diff --git a/README.md b/README.md index f2385f315186f..b79c9703f44ef 100644 --- a/README.md +++ b/README.md @@ -89,6 +89,7 @@ build. $ pacman -S git \ make \ diffutils \ + tar \ mingw-w64-x86_64-python2 \ mingw-w64-x86_64-cmake \ mingw-w64-x86_64-gcc diff --git a/RELEASES.md b/RELEASES.md index 8817d7f88a738..e6f18f9f7ba55 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -1,3 +1,259 @@ +Version 1.12.0 (2016-09-29) +=========================== + +Highlights +---------- + +* [`rustc` translates code to LLVM IR via its own "middle" IR (MIR)] + (https://github.com/rust-lang/rust/pull/34096). + This translation pass is far simpler than the previous AST->LLVM pass, and + creates opportunities to perform new optimizations directly on the MIR. It + was previously described [on the Rust blog] + (https://blog.rust-lang.org/2016/04/19/MIR.html). +* [`rustc` presents a new, more readable error format, along with + machine-readable JSON error output for use by IDEs] + (https://github.com/rust-lang/rust/pull/35401). + Most common editors supporting Rust have been updated to work with it. It was + previously described [on the Rust blog] + (https://blog.rust-lang.org/2016/08/10/Shape-of-errors-to-come.html). + +Compiler +-------- + +* [`rustc` translates code to LLVM IR via its own "middle" IR (MIR)] + (https://github.com/rust-lang/rust/pull/34096). + This translation pass is far simpler than the previous AST->LLVM pass, and + creates opportunities to perform new optimizations directly on the MIR. It + was previously described [on the Rust blog] + (https://blog.rust-lang.org/2016/04/19/MIR.html). +* [Print the Rust target name, not the LLVM target name, with + `--print target-list`] + (https://github.com/rust-lang/rust/pull/35489) +* [The computation of `TypeId` is correct in some cases where it was previously + producing inconsistent results] + (https://github.com/rust-lang/rust/pull/35267) +* [The `mips-unknown-linux-gnu` target uses hardware floating point by default] + (https://github.com/rust-lang/rust/pull/34910) +* [The `rustc` arguments, `--print target-cpus`, `--print target-features`, + `--print relocation-models`, and `--print code-models` print the available + options to the `-C target-cpu`, `-C target-feature`, `-C relocation-model` and + `-C code-model` code generation arguments] + (https://github.com/rust-lang/rust/pull/34845) +* [`rustc` supports three new MUSL targets on ARM: `arm-unknown-linux-musleabi`, + `arm-unknown-linux-musleabihf`, and `armv7-unknown-linux-musleabihf`] + (https://github.com/rust-lang/rust/pull/35060). + These targets produce statically-linked binaries. There are no binary release + builds yet though. + +Diagnostics +----------- + +* [`rustc` presents a new, more readable error format, along with + machine-readable JSON error output for use by IDEs] + (https://github.com/rust-lang/rust/pull/35401). + Most common editors supporting Rust have been updated to work with it. It was + previously described [on the Rust blog] + (https://blog.rust-lang.org/2016/08/10/Shape-of-errors-to-come.html). +* [In error descriptions, references are now described in plain English, + instead of as "&-ptr"] + (https://github.com/rust-lang/rust/pull/35611) +* [In error type descriptions, unknown numeric types are named `{integer}` or + `{float}` instead of `_`] + (https://github.com/rust-lang/rust/pull/35080) +* [`rustc` emits a clearer error when inner attributes follow a doc comment] + (https://github.com/rust-lang/rust/pull/34676) + +Language +-------- + +* [`macro_rules!` invocations can be made within `macro_rules!` invocations] + (https://github.com/rust-lang/rust/pull/34925) +* [`macro_rules!` meta-variables are hygienic] + (https://github.com/rust-lang/rust/pull/35453) +* [`macro_rules!` `tt` matchers can be reparsed correctly, making them much more + useful] + (https://github.com/rust-lang/rust/pull/34908) +* [`macro_rules!` `stmt` matchers correctly consume the entire contents when + inside non-braces invocations] + (https://github.com/rust-lang/rust/pull/34886) +* [Semicolons are properly required as statement delimeters inside + `macro_rules!` invocations] + (https://github.com/rust-lang/rust/pull/34660) +* [`cfg_attr` works on `path` attributes] + (https://github.com/rust-lang/rust/pull/34546) + +Stabilized APIs +--------------- + +* [`Cell::as_ptr`] + (https://doc.rust-lang.org/std/cell/struct.Cell.html#method.as_ptr) +* [`RefCell::as_ptr`] + (https://doc.rust-lang.org/std/cell/struct.RefCell.html#method.as_ptr) +* [`IpAddr::is_unspecified`] + (https://doc.rust-lang.org/std/net/enum.IpAddr.html#method.is_unspecified) +* [`IpAddr::is_loopback`] + (https://doc.rust-lang.org/std/net/enum.IpAddr.html#method.is_loopback) +* [`IpAddr::is_multicast`] + (https://doc.rust-lang.org/std/net/enum.IpAddr.html#method.is_multicast) +* [`Ipv4Addr::is_unspecified`] + (https://doc.rust-lang.org/std/net/struct.Ipv4Addr.html#method.is_unspecified) +* [`Ipv6Addr::octets`] + (https://doc.rust-lang.org/std/net/struct.Ipv6Addr.html#method.octets) +* [`LinkedList::contains`] + (https://doc.rust-lang.org/std/collections/linked_list/struct.LinkedList.html#method.contains) +* [`VecDeque::contains`] + (https://doc.rust-lang.org/std/collections/vec_deque/struct.VecDeque.html#method.contains) +* [`ExitStatusExt::from_raw`] + (https://doc.rust-lang.org/std/os/unix/process/trait.ExitStatusExt.html#tymethod.from_raw). + Both on Unix and Windows. +* [`Receiver::recv_timeout`] + (https://doc.rust-lang.org/std/sync/mpsc/struct.Receiver.html#method.recv_timeout) +* [`RecvTimeoutError`] + (https://doc.rust-lang.org/std/sync/mpsc/enum.RecvTimeoutError.html) +* [`BinaryHeap::peek_mut`] + (https://doc.rust-lang.org/std/collections/binary_heap/struct.BinaryHeap.html#method.peek_mut) +* [`PeekMut`] + (https://doc.rust-lang.org/std/collections/binary_heap/struct.PeekMut.html) +* [`iter::Product`] + (https://doc.rust-lang.org/std/iter/trait.Product.html) +* [`iter::Sum`] + (https://doc.rust-lang.org/std/iter/trait.Sum.html) +* [`OccupiedEntry::remove_entry`] + (https://doc.rust-lang.org/std/collections/btree_map/struct.OccupiedEntry.html#method.remove_entry) +* [`VacantEntry::into_key`] + (https://doc.rust-lang.org/std/collections/btree_map/struct.VacantEntry.html#method.into_key) + +Libraries +--------- + +* [The `format!` macro and friends now allow a single argument to be formatted + in multiple styles] + (https://github.com/rust-lang/rust/pull/33642) +* [The lifetime bounds on `[T]::binary_search_by` and + `[T]::binary_search_by_key` have been adjusted to be more flexible] + (https://github.com/rust-lang/rust/pull/34762) +* [`Option` implements `From` for its contained type] + (https://github.com/rust-lang/rust/pull/34828) +* [`Cell`, `RefCell` and `UnsafeCell` implement `From` for their contained type] + (https://github.com/rust-lang/rust/pull/35392) +* [`RwLock` panics if the reader count overflows] + (https://github.com/rust-lang/rust/pull/35378) +* [`vec_deque::Drain`, `hash_map::Drain` and `hash_set::Drain` are covariant] + (https://github.com/rust-lang/rust/pull/35354) +* [`vec::Drain` and `binary_heap::Drain` are covariant] + (https://github.com/rust-lang/rust/pull/34951) +* [`Cow` implements `FromIterator` for `char`, `&str` and `String`] + (https://github.com/rust-lang/rust/pull/35064) +* [Sockets on Linux are correctly closed in subprocesses via `SOCK_CLOEXEC`] + (https://github.com/rust-lang/rust/pull/34946) +* [`hash_map::Entry`, `hash_map::VacantEntry` and `hash_map::OccupiedEntry` + implement `Debug`] + (https://github.com/rust-lang/rust/pull/34937) +* [`btree_map::Entry`, `btree_map::VacantEntry` and `btree_map::OccupiedEntry` + implement `Debug`] + (https://github.com/rust-lang/rust/pull/34885) +* [`String` implements `AddAssign`] + (https://github.com/rust-lang/rust/pull/34890) +* [Variadic `extern fn` pointers implement the `Clone`, `PartialEq`, `Eq`, + `PartialOrd`, `Ord`, `Hash`, `fmt::Pointer`, and `fmt::Debug` traits] + (https://github.com/rust-lang/rust/pull/34879) +* [`FileType` implements `Debug`] + (https://github.com/rust-lang/rust/pull/34757) +* [References to `Mutex` and `RwLock` are unwind-safe] + (https://github.com/rust-lang/rust/pull/34756) +* [`mpsc::sync_channel` `Receiver`s return any available message before + reporting a disconnect] + (https://github.com/rust-lang/rust/pull/34731) +* [Unicode definitions have been updated to 9.0] + (https://github.com/rust-lang/rust/pull/34599) +* [`env` iterators implement `DoubleEndedIterator`] + (https://github.com/rust-lang/rust/pull/33312) + +Cargo +----- + +* [Support local mirrors of registries] + (https://github.com/rust-lang/cargo/pull/2857) +* [Add support for command aliases] + (https://github.com/rust-lang/cargo/pull/2679) +* [Allow `opt-level="s"` / `opt-level="z"` in profile overrides] + (https://github.com/rust-lang/cargo/pull/3007) +* [Make `cargo doc --open --target` work as expected] + (https://github.com/rust-lang/cargo/pull/2988) +* [Speed up noop registry updates] + (https://github.com/rust-lang/cargo/pull/2974) +* [Update OpenSSL] + (https://github.com/rust-lang/cargo/pull/2971) +* [Fix `--panic=abort` with plugins] + (https://github.com/rust-lang/cargo/pull/2954) +* [Always pass `-C metadata` to the compiler] + (https://github.com/rust-lang/cargo/pull/2946) +* [Fix depending on git repos with workspaces] + (https://github.com/rust-lang/cargo/pull/2938) +* [Add a `--lib` flag to `cargo new`] + (https://github.com/rust-lang/cargo/pull/2921) +* [Add `http.cainfo` for custom certs] + (https://github.com/rust-lang/cargo/pull/2917) +* [Indicate the compilation profile after compiling] + (https://github.com/rust-lang/cargo/pull/2909) +* [Allow enabling features for dependencies with `--features`] + (https://github.com/rust-lang/cargo/pull/2876) +* [Add `--jobs` flag to `cargo package`] + (https://github.com/rust-lang/cargo/pull/2867) +* [Add `--dry-run` to `cargo publish`] + (https://github.com/rust-lang/cargo/pull/2849) +* [Add support for `RUSTDOCFLAGS`] + (https://github.com/rust-lang/cargo/pull/2794) + +Performance +----------- + +* [`panic::catch_unwind` is more optimized] + (https://github.com/rust-lang/rust/pull/35444) +* [`panic::catch_unwind` no longer accesses thread-local storage on entry] + (https://github.com/rust-lang/rust/pull/34866) + +Tooling +------- + +* [Test binaries now support a `--test-threads` argument to specify the number + of threads used to run tests, and which acts the same as the + `RUST_TEST_THREADS` environment variable] + (https://github.com/rust-lang/rust/pull/35414) +* [The test runner now emits a warning when tests run over 60 seconds] + (https://github.com/rust-lang/rust/pull/35405) +* [rustdoc: Fix methods in search results] + (https://github.com/rust-lang/rust/pull/34752) +* [`rust-lldb` warns about unsupported versions of LLDB] + (https://github.com/rust-lang/rust/pull/34646) +* [Rust releases now come with source packages that can be installed by rustup + via `rustup component add rust-src`] + (https://github.com/rust-lang/rust/pull/34366). + The resulting source code can be used by tools and IDES, located in the + sysroot under `lib/rustlib/src`. + +Misc +---- + +* [The compiler can now be built against LLVM 3.9] + (https://github.com/rust-lang/rust/pull/35594) +* Many minor improvements to the documentation. +* [The Rust exception handling "personality" routine is now written in Rust] + (https://github.com/rust-lang/rust/pull/34832) + +Compatibility Notes +------------------- + +* [When printing Windows `OsStr`s, unpaired surrogate codepoints are escaped + with the lowercase format instead of the uppercase] + (https://github.com/rust-lang/rust/pull/35084) +* [When formatting strings, if "precision" is specified, the "fill", + "align" and "width" specifiers are no longer ignored] + (https://github.com/rust-lang/rust/pull/34544) +* [The `Debug` impl for strings no longer escapes all non-ASCII characters] + (https://github.com/rust-lang/rust/pull/34485) + + Version 1.11.0 (2016-08-18) =========================== @@ -629,7 +885,7 @@ Cargo Performance ----------- -* [The time complexity of comparing variables for equivalence during type +* [The time complexity of comparing variables for equivalence during type unification is reduced from _O_(_n_!) to _O_(_n_)][1.9tu]. This leads to major compilation time improvement in some scenarios. * [`ToString` is specialized for `str`, giving it the same performance diff --git a/appveyor.yml b/appveyor.yml new file mode 100644 index 0000000000000..686c48abb30cd --- /dev/null +++ b/appveyor.yml @@ -0,0 +1,110 @@ +environment: + matrix: + # 32/64 bit MSVC + - MSYS_BITS: 64 + TARGET: x86_64-pc-windows-msvc + CHECK: check + CONFIGURE_ARGS: --enable-llvm-assertions --enable-debug-assertions + - MSYS_BITS: 32 + TARGET: i686-pc-windows-msvc + CHECK: check + CONFIGURE_ARGS: --enable-llvm-assertions --enable-debug-assertions + + # MSVC rustbuild + - MSYS_BITS: 64 + CONFIGURE_ARGS: --enable-rustbuild --enable-llvm-assertions --enable-debug-assertions + TARGET: x86_64-pc-windows-msvc + CHECK: check + + # MSVC cargotest + - MSYS_BITS: 64 + CONFIGURE_ARGS: --enable-rustbuild --enable-llvm-assertions --enable-debug-assertions + TARGET: x86_64-pc-windows-msvc + CHECK: check-cargotest + + # 32/64-bit MinGW builds. + # + # The MinGW builds unfortunately have to both download a custom toolchain and + # avoid the one installed by AppVeyor by default. Interestingly, though, for + # different reasons! + # + # For 32-bit the installed gcc toolchain on AppVeyor uses the pthread + # threading model. This is unfortunately not what we want, and if we compile + # with it then there's lots of link errors in the standard library (undefined + # references to pthread symbols). + # + # For 64-bit the installed gcc toolchain is currently 5.3.0 which + # unfortunately segfaults on Windows with --enable-llvm-assertions (segfaults + # in LLVM). See rust-lang/rust#28445 for more information, but to work around + # this we go back in time to 4.9.2 specifically. + # + # Finally, note that the downloads below are all in the `rust-lang-ci` S3 + # bucket, but they cleraly didn't originate there! The downloads originally + # came from the mingw-w64 SourceForge download site. Unfortunately + # SourceForge is notoriously flaky, so we mirror it on our own infrastructure. + # + # And as a final point of note, the 32-bit MinGW build using the makefiles do + # *not* use debug assertions and llvm assertions. This is because they take + # too long on appveyor and this is tested by rustbuild below. + - MSYS_BITS: 32 + TARGET: i686-pc-windows-gnu + CHECK: check + MINGW_URL: https://s3.amazonaws.com/rust-lang-ci + MINGW_ARCHIVE: i686-4.9.2-release-win32-dwarf-rt_v4-rev4.7z + MINGW_DIR: mingw32 + + - MSYS_BITS: 32 + CONFIGURE_ARGS: --enable-rustbuild --enable-llvm-assertions --enable-debug-assertions + TARGET: i686-pc-windows-gnu + CHECK: check + MINGW_URL: https://s3.amazonaws.com/rust-lang-ci + MINGW_ARCHIVE: i686-4.9.2-release-win32-dwarf-rt_v4-rev4.7z + MINGW_DIR: mingw32 + + - MSYS_BITS: 64 + CONFIGURE_ARGS: --enable-llvm-assertions --enable-debug-assertions + TARGET: x86_64-pc-windows-gnu + CHECK: check + MINGW_URL: https://s3.amazonaws.com/rust-lang-ci + MINGW_ARCHIVE: x86_64-4.9.2-release-win32-seh-rt_v4-rev4.7z + MINGW_DIR: mingw64 + +clone_depth: 1 +build: false + +install: + # If we need to download a custom MinGW, do so here and set the path + # appropriately. + # + # Note that this *also* means that we're not using what is typically + # /mingw32/bin/python2.7.exe, which is a "correct" python interpreter where + # /usr/bin/python2.7.exe is not. To ensure we use the right interpreter we + # move `C:\Python27` ahead in PATH and then also make sure the `python2.7.exe` + # file exists in there (which it doesn't by default). + - if defined MINGW_URL appveyor DownloadFile %MINGW_URL%/%MINGW_ARCHIVE% + - if defined MINGW_URL 7z x -y %MINGW_ARCHIVE% > nul + - if defined MINGW_URL set PATH=C:\Python27;%CD%\%MINGW_DIR%\bin;C:\msys64\usr\bin;%PATH% + - if defined MINGW_URL copy C:\Python27\python.exe C:\Python27\python2.7.exe + + # Otherwise pull in the MinGW installed on appveyor + - if NOT defined MINGW_URL set PATH=C:\msys64\mingw%MSYS_BITS%\bin;C:\msys64\usr\bin;%PATH% + +test_script: + - sh ./configure + %CONFIGURE_ARGS% + --build=%TARGET% + - bash -c "make -j$(nproc)" + - bash -c "make %CHECK% -j$(nproc)" + +cache: + - build/%TARGET%/llvm -> src/rustllvm/llvm-auto-clean-trigger + - "%TARGET%/llvm -> src/rustllvm/llvm-auto-clean-trigger" + +branches: + only: + - auto + +# init: +# - ps: iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1')) +# on_finish: +# - ps: $blockRdp = $true; iex ((new-object net.webclient).DownloadString('https://raw.githubusercontent.com/appveyor/ci/master/scripts/enable-rdp.ps1')) diff --git a/configure b/configure index a8bd3acdff1ac..2bc8c72e3ea1c 100755 --- a/configure +++ b/configure @@ -437,6 +437,10 @@ case $CFG_OSTYPE in CFG_CPUTYPE=$(isainfo -n) ;; + Haiku) + CFG_OSTYPE=unknown-haiku + ;; + MINGW*) # msys' `uname` does not print gcc configuration, but prints msys # configuration. so we cannot believe `uname -m`: @@ -532,6 +536,10 @@ case $CFG_CPUTYPE in CFG_CPUTYPE=x86_64 ;; + BePC) + CFG_CPUTYPE=i686 + ;; + *) err "unknown CPU type: $CFG_CPUTYPE" esac @@ -628,6 +636,7 @@ opt_nosave optimize-llvm 1 "build optimized LLVM" opt_nosave llvm-assertions 0 "build LLVM with assertions" opt_nosave debug-assertions 0 "build with debugging assertions" opt_nosave debuginfo 0 "build with debugger metadata" +opt_nosave debuginfo-lines 0 "build with line number debugger metadata" opt_nosave debug-jemalloc 0 "build jemalloc with --enable-debug --enable-fill" valopt localstatedir "/var/lib" "local state directory" @@ -637,7 +646,6 @@ valopt datadir "${CFG_PREFIX}/share" "install data" valopt infodir "${CFG_PREFIX}/share/info" "install additional info" valopt llvm-root "" "set LLVM root" valopt python "" "set path to python" -valopt nodejs "" "set path to nodejs" valopt jemalloc-root "" "set directory where libjemalloc_pic.a is located" valopt build "${DEFAULT_BUILD}" "GNUs ./configure syntax LLVM build triple" valopt android-cross-path "" "Android NDK standalone path (deprecated)" @@ -646,7 +654,12 @@ valopt arm-linux-androideabi-ndk "" "arm-linux-androideabi NDK standalone path" valopt armv7-linux-androideabi-ndk "" "armv7-linux-androideabi NDK standalone path" valopt aarch64-linux-android-ndk "" "aarch64-linux-android NDK standalone path" valopt nacl-cross-path "" "NaCl SDK path (Pepper Canary is recommended). Must be absolute!" -valopt musl-root "/usr/local" "MUSL root installation directory" +valopt musl-root "/usr/local" "MUSL root installation directory (deprecated)" +valopt musl-root-x86_64 "/usr/local" "x86_64-unknown-linux-musl install directory" +valopt musl-root-i686 "/usr/local" "i686-unknown-linux-musl install directory" +valopt musl-root-arm "/usr/local" "arm-unknown-linux-musleabi install directory" +valopt musl-root-armhf "/usr/local" "arm-unknown-linux-musleabihf install directory" +valopt musl-root-armv7 "/usr/local" "armv7-unknown-linux-musleabihf install directory" valopt extra-filename "" "Additional data that is hashed and passed to the -C extra-filename flag" if [ -e ${CFG_SRC_DIR}.git ] @@ -676,7 +689,7 @@ valopt_nosave local-rust-root "/usr/local" "set prefix for local rust binary" valopt_nosave host "${CFG_BUILD}" "GNUs ./configure syntax LLVM host triples" valopt_nosave target "${CFG_HOST}" "GNUs ./configure syntax LLVM target triples" valopt_nosave mandir "${CFG_PREFIX}/share/man" "install man pages in PATH" -valopt_nosave docdir "${CFG_PREFIX}/share/doc/rust" "install man pages in PATH" +valopt_nosave docdir "${CFG_PREFIX}/share/doc/rust" "install documentation in PATH" # On Windows this determines root of the subtree for target libraries. # Host runtime libs always go to 'bin'. @@ -709,8 +722,27 @@ case "$CFG_RELEASE_CHANNEL" in nightly ) msg "overriding settings for $CFG_RELEASE_CHANNEL" CFG_ENABLE_LLVM_ASSERTIONS=1 + + # FIXME(#37364) shouldn't have to disable this on windows-gnu + case "$CFG_BUILD" in + *-pc-windows-gnu) + ;; + *) + CFG_ENABLE_DEBUGINFO_LINES=1 + ;; + esac + ;; + beta | stable) + msg "overriding settings for $CFG_RELEASE_CHANNEL" + case "$CFG_BUILD" in + *-pc-windows-gnu) + ;; + *) + CFG_ENABLE_DEBUGINFO_LINES=1 + ;; + esac ;; - dev | beta | stable) + dev) ;; *) err "release channel must be 'dev', 'nightly', 'beta' or 'stable'" @@ -740,6 +772,7 @@ if [ -n "$CFG_DISABLE_OPTIMIZE_LLVM" ]; then putvar CFG_DISABLE_OPTIMIZE_LLVM; f if [ -n "$CFG_ENABLE_LLVM_ASSERTIONS" ]; then putvar CFG_ENABLE_LLVM_ASSERTIONS; fi if [ -n "$CFG_ENABLE_DEBUG_ASSERTIONS" ]; then putvar CFG_ENABLE_DEBUG_ASSERTIONS; fi if [ -n "$CFG_ENABLE_DEBUGINFO" ]; then putvar CFG_ENABLE_DEBUGINFO; fi +if [ -n "$CFG_ENABLE_DEBUGINFO_LINES" ]; then putvar CFG_ENABLE_DEBUGINFO_LINES; fi if [ -n "$CFG_ENABLE_DEBUG_JEMALLOC" ]; then putvar CFG_ENABLE_DEBUG_JEMALLOC; fi step_msg "looking for build programs" @@ -754,9 +787,6 @@ if [ $(echo $python_version | grep -c '^Python 2\.7') -ne 1 ]; then err "Found $python_version, but Python 2.7 is required" fi -# Checking for node, but not required -probe CFG_NODEJS nodejs node - # If we have no git directory then we are probably a tarball distribution # and shouldn't attempt to load submodules if [ ! -e ${CFG_SRC_DIR}.git ] @@ -1208,14 +1238,6 @@ do fi ;; - - x86_64-*-musl | arm-*-musleabi) - if [ ! -f $CFG_MUSL_ROOT/lib/libc.a ] - then - err "musl libc $CFG_MUSL_ROOT/lib/libc.a not found" - fi - ;; - *-msvc) # There are three builds of cmake on windows: MSVC, MinGW and Cygwin # The Cygwin build does not have generators for Visual Studio, so @@ -1634,8 +1656,8 @@ do ("ccache gcc") LLVM_CXX_32="ccache" LLVM_CC_32="ccache" - LLVM_CXX_32_ARG1="clang++" - LLVM_CC_32_ARG1="clang" + LLVM_CXX_32_ARG1="g++" + LLVM_CC_32_ARG1="gcc" LLVM_CXX_64="ccache" LLVM_CC_64="ccache" @@ -1760,7 +1782,7 @@ do CMAKE_ARGS="$CMAKE_ARGS -DLLVM_ENABLE_ASSERTIONS=ON" fi - CMAKE_ARGS="$CMAKE_ARGS -DLLVM_TARGETS_TO_BUILD='X86;ARM;AArch64;Mips;PowerPC;SystemZ'" + CMAKE_ARGS="$CMAKE_ARGS -DLLVM_TARGETS_TO_BUILD='X86;ARM;AArch64;Mips;PowerPC;SystemZ;JSBackend'" CMAKE_ARGS="$CMAKE_ARGS -G '$CFG_CMAKE_GENERATOR'" CMAKE_ARGS="$CMAKE_ARGS $CFG_LLVM_SRC_DIR" diff --git a/mk/cfg/aarch64-unknown-fuchsia.mk b/mk/cfg/aarch64-unknown-fuchsia.mk new file mode 100644 index 0000000000000..34aee77ae2107 --- /dev/null +++ b/mk/cfg/aarch64-unknown-fuchsia.mk @@ -0,0 +1 @@ +# rustbuild-only target diff --git a/mk/cfg/i686-unknown-haiku.mk b/mk/cfg/i686-unknown-haiku.mk new file mode 100644 index 0000000000000..cbacbff070e88 --- /dev/null +++ b/mk/cfg/i686-unknown-haiku.mk @@ -0,0 +1,27 @@ +# i686-unknown-haiku configuration +CROSS_PREFIX_i686-unknown-haiku=i586-pc-haiku- +CC_i686-unknown-haiku=$(CC) +CXX_i686-unknown-haiku=$(CXX) +CPP_i686-unknown-haiku=$(CPP) +AR_i686-unknown-haiku=$(AR) +CFG_LIB_NAME_i686-unknown-haiku=lib$(1).so +CFG_STATIC_LIB_NAME_i686-unknown-haiku=lib$(1).a +CFG_LIB_GLOB_i686-unknown-haiku=lib$(1)-*.so +CFG_LIB_DSYM_GLOB_i686-unknown-haiku=lib$(1)-*.dylib.dSYM +CFG_CFLAGS_i686-unknown-haiku := -m32 $(CFLAGS) +CFG_GCCISH_CFLAGS_i686-unknown-haiku := -Wall -Werror -g -fPIC -m32 $(CFLAGS) +CFG_GCCISH_CXXFLAGS_i686-unknown-haiku := -fno-rtti $(CXXFLAGS) +CFG_GCCISH_LINK_FLAGS_i686-unknown-haiku := -shared -fPIC -ldl -pthread -lrt -g -m32 +CFG_GCCISH_PRE_LIB_FLAGS_i686-unknown-haiku := -Wl,-whole-archive +CFG_GCCISH_POST_LIB_FLAGS_i686-unknown-haiku := -Wl,-no-whole-archive +CFG_DEF_SUFFIX_i686-unknown-haiku := .linux.def +CFG_LLC_FLAGS_i686-unknown-haiku := +CFG_INSTALL_NAME_i686-unknown-haiku = +CFG_EXE_SUFFIX_i686-unknown-haiku = +CFG_WINDOWSY_i686-unknown-haiku := +CFG_UNIXY_i686-unknown-haiku := 1 +CFG_PATH_MUNGE_i686-unknown-haiku := true +CFG_LDPATH_i686-unknown-haiku := +CFG_RUN_i686-unknown-haiku=$(2) +CFG_RUN_TARG_i686-unknown-haiku=$(call CFG_RUN_i686-unknown-haiku,,$(2)) +CFG_GNU_TRIPLE_i686-unknown-haiku := i686-unknown-haiku diff --git a/mk/cfg/wasm32-unknown-emscripten.mk b/mk/cfg/wasm32-unknown-emscripten.mk new file mode 100644 index 0000000000000..997bdfbf03ab1 --- /dev/null +++ b/mk/cfg/wasm32-unknown-emscripten.mk @@ -0,0 +1,24 @@ +# wasm32-unknown-emscripten configuration +CC_wasm32-unknown-emscripten=emcc +CXX_wasm32-unknown-emscripten=em++ +CPP_wasm32-unknown-emscripten=$(CPP) +AR_wasm32-unknown-emscripten=emar +CFG_LIB_NAME_wasm32-unknown-emscripten=lib$(1).so +CFG_STATIC_LIB_NAME_wasm32-unknown-emscripten=lib$(1).a +CFG_LIB_GLOB_wasm32-unknown-emscripten=lib$(1)-*.so +CFG_LIB_DSYM_GLOB_wasm32-unknown-emscripten=lib$(1)-*.dylib.dSYM +CFG_JEMALLOC_CFLAGS_wasm32-unknown-emscripten := -m32 $(CFLAGS) +CFG_GCCISH_CFLAGS_wasm32-unknown-emscripten := -g -fPIC -m32 -s BINARYEN=1 $(CFLAGS) +CFG_GCCISH_CXXFLAGS_wasm32-unknown-emscripten := -fno-rtti -s BINARYEN=1 $(CXXFLAGS) +CFG_GCCISH_LINK_FLAGS_wasm32-unknown-emscripten := -shared -fPIC -ldl -pthread -lrt -g -m32 -s BINARYEN=1 +CFG_GCCISH_DEF_FLAG_wasm32-unknown-emscripten := -Wl,--export-dynamic,--dynamic-list= +CFG_LLC_FLAGS_wasm32-unknown-emscripten := +CFG_INSTALL_NAME_wasm32-unknown-emscripten = +CFG_EXE_SUFFIX_wasm32-unknown-emscripten = +CFG_WINDOWSY_wasm32-unknown-emscripten := +CFG_UNIXY_wasm32-unknown-emscripten := 1 +CFG_LDPATH_wasm32-unknown-emscripten := +CFG_RUN_wasm32-unknown-emscripten=$(2) +CFG_RUN_TARG_wasm32-unknown-emscripten=$(call CFG_RUN_wasm32-unknown-emscripten,,$(2)) +CFG_GNU_TRIPLE_wasm32-unknown-emscripten := wasm32-unknown-emscripten +CFG_DISABLE_JEMALLOC_wasm32-unknown-emscripten := 1 diff --git a/mk/cfg/x86_64-unknown-fuchsia.mk b/mk/cfg/x86_64-unknown-fuchsia.mk new file mode 100644 index 0000000000000..34aee77ae2107 --- /dev/null +++ b/mk/cfg/x86_64-unknown-fuchsia.mk @@ -0,0 +1 @@ +# rustbuild-only target diff --git a/mk/cfg/x86_64-unknown-haiku.mk b/mk/cfg/x86_64-unknown-haiku.mk new file mode 100644 index 0000000000000..4c2d888be06fb --- /dev/null +++ b/mk/cfg/x86_64-unknown-haiku.mk @@ -0,0 +1,27 @@ +# x86_64-unknown-haiku configuration +CROSS_PREFIX_x86_64-unknown-haiku=x86_64-unknown-haiku- +CC_x86_64-unknown-haiku=$(CC) +CXX_x86_64-unknown-haiku=$(CXX) +CPP_x86_64-unknown-haiku=$(CPP) +AR_x86_64-unknown-haiku=$(AR) +CFG_LIB_NAME_x86_64-unknown-haiku=lib$(1).so +CFG_STATIC_LIB_NAME_x86_64-unknown-haiku=lib$(1).a +CFG_LIB_GLOB_x86_64-unknown-haiku=lib$(1)-*.so +CFG_LIB_DSYM_GLOB_x86_64-unknown-haiku=lib$(1)-*.dylib.dSYM +CFG_CFLAGS_x86_64-unknown-haiku := -m64 $(CFLAGS) +CFG_GCCISH_CFLAGS_x86_64-unknown-haiku := -Wall -Werror -g -fPIC -m64 $(CFLAGS) +CFG_GCCISH_CXXFLAGS_x86_64-unknown-haiku := -fno-rtti $(CXXFLAGS) +CFG_GCCISH_LINK_FLAGS_x86_64-unknown-haiku := -shared -fPIC -ldl -pthread -lrt -g -m64 +CFG_GCCISH_PRE_LIB_FLAGS_x86_64-unknown-haiku := -Wl,-whole-archive +CFG_GCCISH_POST_LIB_FLAGS_x86_64-unknown-haiku := -Wl,-no-whole-archive +CFG_DEF_SUFFIX_x86_64-unknown-haiku := .linux.def +CFG_LLC_FLAGS_x86_64-unknown-haiku := +CFG_INSTALL_NAME_x86_64-unknown-haiku = +CFG_EXE_SUFFIX_x86_64-unknown-haiku = +CFG_WINDOWSY_x86_64-unknown-haiku := +CFG_UNIXY_x86_64-unknown-haiku := 1 +CFG_PATH_MUNGE_x86_64-unknown-haiku := true +CFG_LDPATH_x86_64-unknown-haiku := +CFG_RUN_x86_64-unknown-haiku=$(2) +CFG_RUN_TARG_x86_64-unknown-haiku=$(call CFG_RUN_x86_64-unknown-haiku,,$(2)) +CFG_GNU_TRIPLE_x86_64-unknown-haiku := x86_64-unknown-haiku diff --git a/mk/crates.mk b/mk/crates.mk index d2c79441d866f..25192bfd27a44 100644 --- a/mk/crates.mk +++ b/mk/crates.mk @@ -59,9 +59,9 @@ RUSTC_CRATES := rustc rustc_typeck rustc_mir rustc_borrowck rustc_resolve rustc_ rustc_trans rustc_back rustc_llvm rustc_privacy rustc_lint \ rustc_data_structures rustc_platform_intrinsics rustc_errors \ rustc_plugin rustc_metadata rustc_passes rustc_save_analysis \ - rustc_const_eval rustc_const_math rustc_incremental rustc_macro -HOST_CRATES := syntax syntax_ext proc_macro syntax_pos $(RUSTC_CRATES) rustdoc fmt_macros \ - flate arena graphviz rbml log serialize + rustc_const_eval rustc_const_math rustc_incremental proc_macro +HOST_CRATES := syntax syntax_ext proc_macro_tokens proc_macro_plugin syntax_pos $(RUSTC_CRATES) \ + rustdoc fmt_macros flate arena graphviz log serialize TOOLS := compiletest rustdoc rustc rustbook error_index_generator DEPS_core := @@ -96,21 +96,21 @@ DEPS_getopts := std DEPS_graphviz := std DEPS_log := std DEPS_num := std -DEPS_rbml := std log serialize DEPS_serialize := std log DEPS_term := std DEPS_test := std getopts term native:rust_test_helpers DEPS_syntax := std term serialize log arena libc rustc_bitflags rustc_unicode rustc_errors syntax_pos -DEPS_syntax_ext := syntax syntax_pos rustc_errors fmt_macros rustc_macro -DEPS_proc_macro := syntax syntax_pos rustc_plugin log +DEPS_syntax_ext := syntax syntax_pos rustc_errors fmt_macros proc_macro DEPS_syntax_pos := serialize +DEPS_proc_macro_tokens := syntax syntax_pos log +DEPS_proc_macro_plugin := syntax syntax_pos rustc_plugin log proc_macro_tokens DEPS_rustc_const_math := std syntax log serialize DEPS_rustc_const_eval := rustc_const_math rustc syntax log serialize \ rustc_back graphviz syntax_pos -DEPS_rustc := syntax fmt_macros flate arena serialize getopts rbml \ +DEPS_rustc := syntax fmt_macros flate arena serialize getopts \ log graphviz rustc_llvm rustc_back rustc_data_structures\ rustc_const_math syntax_pos rustc_errors DEPS_rustc_back := std syntax flate log libc @@ -119,15 +119,15 @@ DEPS_rustc_data_structures := std log serialize libc DEPS_rustc_driver := arena flate getopts graphviz libc rustc rustc_back rustc_borrowck \ rustc_typeck rustc_mir rustc_resolve log syntax serialize rustc_llvm \ rustc_trans rustc_privacy rustc_lint rustc_plugin \ - rustc_metadata syntax_ext proc_macro \ + rustc_metadata syntax_ext proc_macro_plugin \ rustc_passes rustc_save_analysis rustc_const_eval \ - rustc_incremental syntax_pos rustc_errors rustc_macro + rustc_incremental syntax_pos rustc_errors proc_macro rustc_data_structures DEPS_rustc_errors := log libc serialize syntax_pos DEPS_rustc_lint := rustc log syntax syntax_pos rustc_const_eval DEPS_rustc_llvm := native:rustllvm libc std rustc_bitflags -DEPS_rustc_macro := std syntax -DEPS_rustc_metadata := rustc syntax syntax_pos rustc_errors rbml rustc_const_math \ - rustc_macro syntax_ext +DEPS_proc_macro := std syntax +DEPS_rustc_metadata := rustc syntax syntax_pos rustc_errors rustc_const_math \ + proc_macro syntax_ext DEPS_rustc_passes := syntax syntax_pos rustc core rustc_const_eval rustc_errors DEPS_rustc_mir := rustc syntax syntax_pos rustc_const_math rustc_const_eval rustc_bitflags DEPS_rustc_resolve := arena rustc log syntax syntax_pos rustc_errors @@ -137,7 +137,7 @@ DEPS_rustc_privacy := rustc log syntax syntax_pos DEPS_rustc_trans := arena flate getopts graphviz libc rustc rustc_back \ log syntax serialize rustc_llvm rustc_platform_intrinsics \ rustc_const_math rustc_const_eval rustc_incremental rustc_errors syntax_pos -DEPS_rustc_incremental := rbml rustc syntax_pos serialize rustc_data_structures +DEPS_rustc_incremental := rustc syntax_pos serialize rustc_data_structures DEPS_rustc_save_analysis := rustc log syntax syntax_pos serialize DEPS_rustc_typeck := rustc syntax syntax_pos rustc_platform_intrinsics rustc_const_math \ rustc_const_eval rustc_errors diff --git a/mk/llvm.mk b/mk/llvm.mk index d6f812049e03c..5a91f5fcaa483 100644 --- a/mk/llvm.mk +++ b/mk/llvm.mk @@ -36,22 +36,27 @@ endif # If CFG_LLVM_ROOT is defined then we don't build LLVM ourselves ifeq ($(CFG_LLVM_ROOT),) -LLVM_STAMP_$(1) = $$(CFG_LLVM_BUILD_DIR_$(1))/llvm-auto-clean-stamp +LLVM_STAMP_$(1) = $(S)src/rustllvm/llvm-auto-clean-trigger LLVM_DONE_$(1) = $$(CFG_LLVM_BUILD_DIR_$(1))/llvm-finished-building $$(LLVM_CONFIG_$(1)): $$(LLVM_DONE_$(1)) -$$(LLVM_DONE_$(1)): $$(LLVM_DEPS_TARGET_$(1)) $$(LLVM_STAMP_$(1)) - @$$(call E, cmake: llvm) ifneq ($$(CFG_NINJA),) - $$(Q)$$(CFG_NINJA) -C $$(CFG_LLVM_BUILD_DIR_$(1)) +BUILD_LLVM_$(1) := $$(CFG_NINJA) -C $$(CFG_LLVM_BUILD_DIR_$(1)) else ifeq ($$(findstring msvc,$(1)),msvc) - $$(Q)$$(CFG_CMAKE) --build $$(CFG_LLVM_BUILD_DIR_$(1)) \ - --config $$(LLVM_BUILD_CONFIG_MODE) +BUILD_LLVM_$(1) := $$(CFG_CMAKE) --build $$(CFG_LLVM_BUILD_DIR_$(1)) \ + --config $$(LLVM_BUILD_CONFIG_MODE) else - $$(Q)$$(MAKE) -C $$(CFG_LLVM_BUILD_DIR_$(1)) +BUILD_LLVM_$(1) := $$(MAKE) -C $$(CFG_LLVM_BUILD_DIR_$(1)) endif - $$(Q)touch $$@ + +$$(LLVM_DONE_$(1)): $$(LLVM_DEPS_TARGET_$(1)) $$(LLVM_STAMP_$(1)) + @$$(call E, cmake: llvm) + $$(Q)if ! cmp $$(LLVM_STAMP_$(1)) $$(LLVM_DONE_$(1)); then \ + $$(MAKE) clean-llvm$(1); \ + $$(BUILD_LLVM_$(1)); \ + fi + $$(Q)cp $$(LLVM_STAMP_$(1)) $$@ ifneq ($$(CFG_NINJA),) clean-llvm$(1): @@ -75,17 +80,6 @@ endif $$(LLVM_AR_$(1)): $$(LLVM_CONFIG_$(1)) -# This is used to independently force an LLVM clean rebuild -# when we changed something not otherwise captured by builtin -# dependencies. In these cases, commit a change that touches -# the stamp in the source dir. -$$(LLVM_STAMP_$(1)): $$(S)src/rustllvm/llvm-auto-clean-trigger - @$$(call E, make: cleaning llvm) - $$(Q)touch $$@.start_time - $$(Q)$$(MAKE) clean-llvm$(1) - @$$(call E, make: done cleaning llvm) - touch -r $$@.start_time $$@ && rm $$@.start_time - ifeq ($$(CFG_ENABLE_LLVM_STATIC_STDCPP),1) LLVM_STDCPP_RUSTFLAGS_$(1) = -L "$$(dir $$(shell $$(CC_$(1)) $$(CFG_GCCISH_CFLAGS_$(1)) \ -print-file-name=lib$(CFG_STDCPP_NAME).a))" diff --git a/mk/main.mk b/mk/main.mk index 275dbb55d7483..a5e3764122003 100644 --- a/mk/main.mk +++ b/mk/main.mk @@ -13,7 +13,7 @@ ###################################################################### # The version number -CFG_RELEASE_NUM=1.13.0 +CFG_RELEASE_NUM=1.14.0 # An optional number to put after the label, e.g. '.2' -> '-beta.2' # NB Make sure it starts with a dot to conform to semver pre-release @@ -53,34 +53,16 @@ endif # versions in the same place CFG_FILENAME_EXTRA=$(shell printf '%s' $(CFG_RELEASE)$(CFG_EXTRA_FILENAME) | $(CFG_HASH_COMMAND)) -# A magic value that allows the compiler to use unstable features during the -# bootstrap even when doing so would normally be an error because of feature -# staging or because the build turns on warnings-as-errors and unstable features -# default to warnings. The build has to match this key in an env var. -# -# This value is keyed off the release to ensure that all compilers for one -# particular release have the same bootstrap key. Note that this is -# intentionally not "secure" by any definition, this is largely just a deterrent -# from users enabling unstable features on the stable compiler. -CFG_BOOTSTRAP_KEY=$(CFG_FILENAME_EXTRA) - -# If local-rust is the same as the current version, then force a local-rebuild +# If local-rust is the same major.minor as the current version, then force a local-rebuild ifdef CFG_ENABLE_LOCAL_RUST -ifeq ($(CFG_RELEASE),\ - $(shell $(S)src/etc/local_stage0.sh --print-rustc-release $(CFG_LOCAL_RUST_ROOT))) - CFG_INFO := $(info cfg: auto-detected local-rebuild $(CFG_RELEASE)) +SEMVER_PREFIX=$(shell echo $(CFG_RELEASE_NUM) | grep -E -o '^[[:digit:]]+\.[[:digit:]]+') +LOCAL_RELEASE=$(shell $(S)src/etc/local_stage0.sh --print-rustc-release $(CFG_LOCAL_RUST_ROOT)) +ifneq (,$(filter $(SEMVER_PREFIX).%,$(LOCAL_RELEASE))) + CFG_INFO := $(info cfg: auto-detected local-rebuild using $(LOCAL_RELEASE)) CFG_ENABLE_LOCAL_REBUILD = 1 endif endif -# The stage0 compiler needs to use the previous key recorded in src/stage0.txt, -# except for local-rebuild when it just uses the same current key. -ifdef CFG_ENABLE_LOCAL_REBUILD -CFG_BOOTSTRAP_KEY_STAGE0=$(CFG_BOOTSTRAP_KEY) -else -CFG_BOOTSTRAP_KEY_STAGE0=$(shell sed -ne 's/^rustc_key: //p' $(S)src/stage0.txt) -endif - # The name of the package to use for creating tarballs, installers etc. CFG_PACKAGE_NAME=rustc-$(CFG_PACKAGE_VERS) @@ -160,6 +142,9 @@ endif ifdef CFG_ENABLE_DEBUGINFO $(info cfg: enabling debuginfo (CFG_ENABLE_DEBUGINFO)) CFG_RUSTC_FLAGS += -g +else ifdef CFG_ENABLE_DEBUGINFO_LINES + $(info cfg: enabling line number debuginfo (CFG_ENABLE_DEBUGINFO_LINES)) + CFG_RUSTC_FLAGS += -Cdebuginfo=1 endif ifdef SAVE_TEMPS @@ -300,7 +285,7 @@ endif # LLVM macros ###################################################################### -LLVM_OPTIONAL_COMPONENTS=x86 arm aarch64 mips powerpc pnacl systemz +LLVM_OPTIONAL_COMPONENTS=x86 arm aarch64 mips powerpc pnacl systemz jsbackend LLVM_REQUIRED_COMPONENTS=ipo bitreader bitwriter linker asmparser mcjit \ interpreter instrumentation @@ -387,13 +372,16 @@ CFG_INFO := $(info cfg: disabling unstable features (CFG_DISABLE_UNSTABLE_FEATUR # Turn on feature-staging export CFG_DISABLE_UNSTABLE_FEATURES # Subvert unstable feature lints to do the self-build -export RUSTC_BOOTSTRAP_KEY:=$(CFG_BOOTSTRAP_KEY) +export RUSTC_BOOTSTRAP endif -export CFG_BOOTSTRAP_KEY ifdef CFG_MUSL_ROOT export CFG_MUSL_ROOT endif +# FIXME: Transitionary measure to bootstrap using the old bootstrap logic. +# Remove this once the bootstrap compiler uses the new login in Issue #36548. +export RUSTC_BOOTSTRAP_KEY=62b3e239 + ###################################################################### # Per-stage targets and runner ###################################################################### @@ -512,10 +500,14 @@ ifeq ($$(OSTYPE_$(3)),apple-darwin) else ifeq ($$(CFG_WINDOWSY_$(3)),1) LD_LIBRARY_PATH_ENV_NAME$(1)_T_$(2)_H_$(3) := PATH +else +ifeq ($$(OSTYPE_$(3)),unknown-haiku) + LD_LIBRARY_PATH_ENV_NAME$(1)_T_$(2)_H_$(3) := LIBRARY_PATH else LD_LIBRARY_PATH_ENV_NAME$(1)_T_$(2)_H_$(3) := LD_LIBRARY_PATH endif endif +endif LD_LIBRARY_PATH_ENV_HOSTDIR$(1)_T_$(2)_H_$(3) := \ $$(CURDIR)/$$(HLIB$(1)_H_$(3)):$$(CFG_LLVM_INST_DIR_$(3))/lib @@ -633,7 +625,8 @@ ALL_TARGET_RULES = $(foreach target,$(CFG_TARGET), \ $(foreach host,$(CFG_HOST), \ all-target-$(target)-host-$(host))) -all: $(ALL_TARGET_RULES) $(GENERATED) docs +all-no-docs: $(ALL_TARGET_RULES) $(GENERATED) +all: all-no-docs docs ###################################################################### # Build system documentation diff --git a/mk/target.mk b/mk/target.mk index 2a08b7b046534..1b139909ab458 100644 --- a/mk/target.mk +++ b/mk/target.mk @@ -42,23 +42,6 @@ $(foreach host,$(CFG_HOST), \ $(foreach crate,$(CRATES), \ $(eval $(call RUST_CRATE_FULLDEPS,$(stage),$(target),$(host),$(crate))))))) -# $(1) stage -# $(2) target -# $(3) host -define DEFINE_BOOTSTRAP_KEY -BOOTSTRAP_KEY$(1)_T_$(2)_H_$(3) := $$(CFG_BOOTSTRAP_KEY) -ifeq ($(1),0) -ifeq ($(3),$$(CFG_BUILD)) -BOOTSTRAP_KEY$(1)_T_$(2)_H_$(3) := $$(CFG_BOOTSTRAP_KEY_STAGE0) -endif -endif -endef - -$(foreach host,$(CFG_TARGET), \ - $(foreach target,$(CFG_TARGET), \ - $(foreach stage,$(STAGES), \ - $(eval $(call DEFINE_BOOTSTRAP_KEY,$(stage),$(target),$(host)))))) - # RUST_TARGET_STAGE_N template: This defines how target artifacts are built # for all stage/target architecture combinations. This is one giant rule which # works as follows: @@ -83,8 +66,6 @@ $(foreach host,$(CFG_TARGET), \ define RUST_TARGET_STAGE_N $$(TLIB$(1)_T_$(2)_H_$(3))/stamp.$(4): CFG_COMPILER_HOST_TRIPLE = $(2) -$$(TLIB$(1)_T_$(2)_H_$(3))/stamp.$(4): \ - export RUSTC_BOOTSTRAP_KEY := $$(BOOTSTRAP_KEY$(1)_T_$(2)_H_$(3)) $$(TLIB$(1)_T_$(2)_H_$(3))/stamp.$(4): \ $$(CRATEFILE_$(4)) \ $$(CRATE_FULLDEPS_$(1)_T_$(2)_H_$(3)_$(4)) \ @@ -132,8 +113,6 @@ endef # $(4) - name of the tool being built define TARGET_TOOL -$$(TBIN$(1)_T_$(2)_H_$(3))/$(4)$$(X_$(2)): \ - export RUSTC_BOOTSTRAP_KEY := $$(BOOTSTRAP_KEY$(1)_T_$(2)_H_$(3)) $$(TBIN$(1)_T_$(2)_H_$(3))/$(4)$$(X_$(2)): \ $$(TOOL_SOURCE_$(4)) \ $$(TOOL_INPUTS_$(4)) \ diff --git a/mk/tests.mk b/mk/tests.mk index c135aa9b8fb95..1957c989eb0d4 100644 --- a/mk/tests.mk +++ b/mk/tests.mk @@ -27,7 +27,7 @@ TEST_TARGET_CRATES = $(filter-out core rustc_unicode alloc_system libc \ panic_abort,$(TARGET_CRATES)) \ collectionstest coretest TEST_DOC_CRATES = $(DOC_CRATES) arena flate fmt_macros getopts graphviz \ - log rand rbml serialize syntax term test + log rand serialize syntax term test TEST_HOST_CRATES = $(filter-out rustc_typeck rustc_borrowck rustc_resolve \ rustc_trans rustc_lint,\ $(HOST_CRATES)) @@ -632,6 +632,7 @@ endif # is a separate choice from whether to pass `-g` when building the # compiler and standard library themselves. CTEST_RUSTC_FLAGS := $$(subst -g,,$$(CTEST_RUSTC_FLAGS)) +CTEST_RUSTC_FLAGS := $$(subst -Cdebuginfo=1,,$$(CTEST_RUSTC_FLAGS)) ifdef CFG_ENABLE_DEBUGINFO_TESTS CTEST_RUSTC_FLAGS += -g endif diff --git a/src/Cargo.lock b/src/Cargo.lock new file mode 100644 index 0000000000000..5826995cc3cfc --- /dev/null +++ b/src/Cargo.lock @@ -0,0 +1,765 @@ +[root] +name = "unwind" +version = "0.0.0" +dependencies = [ + "core 0.0.0", + "libc 0.0.0", +] + +[[package]] +name = "alloc" +version = "0.0.0" +dependencies = [ + "core 0.0.0", +] + +[[package]] +name = "alloc_jemalloc" +version = "0.0.0" +dependencies = [ + "build_helper 0.1.0", + "core 0.0.0", + "gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.0.0", +] + +[[package]] +name = "alloc_system" +version = "0.0.0" +dependencies = [ + "core 0.0.0", + "libc 0.0.0", +] + +[[package]] +name = "arena" +version = "0.0.0" + +[[package]] +name = "bootstrap" +version = "0.0.0" +dependencies = [ + "build_helper 0.1.0", + "cmake 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)", + "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc 0.3.38 (git+https://github.com/alexcrichton/gcc-rs)", + "getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)", + "md5 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", + "num_cpus 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", + "toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "build_helper" +version = "0.1.0" + +[[package]] +name = "cargotest" +version = "0.1.0" + +[[package]] +name = "cmake" +version = "0.1.17" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "collections" +version = "0.0.0" +dependencies = [ + "alloc 0.0.0", + "core 0.0.0", + "rustc_unicode 0.0.0", +] + +[[package]] +name = "compiler_builtins" +version = "0.0.0" +dependencies = [ + "core 0.0.0", + "gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "compiletest" +version = "0.0.0" +dependencies = [ + "env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", + "serialize 0.0.0", +] + +[[package]] +name = "core" +version = "0.0.0" + +[[package]] +name = "env_logger" +version = "0.3.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "error_index_generator" +version = "0.0.0" + +[[package]] +name = "filetime" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "flate" +version = "0.0.0" +dependencies = [ + "build_helper 0.1.0", + "gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "fmt_macros" +version = "0.0.0" + +[[package]] +name = "gcc" +version = "0.3.38" +source = "git+https://github.com/alexcrichton/gcc-rs#be620ac6d3ddb498cd0c700d5312c6a4c3c19597" + +[[package]] +name = "gcc" +version = "0.3.38" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "getopts" +version = "0.0.0" + +[[package]] +name = "getopts" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "graphviz" +version = "0.0.0" + +[[package]] +name = "idna" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "matches 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-bidi 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", + "unicode-normalization 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "kernel32-sys" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "libc" +version = "0.0.0" +dependencies = [ + "core 0.0.0", +] + +[[package]] +name = "libc" +version = "0.2.17" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "linkchecker" +version = "0.1.0" +dependencies = [ + "url 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "log" +version = "0.0.0" + +[[package]] +name = "log" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "matches" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "md5" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "num_cpus" +version = "0.2.13" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "panic_abort" +version = "0.0.0" +dependencies = [ + "core 0.0.0", + "libc 0.0.0", +] + +[[package]] +name = "panic_unwind" +version = "0.0.0" +dependencies = [ + "alloc 0.0.0", + "core 0.0.0", + "libc 0.0.0", + "unwind 0.0.0", +] + +[[package]] +name = "proc_macro" +version = "0.0.0" +dependencies = [ + "syntax 0.0.0", +] + +[[package]] +name = "proc_macro_plugin" +version = "0.0.0" +dependencies = [ + "log 0.0.0", + "proc_macro_tokens 0.0.0", + "rustc_plugin 0.0.0", + "syntax 0.0.0", + "syntax_pos 0.0.0", +] + +[[package]] +name = "proc_macro_tokens" +version = "0.0.0" +dependencies = [ + "log 0.0.0", + "syntax 0.0.0", + "syntax_pos 0.0.0", +] + +[[package]] +name = "rand" +version = "0.0.0" +dependencies = [ + "core 0.0.0", +] + +[[package]] +name = "rustbook" +version = "0.0.0" + +[[package]] +name = "rustc" +version = "0.0.0" +dependencies = [ + "arena 0.0.0", + "flate 0.0.0", + "fmt_macros 0.0.0", + "graphviz 0.0.0", + "log 0.0.0", + "rustc_back 0.0.0", + "rustc_bitflags 0.0.0", + "rustc_const_math 0.0.0", + "rustc_data_structures 0.0.0", + "rustc_errors 0.0.0", + "rustc_llvm 0.0.0", + "serialize 0.0.0", + "syntax 0.0.0", + "syntax_pos 0.0.0", +] + +[[package]] +name = "rustc-main" +version = "0.0.0" +dependencies = [ + "rustc_back 0.0.0", + "rustc_driver 0.0.0", + "rustdoc 0.0.0", +] + +[[package]] +name = "rustc-serialize" +version = "0.3.19" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "rustc_back" +version = "0.0.0" +dependencies = [ + "log 0.0.0", + "serialize 0.0.0", + "syntax 0.0.0", +] + +[[package]] +name = "rustc_bitflags" +version = "0.0.0" + +[[package]] +name = "rustc_borrowck" +version = "0.0.0" +dependencies = [ + "graphviz 0.0.0", + "log 0.0.0", + "rustc 0.0.0", + "rustc_data_structures 0.0.0", + "rustc_errors 0.0.0", + "rustc_mir 0.0.0", + "syntax 0.0.0", + "syntax_pos 0.0.0", +] + +[[package]] +name = "rustc_const_eval" +version = "0.0.0" +dependencies = [ + "arena 0.0.0", + "graphviz 0.0.0", + "log 0.0.0", + "rustc 0.0.0", + "rustc_back 0.0.0", + "rustc_const_math 0.0.0", + "rustc_data_structures 0.0.0", + "rustc_errors 0.0.0", + "serialize 0.0.0", + "syntax 0.0.0", + "syntax_pos 0.0.0", +] + +[[package]] +name = "rustc_const_math" +version = "0.0.0" +dependencies = [ + "log 0.0.0", + "serialize 0.0.0", + "syntax 0.0.0", +] + +[[package]] +name = "rustc_data_structures" +version = "0.0.0" +dependencies = [ + "log 0.0.0", + "serialize 0.0.0", +] + +[[package]] +name = "rustc_driver" +version = "0.0.0" +dependencies = [ + "arena 0.0.0", + "flate 0.0.0", + "graphviz 0.0.0", + "log 0.0.0", + "proc_macro_plugin 0.0.0", + "rustc 0.0.0", + "rustc_back 0.0.0", + "rustc_borrowck 0.0.0", + "rustc_const_eval 0.0.0", + "rustc_data_structures 0.0.0", + "rustc_errors 0.0.0", + "rustc_incremental 0.0.0", + "rustc_lint 0.0.0", + "rustc_llvm 0.0.0", + "rustc_metadata 0.0.0", + "rustc_mir 0.0.0", + "rustc_passes 0.0.0", + "rustc_plugin 0.0.0", + "rustc_privacy 0.0.0", + "rustc_resolve 0.0.0", + "rustc_save_analysis 0.0.0", + "rustc_trans 0.0.0", + "rustc_typeck 0.0.0", + "serialize 0.0.0", + "syntax 0.0.0", + "syntax_ext 0.0.0", + "syntax_pos 0.0.0", +] + +[[package]] +name = "rustc_errors" +version = "0.0.0" +dependencies = [ + "log 0.0.0", + "serialize 0.0.0", + "syntax_pos 0.0.0", +] + +[[package]] +name = "rustc_incremental" +version = "0.0.0" +dependencies = [ + "graphviz 0.0.0", + "log 0.0.0", + "rustc 0.0.0", + "rustc_data_structures 0.0.0", + "serialize 0.0.0", + "syntax 0.0.0", + "syntax_pos 0.0.0", +] + +[[package]] +name = "rustc_lint" +version = "0.0.0" +dependencies = [ + "log 0.0.0", + "rustc 0.0.0", + "rustc_back 0.0.0", + "rustc_const_eval 0.0.0", + "syntax 0.0.0", + "syntax_pos 0.0.0", +] + +[[package]] +name = "rustc_llvm" +version = "0.0.0" +dependencies = [ + "build_helper 0.1.0", + "gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc_bitflags 0.0.0", +] + +[[package]] +name = "rustc_metadata" +version = "0.0.0" +dependencies = [ + "flate 0.0.0", + "log 0.0.0", + "proc_macro 0.0.0", + "rustc 0.0.0", + "rustc_back 0.0.0", + "rustc_const_math 0.0.0", + "rustc_data_structures 0.0.0", + "rustc_errors 0.0.0", + "rustc_llvm 0.0.0", + "serialize 0.0.0", + "syntax 0.0.0", + "syntax_ext 0.0.0", + "syntax_pos 0.0.0", +] + +[[package]] +name = "rustc_mir" +version = "0.0.0" +dependencies = [ + "graphviz 0.0.0", + "log 0.0.0", + "rustc 0.0.0", + "rustc_back 0.0.0", + "rustc_bitflags 0.0.0", + "rustc_const_eval 0.0.0", + "rustc_const_math 0.0.0", + "rustc_data_structures 0.0.0", + "syntax 0.0.0", + "syntax_pos 0.0.0", +] + +[[package]] +name = "rustc_passes" +version = "0.0.0" +dependencies = [ + "log 0.0.0", + "rustc 0.0.0", + "rustc_const_eval 0.0.0", + "rustc_const_math 0.0.0", + "rustc_errors 0.0.0", + "syntax 0.0.0", + "syntax_pos 0.0.0", +] + +[[package]] +name = "rustc_platform_intrinsics" +version = "0.0.0" + +[[package]] +name = "rustc_plugin" +version = "0.0.0" +dependencies = [ + "log 0.0.0", + "rustc 0.0.0", + "rustc_back 0.0.0", + "rustc_bitflags 0.0.0", + "rustc_errors 0.0.0", + "rustc_metadata 0.0.0", + "syntax 0.0.0", + "syntax_pos 0.0.0", +] + +[[package]] +name = "rustc_privacy" +version = "0.0.0" +dependencies = [ + "rustc 0.0.0", + "syntax 0.0.0", + "syntax_pos 0.0.0", +] + +[[package]] +name = "rustc_resolve" +version = "0.0.0" +dependencies = [ + "arena 0.0.0", + "log 0.0.0", + "rustc 0.0.0", + "rustc_errors 0.0.0", + "syntax 0.0.0", + "syntax_pos 0.0.0", +] + +[[package]] +name = "rustc_save_analysis" +version = "0.0.0" +dependencies = [ + "log 0.0.0", + "rustc 0.0.0", + "serialize 0.0.0", + "syntax 0.0.0", + "syntax_pos 0.0.0", +] + +[[package]] +name = "rustc_trans" +version = "0.0.0" +dependencies = [ + "arena 0.0.0", + "flate 0.0.0", + "graphviz 0.0.0", + "log 0.0.0", + "rustc 0.0.0", + "rustc_back 0.0.0", + "rustc_const_eval 0.0.0", + "rustc_const_math 0.0.0", + "rustc_data_structures 0.0.0", + "rustc_errors 0.0.0", + "rustc_incremental 0.0.0", + "rustc_llvm 0.0.0", + "rustc_platform_intrinsics 0.0.0", + "serialize 0.0.0", + "syntax 0.0.0", + "syntax_pos 0.0.0", +] + +[[package]] +name = "rustc_typeck" +version = "0.0.0" +dependencies = [ + "arena 0.0.0", + "fmt_macros 0.0.0", + "log 0.0.0", + "rustc 0.0.0", + "rustc_back 0.0.0", + "rustc_const_eval 0.0.0", + "rustc_const_math 0.0.0", + "rustc_errors 0.0.0", + "rustc_platform_intrinsics 0.0.0", + "syntax 0.0.0", + "syntax_pos 0.0.0", +] + +[[package]] +name = "rustc_unicode" +version = "0.0.0" +dependencies = [ + "core 0.0.0", +] + +[[package]] +name = "rustdoc" +version = "0.0.0" +dependencies = [ + "arena 0.0.0", + "build_helper 0.1.0", + "gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.0.0", + "rustc 0.0.0", + "rustc_back 0.0.0", + "rustc_const_eval 0.0.0", + "rustc_const_math 0.0.0", + "rustc_data_structures 0.0.0", + "rustc_driver 0.0.0", + "rustc_errors 0.0.0", + "rustc_lint 0.0.0", + "rustc_metadata 0.0.0", + "rustc_resolve 0.0.0", + "rustc_trans 0.0.0", + "serialize 0.0.0", + "syntax 0.0.0", + "syntax_pos 0.0.0", +] + +[[package]] +name = "serialize" +version = "0.0.0" +dependencies = [ + "log 0.0.0", +] + +[[package]] +name = "std" +version = "0.0.0" +dependencies = [ + "alloc 0.0.0", + "alloc_jemalloc 0.0.0", + "alloc_system 0.0.0", + "build_helper 0.1.0", + "collections 0.0.0", + "compiler_builtins 0.0.0", + "core 0.0.0", + "gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.0.0", + "panic_abort 0.0.0", + "panic_unwind 0.0.0", + "rand 0.0.0", + "rustc_unicode 0.0.0", + "unwind 0.0.0", +] + +[[package]] +name = "std_shim" +version = "0.1.0" +dependencies = [ + "core 0.0.0", + "std 0.0.0", +] + +[[package]] +name = "syntax" +version = "0.0.0" +dependencies = [ + "log 0.0.0", + "rustc_bitflags 0.0.0", + "rustc_errors 0.0.0", + "serialize 0.0.0", + "syntax_pos 0.0.0", +] + +[[package]] +name = "syntax_ext" +version = "0.0.0" +dependencies = [ + "fmt_macros 0.0.0", + "log 0.0.0", + "proc_macro 0.0.0", + "rustc_errors 0.0.0", + "syntax 0.0.0", + "syntax_pos 0.0.0", +] + +[[package]] +name = "syntax_pos" +version = "0.0.0" +dependencies = [ + "serialize 0.0.0", +] + +[[package]] +name = "term" +version = "0.0.0" + +[[package]] +name = "test" +version = "0.0.0" +dependencies = [ + "getopts 0.0.0", + "term 0.0.0", +] + +[[package]] +name = "test_shim" +version = "0.1.0" +dependencies = [ + "test 0.0.0", +] + +[[package]] +name = "tidy" +version = "0.1.0" + +[[package]] +name = "toml" +version = "0.1.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "unicode-bidi" +version = "0.2.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "matches 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "unicode-normalization" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "url" +version = "1.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "idna 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "matches 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", +] + +[[package]] +name = "winapi" +version = "0.2.8" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "winapi-build" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" + +[metadata] +"checksum cmake 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "dfcf5bcece56ef953b8ea042509e9dcbdfe97820b7e20d86beb53df30ed94978" +"checksum env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "15abd780e45b3ea4f76b4e9a26ff4843258dd8a3eed2775a0e7368c2e7936c2f" +"checksum filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "5363ab8e4139b8568a6237db5248646e5a8a2f89bd5ccb02092182b11fd3e922" +"checksum gcc 0.3.38 (git+https://github.com/alexcrichton/gcc-rs)" = "" +"checksum gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)" = "553f11439bdefe755bf366b264820f1da70f3aaf3924e594b886beb9c831bcf5" +"checksum getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9047cfbd08a437050b363d35ef160452c5fe8ea5187ae0a624708c91581d685" +"checksum idna 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1053236e00ce4f668aeca4a769a09b3bf5a682d802abd6f3cb39374f6b162c11" +"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" +"checksum libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)" = "044d1360593a78f5c8e5e710beccdc24ab71d1f01bc19a29bcacdba22e8475d8" +"checksum log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ab83497bf8bf4ed2a74259c1c802351fcd67a65baa86394b6ba73c36f4838054" +"checksum matches 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "bcc3ad8109fa4b522f9b0cd81440422781f564aaf8c195de6b9d6642177ad0dd" +"checksum md5 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a5539a8dee9b4ae308c9c406a379838b435a8f2c84cf9fedc6d5a576be9888db" +"checksum num_cpus 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "cee7e88156f3f9e19bdd598f8d6c9db7bf4078f99f8381f43a55b09648d1a6e3" +"checksum rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)" = "6159e4e6e559c81bd706afe9c8fd68f547d3e851ce12e76b1de7914bab61691b" +"checksum toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)" = "0590d72182e50e879c4da3b11c6488dae18fccb1ae0c7a3eda18e16795844796" +"checksum unicode-bidi 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c1f7ceb96afdfeedee42bade65a0d585a6a0106f681b6749c8ff4daa8df30b3f" +"checksum unicode-normalization 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "26643a2f83bac55f1976fb716c10234485f9202dcd65cfbdf9da49867b271172" +"checksum url 1.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9ba5a45db1d2e0effb7a1c00cc73ffc63a973da8c7d1fcd5b46f24285ade6c54" +"checksum winapi 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a" +"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" diff --git a/src/Cargo.toml b/src/Cargo.toml new file mode 100644 index 0000000000000..dbd2f7743dc08 --- /dev/null +++ b/src/Cargo.toml @@ -0,0 +1,13 @@ +[workspace] +members = [ + "bootstrap", + "rustc", + "rustc/std_shim", + "rustc/test_shim", + "tools/cargotest", + "tools/compiletest", + "tools/error_index_generator", + "tools/linkchecker", + "tools/rustbook", + "tools/tidy", +] diff --git a/src/bootstrap/Cargo.lock b/src/bootstrap/Cargo.lock deleted file mode 100644 index 36b94e4ebea32..0000000000000 --- a/src/bootstrap/Cargo.lock +++ /dev/null @@ -1,180 +0,0 @@ -[root] -name = "bootstrap" -version = "0.0.0" -dependencies = [ - "build_helper 0.1.0", - "cmake 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)", - "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", - "gcc 0.3.31 (git+https://github.com/alexcrichton/gcc-rs)", - "getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", - "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", - "md5 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", - "num_cpus 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 0.1.73 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", - "toml 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "aho-corasick" -version = "0.5.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "build_helper" -version = "0.1.0" - -[[package]] -name = "cmake" -version = "0.1.17" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "gcc 0.3.31 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "filetime" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "gcc" -version = "0.3.31" -source = "git+https://github.com/alexcrichton/gcc-rs#b8e2400883f1a2749b323354dad372cdd1c838c7" - -[[package]] -name = "gcc" -version = "0.3.31" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "getopts" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "kernel32-sys" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "libc" -version = "0.2.10" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "md5" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "memchr" -version = "0.1.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "num_cpus" -version = "0.2.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "regex" -version = "0.1.73" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "aho-corasick 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", - "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", - "regex-syntax 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", - "thread_local 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", - "utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "regex-syntax" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "rustc-serialize" -version = "0.3.19" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "thread-id" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "thread_local" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "toml" -version = "0.1.28" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "utf8-ranges" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "winapi" -version = "0.2.6" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "winapi-build" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[metadata] -"checksum aho-corasick 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "2b3fb52b09c1710b961acb35390d514be82e4ac96a9969a8e38565a29b878dc9" -"checksum cmake 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)" = "dfcf5bcece56ef953b8ea042509e9dcbdfe97820b7e20d86beb53df30ed94978" -"checksum filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "5363ab8e4139b8568a6237db5248646e5a8a2f89bd5ccb02092182b11fd3e922" -"checksum gcc 0.3.31 (git+https://github.com/alexcrichton/gcc-rs)" = "" -"checksum gcc 0.3.31 (registry+https://github.com/rust-lang/crates.io-index)" = "cfe877476e53690ebb0ce7325d0bf43e198d9500291b54b3c65e518de5039b07" -"checksum getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9047cfbd08a437050b363d35ef160452c5fe8ea5187ae0a624708c91581d685" -"checksum kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d" -"checksum libc 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "55f3730be7e803cf350d32061958171731c2395831fbd67a61083782808183e0" -"checksum md5 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a5539a8dee9b4ae308c9c406a379838b435a8f2c84cf9fedc6d5a576be9888db" -"checksum memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20" -"checksum num_cpus 0.2.11 (registry+https://github.com/rust-lang/crates.io-index)" = "51fedae97a05f7353612fe017ab705a37e6db8f4d67c5c6fe739a9e70d6eed09" -"checksum regex 0.1.73 (registry+https://github.com/rust-lang/crates.io-index)" = "56b7ee9f764ecf412c6e2fff779bca4b22980517ae335a21aeaf4e32625a5df2" -"checksum regex-syntax 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "31040aad7470ad9d8c46302dcffba337bb4289ca5da2e3cd6e37b64109a85199" -"checksum rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)" = "6159e4e6e559c81bd706afe9c8fd68f547d3e851ce12e76b1de7914bab61691b" -"checksum thread-id 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "a9539db560102d1cef46b8b78ce737ff0bb64e7e18d35b2a5688f7d097d0ff03" -"checksum thread_local 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "55dd963dbaeadc08aa7266bf7f91c3154a7805e32bb94b820b769d2ef3b4744d" -"checksum toml 0.1.28 (registry+https://github.com/rust-lang/crates.io-index)" = "fcd27a04ca509aff336ba5eb2abc58d456f52c4ff64d9724d88acb85ead560b6" -"checksum utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1ca13c08c41c9c3e04224ed9ff80461d97e121589ff27c753a16cb10830ae0f" -"checksum winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "4dfaaa8fbdaa618fa6914b59b2769d690dd7521920a18d84b42d254678dd5fd4" -"checksum winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc" diff --git a/src/bootstrap/Cargo.toml b/src/bootstrap/Cargo.toml index b19545590b9c0..c96690754387b 100644 --- a/src/bootstrap/Cargo.toml +++ b/src/bootstrap/Cargo.toml @@ -27,9 +27,10 @@ num_cpus = "0.2" toml = "0.1" getopts = "0.2" rustc-serialize = "0.3" -winapi = "0.2" -kernel32-sys = "0.2" gcc = { git = "https://github.com/alexcrichton/gcc-rs" } libc = "0.2" md5 = "0.1" -regex = "0.1.73" + +[target.'cfg(windows)'.dependencies] +winapi = "0.2" +kernel32-sys = "0.2" diff --git a/src/bootstrap/bin/rustc.rs b/src/bootstrap/bin/rustc.rs index 175e32125f272..879eca60cc751 100644 --- a/src/bootstrap/bin/rustc.rs +++ b/src/bootstrap/bin/rustc.rs @@ -36,8 +36,9 @@ fn main() { let args = env::args_os().skip(1).collect::>(); // Detect whether or not we're a build script depending on whether --target // is passed (a bit janky...) - let target = args.windows(2).find(|w| &*w[0] == "--target") - .and_then(|w| w[1].to_str()); + let target = args.windows(2) + .find(|w| &*w[0] == "--target") + .and_then(|w| w[1].to_str()); let version = args.iter().find(|w| &**w == "-vV"); // Build scripts always use the snapshot compiler which is guaranteed to be @@ -55,23 +56,24 @@ fn main() { } else { ("RUSTC_REAL", "RUSTC_LIBDIR") }; - let stage = env::var("RUSTC_STAGE").unwrap(); + let stage = env::var("RUSTC_STAGE").expect("RUSTC_STAGE was not set"); - let rustc = env::var_os(rustc).unwrap(); - let libdir = env::var_os(libdir).unwrap(); + let rustc = env::var_os(rustc).unwrap_or_else(|| panic!("{:?} was not set", rustc)); + let libdir = env::var_os(libdir).unwrap_or_else(|| panic!("{:?} was not set", libdir)); let mut dylib_path = bootstrap::util::dylib_path(); dylib_path.insert(0, PathBuf::from(libdir)); let mut cmd = Command::new(rustc); cmd.args(&args) - .arg("--cfg").arg(format!("stage{}", stage)) - .env(bootstrap::util::dylib_path_var(), - env::join_paths(&dylib_path).unwrap()); + .arg("--cfg") + .arg(format!("stage{}", stage)) + .env(bootstrap::util::dylib_path_var(), + env::join_paths(&dylib_path).unwrap()); if let Some(target) = target { // The stage0 compiler has a special sysroot distinct from what we // actually downloaded, so we just always pass the `--sysroot` option. - cmd.arg("--sysroot").arg(env::var_os("RUSTC_SYSROOT").unwrap()); + cmd.arg("--sysroot").arg(env::var_os("RUSTC_SYSROOT").expect("RUSTC_SYSROOT was not set")); // When we build Rust dylibs they're all intended for intermediate // usage, so make sure we pass the -Cprefer-dynamic flag instead of @@ -101,11 +103,9 @@ fn main() { // This... is a bit of a hack how we detect this. Ideally this // information should be encoded in the crate I guess? Would likely // require an RFC amendment to RFC 1513, however. - let is_panic_abort = args.windows(2).any(|a| { - &*a[0] == "--crate-name" && &*a[1] == "panic_abort" - }); - // FIXME(stage0): remove this `stage != "0"` condition - if is_panic_abort && stage != "0" { + let is_panic_abort = args.windows(2) + .any(|a| &*a[0] == "--crate-name" && &*a[1] == "panic_abort"); + if is_panic_abort { cmd.arg("-C").arg("panic=abort"); } @@ -113,9 +113,11 @@ fn main() { // code. if env::var("RUSTC_DEBUGINFO") == Ok("true".to_string()) { cmd.arg("-g"); + } else if env::var("RUSTC_DEBUGINFO_LINES") == Ok("true".to_string()) { + cmd.arg("-Cdebuginfo=1"); } let debug_assertions = match env::var("RUSTC_DEBUG_ASSERTIONS") { - Ok(s) => if s == "true" {"y"} else {"n"}, + Ok(s) => if s == "true" { "y" } else { "n" }, Err(..) => "n", }; cmd.arg("-C").arg(format!("debug-assertions={}", debug_assertions)); diff --git a/src/bootstrap/bin/rustdoc.rs b/src/bootstrap/bin/rustdoc.rs index 79629bfd71711..67358e540dad0 100644 --- a/src/bootstrap/bin/rustdoc.rs +++ b/src/bootstrap/bin/rustdoc.rs @@ -20,21 +20,23 @@ use std::path::PathBuf; fn main() { let args = env::args_os().skip(1).collect::>(); - let rustdoc = env::var_os("RUSTDOC_REAL").unwrap(); - let libdir = env::var_os("RUSTC_LIBDIR").unwrap(); + let rustdoc = env::var_os("RUSTDOC_REAL").expect("RUSTDOC_REAL was not set"); + let libdir = env::var_os("RUSTC_LIBDIR").expect("RUSTC_LIBDIR was not set"); + let stage = env::var("RUSTC_STAGE").expect("RUSTC_STAGE was not set"); let mut dylib_path = bootstrap::util::dylib_path(); dylib_path.insert(0, PathBuf::from(libdir)); let mut cmd = Command::new(rustdoc); cmd.args(&args) - .arg("--cfg").arg(format!("stage{}", env::var("RUSTC_STAGE").unwrap())) - .arg("--cfg").arg("dox") - .env(bootstrap::util::dylib_path_var(), - env::join_paths(&dylib_path).unwrap()); + .arg("--cfg") + .arg(format!("stage{}", stage)) + .arg("--cfg") + .arg("dox") + .env(bootstrap::util::dylib_path_var(), + env::join_paths(&dylib_path).unwrap()); std::process::exit(match cmd.status() { Ok(s) => s.code().unwrap_or(1), Err(e) => panic!("\n\nfailed to run {:?}: {}\n\n", cmd, e), }) } - diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py index 17a7c9ca66a26..2c2260a8e60c7 100644 --- a/src/bootstrap/bootstrap.py +++ b/src/bootstrap/bootstrap.py @@ -131,7 +131,8 @@ def stage0_data(rust_root): def format_build_time(duration): return str(datetime.timedelta(seconds=int(duration))) -class RustBuild: + +class RustBuild(object): def download_stage0(self): cache_dst = os.path.join(self.build_dir, "cache") rustc_cache = os.path.join(cache_dst, self.stage0_rustc_date()) @@ -142,7 +143,7 @@ def download_stage0(self): os.makedirs(cargo_cache) if self.rustc().startswith(self.bin_root()) and \ - (not os.path.exists(self.rustc()) or self.rustc_out_of_date()): + (not os.path.exists(self.rustc()) or self.rustc_out_of_date()): if os.path.exists(self.bin_root()): shutil.rmtree(self.bin_root()) channel = self.stage0_rustc_channel() @@ -165,7 +166,7 @@ def download_stage0(self): f.write(self.stage0_rustc_date()) if self.cargo().startswith(self.bin_root()) and \ - (not os.path.exists(self.cargo()) or self.cargo_out_of_date()): + (not os.path.exists(self.cargo()) or self.cargo_out_of_date()): channel = self.stage0_cargo_channel() filename = "cargo-{}-{}.tar.gz".format(channel, self.build) url = "https://static.rust-lang.org/cargo-dist/" + self.stage0_cargo_date() @@ -238,8 +239,8 @@ def rustc(self): def get_string(self, line): start = line.find('"') - end = start + 1 + line[start+1:].find('"') - return line[start+1:end] + end = start + 1 + line[start + 1:].find('"') + return line[start + 1:end] def exe_suffix(self): if sys.platform == 'win32': @@ -269,6 +270,7 @@ def run(self, args, env): sys.exit(ret) def build_triple(self): + default_encoding = sys.getdefaultencoding() config = self.get_toml('build') if config: return config @@ -276,8 +278,8 @@ def build_triple(self): if config: return config try: - ostype = subprocess.check_output(['uname', '-s']).strip() - cputype = subprocess.check_output(['uname', '-m']).strip() + ostype = subprocess.check_output(['uname', '-s']).strip().decode(default_encoding) + cputype = subprocess.check_output(['uname', '-m']).strip().decode(default_encoding) except (subprocess.CalledProcessError, WindowsError): if sys.platform == 'win32': return 'x86_64-pc-windows-msvc' @@ -289,7 +291,8 @@ def build_triple(self): # Darwin's `uname -s` lies and always returns i386. We have to use # sysctl instead. if ostype == 'Darwin' and cputype == 'i686': - sysctl = subprocess.check_output(['sysctl', 'hw.optional.x86_64']) + args = ['sysctl', 'hw.optional.x86_64'] + sysctl = subprocess.check_output(args).decode(default_encoding) if ': 1' in sysctl: cputype = 'x86_64' diff --git a/src/bootstrap/check.rs b/src/bootstrap/check.rs index 2b9d717cbd48d..af76a49fed045 100644 --- a/src/bootstrap/check.rs +++ b/src/bootstrap/check.rs @@ -13,19 +13,44 @@ //! This file implements the various regression test suites that we execute on //! our CI. +use std::collections::{HashMap, HashSet}; use std::env; -use std::fs::{self, File}; -use std::io::prelude::*; +use std::fs; use std::path::{PathBuf, Path}; use std::process::Command; use build_helper::output; +use rustc_serialize::json; use {Build, Compiler, Mode}; use util::{self, dylib_path, dylib_path_var}; const ADB_TEST_DIR: &'static str = "/data/tmp"; +#[derive(RustcDecodable)] +struct Output { + packages: Vec, + resolve: Resolve, +} + +#[derive(RustcDecodable)] +struct Package { + id: String, + name: String, + source: Option, +} + +#[derive(RustcDecodable)] +struct Resolve { + nodes: Vec, +} + +#[derive(RustcDecodable)] +struct ResolveNode { + id: String, + dependencies: Vec, +} + /// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler. /// /// This tool in `src/tools` will verify the validity of all our links in the @@ -108,6 +133,10 @@ pub fn compiletest(build: &Build, cmd.arg("--host").arg(compiler.host); cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(&build.config.build)); + if let Some(nodejs) = build.config.nodejs.as_ref() { + cmd.arg("--nodejs").arg(nodejs); + } + let mut flags = vec!["-Crpath".to_string()]; if build.config.rust_optimize_tests { flags.push("-O".to_string()); @@ -185,7 +214,7 @@ pub fn compiletest(build: &Build, } } } - build.add_bootstrap_key(compiler, &mut cmd); + build.add_bootstrap_key(&mut cmd); cmd.arg("--adb-path").arg("adb"); cmd.arg("--adb-test-dir").arg(ADB_TEST_DIR); @@ -259,56 +288,74 @@ fn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) { /// It essentially is the driver for running `cargo test`. /// /// Currently this runs all tests for a DAG by passing a bunch of `-p foo` -/// arguments, and those arguments are discovered from `Cargo.lock`. +/// arguments, and those arguments are discovered from `cargo metadata`. pub fn krate(build: &Build, compiler: &Compiler, target: &str, mode: Mode) { - let (name, path, features) = match mode { - Mode::Libstd => ("libstd", "src/rustc/std_shim", build.std_features()), - Mode::Libtest => ("libtest", "src/rustc/test_shim", String::new()), - Mode::Librustc => ("librustc", "src/rustc", build.rustc_features()), + let (name, path, features, root) = match mode { + Mode::Libstd => { + ("libstd", "src/rustc/std_shim", build.std_features(), "std_shim") + } + Mode::Libtest => { + ("libtest", "src/rustc/test_shim", String::new(), "test_shim") + } + Mode::Librustc => { + ("librustc", "src/rustc", build.rustc_features(), "rustc-main") + } _ => panic!("can only test libraries"), }; println!("Testing {} stage{} ({} -> {})", name, compiler.stage, compiler.host, target); + // Run `cargo metadata` to figure out what crates we're testing. + // + // Down below we're going to call `cargo test`, but to test the right set + // of packages we're going to have to know what `-p` arguments to pass it + // to know what crates to test. Here we run `cargo metadata` to learn about + // the dependency graph and what `-p` arguments there are. + let mut cargo = Command::new(&build.cargo); + cargo.arg("metadata") + .arg("--manifest-path").arg(build.src.join(path).join("Cargo.toml")); + let output = output(&mut cargo); + let output: Output = json::decode(&output).unwrap(); + let id2pkg = output.packages.iter() + .map(|pkg| (&pkg.id, pkg)) + .collect::>(); + let id2deps = output.resolve.nodes.iter() + .map(|node| (&node.id, &node.dependencies)) + .collect::>(); + // Build up the base `cargo test` command. + // + // Pass in some standard flags then iterate over the graph we've discovered + // in `cargo metadata` with the maps above and figure out what `-p` + // arguments need to get passed. let mut cargo = build.cargo(compiler, mode, target, "test"); cargo.arg("--manifest-path") .arg(build.src.join(path).join("Cargo.toml")) .arg("--features").arg(features); - // Generate a list of `-p` arguments to pass to the `cargo test` invocation - // by crawling the corresponding Cargo.lock file. - let lockfile = build.src.join(path).join("Cargo.lock"); - let mut contents = String::new(); - t!(t!(File::open(&lockfile)).read_to_string(&mut contents)); - let mut lines = contents.lines(); - while let Some(line) = lines.next() { - let prefix = "name = \""; - if !line.starts_with(prefix) { + let mut visited = HashSet::new(); + let root_pkg = output.packages.iter().find(|p| p.name == root).unwrap(); + let mut next = vec![&root_pkg.id]; + while let Some(id) = next.pop() { + // Skip any packages with sources listed, as these come from crates.io + // and we shouldn't be testing them. + if id2pkg[id].source.is_some() { continue } - lines.next(); // skip `version = ...` - - // skip crates.io or otherwise non-path crates - if let Some(line) = lines.next() { - if line.starts_with("source") { - continue - } - } - - let crate_name = &line[prefix.len()..line.len() - 1]; - // Right now jemalloc is our only target-specific crate in the sense // that it's not present on all platforms. Custom skip it here for now, // but if we add more this probably wants to get more generalized. - if crate_name.contains("jemalloc") { - continue + if !id.contains("jemalloc") { + cargo.arg("-p").arg(&id2pkg[id].name); + } + for dep in id2deps[id] { + if visited.insert(dep) { + next.push(dep); + } } - - cargo.arg("-p").arg(crate_name); } // The tests are going to run with the *target* libraries, so we need to @@ -323,6 +370,9 @@ pub fn krate(build: &Build, if target.contains("android") { build.run(cargo.arg("--no-run")); krate_android(build, compiler, target, mode); + } else if target.contains("emscripten") { + build.run(cargo.arg("--no-run")); + krate_emscripten(build, compiler, target, mode); } else { cargo.args(&build.flags.args); build.run(&mut cargo); @@ -371,6 +421,35 @@ fn krate_android(build: &Build, } } +fn krate_emscripten(build: &Build, + compiler: &Compiler, + target: &str, + mode: Mode) { + let mut tests = Vec::new(); + let out_dir = build.cargo_out(compiler, mode, target); + find_tests(&out_dir, target, &mut tests); + find_tests(&out_dir.join("deps"), target, &mut tests); + + for test in tests { + let test_file_name = test.to_string_lossy().into_owned(); + println!("running {}", test_file_name); + let nodejs = build.config.nodejs.as_ref().expect("nodejs not configured"); + let status = Command::new(nodejs) + .arg(&test_file_name) + .stderr(::std::process::Stdio::inherit()) + .status(); + match status { + Ok(status) => { + if !status.success() { + panic!("some tests failed"); + } + } + Err(e) => panic!(format!("failed to execute command: {}", e)), + }; + } + } + + fn find_tests(dir: &Path, target: &str, dst: &mut Vec) { @@ -381,7 +460,8 @@ fn find_tests(dir: &Path, } let filename = e.file_name().into_string().unwrap(); if (target.contains("windows") && filename.ends_with(".exe")) || - (!target.contains("windows") && !filename.contains(".")) { + (!target.contains("windows") && !filename.contains(".")) || + (target.contains("emscripten") && filename.contains(".js")){ dst.push(e.path()); } } diff --git a/src/bootstrap/compile.rs b/src/bootstrap/compile.rs index 9de438cfa7d50..ff8e4757bd1f1 100644 --- a/src/bootstrap/compile.rs +++ b/src/bootstrap/compile.rs @@ -25,7 +25,7 @@ use std::process::Command; use build_helper::output; use filetime::FileTime; -use util::{exe, staticlib, libdir, mtime, is_dylib, copy}; +use util::{exe, libdir, mtime, is_dylib, copy}; use {Build, Compiler, Mode}; /// Build the standard library. @@ -40,20 +40,6 @@ pub fn std<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) { let libdir = build.sysroot_libdir(compiler, target); let _ = fs::remove_dir_all(&libdir); t!(fs::create_dir_all(&libdir)); - // FIXME(stage0) remove this `if` after the next snapshot - // The stage0 compiler still passes the `-lcompiler-rt` flag to the linker but now `bootstrap` - // never builds a `libcopmiler-rt.a`! We'll fill the hole by simply copying stage0's - // `libcompiler-rt.a` to where the stage1's one is expected (though we could as well just use - // an empty `.a` archive). Note that the symbols of that stage0 `libcompiler-rt.a` won't make - // it to the final binary because now `libcore.rlib` also contains the symbols that - // `libcompiler-rt.a` provides. Since that rlib appears first in the linker arguments, its - // symbols are used instead of `libcompiler-rt.a`'s. - if compiler.stage == 0 { - let rtlib = &staticlib("compiler-rt", target); - let src = build.rustc.parent().unwrap().parent().unwrap().join("lib").join("rustlib") - .join(target).join("lib").join(rtlib); - copy(&src, &libdir.join(rtlib)); - } // Some platforms have startup objects that may be required to produce the // libstd dynamic library, for example. @@ -104,16 +90,16 @@ pub fn std_link(build: &Build, add_to_sysroot(&out_dir, &libdir); if target.contains("musl") && !target.contains("mips") { - copy_musl_third_party_objects(build, &libdir); + copy_musl_third_party_objects(build, target, &libdir); } } /// Copies the crt(1,i,n).o startup objects /// /// Only required for musl targets that statically link to libc -fn copy_musl_third_party_objects(build: &Build, into: &Path) { +fn copy_musl_third_party_objects(build: &Build, target: &str, into: &Path) { for &obj in &["crt1.o", "crti.o", "crtn.o"] { - copy(&build.config.musl_root.as_ref().unwrap().join("lib").join(obj), &into.join(obj)); + copy(&build.musl_root(target).unwrap().join("lib").join(obj), &into.join(obj)); } } @@ -133,7 +119,7 @@ fn build_startup_objects(build: &Build, target: &str, into: &Path) { for file in t!(fs::read_dir(build.src.join("src/rtstartup"))) { let file = t!(file); let mut cmd = Command::new(&compiler_path); - build.add_bootstrap_key(&compiler, &mut cmd); + build.add_bootstrap_key(&mut cmd); build.run(cmd.arg("--target").arg(target) .arg("--emit=obj") .arg("--out-dir").arg(into) @@ -199,7 +185,6 @@ pub fn rustc<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) { cargo.env("CFG_RELEASE", &build.release) .env("CFG_RELEASE_CHANNEL", &build.config.channel) .env("CFG_VERSION", &build.version) - .env("CFG_BOOTSTRAP_KEY", &build.bootstrap_key) .env("CFG_PREFIX", build.config.prefix.clone().unwrap_or(String::new())) .env("CFG_LIBDIR_RELATIVE", "lib"); diff --git a/src/bootstrap/config.rs b/src/bootstrap/config.rs index 0f69bcfbb649d..8c0ad1ccf825f 100644 --- a/src/bootstrap/config.rs +++ b/src/bootstrap/config.rs @@ -56,6 +56,7 @@ pub struct Config { pub rust_codegen_units: u32, pub rust_debug_assertions: bool, pub rust_debuginfo: bool, + pub rust_debuginfo_lines: bool, pub rust_rpath: bool, pub rustc_default_linker: Option, pub rustc_default_ar: Option, @@ -79,6 +80,9 @@ pub struct Config { // Fallback musl-root for all targets pub musl_root: Option, pub prefix: Option, + pub docdir: Option, + pub libdir: Option, + pub mandir: Option, pub codegen_tests: bool, pub nodejs: Option, } @@ -117,6 +121,7 @@ struct Build { rustc: Option, compiler_docs: Option, docs: Option, + submodules: Option, } /// TOML representation of how the LLVM build is configured. @@ -137,6 +142,7 @@ struct Rust { codegen_units: Option, debug_assertions: Option, debuginfo: Option, + debuginfo_lines: Option, debug_jemalloc: Option, use_jemalloc: Option, backtrace: Option, @@ -158,6 +164,7 @@ struct TomlTarget { cc: Option, cxx: Option, android_ndk: Option, + musl_root: Option, } impl Config { @@ -221,6 +228,7 @@ impl Config { config.cargo = build.cargo.map(PathBuf::from); set(&mut config.compiler_docs, build.compiler_docs); set(&mut config.docs, build.docs); + set(&mut config.submodules, build.submodules); if let Some(ref llvm) = toml.llvm { set(&mut config.ccache, llvm.ccache); @@ -233,6 +241,7 @@ impl Config { if let Some(ref rust) = toml.rust { set(&mut config.rust_debug_assertions, rust.debug_assertions); set(&mut config.rust_debuginfo, rust.debuginfo); + set(&mut config.rust_debuginfo_lines, rust.debuginfo_lines); set(&mut config.rust_optimize, rust.optimize); set(&mut config.rust_optimize_tests, rust.optimize_tests); set(&mut config.rust_debuginfo_tests, rust.debuginfo_tests); @@ -268,6 +277,7 @@ impl Config { } target.cxx = cfg.cxx.clone().map(PathBuf::from); target.cc = cfg.cc.clone().map(PathBuf::from); + target.musl_root = cfg.musl_root.clone().map(PathBuf::from); config.target_config.insert(triple.clone(), target); } @@ -322,6 +332,7 @@ impl Config { ("OPTIMIZE", self.rust_optimize), ("DEBUG_ASSERTIONS", self.rust_debug_assertions), ("DEBUGINFO", self.rust_debuginfo), + ("DEBUGINFO_LINES", self.rust_debuginfo_lines), ("JEMALLOC", self.use_jemalloc), ("DEBUG_JEMALLOC", self.debug_jemalloc), ("RPATH", self.rust_rpath), @@ -345,6 +356,36 @@ impl Config { "CFG_MUSL_ROOT" if value.len() > 0 => { self.musl_root = Some(PathBuf::from(value)); } + "CFG_MUSL_ROOT_X86_64" if value.len() > 0 => { + let target = "x86_64-unknown-linux-musl".to_string(); + let target = self.target_config.entry(target) + .or_insert(Target::default()); + target.musl_root = Some(PathBuf::from(value)); + } + "CFG_MUSL_ROOT_I686" if value.len() > 0 => { + let target = "i686-unknown-linux-musl".to_string(); + let target = self.target_config.entry(target) + .or_insert(Target::default()); + target.musl_root = Some(PathBuf::from(value)); + } + "CFG_MUSL_ROOT_ARM" if value.len() > 0 => { + let target = "arm-unknown-linux-musleabi".to_string(); + let target = self.target_config.entry(target) + .or_insert(Target::default()); + target.musl_root = Some(PathBuf::from(value)); + } + "CFG_MUSL_ROOT_ARMHF" if value.len() > 0 => { + let target = "arm-unknown-linux-musleabihf".to_string(); + let target = self.target_config.entry(target) + .or_insert(Target::default()); + target.musl_root = Some(PathBuf::from(value)); + } + "CFG_MUSL_ROOT_ARMV7" if value.len() > 0 => { + let target = "armv7-unknown-linux-musleabihf".to_string(); + let target = self.target_config.entry(target) + .or_insert(Target::default()); + target.musl_root = Some(PathBuf::from(value)); + } "CFG_DEFAULT_AR" if value.len() > 0 => { self.rustc_default_ar = Some(value.to_string()); } @@ -357,6 +398,15 @@ impl Config { "CFG_PREFIX" => { self.prefix = Some(value.to_string()); } + "CFG_DOCDIR" => { + self.docdir = Some(value.to_string()); + } + "CFG_LIBDIR" => { + self.libdir = Some(value.to_string()); + } + "CFG_MANDIR" => { + self.mandir = Some(value.to_string()); + } "CFG_LLVM_ROOT" if value.len() > 0 => { let target = self.target_config.entry(self.build.clone()) .or_insert(Target::default()); @@ -396,9 +446,6 @@ impl Config { self.rustc = Some(PathBuf::from(value).join("bin/rustc")); self.cargo = Some(PathBuf::from(value).join("bin/cargo")); } - "CFG_NODEJS" if value.len() > 0 => { - self.nodejs = Some(PathBuf::from(value)); - } _ => {} } } diff --git a/src/bootstrap/config.toml.example b/src/bootstrap/config.toml.example index f054b29d0b140..b4730c003d646 100644 --- a/src/bootstrap/config.toml.example +++ b/src/bootstrap/config.toml.example @@ -76,6 +76,9 @@ # library and facade crates. #compiler-docs = false +# Indicate whether submodules are managed and updated automatically. +#submodules = true + # ============================================================================= # Options for compiling Rust code itself # ============================================================================= @@ -96,6 +99,9 @@ # Whether or not debuginfo is emitted #debuginfo = false +# Whether or not line number debug information is emitted +#debuginfo-lines = false + # Whether or not jemalloc is built and enabled #use-jemalloc = true diff --git a/src/bootstrap/dist.rs b/src/bootstrap/dist.rs index 31b7db168b48f..8676f5cc4a1ed 100644 --- a/src/bootstrap/dist.rs +++ b/src/bootstrap/dist.rs @@ -25,9 +25,8 @@ use std::process::Command; use {Build, Compiler}; use util::{cp_r, libdir, is_dylib, cp_filtered, copy}; -use regex::{RegexSet, quote}; -fn package_vers(build: &Build) -> &str { +pub fn package_vers(build: &Build) -> &str { match &build.config.channel[..] { "stable" => &build.release, "beta" => "beta", @@ -40,7 +39,7 @@ fn distdir(build: &Build) -> PathBuf { build.out.join("dist") } -fn tmpdir(build: &Build) -> PathBuf { +pub fn tmpdir(build: &Build) -> PathBuf { build.out.join("tmp/dist") } @@ -315,49 +314,31 @@ pub fn rust_src(build: &Build) { "mk" ]; - // Exclude paths matching these wildcard expressions - let excludes = [ - // exclude-vcs - "CVS", "RCS", "SCCS", ".git", ".gitignore", ".gitmodules", ".gitattributes", ".cvsignore", - ".svn", ".arch-ids", "{arch}", "=RELEASE-ID", "=meta-update", "=update", ".bzr", - ".bzrignore", ".bzrtags", ".hg", ".hgignore", ".hgrags", "_darcs", - // extensions - "*~", "*.pyc", - // misc - "llvm/test/*/*.ll", - "llvm/test/*/*.td", - "llvm/test/*/*.s", - "llvm/test/*/*/*.ll", - "llvm/test/*/*/*.td", - "llvm/test/*/*/*.s" - ]; - - // Construct a set of regexes for efficiently testing whether paths match one of the above - // expressions. - let regex_set = t!(RegexSet::new( - // This converts a wildcard expression to a regex - excludes.iter().map(|&s| { - // Prefix ensures that matching starts on a path separator boundary - r"^(.*[\\/])?".to_owned() + ( - // Escape the expression to produce a regex matching exactly that string - "e(s) - // Replace slashes with a pattern matching either forward or backslash - .replace(r"/", r"[\\/]") - // Replace wildcards with a pattern matching a single path segment, ie. containing - // no slashes. - .replace(r"\*", r"[^\\/]*") - // Suffix anchors to the end of the path - ) + "$" - }) - )); - - // Create a filter which skips files which match the regex set or contain invalid unicode let filter_fn = move |path: &Path| { - if let Some(path) = path.to_str() { - !regex_set.is_match(path) - } else { - false + let spath = match path.to_str() { + Some(path) => path, + None => return false, + }; + if spath.ends_with("~") || spath.ends_with(".pyc") { + return false } + if spath.contains("llvm/test") || spath.contains("llvm\\test") { + if spath.ends_with(".ll") || + spath.ends_with(".td") || + spath.ends_with(".s") { + return false + } + } + + let excludes = [ + "CVS", "RCS", "SCCS", ".git", ".gitignore", ".gitmodules", + ".gitattributes", ".cvsignore", ".svn", ".arch-ids", "{arch}", + "=RELEASE-ID", "=meta-update", "=update", ".bzr", ".bzrignore", + ".bzrtags", ".hg", ".hgignore", ".hgrags", "_darcs", + ]; + !path.iter() + .map(|s| s.to_str().unwrap()) + .any(|s| excludes.contains(&s)) }; // Copy the directories using our filter @@ -418,7 +399,7 @@ fn chmod(_path: &Path, _perms: u32) {} // We have to run a few shell scripts, which choke quite a bit on both `\` // characters and on `C:\` paths, so normalize both of them away. -fn sanitize_sh(path: &Path) -> String { +pub fn sanitize_sh(path: &Path) -> String { let path = path.to_str().unwrap().replace("\\", "/"); return change_drive(&path).unwrap_or(path); diff --git a/src/bootstrap/install.rs b/src/bootstrap/install.rs new file mode 100644 index 0000000000000..9bc5a7c00abaf --- /dev/null +++ b/src/bootstrap/install.rs @@ -0,0 +1,61 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Implementation of the install aspects of the compiler. +//! +//! This module is responsible for installing the standard library, +//! compiler, and documentation. + +use std::fs; +use std::borrow::Cow; +use std::path::Path; +use std::process::Command; + +use Build; +use dist::{package_vers, sanitize_sh, tmpdir}; + +/// Installs everything. +pub fn install(build: &Build, stage: u32, host: &str) { + let prefix = build.config.prefix.as_ref().clone().map(|x| Path::new(x)) + .unwrap_or(Path::new("/usr/local")); + let docdir = build.config.docdir.as_ref().clone().map(|x| Cow::Borrowed(Path::new(x))) + .unwrap_or(Cow::Owned(prefix.join("share/doc/rust"))); + let libdir = build.config.libdir.as_ref().clone().map(|x| Cow::Borrowed(Path::new(x))) + .unwrap_or(Cow::Owned(prefix.join("lib"))); + let mandir = build.config.mandir.as_ref().clone().map(|x| Cow::Borrowed(Path::new(x))) + .unwrap_or(Cow::Owned(prefix.join("share/man"))); + let empty_dir = build.out.join("tmp/empty_dir"); + t!(fs::create_dir_all(&empty_dir)); + if build.config.docs { + install_sh(&build, "docs", "rust-docs", stage, host, prefix, + &docdir, &libdir, &mandir, &empty_dir); + } + install_sh(&build, "std", "rust-std", stage, host, prefix, + &docdir, &libdir, &mandir, &empty_dir); + install_sh(&build, "rustc", "rustc", stage, host, prefix, + &docdir, &libdir, &mandir, &empty_dir); + t!(fs::remove_dir_all(&empty_dir)); +} + +fn install_sh(build: &Build, package: &str, name: &str, stage: u32, host: &str, + prefix: &Path, docdir: &Path, libdir: &Path, mandir: &Path, empty_dir: &Path) { + println!("Install {} stage{} ({})", package, stage, host); + let package_name = format!("{}-{}-{}", name, package_vers(build), host); + + let mut cmd = Command::new("sh"); + cmd.current_dir(empty_dir) + .arg(sanitize_sh(&tmpdir(build).join(&package_name).join("install.sh"))) + .arg(format!("--prefix={}", sanitize_sh(prefix))) + .arg(format!("--docdir={}", sanitize_sh(docdir))) + .arg(format!("--libdir={}", sanitize_sh(libdir))) + .arg(format!("--mandir={}", sanitize_sh(mandir))) + .arg("--disable-ldconfig"); + build.run(&mut cmd); +} diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs index c5dbb2a0319f2..7c5a0c7373f88 100644 --- a/src/bootstrap/lib.rs +++ b/src/bootstrap/lib.rs @@ -26,7 +26,6 @@ extern crate md5; extern crate num_cpus; extern crate rustc_serialize; extern crate toml; -extern crate regex; use std::collections::HashMap; use std::env; @@ -62,6 +61,7 @@ mod config; mod dist; mod doc; mod flags; +mod install; mod native; mod sanity; mod step; @@ -220,14 +220,14 @@ impl Build { sanity::check(self); self.verbose("collecting channel variables"); channel::collect(self); - // If local-rust is the same as the current version, then force a local-rebuild + // If local-rust is the same major.minor as the current version, then force a local-rebuild let local_version_verbose = output( Command::new(&self.rustc).arg("--version").arg("--verbose")); let local_release = local_version_verbose .lines().filter(|x| x.starts_with("release:")) .next().unwrap().trim_left_matches("release:").trim(); - if local_release == self.release { - self.verbose(&format!("auto-detected local-rebuild {}", self.release)); + if local_release.split('.').take(2).eq(self.release.split('.').take(2)) { + self.verbose(&format!("auto-detected local-rebuild {}", local_release)); self.local_rebuild = true; } self.verbose("updating submodules"); @@ -243,7 +243,14 @@ impl Build { // Almost all of these are simple one-liners that shell out to the // corresponding functionality in the extra modules, where more // documentation can be found. - for target in step::all(self) { + let steps = step::all(self); + + self.verbose("bootstrap build plan:"); + for step in &steps { + self.verbose(&format!("{:?}", step)); + } + + for target in steps { let doc_out = self.out.join(&target.target).join("doc"); match target.src { Llvm { _dummy } => { @@ -446,6 +453,8 @@ impl Build { DistStd { compiler } => dist::std(self, &compiler, target.target), DistSrc { _dummy } => dist::rust_src(self), + Install { stage } => install::install(self, stage, target.target), + DebuggerScripts { stage } => { let compiler = Compiler::new(stage, target.target); dist::debugger_scripts(self, @@ -550,12 +559,23 @@ impl Build { continue } + // `submodule.path` is the relative path to a submodule (from the repository root) + // `submodule_path` is the path to a submodule from the cwd + + // use `submodule.path` when e.g. executing a submodule specific command from the + // repository root + // use `submodule_path` when e.g. executing a normal git command for the submodule + // (set via `current_dir`) + let submodule_path = self.src.join(submodule.path); + match submodule.state { State::MaybeDirty => { // drop staged changes - self.run(git().arg("-C").arg(submodule.path).args(&["reset", "--hard"])); + self.run(git().current_dir(&submodule_path) + .args(&["reset", "--hard"])); // drops unstaged changes - self.run(git().arg("-C").arg(submodule.path).args(&["clean", "-fdx"])); + self.run(git().current_dir(&submodule_path) + .args(&["clean", "-fdx"])); }, State::NotInitialized => { self.run(git_submodule().arg("init").arg(submodule.path)); @@ -564,8 +584,10 @@ impl Build { State::OutOfSync => { // drops submodule commits that weren't reported to the (outer) git repository self.run(git_submodule().arg("update").arg(submodule.path)); - self.run(git().arg("-C").arg(submodule.path).args(&["reset", "--hard"])); - self.run(git().arg("-C").arg(submodule.path).args(&["clean", "-fdx"])); + self.run(git().current_dir(&submodule_path) + .args(&["reset", "--hard"])); + self.run(git().current_dir(&submodule_path) + .args(&["clean", "-fdx"])); }, } } @@ -627,6 +649,7 @@ impl Build { .env("RUSTC_REAL", self.compiler_path(compiler)) .env("RUSTC_STAGE", stage.to_string()) .env("RUSTC_DEBUGINFO", self.config.rust_debuginfo.to_string()) + .env("RUSTC_DEBUGINFO_LINES", self.config.rust_debuginfo_lines.to_string()) .env("RUSTC_CODEGEN_UNITS", self.config.rust_codegen_units.to_string()) .env("RUSTC_DEBUG_ASSERTIONS", @@ -640,7 +663,7 @@ impl Build { .env("RUSTDOC_REAL", self.rustdoc(compiler)) .env("RUSTC_FLAGS", self.rustc_flags(target).join(" ")); - self.add_bootstrap_key(compiler, &mut cargo); + self.add_bootstrap_key(&mut cargo); // Specify some various options for build scripts used throughout // the build. @@ -652,12 +675,6 @@ impl Build { .env(format!("CFLAGS_{}", target), self.cflags(target).join(" ")); } - // If we're building for OSX, inform the compiler and the linker that - // we want to build a compiler runnable on 10.7 - if target.contains("apple-darwin") { - cargo.env("MACOSX_DEPLOYMENT_TARGET", "10.7"); - } - // Environment variables *required* needed throughout the build // // FIXME: should update code to not require this env var @@ -855,16 +872,11 @@ impl Build { } /// Adds the compiler's bootstrap key to the environment of `cmd`. - fn add_bootstrap_key(&self, compiler: &Compiler, cmd: &mut Command) { - // In stage0 we're using a previously released stable compiler, so we - // use the stage0 bootstrap key. Otherwise we use our own build's - // bootstrap key. - let bootstrap_key = if compiler.is_snapshot(self) && !self.local_rebuild { - &self.bootstrap_key_stage0 - } else { - &self.bootstrap_key - }; - cmd.env("RUSTC_BOOTSTRAP_KEY", bootstrap_key); + fn add_bootstrap_key(&self, cmd: &mut Command) { + cmd.env("RUSTC_BOOTSTRAP", ""); + // FIXME: Transitionary measure to bootstrap using the old bootstrap logic. + // Remove this once the bootstrap compiler uses the new login in Issue #36548. + cmd.env("RUSTC_BOOTSTRAP_KEY", "62b3e239"); } /// Returns the compiler's libdir where it stores the dynamic libraries that @@ -926,7 +938,6 @@ impl Build { // LLVM/jemalloc/etc are all properly compiled. if target.contains("apple-darwin") { base.push("-stdlib=libc++".into()); - base.push("-mmacosx-version-min=10.7".into()); } // This is a hack, because newer binutils broke things on some vms/distros // (i.e., linking against unknown relocs disabled by the following flag) @@ -950,7 +961,11 @@ impl Build { /// Returns the path to the C++ compiler for the target specified, may panic /// if no C++ compiler was configured for the target. fn cxx(&self, target: &str) -> &Path { - self.cxx[target].path() + match self.cxx.get(target) { + Some(p) => p.path(), + None => panic!("\n\ntarget `{}` is not configured as a host, + only as a target\n\n", target), + } } /// Returns flags to pass to the compiler to generate code for `target`. @@ -963,7 +978,8 @@ impl Build { // than an entry here. let mut base = Vec::new(); - if target != self.config.build && !target.contains("msvc") { + if target != self.config.build && !target.contains("msvc") && + !target.contains("emscripten") { base.push(format!("-Clinker={}", self.cc(target).display())); } return base @@ -971,7 +987,8 @@ impl Build { /// Returns the "musl root" for this `target`, if defined fn musl_root(&self, target: &str) -> Option<&Path> { - self.config.target_config[target].musl_root.as_ref() + self.config.target_config.get(target) + .and_then(|t| t.musl_root.as_ref()) .or(self.config.musl_root.as_ref()) .map(|p| &**p) } diff --git a/src/bootstrap/mk/Makefile.in b/src/bootstrap/mk/Makefile.in index cc44d45c2cc75..0762ed98472be 100644 --- a/src/bootstrap/mk/Makefile.in +++ b/src/bootstrap/mk/Makefile.in @@ -51,6 +51,12 @@ check-cargotest: $(Q)$(BOOTSTRAP) --step check-cargotest dist: $(Q)$(BOOTSTRAP) --step dist +install: +ifeq (root user, $(USER) $(patsubst %,user,$(SUDO_USER))) + $(Q)echo "'sudo make install' is not supported currently." +else + $(Q)$(BOOTSTRAP) --step install +endif tidy: $(Q)$(BOOTSTRAP) --step check-tidy --stage 0 diff --git a/src/bootstrap/native.rs b/src/bootstrap/native.rs index df6408e5fe1c8..1b4e86fb30f25 100644 --- a/src/bootstrap/native.rs +++ b/src/bootstrap/native.rs @@ -18,9 +18,10 @@ //! LLVM and compiler-rt are essentially just wired up to everything else to //! ensure that they're always in place if needed. +use std::fs::{self, File}; +use std::io::{Read, Write}; use std::path::Path; use std::process::Command; -use std::fs::{self, File}; use build_helper::output; use cmake; @@ -43,11 +44,17 @@ pub fn llvm(build: &Build, target: &str) { // artifacts are missing) then we keep going, otherwise we bail out. let dst = build.llvm_out(target); let stamp = build.src.join("src/rustllvm/llvm-auto-clean-trigger"); + let mut stamp_contents = String::new(); + t!(t!(File::open(&stamp)).read_to_string(&mut stamp_contents)); let done_stamp = dst.join("llvm-finished-building"); - build.clear_if_dirty(&dst, &stamp); - if fs::metadata(&done_stamp).is_ok() { - return + if done_stamp.exists() { + let mut done_contents = String::new(); + t!(t!(File::open(&done_stamp)).read_to_string(&mut done_contents)); + if done_contents == stamp_contents { + return + } } + drop(fs::remove_dir_all(&dst)); println!("Building LLVM for {}", target); @@ -65,7 +72,7 @@ pub fn llvm(build: &Build, target: &str) { .out_dir(&dst) .profile(if build.config.llvm_optimize {"Release"} else {"Debug"}) .define("LLVM_ENABLE_ASSERTIONS", assertions) - .define("LLVM_TARGETS_TO_BUILD", "X86;ARM;AArch64;Mips;PowerPC;SystemZ") + .define("LLVM_TARGETS_TO_BUILD", "X86;ARM;AArch64;Mips;PowerPC;SystemZ;JSBackend") .define("LLVM_INCLUDE_EXAMPLES", "OFF") .define("LLVM_INCLUDE_TESTS", "OFF") .define("LLVM_INCLUDE_DOCS", "OFF") @@ -73,7 +80,9 @@ pub fn llvm(build: &Build, target: &str) { .define("WITH_POLLY", "OFF") .define("LLVM_ENABLE_TERMINFO", "OFF") .define("LLVM_ENABLE_LIBEDIT", "OFF") - .define("LLVM_PARALLEL_COMPILE_JOBS", build.jobs().to_string()); + .define("LLVM_PARALLEL_COMPILE_JOBS", build.jobs().to_string()) + .define("LLVM_TARGET_ARCH", target.split('-').next().unwrap()) + .define("LLVM_DEFAULT_TARGET_TRIPLE", target); if target.starts_with("i686") { cfg.define("LLVM_BUILD_32_BITS", "ON"); @@ -86,9 +95,7 @@ pub fn llvm(build: &Build, target: &str) { // actually exists most of the time in normal installs of LLVM. let host = build.llvm_out(&build.config.build).join("bin/llvm-tblgen"); cfg.define("CMAKE_CROSSCOMPILING", "True") - .define("LLVM_TARGET_ARCH", target.split('-').next().unwrap()) - .define("LLVM_TABLEGEN", &host) - .define("LLVM_DEFAULT_TARGET_TRIPLE", target); + .define("LLVM_TABLEGEN", &host); } // MSVC handles compiler business itself @@ -114,7 +121,7 @@ pub fn llvm(build: &Build, target: &str) { // tools and libs on all platforms. cfg.build(); - t!(File::create(&done_stamp)); + t!(t!(File::create(&done_stamp)).write_all(stamp_contents.as_bytes())); } fn check_llvm_version(build: &Build, llvm_config: &Path) { diff --git a/src/bootstrap/sanity.rs b/src/bootstrap/sanity.rs index 05c35543e3e5b..969cd70fd57eb 100644 --- a/src/bootstrap/sanity.rs +++ b/src/bootstrap/sanity.rs @@ -40,17 +40,23 @@ pub fn check(build: &mut Build) { panic!("PATH contains invalid character '\"'"); } } - let mut need_cmd = |cmd: &OsStr| { - if !checked.insert(cmd.to_owned()) { - return - } + let have_cmd = |cmd: &OsStr| { for path in env::split_paths(&path).map(|p| p.join(cmd)) { if fs::metadata(&path).is_ok() || fs::metadata(path.with_extension("exe")).is_ok() { - return + return Some(path); } } - panic!("\n\ncouldn't find required command: {:?}\n\n", cmd); + return None; + }; + + let mut need_cmd = |cmd: &OsStr| { + if !checked.insert(cmd.to_owned()) { + return + } + if have_cmd(cmd).is_none() { + panic!("\n\ncouldn't find required command: {:?}\n\n", cmd); + } }; // If we've got a git directory we're gona need git to update @@ -75,8 +81,13 @@ pub fn check(build: &mut Build) { need_cmd("python".as_ref()); - // If a manual nodejs was added to the config, - // of if a nodejs install is detected through config, use it. + // Look for the nodejs command, needed for emscripten testing + if let Some(node) = have_cmd("node".as_ref()) { + build.config.nodejs = Some(node); + } else if let Some(node) = have_cmd("nodejs".as_ref()) { + build.config.nodejs = Some(node); + } + if let Some(ref s) = build.config.nodejs { need_cmd(s.as_ref()); } @@ -84,6 +95,13 @@ pub fn check(build: &mut Build) { // We're gonna build some custom C code here and there, host triples // also build some C++ shims for LLVM so we need a C++ compiler. for target in build.config.target.iter() { + // On emscripten we don't actually need the C compiler to just + // build the target artifacts, only for testing. For the sake + // of easier bot configuration, just skip detection. + if target.contains("emscripten") { + continue; + } + need_cmd(build.cc(target).as_ref()); if let Some(ar) = build.ar(target) { need_cmd(ar.as_ref()); @@ -93,6 +111,14 @@ pub fn check(build: &mut Build) { need_cmd(build.cxx(host).as_ref()); } + // The msvc hosts don't use jemalloc, turn it off globally to + // avoid packaging the dummy liballoc_jemalloc on that platform. + for host in build.config.host.iter() { + if host.contains("msvc") { + build.config.use_jemalloc = false; + } + } + // Externally configured LLVM requires FileCheck to exist let filecheck = build.llvm_filecheck(&build.config.build); if !filecheck.starts_with(&build.out) && !filecheck.exists() && build.config.codegen_tests { @@ -100,15 +126,6 @@ pub fn check(build: &mut Build) { } for target in build.config.target.iter() { - // Either can't build or don't want to run jemalloc on these targets - if target.contains("rumprun") || - target.contains("bitrig") || - target.contains("openbsd") || - target.contains("msvc") || - target.contains("emscripten") { - build.config.use_jemalloc = false; - } - // Can't compile for iOS unless we're on OSX if target.contains("apple-ios") && !build.config.build.contains("apple-darwin") { @@ -129,8 +146,8 @@ pub fn check(build: &mut Build) { } } None => { - panic!("when targeting MUSL either the build.musl-root \ - option or the target.$TARGET.musl-root one must \ + panic!("when targeting MUSL either the rust.musl-root \ + option or the target.$TARGET.musl-root option must \ be specified in config.toml") } } diff --git a/src/bootstrap/step.rs b/src/bootstrap/step.rs index 5f391b70fbe88..3bf0f21192147 100644 --- a/src/bootstrap/step.rs +++ b/src/bootstrap/step.rs @@ -140,6 +140,9 @@ macro_rules! targets { (dist_std, DistStd { compiler: Compiler<'a> }), (dist_src, DistSrc { _dummy: () }), + // install target + (install, Install { stage: u32 }), + // Misc targets (android_copy_libs, AndroidCopyLibs { compiler: Compiler<'a> }), } @@ -171,6 +174,8 @@ targets!(define_source); /// into a topologically sorted list which when executed left-to-right will /// correctly sequence the entire build. pub fn all(build: &Build) -> Vec { + build.verbose("inferred build steps:"); + let mut ret = Vec::new(); let mut all = HashSet::new(); for target in top_level(build) { @@ -184,6 +189,7 @@ pub fn all(build: &Build) -> Vec { set: &mut HashSet>) { if set.insert(target.clone()) { for dep in target.deps(build) { + build.verbose(&format!("{:?}\n -> {:?}", target, dep)); fill(build, &dep, ret, set); } ret.push(target.clone()); @@ -246,8 +252,7 @@ fn top_level(build: &Build) -> Vec { } } - return targets - + targets } fn add_steps<'a>(build: &'a Build, @@ -415,7 +420,6 @@ impl<'a> Step<'a> { self.check_crate_std(compiler), self.check_crate_test(compiler), self.check_debuginfo(compiler), - self.dist(stage), ]; // If we're testing the build triple, then we know we can @@ -460,9 +464,12 @@ impl<'a> Step<'a> { // misc self.check_linkcheck(stage), self.check_tidy(stage), + + // can we make the distributables? + self.dist(stage), ]); } - return base + base } Source::CheckLinkcheck { stage } => { vec![self.tool_linkchecker(stage), self.doc(stage)] @@ -483,7 +490,6 @@ impl<'a> Step<'a> { Source::CheckCodegenUnits { compiler } | Source::CheckIncremental { compiler } | Source::CheckUi { compiler } | - Source::CheckRustdoc { compiler } | Source::CheckPretty { compiler } | Source::CheckCFail { compiler } | Source::CheckRPassValgrind { compiler } | @@ -506,6 +512,7 @@ impl<'a> Step<'a> { self.debugger_scripts(compiler.stage), ] } + Source::CheckRustdoc { compiler } | Source::CheckRPassFull { compiler } | Source::CheckRFailFull { compiler } | Source::CheckCFailFull { compiler } | @@ -517,7 +524,7 @@ impl<'a> Step<'a> { self.target(compiler.host).tool_compiletest(compiler.stage)] } Source::CheckDocs { compiler } => { - vec![self.libstd(compiler)] + vec![self.libtest(compiler)] } Source::CheckErrorIndex { compiler } => { vec![self.libstd(compiler), @@ -585,7 +592,11 @@ impl<'a> Step<'a> { base.push(target.dist_std(compiler)); } } - return base + base + } + + Source::Install { stage } => { + vec![self.dist(stage)] } Source::AndroidCopyLibs { compiler } => { diff --git a/src/build_helper/lib.rs b/src/build_helper/lib.rs index 838cc4f07a9a1..38844fb6c9ef0 100644 --- a/src/build_helper/lib.rs +++ b/src/build_helper/lib.rs @@ -25,7 +25,9 @@ pub fn run_silent(cmd: &mut Command) { }; if !status.success() { fail(&format!("command did not execute successfully: {:?}\n\ - expected success, got: {}", cmd, status)); + expected success, got: {}", + cmd, + status)); } } @@ -65,7 +67,9 @@ pub fn output(cmd: &mut Command) -> String { }; if !output.status.success() { panic!("command did not execute successfully: {:?}\n\ - expected success, got: {}", cmd, output.status); + expected success, got: {}", + cmd, + output.status); } String::from_utf8(output.stdout).unwrap() } diff --git a/src/compiler-rt b/src/compiler-rt index 8598065bd965d..f03ba5a4e8bf1 160000 --- a/src/compiler-rt +++ b/src/compiler-rt @@ -1 +1 @@ -Subproject commit 8598065bd965d9713bfafb6c1e766d63a7b17b89 +Subproject commit f03ba5a4e8bf16dcf42dd742a4ce255c36321356 diff --git a/src/doc/book/concurrency.md b/src/doc/book/concurrency.md index a783650f8ea2d..41d8345b72094 100644 --- a/src/doc/book/concurrency.md +++ b/src/doc/book/concurrency.md @@ -4,7 +4,7 @@ Concurrency and parallelism are incredibly important topics in computer science, and are also a hot topic in industry today. Computers are gaining more and more cores, yet many programmers aren't prepared to fully utilize them. -Rust's memory safety features also apply to its concurrency story too. Even +Rust's memory safety features also apply to its concurrency story. Even concurrent Rust programs must be memory safe, having no data races. Rust's type system is up to the task, and gives you powerful ways to reason about concurrent code at compile time. @@ -281,8 +281,8 @@ And... still gives us an error. ``` `Arc` by default has immutable contents. It allows the _sharing_ of data -between threads, but shared mutable data is unsafe and when threads are -involved can cause data races! +between threads, but shared mutable data is unsafe—and when threads are +involved—can cause data races! Usually when we wish to make something in an immutable position mutable, we use diff --git a/src/doc/book/const-and-static.md b/src/doc/book/const-and-static.md index 11aa25ac811c9..e8f17a41cbeab 100644 --- a/src/doc/book/const-and-static.md +++ b/src/doc/book/const-and-static.md @@ -1,4 +1,4 @@ -% `const` and `static` +% const and static Rust has a way of defining constants with the `const` keyword: diff --git a/src/doc/book/deref-coercions.md b/src/doc/book/deref-coercions.md index beb65c4ce358a..cabe66f5b2282 100644 --- a/src/doc/book/deref-coercions.md +++ b/src/doc/book/deref-coercions.md @@ -69,7 +69,7 @@ foo(&counted); All we’ve done is wrap our `String` in an `Rc`. But we can now pass the `Rc` around anywhere we’d have a `String`. The signature of `foo` didn’t change, but works just as well with either type. This example has two -conversions: `Rc` to `String` and then `String` to `&str`. Rust will do +conversions: `&Rc` to `&String` and then `&String` to `&str`. Rust will do this as many times as possible until the types match. Another very common implementation provided by the standard library is: diff --git a/src/doc/book/getting-started.md b/src/doc/book/getting-started.md index 700ab2be58932..5add235928272 100644 --- a/src/doc/book/getting-started.md +++ b/src/doc/book/getting-started.md @@ -166,12 +166,22 @@ you can find the Rust executables in a directory like `"C:\Program Files\Rust stable GNU 1.x\bin"`. Rust does not do its own linking, and so you’ll need to have a linker -installed. Doing so will depend on your specific system, consult its -documentation for more details. - -If not, there are a number of places where we can get help. The easiest is -[the #rust-beginners IRC channel on irc.mozilla.org][irc-beginners] and for -general discussion [the #rust IRC channel on irc.mozilla.org][irc], which we +installed. Doing so will depend on your specific system. For +Linux-based systems, Rust will attempt to call `cc` for linking. On +`windows-msvc` (Rust built on Windows with Microsoft Visual Studio), +this depends on having [Microsoft Visual C++ Build Tools][msvbt] +installed. These do not need to be in `%PATH%` as `rustc` will find +them automatically. In general, if you have your linker in a +non-traditional location you can call `rustc +linker=/path/to/cc`, where `/path/to/cc` should point to your linker path. + +[msvbt]: http://landinghub.visualstudio.com/visual-cpp-build-tools + +If you are still stuck, there are a number of places where we can get +help. The easiest is +[the #rust-beginners IRC channel on irc.mozilla.org][irc-beginners] +and for general discussion +[the #rust IRC channel on irc.mozilla.org][irc], which we can access through [Mibbit][mibbit]. Then we'll be chatting with other Rustaceans (a silly nickname we call ourselves) who can help us out. Other great resources include [the user’s forum][users] and [Stack Overflow][stackoverflow]. @@ -230,12 +240,13 @@ $ cd hello_world ## Writing and Running a Rust Program -Next, make a new source file and call it *main.rs*. Rust files always end -in a *.rs* extension. If you’re using more than one word in your filename, use -an underscore to separate them; for example, you'd use *hello_world.rs* rather -than *helloworld.rs*. +We need to create a source file for our Rust program. Rust files always end +in a *.rs* extension. If you are using more than one word in your filename, +use an underscore to separate them; for example, you would use +*my_program.rs* rather than *myprogram.rs*. -Now open the *main.rs* file you just created, and type the following code: +Now, make a new file and call it *main.rs*. Open the file and type +the following code: ```rust fn main() { @@ -494,6 +505,9 @@ $ cargo run Hello, world! ``` +The `run` command comes in handy when you need to rapidly iterate on a +project. + Notice that this example didn’t re-build the project. Cargo figured out that the file hasn’t changed, and so it just ran the binary. If you'd modified your source code, Cargo would have rebuilt the project before running it, and you diff --git a/src/doc/book/guessing-game.md b/src/doc/book/guessing-game.md index 22cf6068e4d5a..4e0e372868941 100644 --- a/src/doc/book/guessing-game.md +++ b/src/doc/book/guessing-game.md @@ -56,9 +56,7 @@ $ cargo build Excellent! Open up your `src/main.rs` again. We’ll be writing all of our code in this file. -Before we move on, let me show you one more Cargo command: `run`. `cargo run` -is kind of like `cargo build`, but it also then runs the produced executable. -Try it out: +Remember the `run` command from last chapter? Try it out again here: ```bash $ cargo run @@ -67,9 +65,8 @@ $ cargo run Hello, world! ``` -Great! The `run` command comes in handy when you need to rapidly iterate on a -project. Our game is such a project, we need to quickly test each -iteration before moving on to the next one. +Great! Our game is just the kind of project `run` is good for: we need +to quickly test each iteration before moving on to the next one. # Processing a Guess @@ -279,7 +276,7 @@ displaying the message. [expect]: ../std/result/enum.Result.html#method.expect [panic]: error-handling.html -If we leave off calling this method, our program will compile, but +If we do not call `expect()`, our program will compile, but we’ll get a warning: ```bash diff --git a/src/doc/book/lifetimes.md b/src/doc/book/lifetimes.md index f7d9c94bc454f..df1ee5a293c9d 100644 --- a/src/doc/book/lifetimes.md +++ b/src/doc/book/lifetimes.md @@ -50,29 +50,94 @@ complicated. For example, imagine this set of operations: 4. You decide to use the resource. Uh oh! Your reference is pointing to an invalid resource. This is called a -dangling pointer or ‘use after free’, when the resource is memory. +dangling pointer or ‘use after free’, when the resource is memory. A small +example of such a situation would be: + +```rust,compile_fail +let r; // Introduce reference: r +{ + let i = 1; // Introduce scoped value: i + r = &i; // Store reference of i in r +} // i goes out of scope and is dropped. + +println!("{}", r); // r still refers to i +``` To fix this, we have to make sure that step four never happens after step -three. The ownership system in Rust does this through a concept called -lifetimes, which describe the scope that a reference is valid for. +three. In the small example above the Rust compiler is able to report the issue +as it can see the lifetimes of the various values in the function. -When we have a function that takes an argument by reference, we can be -implicit or explicit about the lifetime of the reference: +When we have a function that takes arguments by reference the situation becomes +more complex. Consider the following example: -```rust -// implicit -fn foo(x: &i32) { +```rust,compile_fail,E0106 +fn skip_prefix(line: &str, prefix: &str) -> &str { + // ... +# line } -// explicit -fn bar<'a>(x: &'a i32) { +let line = "lang:en=Hello World!"; +let lang = "en"; + +let v; +{ + let p = format!("lang:{}=", lang); // -+ p goes into scope + v = skip_prefix(line, p.as_str()); // | +} // -+ p goes out of scope +println!("{}", v); +``` + +Here we have a function `skip_prefix` which takes two `&str` references +as parameters and returns a single `&str` reference. We call it +by passing in references to `line` and `p`: Two variables with different +lifetimes. Now the safety of the `println!`-line depends on whether the +reference returned by `skip_prefix` function references the still living +`line` or the already dropped `p` string. + +Because of the above ambiguity, Rust will refuse to compile the example +code. To get it to compile we need to tell the compiler more about the +lifetimes of the references. This can be done by making the lifetimes +explicit in the function declaration: + +```rust +fn skip_prefix<'a, 'b>(line: &'a str, prefix: &'b str) -> &'a str { + // ... +# line } ``` +Let's examine the changes without going too deep into the syntax for now - +we'll get to that later. The first change was adding the `<'a, 'b>` after the +method name. This introduces two lifetime parameters: `'a` and `'b`. Next each +reference in the function signature was associated with one of the lifetime +parameters by adding the lifetime name after the `&`. This tells the compiler +how the lifetimes between different references are related. + +As a result the compiler is now able to deduce that the return value of +`skip_prefix` has the same lifetime as the `line` parameter, which makes the `v` +reference safe to use even after the `p` goes out of scope in the original +example. + +In addition to the compiler being able to validate the usage of `skip_prefix` +return value, it can also ensure that the implementation follows the contract +established by the function declaration. This is useful especially when you are +implementing traits that are introduced [later in the book][traits]. + +**Note** It's important to understand that lifetime annotations are +_descriptive_, not _prescriptive_. This means that how long a reference is valid +is determined by the code, not by the annotations. The annotations, however, +give information about lifetimes to the compiler that uses them to check the +validity of references. The compiler can do so without annotations in simple +cases, but needs the programmers support in complex scenarios. + +[traits]: traits.html + +# Syntax + The `'a` reads ‘the lifetime a’. Technically, every reference has some lifetime associated with it, but the compiler lets you elide (i.e. omit, see -["Lifetime Elision"][lifetime-elision] below) them in common cases. -Before we get to that, though, let’s break the explicit example down: +["Lifetime Elision"][lifetime-elision] below) them in common cases. Before we +get to that, though, let’s look at a short example with explicit lifetimes: [lifetime-elision]: #lifetime-elision @@ -90,7 +155,8 @@ focus on the lifetimes aspect. [generics]: generics.html We use `<>` to declare our lifetimes. This says that `bar` has one lifetime, -`'a`. If we had two reference parameters, it would look like this: +`'a`. If we had two reference parameters with different lifetimes, it would +look like this: ```rust,ignore diff --git a/src/doc/book/ownership.md b/src/doc/book/ownership.md index 23ca21b3b4992..a711397b211db 100644 --- a/src/doc/book/ownership.md +++ b/src/doc/book/ownership.md @@ -57,13 +57,13 @@ of scope at the end of `foo()`, Rust will clean up everything related to the vector, even the heap-allocated memory. This happens deterministically, at the end of the scope. -We'll cover [vectors] in detail later in this chapter; we only use them +We covered [vectors] in the previous chapter; we use them here as an example of a type that allocates space on the heap at runtime. They behave like [arrays], except their size may change by `push()`ing more elements onto them. Vectors have a [generic type][generics] `Vec`, so in this example `v` will have type -`Vec`. We'll cover generics in detail later in this chapter. +`Vec`. We'll cover [generics] in detail in a later chapter. [arrays]: primitive-types.html#arrays [vectors]: vectors.html diff --git a/src/doc/book/references-and-borrowing.md b/src/doc/book/references-and-borrowing.md index 2ec3a00c0df51..1e2f061b06745 100644 --- a/src/doc/book/references-and-borrowing.md +++ b/src/doc/book/references-and-borrowing.md @@ -86,7 +86,7 @@ fn main() { return v.iter().fold(0, |a, &b| a + b); } // Borrow two vectors and sum them. - // This kind of borrowing does not allow mutation to the borrowed. + // This kind of borrowing does not allow mutation through the borrowed reference. fn foo(v1: &Vec, v2: &Vec) -> i32 { // do stuff with v1 and v2 let s1 = sum_vec(v1); @@ -240,7 +240,7 @@ fn main() { In other words, the mutable borrow is held through the rest of our example. What we want is for the mutable borrow by `y` to end so that the resource can be -returned to the owner, `x`. `x` can then provide a immutable borrow to `println!`. +returned to the owner, `x`. `x` can then provide an immutable borrow to `println!`. In Rust, borrowing is tied to the scope that the borrow is valid for. And our scopes look like this: diff --git a/src/doc/book/syntax-index.md b/src/doc/book/syntax-index.md index 0259db221b6bc..1e05b01d30d46 100644 --- a/src/doc/book/syntax-index.md +++ b/src/doc/book/syntax-index.md @@ -61,7 +61,6 @@ * `-` (`- expr`): arithmetic negation. Overloadable (`Neg`). * `-=` (`var -= expr`): arithmetic subtraction & assignment. Overloadable (`SubAssign`). * `->` (`fn(…) -> type`, `|…| -> type`): function and closure return type. See [Functions], [Closures]. -* `-> !` (`fn(…) -> !`, `|…| -> !`): diverging function or closure. See [Diverging Functions]. * `.` (`expr.ident`): member access. See [Structs], [Method Syntax]. * `..` (`..`, `expr..`, `..expr`, `expr..expr`): right-exclusive range literal. * `..` (`..expr`): struct literal update syntax. See [Structs (Update syntax)]. @@ -159,6 +158,10 @@ * `/*!…*/`: inner block doc comment. See [Comments]. * `/**…*/`: outer block doc comment. See [Comments]. + + +* `!`: always empty Never type. See [Diverging Functions]. + * `()`: empty tuple (*a.k.a.* unit), both literal and type. diff --git a/src/doc/book/testing.md b/src/doc/book/testing.md index 86729147ed065..3bdf1b7b7f229 100644 --- a/src/doc/book/testing.md +++ b/src/doc/book/testing.md @@ -380,9 +380,9 @@ the `tests` directory. # The `tests` directory -Each file in `tests/*.rs` directory is treated as individual crate. -So, to write an integration test, let's make a `tests` directory, and -put a `tests/integration_test.rs` file inside, with this as its contents: +Each file in `tests/*.rs` directory is treated as an individual crate. +To write an integration test, let's make a `tests` directory and +put a `tests/integration_test.rs` file inside with this as its contents: ```rust,ignore extern crate adder; diff --git a/src/doc/book/traits.md b/src/doc/book/traits.md index d07fb6b7c45bf..b0d954adf6771 100644 --- a/src/doc/book/traits.md +++ b/src/doc/book/traits.md @@ -291,7 +291,7 @@ let result = f.write(buf); We need to `use` the `Write` trait first: -```rust,ignore +```rust,no_run use std::io::Write; let mut f = std::fs::File::create("foo.txt").expect("Couldn’t create foo.txt"); diff --git a/src/doc/book/type-aliases.md b/src/doc/book/type-aliases.md index def2e31f3514b..3798336f0a524 100644 --- a/src/doc/book/type-aliases.md +++ b/src/doc/book/type-aliases.md @@ -1,4 +1,4 @@ -% `type` Aliases +% Type Aliases The `type` keyword lets you declare an alias of another type: diff --git a/src/doc/book/variable-bindings.md b/src/doc/book/variable-bindings.md index 30e922d7f4dc0..03f17371de68d 100644 --- a/src/doc/book/variable-bindings.md +++ b/src/doc/book/variable-bindings.md @@ -161,7 +161,7 @@ Could not compile `hello_world`. Rust will not let us use a value that has not been initialized. -Let take a minute to talk about this stuff we've added to `println!`. +Let us take a minute to talk about this stuff we've added to `println!`. If you include two curly braces (`{}`, some call them moustaches...) in your string to print, Rust will interpret this as a request to interpolate some sort diff --git a/src/doc/footer.inc b/src/doc/footer.inc index 7513e524e73a1..77e151235e822 100644 --- a/src/doc/footer.inc +++ b/src/doc/footer.inc @@ -5,4 +5,3 @@ or the MIT license, at your op

This file may not be copied, modified, or distributed except according to those terms.

- diff --git a/src/doc/grammar.md b/src/doc/grammar.md index be64379b516e7..690d44cc2cb7b 100644 --- a/src/doc/grammar.md +++ b/src/doc/grammar.md @@ -764,6 +764,13 @@ bound-list := bound | bound '+' bound-list bound := path | lifetime ``` +### Never type +An empty type + +```antlr +never_type : "!" ; +``` + ### Object types **FIXME:** grammar? diff --git a/src/doc/reference.md b/src/doc/reference.md index b72c3743a69ce..4838ecd2d42e6 100644 --- a/src/doc/reference.md +++ b/src/doc/reference.md @@ -2472,8 +2472,7 @@ The currently implemented features of the reference compiler are: * - `default_type_parameter_fallback` - Allows type parameter defaults to influence type inference. -* - `stmt_expr_attributes` - Allows attributes on expressions and - non-item statements. +* - `stmt_expr_attributes` - Allows attributes on expressions. * - `type_ascription` - Allows type ascription expressions `expr: Type`. @@ -3110,10 +3109,12 @@ the lambda expression captures its environment by reference, effectively borrowing pointers to all outer variables mentioned inside the function. Alternately, the compiler may infer that a lambda expression should copy or move values (depending on their type) from the environment into the lambda -expression's captured environment. +expression's captured environment. A lambda can be forced to capture its +environment by moving values by prefixing it with the `move` keyword. In this example, we define a function `ten_times` that takes a higher-order -function argument, and we then call it with a lambda expression as an argument: +function argument, and we then call it with a lambda expression as an argument, +followed by a lambda expression that moves values from its environment. ``` fn ten_times(f: F) where F: Fn(i32) { @@ -3123,6 +3124,9 @@ fn ten_times(f: F) where F: Fn(i32) { } ten_times(|j| println!("hello, {}", j)); + +let word = "konnichiwa".to_owned(); +ten_times(move |j| println!("{}, {}", word, j)); ``` ### Infinite loops @@ -3959,6 +3963,16 @@ the top-level type for the implementation of the called method. If no such metho found, `.deref()` is called and the compiler continues to search for the method implementation in the returned type `U`. +## The `Send` trait + +The `Send` trait indicates that a value of this type is safe to send from one +thread to another. + +## The `Sync` trait + +The `Sync` trait indicates that a value of this type is safe to share between +multiple threads. + # Memory model A Rust program's memory consists of a static set of *items* and a *heap*. @@ -4009,9 +4023,9 @@ Methods that take either `self` or `Box` can optionally place them in a mutable variable by prefixing them with `mut` (similar to regular arguments): ``` -trait Changer { - fn change(mut self) -> Self; - fn modify(mut self: Box) -> Box; +trait Changer: Sized { + fn change(mut self) {} + fn modify(mut self: Box) {} } ``` @@ -4064,6 +4078,12 @@ be ignored in favor of only building the artifacts specified by command line. Rust code into an existing non-Rust application because it will not have dynamic dependencies on other Rust code. +* `--crate-type=cdylib`, `#[crate_type = "cdylib"]` - A dynamic system + library will be produced. This is used when compiling Rust code as + a dynamic library to be loaded from another language. This output type will + create `*.so` files on Linux, `*.dylib` files on OSX, and `*.dll` files on + Windows. + * `--crate-type=rlib`, `#[crate_type = "rlib"]` - A "Rust library" file will be produced. This is used as an intermediate artifact and can be thought of as a "static Rust library". These `rlib` files, unlike `staticlib` files, are diff --git a/src/doc/rust.css b/src/doc/rust.css index 9c1b3724d8d81..932594b99126d 100644 --- a/src/doc/rust.css +++ b/src/doc/rust.css @@ -159,7 +159,7 @@ em { footer { border-top: 1px solid #ddd; - font-size: 14.3px; + font-size: 14px; font-style: italic; padding-top: 5px; margin-top: 3em; @@ -336,13 +336,11 @@ table th { /* Code snippets */ -.rusttest { display: none; } pre.rust { position: relative; } a.test-arrow { + background-color: rgba(78, 139, 202, 0.2); display: inline-block; position: absolute; - - background-color: #4e8bca; color: #f5f5f5; padding: 5px 10px 5px 10px; border-radius: 5px; @@ -350,6 +348,10 @@ a.test-arrow { top: 5px; right: 5px; } +a.test-arrow:hover{ + background-color: #4e8bca; + text-decoration: none; +} .unstable-feature { border: 2px solid red; diff --git a/src/etc/debugger_pretty_printers_common.py b/src/etc/debugger_pretty_printers_common.py index e713c7c8387fc..eb562877c8573 100644 --- a/src/etc/debugger_pretty_printers_common.py +++ b/src/etc/debugger_pretty_printers_common.py @@ -328,7 +328,7 @@ def extract_length_and_ptr_from_slice(slice_val): UNQUALIFIED_TYPE_MARKERS = frozenset(["(", "[", "&", "*"]) def extract_type_name(qualified_type_name): - '''Extracts the type name from a fully qualified path''' + """Extracts the type name from a fully qualified path""" if qualified_type_name[0] in UNQUALIFIED_TYPE_MARKERS: return qualified_type_name diff --git a/src/etc/gdb_rust_pretty_printing.py b/src/etc/gdb_rust_pretty_printing.py index 9b163c835c650..afac8d6bbaefc 100755 --- a/src/etc/gdb_rust_pretty_printing.py +++ b/src/etc/gdb_rust_pretty_printing.py @@ -170,7 +170,7 @@ def rust_pretty_printer_lookup_function(gdb_val): #=------------------------------------------------------------------------------ # Pretty Printer Classes #=------------------------------------------------------------------------------ -class RustStructPrinter: +class RustStructPrinter(object): def __init__(self, val, omit_first_field, omit_type_name, is_tuple_like): self.__val = val self.__omit_first_field = omit_first_field @@ -205,11 +205,12 @@ def display_hint(self): return "" -class RustSlicePrinter: +class RustSlicePrinter(object): def __init__(self, val): self.__val = val - def display_hint(self): + @staticmethod + def display_hint(): return "array" def to_string(self): @@ -226,7 +227,7 @@ def children(self): yield (str(index), (raw_ptr + index).dereference()) -class RustStringSlicePrinter: +class RustStringSlicePrinter(object): def __init__(self, val): self.__val = val @@ -236,11 +237,12 @@ def to_string(self): return '"%s"' % raw_ptr.string(encoding="utf-8", length=length) -class RustStdVecPrinter: +class RustStdVecPrinter(object): def __init__(self, val): self.__val = val - def display_hint(self): + @staticmethod + def display_hint(): return "array" def to_string(self): @@ -255,7 +257,7 @@ def children(self): yield (str(index), (gdb_ptr + index).dereference()) -class RustStdStringPrinter: +class RustStdStringPrinter(object): def __init__(self, val): self.__val = val @@ -266,7 +268,7 @@ def to_string(self): length=length) -class RustCStyleVariantPrinter: +class RustCStyleVariantPrinter(object): def __init__(self, val): assert val.type.get_dwarf_type_kind() == rustpp.DWARF_TYPE_CODE_ENUM self.__val = val @@ -275,7 +277,7 @@ def to_string(self): return str(self.__val.get_wrapped_value()) -class IdentityPrinter: +class IdentityPrinter(object): def __init__(self, string): self.string = string diff --git a/src/etc/lldb_batchmode.py b/src/etc/lldb_batchmode.py index 7bbb3577f8d93..4952cf4f82c3b 100644 --- a/src/etc/lldb_batchmode.py +++ b/src/etc/lldb_batchmode.py @@ -37,14 +37,14 @@ def print_debug(s): - "Print something if DEBUG_OUTPUT is True" + """Print something if DEBUG_OUTPUT is True""" global DEBUG_OUTPUT if DEBUG_OUTPUT: print("DEBUG: " + str(s)) def normalize_whitespace(s): - "Replace newlines, tabs, multiple spaces, etc with exactly one space" + """Replace newlines, tabs, multiple spaces, etc with exactly one space""" return re.sub("\s+", " ", s) @@ -71,7 +71,7 @@ def breakpoint_callback(frame, bp_loc, dict): def execute_command(command_interpreter, command): - "Executes a single CLI command" + """Executes a single CLI command""" global new_breakpoints global registered_breakpoints diff --git a/src/etc/lldb_rust_formatters.py b/src/etc/lldb_rust_formatters.py index c0a4c3e9ece93..335acae5fb6f7 100644 --- a/src/etc/lldb_rust_formatters.py +++ b/src/etc/lldb_rust_formatters.py @@ -171,10 +171,10 @@ def print_val(lldb_val, internal_dict): #=-------------------------------------------------------------------------------------------------- def print_struct_val(val, internal_dict, omit_first_field, omit_type_name, is_tuple_like): - ''' + """ Prints a struct, tuple, or tuple struct value with Rust syntax. Ignores any fields before field_start_index. - ''' + """ assert val.type.get_dwarf_type_kind() == rustpp.DWARF_TYPE_CODE_STRUCT if omit_type_name: @@ -221,7 +221,7 @@ def render_child(child_index): "body": body} def print_pointer_val(val, internal_dict): - '''Prints a pointer value with Rust syntax''' + """Prints a pointer value with Rust syntax""" assert val.type.get_dwarf_type_kind() == rustpp.DWARF_TYPE_CODE_PTR sigil = "&" type_name = val.type.get_unqualified_type_name() @@ -275,8 +275,8 @@ def print_std_string_val(val, internal_dict): #=-------------------------------------------------------------------------------------------------- def print_array_of_values(array_name, data_ptr_val, length, internal_dict): - '''Prints a contigous memory range, interpreting it as values of the - pointee-type of data_ptr_val.''' + """Prints a contigous memory range, interpreting it as values of the + pointee-type of data_ptr_val.""" data_ptr_type = data_ptr_val.type assert data_ptr_type.get_dwarf_type_kind() == rustpp.DWARF_TYPE_CODE_PTR diff --git a/src/etc/local_stage0.sh b/src/etc/local_stage0.sh index f5f39d264a6b0..ee77206640eab 100755 --- a/src/etc/local_stage0.sh +++ b/src/etc/local_stage0.sh @@ -18,7 +18,7 @@ LIB_PREFIX=lib OS=`uname -s` case $OS in - ("Linux"|"FreeBSD"|"DragonFly"|"Bitrig"|"OpenBSD"|"SunOS") + ("Linux"|"FreeBSD"|"DragonFly"|"Bitrig"|"OpenBSD"|"SunOS"|"Haiku") BIN_SUF= LIB_SUF=.so ;; @@ -71,6 +71,7 @@ cp ${PREFIX}/${LIB_DIR}/${LIB_PREFIX}log*${LIB_SUF} ${TARG_DIR}/stage0/${LIB_DIR cp ${PREFIX}/${LIB_DIR}/${LIB_PREFIX}rbml*${LIB_SUF} ${TARG_DIR}/stage0/${LIB_DIR}/ cp ${PREFIX}/${LIB_DIR}/${LIB_PREFIX}serialize*${LIB_SUF} ${TARG_DIR}/stage0/${LIB_DIR}/ cp ${PREFIX}/${LIB_DIR}/${LIB_PREFIX}term*${LIB_SUF} ${TARG_DIR}/stage0/${LIB_DIR}/ +cp ${PREFIX}/${LIB_DIR}/${LIB_PREFIX}proc_macro*${LIB_SUF} ${TARG_DIR}/stage0/${LIB_DIR}/ # do not fail if one of the above fails, as all we need is a working rustc! exit 0 diff --git a/src/etc/platform-intrinsics/generator.py b/src/etc/platform-intrinsics/generator.py index a4a91170efb3b..e3c08bb35e075 100644 --- a/src/etc/platform-intrinsics/generator.py +++ b/src/etc/platform-intrinsics/generator.py @@ -119,16 +119,19 @@ class Void(Type): def __init__(self): Type.__init__(self, 0) - def compiler_ctor(self): + @staticmethod + def compiler_ctor(): return '::VOID' def compiler_ctor_ref(self): return '&' + self.compiler_ctor() - def rust_name(self): + @staticmethod + def rust_name(): return '()' - def type_info(self, platform_info): + @staticmethod + def type_info(platform_info): return None def __eq__(self, other): @@ -282,7 +285,7 @@ def __eq__(self, other): class Pointer(Type): def __init__(self, elem, llvm_elem, const): - self._elem = elem; + self._elem = elem self._llvm_elem = llvm_elem self._const = const Type.__init__(self, BITWIDTH_POINTER) @@ -503,7 +506,7 @@ def monomorphise(self): # must be a power of two assert width & (width - 1) == 0 def recur(processed, untouched): - if untouched == []: + if not untouched: ret = processed[0] args = processed[1:] yield MonomorphicIntrinsic(self._platform, self.intrinsic, width, @@ -756,22 +759,26 @@ class ExternBlock(object): def __init__(self): pass - def open(self, platform): + @staticmethod + def open(platform): return 'extern "platform-intrinsic" {' - def render(self, mono): + @staticmethod + def render(mono): return ' fn {}{}{};'.format(mono.platform_prefix(), mono.intrinsic_name(), mono.intrinsic_signature()) - def close(self): + @staticmethod + def close(): return '}' class CompilerDefs(object): def __init__(self): pass - def open(self, platform): + @staticmethod + def open(platform): return '''\ // Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at @@ -798,7 +805,8 @@ def open(self, platform): if !name.starts_with("{0}") {{ return None }} Some(match &name["{0}".len()..] {{'''.format(platform.platform_prefix()) - def render(self, mono): + @staticmethod + def render(mono): return '''\ "{}" => Intrinsic {{ inputs: {{ static INPUTS: [&'static Type; {}] = [{}]; &INPUTS }}, @@ -810,7 +818,8 @@ def render(self, mono): mono.compiler_ret(), mono.llvm_name()) - def close(self): + @staticmethod + def close(): return '''\ _ => return None, }) diff --git a/src/etc/test-float-parse/runtests.py b/src/etc/test-float-parse/runtests.py index 896d63b9f0a01..bc141877b373f 100644 --- a/src/etc/test-float-parse/runtests.py +++ b/src/etc/test-float-parse/runtests.py @@ -177,7 +177,6 @@ def run(test): def interact(proc, queue): - line = "" n = 0 while proc.poll() is None: line = proc.stdout.readline() @@ -185,7 +184,6 @@ def interact(proc, queue): continue assert line.endswith('\n'), "incomplete line: " + repr(line) queue.put(line) - line = "" n += 1 if n % UPDATE_EVERY_N == 0: msg("got", str(n // 1000) + "k", "records") diff --git a/src/etc/unicode.py b/src/etc/unicode.py index 822a3894fccb7..bddc83f63d25d 100755 --- a/src/etc/unicode.py +++ b/src/etc/unicode.py @@ -82,28 +82,28 @@ def load_unicode_data(f): canon_decomp = {} compat_decomp = {} - udict = {}; - range_start = -1; + udict = {} + range_start = -1 for line in fileinput.input(f): - data = line.split(';'); + data = line.split(';') if len(data) != 15: continue - cp = int(data[0], 16); + cp = int(data[0], 16) if is_surrogate(cp): continue if range_start >= 0: for i in xrange(range_start, cp): - udict[i] = data; - range_start = -1; + udict[i] = data + range_start = -1 if data[1].endswith(", First>"): - range_start = cp; - continue; - udict[cp] = data; + range_start = cp + continue + udict[cp] = data for code in udict: - [code_org, name, gencat, combine, bidi, + (code_org, name, gencat, combine, bidi, decomp, deci, digit, num, mirror, - old, iso, upcase, lowcase, titlecase ] = udict[code]; + old, iso, upcase, lowcase, titlecase) = udict[code] # generate char to char direct common and simple conversions # uppercase to lowercase @@ -382,7 +382,7 @@ def emit_bool_trie(f, name, t_data, is_pub=True): global bytes_old, bytes_new bytes_old += 8 * len(t_data) CHUNK = 64 - rawdata = [False] * 0x110000; + rawdata = [False] * 0x110000 for (lo, hi) in t_data: for cp in range(lo, hi + 1): rawdata[cp] = True diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index 5f9ccd1820ca6..7a07e007ce1c4 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -10,35 +10,11 @@ #![stable(feature = "rust1", since = "1.0.0")] -//! Threadsafe reference-counted boxes (the `Arc` type). +//! Thread-safe reference-counting pointers. //! -//! The `Arc` type provides shared ownership of an immutable value through -//! atomic reference counting. +//! See the [`Arc`][arc] documentation for more details. //! -//! `Weak` is a weak reference to the `Arc` box, and it is created by -//! the `downgrade` method. -//! # Examples -//! -//! Sharing some immutable data between threads: -//! -// Note that we **do not** run these tests here. The windows builders get super -// unhappy of a thread outlives the main thread and then exits at the same time -// (something deadlocks) so we just avoid this entirely by not running these -// tests. -//! ```no_run -//! use std::sync::Arc; -//! use std::thread; -//! -//! let five = Arc::new(5); -//! -//! for _ in 0..10 { -//! let five = five.clone(); -//! -//! thread::spawn(move || { -//! println!("{:?}", five); -//! }); -//! } -//! ``` +//! [arc]: struct.Arc.html use boxed::Box; @@ -62,72 +38,114 @@ use heap::deallocate; const MAX_REFCOUNT: usize = (isize::MAX) as usize; -/// An atomically reference counted wrapper for shared state. -/// Destruction is deterministic, and will occur as soon as the last owner is -/// gone. It is marked as `Send` because it uses atomic reference counting. +/// A thread-safe reference-counting pointer. /// -/// If you do not need thread-safety, and just need shared ownership, consider -/// the [`Rc` type](../rc/struct.Rc.html). It is the same as `Arc`, but -/// does not use atomics, making it both thread-unsafe as well as significantly -/// faster when updating the reference count. +/// The type `Arc` provides shared ownership of a value of type `T`, +/// allocated in the heap. Invoking [`clone`][clone] on `Arc` produces +/// a new pointer to the same value in the heap. When the last `Arc` +/// pointer to a given value is destroyed, the pointed-to value is +/// also destroyed. /// -/// Note: the inherent methods defined on `Arc` are all associated functions, -/// which means that you have to call them as e.g. `Arc::get_mut(&value)` -/// instead of `value.get_mut()`. This is so that there are no conflicts with -/// methods on the inner type `T`, which are what you want to call in the -/// majority of cases. +/// Shared references in Rust disallow mutation by default, and `Arc` is no +/// exception. If you need to mutate through an `Arc`, use [`Mutex`][mutex], +/// [`RwLock`][rwlock], or one of the [`Atomic`][atomic] types. /// -/// # Examples +/// `Arc` uses atomic operations for reference counting, so `Arc`s can be +/// sent between threads. In other words, `Arc` implements [`Send`][send] +/// as long as `T` implements `Send` and [`Sync`][sync]. The disadvantage is +/// that atomic operations are more expensive than ordinary memory accesses. +/// If you are not sharing reference-counted values between threads, consider +/// using [`rc::Rc`][rc] for lower overhead. `Rc` is a safe default, because +/// the compiler will catch any attempt to send an `Rc` between threads. +/// However, a library might choose `Arc` in order to give library consumers +/// more flexibility. +/// +/// The [`downgrade`][downgrade] method can be used to create a non-owning +/// [`Weak`][weak] pointer. A `Weak` pointer can be [`upgrade`][upgrade]d +/// to an `Arc`, but this will return [`None`][option] if the value has +/// already been dropped. /// -/// In this example, a large vector of data will be shared by several threads. First we -/// wrap it with a `Arc::new` and then clone the `Arc` reference for every thread (which will -/// increase the reference count atomically). +/// A cycle between `Arc` pointers will never be deallocated. For this reason, +/// `Weak` is used to break cycles. For example, a tree could have strong +/// `Arc` pointers from parent nodes to children, and `Weak` pointers from +/// children back to their parents. +/// +/// `Arc` automatically dereferences to `T` (via the [`Deref`][deref] trait), +/// so you can call `T`'s methods on a value of type `Arc`. To avoid name +/// clashes with `T`'s methods, the methods of `Arc` itself are [associated +/// functions][assoc], called using function-like syntax: /// /// ``` /// use std::sync::Arc; -/// use std::thread; +/// let my_arc = Arc::new(()); +/// +/// Arc::downgrade(&my_arc); +/// ``` /// -/// fn main() { -/// let numbers: Vec<_> = (0..100).collect(); -/// let shared_numbers = Arc::new(numbers); +/// `Weak` does not auto-dereference to `T`, because the value may have +/// already been destroyed. /// -/// for _ in 0..10 { -/// // prepare a copy of reference here and it will be moved to the thread -/// let child_numbers = shared_numbers.clone(); +/// [arc]: struct.Arc.html +/// [weak]: struct.Weak.html +/// [rc]: ../../std/rc/struct.Rc.html +/// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone +/// [mutex]: ../../std/sync/struct.Mutex.html +/// [rwlock]: ../../std/sync/struct.RwLock.html +/// [atomic]: ../../std/sync/atomic/index.html +/// [send]: ../../std/marker/trait.Send.html +/// [sync]: ../../std/marker/trait.Sync.html +/// [deref]: ../../std/ops/trait.Deref.html +/// [downgrade]: struct.Arc.html#method.downgrade +/// [upgrade]: struct.Weak.html#method.upgrade +/// [option]: ../../std/option/enum.Option.html +/// [assoc]: ../../book/method-syntax.html#associated-functions /// -/// thread::spawn(move || { -/// let local_numbers = &child_numbers[..]; +/// # Examples /// -/// // Work with the local numbers -/// }); -/// } -/// } -/// ``` -/// You can also share mutable data between threads safely -/// by putting it inside `Mutex` and then share `Mutex` immutably -/// with `Arc` as shown below. +/// Sharing some immutable data between threads: /// -// See comment at the top of this file for why the test is no_run +// Note that we **do not** run these tests here. The windows builders get super +// unhappy if a thread outlives the main thread and then exits at the same time +// (something deadlocks) so we just avoid this entirely by not running these +// tests. /// ```no_run -/// use std::sync::{Arc, Mutex}; +/// use std::sync::Arc; /// use std::thread; /// -/// let five = Arc::new(Mutex::new(5)); +/// let five = Arc::new(5); /// /// for _ in 0..10 { /// let five = five.clone(); /// /// thread::spawn(move || { -/// let mut number = five.lock().unwrap(); +/// println!("{:?}", five); +/// }); +/// } +/// ``` /// -/// *number += 1; +/// Sharing a mutable `AtomicUsize`: /// -/// println!("{}", *number); // prints 6 +/// ```no_run +/// use std::sync::Arc; +/// use std::sync::atomic::{AtomicUsize, Ordering}; +/// use std::thread; +/// +/// let val = Arc::new(AtomicUsize::new(5)); +/// +/// for _ in 0..10 { +/// let val = val.clone(); +/// +/// thread::spawn(move || { +/// let v = val.fetch_add(1, Ordering::SeqCst); +/// println!("{:?}", v); /// }); /// } /// ``` - -#[cfg_attr(stage0, unsafe_no_drop_flag)] +/// +/// See the [`rc` documentation][rc_examples] for more examples of reference +/// counting in general. +/// +/// [rc_examples]: ../../std/rc/index.html#examples #[stable(feature = "rust1", since = "1.0.0")] pub struct Arc { ptr: Shared>, @@ -141,19 +159,18 @@ unsafe impl Sync for Arc {} #[unstable(feature = "coerce_unsized", issue = "27732")] impl, U: ?Sized> CoerceUnsized> for Arc {} -/// A weak pointer to an `Arc`. +/// A weak version of [`Arc`][arc]. /// -/// Weak pointers will not keep the data inside of the `Arc` alive, and can be -/// used to break cycles between `Arc` pointers. +/// `Weak` pointers do not count towards determining if the inner value +/// should be dropped. /// -/// A `Weak` pointer can be upgraded to an `Arc` pointer, but -/// will return `None` if the value has already been dropped. +/// The typical way to obtain a `Weak` pointer is to call +/// [`Arc::downgrade`][downgrade]. /// -/// For example, a tree with parent pointers can be represented by putting the -/// nodes behind strong `Arc` pointers, and then storing the parent pointers -/// as `Weak` pointers. - -#[cfg_attr(stage0, unsafe_no_drop_flag)] +/// See the [`Arc`][arc] documentation for more details. +/// +/// [arc]: struct.Arc.html +/// [downgrade]: struct.Arc.html#method.downgrade #[stable(feature = "arc_weak", since = "1.4.0")] pub struct Weak { ptr: Shared>, @@ -211,12 +228,15 @@ impl Arc { Arc { ptr: unsafe { Shared::new(Box::into_raw(x)) } } } - /// Unwraps the contained value if the `Arc` has exactly one strong reference. + /// Returns the contained value, if the `Arc` has exactly one strong reference. /// - /// Otherwise, an `Err` is returned with the same `Arc`. + /// Otherwise, an [`Err`][result] is returned with the same `Arc` that was + /// passed in. /// /// This will succeed even if there are outstanding weak references. /// + /// [result]: ../../std/result/enum.Result.html + /// /// # Examples /// /// ``` @@ -227,7 +247,7 @@ impl Arc { /// /// let x = Arc::new(4); /// let _y = x.clone(); - /// assert_eq!(Arc::try_unwrap(x), Err(Arc::new(4))); + /// assert_eq!(*Arc::try_unwrap(x).unwrap_err(), 4); /// ``` #[inline] #[stable(feature = "arc_unique", since = "1.4.0")] @@ -253,7 +273,9 @@ impl Arc { } impl Arc { - /// Downgrades the `Arc` to a `Weak` reference. + /// Creates a new [`Weak`][weak] pointer to this value. + /// + /// [weak]: struct.Weak.html /// /// # Examples /// @@ -291,7 +313,27 @@ impl Arc { } } - /// Get the number of weak references to this value. + /// Gets the number of [`Weak`][weak] pointers to this value. + /// + /// Be careful how you use this information, because another thread + /// may change the weak count at any time. + /// + /// [weak]: struct.Weak.html + /// + /// # Examples + /// + /// ``` + /// #![feature(arc_counts)] + /// + /// use std::sync::Arc; + /// + /// let five = Arc::new(5); + /// let _weak_five = Arc::downgrade(&five); + /// + /// // This assertion is deterministic because we haven't shared + /// // the `Arc` or `Weak` between threads. + /// assert_eq!(1, Arc::weak_count(&five)); + /// ``` #[inline] #[unstable(feature = "arc_counts", reason = "not clearly useful, and racy", issue = "28356")] @@ -299,7 +341,25 @@ impl Arc { this.inner().weak.load(SeqCst) - 1 } - /// Get the number of strong references to this value. + /// Gets the number of strong (`Arc`) pointers to this value. + /// + /// Be careful how you use this information, because another thread + /// may change the strong count at any time. + /// + /// # Examples + /// + /// ``` + /// #![feature(arc_counts)] + /// + /// use std::sync::Arc; + /// + /// let five = Arc::new(5); + /// let _also_five = five.clone(); + /// + /// // This assertion is deterministic because we haven't shared + /// // the `Arc` between threads. + /// assert_eq!(2, Arc::strong_count(&five)); + /// ``` #[inline] #[unstable(feature = "arc_counts", reason = "not clearly useful, and racy", issue = "28356")] @@ -336,8 +396,8 @@ impl Arc { #[unstable(feature = "ptr_eq", reason = "newly added", issue = "36497")] - /// Return whether two `Arc` references point to the same value - /// (not just values that compare equal). + /// Returns true if the two `Arc`s point to the same value (not + /// just values that compare as equal). /// /// # Examples /// @@ -362,9 +422,10 @@ impl Arc { #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Arc { - /// Makes a clone of the `Arc`. + /// Makes a clone of the `Arc` pointer. /// - /// This increases the strong reference count. + /// This creates another pointer to the same inner value, increasing the + /// strong reference count. /// /// # Examples /// @@ -420,11 +481,17 @@ impl Deref for Arc { } impl Arc { - /// Make a mutable reference into the given `Arc`. - /// If the `Arc` has more than one strong reference, or any weak - /// references, the inner data is cloned. + /// Makes a mutable reference into the given `Arc`. + /// + /// If there are other `Arc` or [`Weak`][weak] pointers to the same value, + /// then `make_mut` will invoke [`clone`][clone] on the inner value to + /// ensure unique ownership. This is also referred to as clone-on-write. + /// + /// See also [`get_mut`][get_mut], which will fail rather than cloning. /// - /// This is also referred to as a copy-on-write. + /// [weak]: struct.Weak.html + /// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone + /// [get_mut]: struct.Arc.html#method.get_mut /// /// # Examples /// @@ -439,10 +506,9 @@ impl Arc { /// *Arc::make_mut(&mut data) += 1; // Won't clone anything /// *Arc::make_mut(&mut other_data) *= 2; // Won't clone anything /// - /// // Note: data and other_data now point to different numbers + /// // Now `data` and `other_data` point to different values. /// assert_eq!(*data, 8); /// assert_eq!(*other_data, 12); - /// /// ``` #[inline] #[stable(feature = "arc_unique", since = "1.4.0")] @@ -501,8 +567,19 @@ impl Arc { } impl Arc { - /// Returns a mutable reference to the contained value if the `Arc` has - /// one strong reference and no weak references. + /// Returns a mutable reference to the inner value, if there are + /// no other `Arc` or [`Weak`][weak] pointers to the same value. + /// + /// Returns [`None`][option] otherwise, because it is not safe to + /// mutate a shared value. + /// + /// See also [`make_mut`][make_mut], which will [`clone`][clone] + /// the inner value when it's shared. + /// + /// [weak]: struct.Weak.html + /// [option]: ../../std/option/enum.Option.html + /// [make_mut]: struct.Arc.html#method.make_mut + /// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone /// /// # Examples /// @@ -564,30 +641,32 @@ impl Arc { #[stable(feature = "rust1", since = "1.0.0")] impl Drop for Arc { - /// Drops the `Arc`. + /// Drops the `Arc`. /// /// This will decrement the strong reference count. If the strong reference - /// count becomes zero and the only other references are `Weak` ones, - /// `drop`s the inner value. + /// count reaches zero then the only other references (if any) are + /// [`Weak`][weak], so we `drop` the inner value. + /// + /// [weak]: struct.Weak.html /// /// # Examples /// /// ``` /// use std::sync::Arc; /// - /// { - /// let five = Arc::new(5); - /// - /// // stuff + /// struct Foo; /// - /// drop(five); // explicit drop + /// impl Drop for Foo { + /// fn drop(&mut self) { + /// println!("dropped!"); + /// } /// } - /// { - /// let five = Arc::new(5); /// - /// // stuff + /// let foo = Arc::new(Foo); + /// let foo2 = foo.clone(); /// - /// } // implicit drop + /// drop(foo); // Doesn't print anything + /// drop(foo2); // Prints "dropped!" /// ``` #[unsafe_destructor_blind_to_params] #[inline] @@ -625,10 +704,14 @@ impl Drop for Arc { } impl Weak { - /// Constructs a new `Weak` without an accompanying instance of T. + /// Constructs a new `Weak`, without an accompanying instance of `T`. /// - /// This allocates memory for T, but does not initialize it. Calling - /// Weak::upgrade() on the return value always gives None. + /// This allocates memory for `T`, but does not initialize it. Calling + /// [`upgrade`][upgrade] on the return value always gives + /// [`None`][option]. + /// + /// [upgrade]: struct.Weak.html#method.upgrade + /// [option]: ../../std/option/enum.Option.html /// /// # Examples /// @@ -636,6 +719,7 @@ impl Weak { /// use std::sync::Weak; /// /// let empty: Weak = Weak::new(); + /// assert!(empty.upgrade().is_none()); /// ``` #[stable(feature = "downgraded_weak", since = "1.10.0")] pub fn new() -> Weak { @@ -652,12 +736,13 @@ impl Weak { } impl Weak { - /// Upgrades a weak reference to a strong reference. + /// Upgrades the `Weak` pointer to an [`Arc`][arc], if possible. /// - /// Upgrades the `Weak` reference to an `Arc`, if possible. + /// Returns [`None`][option] if the strong count has reached zero and the + /// inner value was destroyed. /// - /// Returns `None` if there were no strong references and the data was - /// destroyed. + /// [arc]: struct.Arc.html + /// [option]: ../../std/option/enum.Option.html /// /// # Examples /// @@ -669,6 +754,13 @@ impl Weak { /// let weak_five = Arc::downgrade(&five); /// /// let strong_five: Option> = weak_five.upgrade(); + /// assert!(strong_five.is_some()); + /// + /// // Destroy all strong pointers. + /// drop(strong_five); + /// drop(five); + /// + /// assert!(weak_five.upgrade().is_none()); /// ``` #[stable(feature = "arc_weak", since = "1.4.0")] pub fn upgrade(&self) -> Option> { @@ -711,9 +803,10 @@ impl Weak { #[stable(feature = "arc_weak", since = "1.4.0")] impl Clone for Weak { - /// Makes a clone of the `Weak`. + /// Makes a clone of the `Weak` pointer. /// - /// This increases the weak reference count. + /// This creates another pointer to the same inner value, increasing the + /// weak reference count. /// /// # Examples /// @@ -745,7 +838,23 @@ impl Clone for Weak { #[stable(feature = "downgraded_weak", since = "1.10.0")] impl Default for Weak { - /// Constructs a new `Weak` without an accompanying instance of T. + /// Constructs a new `Weak`, without an accompanying instance of `T`. + /// + /// This allocates memory for `T`, but does not initialize it. Calling + /// [`upgrade`][upgrade] on the return value always gives + /// [`None`][option]. + /// + /// [upgrade]: struct.Weak.html#method.upgrade + /// [option]: ../../std/option/enum.Option.html + /// + /// # Examples + /// + /// ``` + /// use std::sync::Weak; + /// + /// let empty: Weak = Default::default(); + /// assert!(empty.upgrade().is_none()); + /// ``` fn default() -> Weak { Weak::new() } @@ -753,7 +862,7 @@ impl Default for Weak { #[stable(feature = "arc_weak", since = "1.4.0")] impl Drop for Weak { - /// Drops the `Weak`. + /// Drops the `Weak` pointer. /// /// This will decrement the weak reference count. /// @@ -762,21 +871,22 @@ impl Drop for Weak { /// ``` /// use std::sync::Arc; /// - /// { - /// let five = Arc::new(5); - /// let weak_five = Arc::downgrade(&five); - /// - /// // stuff + /// struct Foo; /// - /// drop(weak_five); // explicit drop + /// impl Drop for Foo { + /// fn drop(&mut self) { + /// println!("dropped!"); + /// } /// } - /// { - /// let five = Arc::new(5); - /// let weak_five = Arc::downgrade(&five); /// - /// // stuff + /// let foo = Arc::new(Foo); + /// let weak_foo = Arc::downgrade(&foo); + /// let other_weak_foo = weak_foo.clone(); /// - /// } // implicit drop + /// drop(weak_foo); // Doesn't print anything + /// drop(foo); // Prints "dropped!" + /// + /// assert!(other_weak_foo.upgrade().is_none()); /// ``` fn drop(&mut self) { let ptr = *self.ptr; @@ -798,9 +908,9 @@ impl Drop for Weak { #[stable(feature = "rust1", since = "1.0.0")] impl PartialEq for Arc { - /// Equality for two `Arc`s. + /// Equality for two `Arc`s. /// - /// Two `Arc`s are equal if their inner value are equal. + /// Two `Arc`s are equal if their inner values are equal. /// /// # Examples /// @@ -809,15 +919,15 @@ impl PartialEq for Arc { /// /// let five = Arc::new(5); /// - /// five == Arc::new(5); + /// assert!(five == Arc::new(5)); /// ``` fn eq(&self, other: &Arc) -> bool { *(*self) == *(*other) } - /// Inequality for two `Arc`s. + /// Inequality for two `Arc`s. /// - /// Two `Arc`s are unequal if their inner value are unequal. + /// Two `Arc`s are unequal if their inner values are unequal. /// /// # Examples /// @@ -826,7 +936,7 @@ impl PartialEq for Arc { /// /// let five = Arc::new(5); /// - /// five != Arc::new(5); + /// assert!(five != Arc::new(6)); /// ``` fn ne(&self, other: &Arc) -> bool { *(*self) != *(*other) @@ -834,7 +944,7 @@ impl PartialEq for Arc { } #[stable(feature = "rust1", since = "1.0.0")] impl PartialOrd for Arc { - /// Partial comparison for two `Arc`s. + /// Partial comparison for two `Arc`s. /// /// The two are compared by calling `partial_cmp()` on their inner values. /// @@ -842,16 +952,17 @@ impl PartialOrd for Arc { /// /// ``` /// use std::sync::Arc; + /// use std::cmp::Ordering; /// /// let five = Arc::new(5); /// - /// five.partial_cmp(&Arc::new(5)); + /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Arc::new(6))); /// ``` fn partial_cmp(&self, other: &Arc) -> Option { (**self).partial_cmp(&**other) } - /// Less-than comparison for two `Arc`s. + /// Less-than comparison for two `Arc`s. /// /// The two are compared by calling `<` on their inner values. /// @@ -862,13 +973,13 @@ impl PartialOrd for Arc { /// /// let five = Arc::new(5); /// - /// five < Arc::new(5); + /// assert!(five < Arc::new(6)); /// ``` fn lt(&self, other: &Arc) -> bool { *(*self) < *(*other) } - /// 'Less-than or equal to' comparison for two `Arc`s. + /// 'Less than or equal to' comparison for two `Arc`s. /// /// The two are compared by calling `<=` on their inner values. /// @@ -879,13 +990,13 @@ impl PartialOrd for Arc { /// /// let five = Arc::new(5); /// - /// five <= Arc::new(5); + /// assert!(five <= Arc::new(5)); /// ``` fn le(&self, other: &Arc) -> bool { *(*self) <= *(*other) } - /// Greater-than comparison for two `Arc`s. + /// Greater-than comparison for two `Arc`s. /// /// The two are compared by calling `>` on their inner values. /// @@ -896,13 +1007,13 @@ impl PartialOrd for Arc { /// /// let five = Arc::new(5); /// - /// five > Arc::new(5); + /// assert!(five > Arc::new(4)); /// ``` fn gt(&self, other: &Arc) -> bool { *(*self) > *(*other) } - /// 'Greater-than or equal to' comparison for two `Arc`s. + /// 'Greater than or equal to' comparison for two `Arc`s. /// /// The two are compared by calling `>=` on their inner values. /// @@ -913,7 +1024,7 @@ impl PartialOrd for Arc { /// /// let five = Arc::new(5); /// - /// five >= Arc::new(5); + /// assert!(five >= Arc::new(5)); /// ``` fn ge(&self, other: &Arc) -> bool { *(*self) >= *(*other) @@ -921,6 +1032,20 @@ impl PartialOrd for Arc { } #[stable(feature = "rust1", since = "1.0.0")] impl Ord for Arc { + /// Comparison for two `Arc`s. + /// + /// The two are compared by calling `cmp()` on their inner values. + /// + /// # Examples + /// + /// ``` + /// use std::sync::Arc; + /// use std::cmp::Ordering; + /// + /// let five = Arc::new(5); + /// + /// assert_eq!(Ordering::Less, five.cmp(&Arc::new(6))); + /// ``` fn cmp(&self, other: &Arc) -> Ordering { (**self).cmp(&**other) } @@ -951,7 +1076,16 @@ impl fmt::Pointer for Arc { #[stable(feature = "rust1", since = "1.0.0")] impl Default for Arc { - /// Creates a new `Arc`, with the `Default` value for T. + /// Creates a new `Arc`, with the `Default` value for `T`. + /// + /// # Examples + /// + /// ``` + /// use std::sync::Arc; + /// + /// let x: Arc = Default::default(); + /// assert_eq!(*x, 0); + /// ``` fn default() -> Arc { Arc::new(Default::default()) } @@ -1002,6 +1136,7 @@ mod tests { } #[test] + #[cfg_attr(target_os = "emscripten", ignore)] fn manually_share_arc() { let v = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; let arc_v = Arc::new(v); diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs index bc9b6e805efc9..28f4dda140883 100644 --- a/src/liballoc/boxed.rs +++ b/src/liballoc/boxed.rs @@ -244,12 +244,14 @@ impl Box { /// the destructor of `T` and free the allocated memory. Since the /// way `Box` allocates and releases memory is unspecified, the /// only valid pointer to pass to this function is the one taken - /// from another `Box` via the `Box::into_raw` function. + /// from another `Box` via the [`Box::into_raw`] function. /// /// This function is unsafe because improper use may lead to /// memory problems. For example, a double-free may occur if the /// function is called twice on the same raw pointer. /// + /// [`Box::into_raw`]: struct.Box.html#method.into_raw + /// /// # Examples /// /// ``` @@ -269,12 +271,14 @@ impl Box { /// memory previously managed by the `Box`. In particular, the /// caller should properly destroy `T` and release the memory. The /// proper way to do so is to convert the raw pointer back into a - /// `Box` with the `Box::from_raw` function. + /// `Box` with the [`Box::from_raw`] function. /// /// Note: this is an associated function, which means that you have /// to call it as `Box::into_raw(b)` instead of `b.into_raw()`. This /// is so that there is no conflict with a method on the inner type. /// + /// [`Box::from_raw`]: struct.Box.html#method.from_raw + /// /// # Examples /// /// ``` diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs index c6453da3f4697..31491106d97ee 100644 --- a/src/liballoc/lib.rs +++ b/src/liballoc/lib.rs @@ -88,7 +88,6 @@ #![feature(staged_api)] #![feature(unboxed_closures)] #![feature(unique)] -#![cfg_attr(stage0, feature(unsafe_no_drop_flag))] #![feature(unsize)] #![cfg_attr(not(test), feature(fused, fn_traits, placement_new_protocol))] diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index 23542215fa890..f23ea0ea8bf71 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -44,7 +44,6 @@ use core::cmp; /// `shrink_to_fit`, and `from_box` will actually set RawVec's private capacity /// field. This allows zero-sized types to not be special-cased by consumers of /// this type. -#[cfg_attr(stage0, unsafe_no_drop_flag)] pub struct RawVec { ptr: Unique, cap: usize, @@ -58,11 +57,7 @@ impl RawVec { pub fn new() -> Self { unsafe { // !0 is usize::MAX. This branch should be stripped at compile time. - let cap = if mem::size_of::() == 0 { - !0 - } else { - 0 - }; + let cap = if mem::size_of::() == 0 { !0 } else { 0 }; // heap::EMPTY doubles as "unallocated" and "zero-sized allocation" RawVec { @@ -210,11 +205,7 @@ impl RawVec { let (new_cap, ptr) = if self.cap == 0 { // skip to 4 because tiny Vec's are dumb; but not if that would cause overflow - let new_cap = if elem_size > (!0) / 8 { - 1 - } else { - 4 - }; + let new_cap = if elem_size > (!0) / 8 { 1 } else { 4 }; let ptr = heap::allocate(new_cap * elem_size, align); (new_cap, ptr) } else { @@ -348,7 +339,7 @@ impl RawVec { let elem_size = mem::size_of::(); // Nothing we can really do about these checks :( let required_cap = used_cap.checked_add(needed_extra_cap) - .expect("capacity overflow"); + .expect("capacity overflow"); // Cannot overflow, because `cap <= isize::MAX`, and type of `cap` is `usize`. let double_cap = self.cap * 2; // `double_cap` guarantees exponential growth. diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index 32e5587ff4128..740d13c476222 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -10,90 +10,139 @@ #![allow(deprecated)] -//! Unsynchronized reference-counted boxes (the `Rc` type) which are usable -//! only within a single thread. +//! Single-threaded reference-counting pointers. //! -//! The `Rc` type provides shared ownership of an immutable value. -//! Destruction is deterministic, and will occur as soon as the last owner is -//! gone. It is marked as non-sendable because it avoids the overhead of atomic -//! reference counting. +//! The type [`Rc`][rc] provides shared ownership of a value of type `T`, +//! allocated in the heap. Invoking [`clone`][clone] on `Rc` produces a new +//! pointer to the same value in the heap. When the last `Rc` pointer to a +//! given value is destroyed, the pointed-to value is also destroyed. //! -//! The `downgrade` method can be used to create a non-owning `Weak` pointer -//! to the box. A `Weak` pointer can be upgraded to an `Rc` pointer, but -//! will return `None` if the value has already been dropped. +//! Shared references in Rust disallow mutation by default, and `Rc` is no +//! exception. If you need to mutate through an `Rc`, use [`Cell`][cell] or +//! [`RefCell`][refcell]. //! -//! For example, a tree with parent pointers can be represented by putting the -//! nodes behind strong `Rc` pointers, and then storing the parent pointers -//! as `Weak` pointers. +//! `Rc` uses non-atomic reference counting. This means that overhead is very +//! low, but an `Rc` cannot be sent between threads, and consequently `Rc` +//! does not implement [`Send`][send]. As a result, the Rust compiler +//! will check *at compile time* that you are not sending `Rc`s between +//! threads. If you need multi-threaded, atomic reference counting, use +//! [`sync::Arc`][arc]. +//! +//! The [`downgrade`][downgrade] method can be used to create a non-owning +//! [`Weak`][weak] pointer. A `Weak` pointer can be [`upgrade`][upgrade]d +//! to an `Rc`, but this will return [`None`][option] if the value has +//! already been dropped. +//! +//! A cycle between `Rc` pointers will never be deallocated. For this reason, +//! `Weak` is used to break cycles. For example, a tree could have strong +//! `Rc` pointers from parent nodes to children, and `Weak` pointers from +//! children back to their parents. +//! +//! `Rc` automatically dereferences to `T` (via the [`Deref`][deref] trait), +//! so you can call `T`'s methods on a value of type `Rc`. To avoid name +//! clashes with `T`'s methods, the methods of `Rc` itself are [associated +//! functions][assoc], called using function-like syntax: +//! +//! ``` +//! use std::rc::Rc; +//! let my_rc = Rc::new(()); +//! +//! Rc::downgrade(&my_rc); +//! ``` +//! +//! `Weak` does not auto-dereference to `T`, because the value may have +//! already been destroyed. +//! +//! [rc]: struct.Rc.html +//! [weak]: struct.Weak.html +//! [clone]: ../../std/clone/trait.Clone.html#tymethod.clone +//! [cell]: ../../std/cell/struct.Cell.html +//! [refcell]: ../../std/cell/struct.RefCell.html +//! [send]: ../../std/marker/trait.Send.html +//! [arc]: ../../std/sync/struct.Arc.html +//! [deref]: ../../std/ops/trait.Deref.html +//! [downgrade]: struct.Rc.html#method.downgrade +//! [upgrade]: struct.Weak.html#method.upgrade +//! [option]: ../../std/option/enum.Option.html +//! [assoc]: ../../book/method-syntax.html#associated-functions //! //! # Examples //! //! Consider a scenario where a set of `Gadget`s are owned by a given `Owner`. //! We want to have our `Gadget`s point to their `Owner`. We can't do this with //! unique ownership, because more than one gadget may belong to the same -//! `Owner`. `Rc` allows us to share an `Owner` between multiple `Gadget`s, +//! `Owner`. `Rc` allows us to share an `Owner` between multiple `Gadget`s, //! and have the `Owner` remain allocated as long as any `Gadget` points at it. //! -//! ```rust +//! ``` //! use std::rc::Rc; //! //! struct Owner { -//! name: String +//! name: String, //! // ...other fields //! } //! //! struct Gadget { //! id: i32, -//! owner: Rc +//! owner: Rc, //! // ...other fields //! } //! //! fn main() { -//! // Create a reference counted Owner. -//! let gadget_owner : Rc = Rc::new( -//! Owner { name: String::from("Gadget Man") } +//! // Create a reference-counted `Owner`. +//! let gadget_owner: Rc = Rc::new( +//! Owner { +//! name: "Gadget Man".to_string(), +//! } //! ); //! -//! // Create Gadgets belonging to gadget_owner. To increment the reference -//! // count we clone the `Rc` object. -//! let gadget1 = Gadget { id: 1, owner: gadget_owner.clone() }; -//! let gadget2 = Gadget { id: 2, owner: gadget_owner.clone() }; +//! // Create `Gadget`s belonging to `gadget_owner`. Cloning the `Rc` +//! // value gives us a new pointer to the same `Owner` value, incrementing +//! // the reference count in the process. +//! let gadget1 = Gadget { +//! id: 1, +//! owner: gadget_owner.clone(), +//! }; +//! let gadget2 = Gadget { +//! id: 2, +//! owner: gadget_owner.clone(), +//! }; //! +//! // Dispose of our local variable `gadget_owner`. //! drop(gadget_owner); //! -//! // Despite dropping gadget_owner, we're still able to print out the name -//! // of the Owner of the Gadgets. This is because we've only dropped the -//! // reference count object, not the Owner it wraps. As long as there are -//! // other `Rc` objects pointing at the same Owner, it will remain -//! // allocated. Notice that the `Rc` wrapper around Gadget.owner gets -//! // automatically dereferenced for us. +//! // Despite dropping `gadget_owner`, we're still able to print out the name +//! // of the `Owner` of the `Gadget`s. This is because we've only dropped a +//! // single `Rc`, not the `Owner` it points to. As long as there are +//! // other `Rc` values pointing at the same `Owner`, it will remain +//! // allocated. The field projection `gadget1.owner.name` works because +//! // `Rc` automatically dereferences to `Owner`. //! println!("Gadget {} owned by {}", gadget1.id, gadget1.owner.name); //! println!("Gadget {} owned by {}", gadget2.id, gadget2.owner.name); //! -//! // At the end of the method, gadget1 and gadget2 get destroyed, and with -//! // them the last counted references to our Owner. Gadget Man now gets -//! // destroyed as well. +//! // At the end of the function, `gadget1` and `gadget2` are destroyed, and +//! // with them the last counted references to our `Owner`. Gadget Man now +//! // gets destroyed as well. //! } //! ``` //! //! If our requirements change, and we also need to be able to traverse from -//! Owner → Gadget, we will run into problems: an `Rc` pointer from Owner -//! → Gadget introduces a cycle between the objects. This means that their -//! reference counts can never reach 0, and the objects will remain allocated: a -//! memory leak. In order to get around this, we can use `Weak` pointers. -//! These pointers don't contribute to the total count. +//! `Owner` to `Gadget`, we will run into problems. An `Rc` pointer from `Owner` +//! to `Gadget` introduces a cycle between the values. This means that their +//! reference counts can never reach 0, and the values will remain allocated +//! forever: a memory leak. In order to get around this, we can use `Weak` +//! pointers. //! //! Rust actually makes it somewhat difficult to produce this loop in the first -//! place: in order to end up with two objects that point at each other, one of -//! them needs to be mutable. This is problematic because `Rc` enforces -//! memory safety by only giving out shared references to the object it wraps, +//! place. In order to end up with two values that point at each other, one of +//! them needs to be mutable. This is difficult because `Rc` enforces +//! memory safety by only giving out shared references to the value it wraps, //! and these don't allow direct mutation. We need to wrap the part of the -//! object we wish to mutate in a `RefCell`, which provides *interior +//! value we wish to mutate in a [`RefCell`][refcell], which provides *interior //! mutability*: a method to achieve mutability through a shared reference. -//! `RefCell` enforces Rust's borrowing rules at runtime. Read the `Cell` -//! documentation for more details on interior mutability. +//! `RefCell` enforces Rust's borrowing rules at runtime. //! -//! ```rust +//! ``` //! use std::rc::Rc; //! use std::rc::Weak; //! use std::cell::RefCell; @@ -111,41 +160,58 @@ //! } //! //! fn main() { -//! // Create a reference counted Owner. Note the fact that we've put the -//! // Owner's vector of Gadgets inside a RefCell so that we can mutate it -//! // through a shared reference. -//! let gadget_owner : Rc = Rc::new( +//! // Create a reference-counted `Owner`. Note that we've put the `Owner`'s +//! // vector of `Gadget`s inside a `RefCell` so that we can mutate it through +//! // a shared reference. +//! let gadget_owner: Rc = Rc::new( //! Owner { //! name: "Gadget Man".to_string(), -//! gadgets: RefCell::new(Vec::new()), +//! gadgets: RefCell::new(vec![]), +//! } +//! ); +//! +//! // Create `Gadget`s belonging to `gadget_owner`, as before. +//! let gadget1 = Rc::new( +//! Gadget { +//! id: 1, +//! owner: gadget_owner.clone(), +//! } +//! ); +//! let gadget2 = Rc::new( +//! Gadget { +//! id: 2, +//! owner: gadget_owner.clone(), //! } //! ); //! -//! // Create Gadgets belonging to gadget_owner as before. -//! let gadget1 = Rc::new(Gadget{id: 1, owner: gadget_owner.clone()}); -//! let gadget2 = Rc::new(Gadget{id: 2, owner: gadget_owner.clone()}); +//! // Add the `Gadget`s to their `Owner`. +//! { +//! let mut gadgets = gadget_owner.gadgets.borrow_mut(); +//! gadgets.push(Rc::downgrade(&gadget1)); +//! gadgets.push(Rc::downgrade(&gadget2)); //! -//! // Add the Gadgets to their Owner. To do this we mutably borrow from -//! // the RefCell holding the Owner's Gadgets. -//! gadget_owner.gadgets.borrow_mut().push(Rc::downgrade(&gadget1)); -//! gadget_owner.gadgets.borrow_mut().push(Rc::downgrade(&gadget2)); +//! // `RefCell` dynamic borrow ends here. +//! } //! -//! // Iterate over our Gadgets, printing their details out -//! for gadget_opt in gadget_owner.gadgets.borrow().iter() { +//! // Iterate over our `Gadget`s, printing their details out. +//! for gadget_weak in gadget_owner.gadgets.borrow().iter() { //! -//! // gadget_opt is a Weak. Since weak pointers can't guarantee -//! // that their object is still allocated, we need to call upgrade() -//! // on them to turn them into a strong reference. This returns an -//! // Option, which contains a reference to our object if it still -//! // exists. -//! let gadget = gadget_opt.upgrade().unwrap(); +//! // `gadget_weak` is a `Weak`. Since `Weak` pointers can't +//! // guarantee the value is still allocated, we need to call +//! // `upgrade`, which returns an `Option>`. +//! // +//! // In this case we know the value still exists, so we simply +//! // `unwrap` the `Option`. In a more complicated program, you might +//! // need graceful error handling for a `None` result. +//! +//! let gadget = gadget_weak.upgrade().unwrap(); //! println!("Gadget {} owned by {}", gadget.id, gadget.owner.name); //! } //! -//! // At the end of the method, gadget_owner, gadget1 and gadget2 get -//! // destroyed. There are now no strong (`Rc`) references to the gadgets. -//! // Once they get destroyed, the Gadgets get destroyed. This zeroes the -//! // reference count on Gadget Man, they get destroyed as well. +//! // At the end of the function, `gadget_owner`, `gadget1`, and `gadget2` +//! // are destroyed. There are now no strong (`Rc`) pointers to the +//! // gadgets, so they are destroyed. This zeroes the reference count on +//! // Gadget Man, so he gets destroyed as well. //! } //! ``` @@ -164,13 +230,14 @@ use core::hash::{Hash, Hasher}; use core::intrinsics::{abort, assume}; use core::marker; use core::marker::Unsize; -use core::mem::{self, align_of_val, forget, size_of_val, uninitialized}; +use core::mem::{self, align_of_val, forget, size_of, size_of_val, uninitialized}; use core::ops::Deref; use core::ops::CoerceUnsized; use core::ptr::{self, Shared}; use core::convert::From; use heap::deallocate; +use raw_vec::RawVec; struct RcBox { strong: Cell, @@ -179,16 +246,14 @@ struct RcBox { } -/// A reference-counted pointer type over an immutable value. +/// A single-threaded reference-counting pointer. /// -/// See the [module level documentation](./index.html) for more details. +/// See the [module-level documentation](./index.html) for more details. /// -/// Note: the inherent methods defined on `Rc` are all associated functions, -/// which means that you have to call them as e.g. `Rc::get_mut(&value)` instead -/// of `value.get_mut()`. This is so that there are no conflicts with methods -/// on the inner type `T`, which are what you want to call in the majority of -/// cases. -#[cfg_attr(stage0, unsafe_no_drop_flag)] +/// The inherent methods of `Rc` are all associated functions, which means +/// that you have to call them as e.g. `Rc::get_mut(&value)` instead of +/// `value.get_mut()`. This avoids conflicts with methods of the inner +/// type `T`. #[stable(feature = "rust1", since = "1.0.0")] pub struct Rc { ptr: Shared>, @@ -229,12 +294,15 @@ impl Rc { } } - /// Unwraps the contained value if the `Rc` has exactly one strong reference. + /// Returns the contained value, if the `Rc` has exactly one strong reference. /// - /// Otherwise, an `Err` is returned with the same `Rc`. + /// Otherwise, an [`Err`][result] is returned with the same `Rc` that was + /// passed in. /// /// This will succeed even if there are outstanding weak references. /// + /// [result]: ../../std/result/enum.Result.html + /// /// # Examples /// /// ``` @@ -245,7 +313,7 @@ impl Rc { /// /// let x = Rc::new(4); /// let _y = x.clone(); - /// assert_eq!(Rc::try_unwrap(x), Err(Rc::new(4))); + /// assert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4); /// ``` #[inline] #[stable(feature = "rc_unique", since = "1.4.0")] @@ -268,7 +336,11 @@ impl Rc { } } - /// Checks if `Rc::try_unwrap` would return `Ok`. + /// Checks whether [`Rc::try_unwrap`][try_unwrap] would return + /// [`Ok`][result]. + /// + /// [try_unwrap]: struct.Rc.html#method.try_unwrap + /// [result]: ../../std/result/enum.Result.html /// /// # Examples /// @@ -284,7 +356,7 @@ impl Rc { /// let x = Rc::new(4); /// let _y = x.clone(); /// assert!(!Rc::would_unwrap(&x)); - /// assert_eq!(Rc::try_unwrap(x), Err(Rc::new(4))); + /// assert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4); /// ``` #[unstable(feature = "rc_would_unwrap", reason = "just added for niche usecase", @@ -294,8 +366,35 @@ impl Rc { } } +impl Rc { + /// Constructs a new `Rc` from a string slice. + #[doc(hidden)] + #[unstable(feature = "rustc_private", + reason = "for internal use in rustc", + issue = "0")] + pub fn __from_str(value: &str) -> Rc { + unsafe { + // Allocate enough space for `RcBox`. + let aligned_len = 2 + (value.len() + size_of::() - 1) / size_of::(); + let vec = RawVec::::with_capacity(aligned_len); + let ptr = vec.ptr(); + forget(vec); + // Initialize fields of `RcBox`. + *ptr.offset(0) = 1; // strong: Cell::new(1) + *ptr.offset(1) = 1; // weak: Cell::new(1) + ptr::copy_nonoverlapping(value.as_ptr(), ptr.offset(2) as *mut u8, value.len()); + // Combine the allocation address and the string length into a fat pointer to `RcBox`. + let rcbox_ptr: *mut RcBox = mem::transmute([ptr as usize, value.len()]); + assert!(aligned_len * size_of::() == size_of_val(&*rcbox_ptr)); + Rc { ptr: Shared::new(rcbox_ptr) } + } + } +} + impl Rc { - /// Creates a new `Weak` reference from this value. + /// Creates a new [`Weak`][weak] pointer to this value. + /// + /// [weak]: struct.Weak.html /// /// # Examples /// @@ -312,7 +411,22 @@ impl Rc { Weak { ptr: this.ptr } } - /// Get the number of weak references to this value. + /// Gets the number of [`Weak`][weak] pointers to this value. + /// + /// [weak]: struct.Weak.html + /// + /// # Examples + /// + /// ``` + /// #![feature(rc_counts)] + /// + /// use std::rc::Rc; + /// + /// let five = Rc::new(5); + /// let _weak_five = Rc::downgrade(&five); + /// + /// assert_eq!(1, Rc::weak_count(&five)); + /// ``` #[inline] #[unstable(feature = "rc_counts", reason = "not clearly useful", issue = "28356")] @@ -320,7 +434,20 @@ impl Rc { this.weak() - 1 } - /// Get the number of strong references to this value. + /// Gets the number of strong (`Rc`) pointers to this value. + /// + /// # Examples + /// + /// ``` + /// #![feature(rc_counts)] + /// + /// use std::rc::Rc; + /// + /// let five = Rc::new(5); + /// let _also_five = five.clone(); + /// + /// assert_eq!(2, Rc::strong_count(&five)); + /// ``` #[inline] #[unstable(feature = "rc_counts", reason = "not clearly useful", issue = "28356")] @@ -328,8 +455,10 @@ impl Rc { this.strong() } - /// Returns true if there are no other `Rc` or `Weak` values that share - /// the same inner value. + /// Returns true if there are no other `Rc` or [`Weak`][weak] pointers to + /// this inner value. + /// + /// [weak]: struct.Weak.html /// /// # Examples /// @@ -349,10 +478,19 @@ impl Rc { Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1 } - /// Returns a mutable reference to the contained value if the `Rc` has - /// one strong reference and no weak references. + /// Returns a mutable reference to the inner value, if there are + /// no other `Rc` or [`Weak`][weak] pointers to the same value. + /// + /// Returns [`None`][option] otherwise, because it is not safe to + /// mutate a shared value. + /// + /// See also [`make_mut`][make_mut], which will [`clone`][clone] + /// the inner value when it's shared. /// - /// Returns `None` if the `Rc` is not unique. + /// [weak]: struct.Weak.html + /// [option]: ../../std/option/enum.Option.html + /// [make_mut]: struct.Rc.html#method.make_mut + /// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone /// /// # Examples /// @@ -381,8 +519,8 @@ impl Rc { #[unstable(feature = "ptr_eq", reason = "newly added", issue = "36497")] - /// Return whether two `Rc` references point to the same value - /// (not just values that compare equal). + /// Returns true if the two `Rc`s point to the same value (not + /// just values that compare as equal). /// /// # Examples /// @@ -406,11 +544,17 @@ impl Rc { } impl Rc { - /// Make a mutable reference into the given `Rc` by cloning the inner - /// data if the `Rc` doesn't have one strong reference and no weak - /// references. + /// Makes a mutable reference into the given `Rc`. + /// + /// If there are other `Rc` or [`Weak`][weak] pointers to the same value, + /// then `make_mut` will invoke [`clone`][clone] on the inner value to + /// ensure unique ownership. This is also referred to as clone-on-write. + /// + /// See also [`get_mut`][get_mut], which will fail rather than cloning. /// - /// This is also referred to as a copy-on-write. + /// [weak]: struct.Weak.html + /// [clone]: ../../std/clone/trait.Clone.html#tymethod.clone + /// [get_mut]: struct.Rc.html#method.get_mut /// /// # Examples /// @@ -419,16 +563,15 @@ impl Rc { /// /// let mut data = Rc::new(5); /// - /// *Rc::make_mut(&mut data) += 1; // Won't clone anything - /// let mut other_data = data.clone(); // Won't clone inner data - /// *Rc::make_mut(&mut data) += 1; // Clones inner data - /// *Rc::make_mut(&mut data) += 1; // Won't clone anything - /// *Rc::make_mut(&mut other_data) *= 2; // Won't clone anything + /// *Rc::make_mut(&mut data) += 1; // Won't clone anything + /// let mut other_data = data.clone(); // Won't clone inner data + /// *Rc::make_mut(&mut data) += 1; // Clones inner data + /// *Rc::make_mut(&mut data) += 1; // Won't clone anything + /// *Rc::make_mut(&mut other_data) *= 2; // Won't clone anything /// - /// // Note: data and other_data now point to different numbers + /// // Now `data` and `other_data` point to different values. /// assert_eq!(*data, 8); /// assert_eq!(*other_data, 12); - /// /// ``` #[inline] #[stable(feature = "rc_unique", since = "1.4.0")] @@ -470,30 +613,32 @@ impl Deref for Rc { #[stable(feature = "rust1", since = "1.0.0")] impl Drop for Rc { - /// Drops the `Rc`. + /// Drops the `Rc`. /// /// This will decrement the strong reference count. If the strong reference - /// count becomes zero and the only other references are `Weak` ones, - /// `drop`s the inner value. + /// count reaches zero then the only other references (if any) are + /// [`Weak`][weak], so we `drop` the inner value. + /// + /// [weak]: struct.Weak.html /// /// # Examples /// /// ``` /// use std::rc::Rc; /// - /// { - /// let five = Rc::new(5); - /// - /// // stuff + /// struct Foo; /// - /// drop(five); // explicit drop + /// impl Drop for Foo { + /// fn drop(&mut self) { + /// println!("dropped!"); + /// } /// } - /// { - /// let five = Rc::new(5); /// - /// // stuff + /// let foo = Rc::new(Foo); + /// let foo2 = foo.clone(); /// - /// } // implicit drop + /// drop(foo); // Doesn't print anything + /// drop(foo2); // Prints "dropped!" /// ``` #[unsafe_destructor_blind_to_params] fn drop(&mut self) { @@ -519,10 +664,10 @@ impl Drop for Rc { #[stable(feature = "rust1", since = "1.0.0")] impl Clone for Rc { - /// Makes a clone of the `Rc`. + /// Makes a clone of the `Rc` pointer. /// - /// When you clone an `Rc`, it will create another pointer to the data and - /// increase the strong reference counter. + /// This creates another pointer to the same inner value, increasing the + /// strong reference count. /// /// # Examples /// @@ -550,6 +695,7 @@ impl Default for Rc { /// use std::rc::Rc; /// /// let x: Rc = Default::default(); + /// assert_eq!(*x, 0); /// ``` #[inline] fn default() -> Rc { @@ -559,9 +705,9 @@ impl Default for Rc { #[stable(feature = "rust1", since = "1.0.0")] impl PartialEq for Rc { - /// Equality for two `Rc`s. + /// Equality for two `Rc`s. /// - /// Two `Rc`s are equal if their inner value are equal. + /// Two `Rc`s are equal if their inner values are equal. /// /// # Examples /// @@ -570,16 +716,16 @@ impl PartialEq for Rc { /// /// let five = Rc::new(5); /// - /// five == Rc::new(5); + /// assert!(five == Rc::new(5)); /// ``` #[inline(always)] fn eq(&self, other: &Rc) -> bool { **self == **other } - /// Inequality for two `Rc`s. + /// Inequality for two `Rc`s. /// - /// Two `Rc`s are unequal if their inner value are unequal. + /// Two `Rc`s are unequal if their inner values are unequal. /// /// # Examples /// @@ -588,7 +734,7 @@ impl PartialEq for Rc { /// /// let five = Rc::new(5); /// - /// five != Rc::new(5); + /// assert!(five != Rc::new(6)); /// ``` #[inline(always)] fn ne(&self, other: &Rc) -> bool { @@ -601,7 +747,7 @@ impl Eq for Rc {} #[stable(feature = "rust1", since = "1.0.0")] impl PartialOrd for Rc { - /// Partial comparison for two `Rc`s. + /// Partial comparison for two `Rc`s. /// /// The two are compared by calling `partial_cmp()` on their inner values. /// @@ -609,17 +755,18 @@ impl PartialOrd for Rc { /// /// ``` /// use std::rc::Rc; + /// use std::cmp::Ordering; /// /// let five = Rc::new(5); /// - /// five.partial_cmp(&Rc::new(5)); + /// assert_eq!(Some(Ordering::Less), five.partial_cmp(&Rc::new(6))); /// ``` #[inline(always)] fn partial_cmp(&self, other: &Rc) -> Option { (**self).partial_cmp(&**other) } - /// Less-than comparison for two `Rc`s. + /// Less-than comparison for two `Rc`s. /// /// The two are compared by calling `<` on their inner values. /// @@ -630,14 +777,14 @@ impl PartialOrd for Rc { /// /// let five = Rc::new(5); /// - /// five < Rc::new(5); + /// assert!(five < Rc::new(6)); /// ``` #[inline(always)] fn lt(&self, other: &Rc) -> bool { **self < **other } - /// 'Less-than or equal to' comparison for two `Rc`s. + /// 'Less than or equal to' comparison for two `Rc`s. /// /// The two are compared by calling `<=` on their inner values. /// @@ -648,14 +795,14 @@ impl PartialOrd for Rc { /// /// let five = Rc::new(5); /// - /// five <= Rc::new(5); + /// assert!(five <= Rc::new(5)); /// ``` #[inline(always)] fn le(&self, other: &Rc) -> bool { **self <= **other } - /// Greater-than comparison for two `Rc`s. + /// Greater-than comparison for two `Rc`s. /// /// The two are compared by calling `>` on their inner values. /// @@ -666,14 +813,14 @@ impl PartialOrd for Rc { /// /// let five = Rc::new(5); /// - /// five > Rc::new(5); + /// assert!(five > Rc::new(4)); /// ``` #[inline(always)] fn gt(&self, other: &Rc) -> bool { **self > **other } - /// 'Greater-than or equal to' comparison for two `Rc`s. + /// 'Greater than or equal to' comparison for two `Rc`s. /// /// The two are compared by calling `>=` on their inner values. /// @@ -684,7 +831,7 @@ impl PartialOrd for Rc { /// /// let five = Rc::new(5); /// - /// five >= Rc::new(5); + /// assert!(five >= Rc::new(5)); /// ``` #[inline(always)] fn ge(&self, other: &Rc) -> bool { @@ -694,7 +841,7 @@ impl PartialOrd for Rc { #[stable(feature = "rust1", since = "1.0.0")] impl Ord for Rc { - /// Comparison for two `Rc`s. + /// Comparison for two `Rc`s. /// /// The two are compared by calling `cmp()` on their inner values. /// @@ -702,10 +849,11 @@ impl Ord for Rc { /// /// ``` /// use std::rc::Rc; + /// use std::cmp::Ordering; /// /// let five = Rc::new(5); /// - /// five.partial_cmp(&Rc::new(5)); + /// assert_eq!(Ordering::Less, five.cmp(&Rc::new(6))); /// ``` #[inline] fn cmp(&self, other: &Rc) -> Ordering { @@ -748,13 +896,18 @@ impl From for Rc { } } -/// A weak version of `Rc`. +/// A weak version of [`Rc`][rc]. +/// +/// `Weak` pointers do not count towards determining if the inner value +/// should be dropped. +/// +/// The typical way to obtain a `Weak` pointer is to call +/// [`Rc::downgrade`][downgrade]. /// -/// Weak references do not count when determining if the inner value should be -/// dropped. +/// See the [module-level documentation](./index.html) for more details. /// -/// See the [module level documentation](./index.html) for more. -#[cfg_attr(stage0, unsafe_no_drop_flag)] +/// [rc]: struct.Rc.html +/// [downgrade]: struct.Rc.html#method.downgrade #[stable(feature = "rc_weak", since = "1.4.0")] pub struct Weak { ptr: Shared>, @@ -769,10 +922,14 @@ impl !marker::Sync for Weak {} impl, U: ?Sized> CoerceUnsized> for Weak {} impl Weak { - /// Constructs a new `Weak` without an accompanying instance of T. + /// Constructs a new `Weak`, without an accompanying instance of `T`. /// - /// This allocates memory for T, but does not initialize it. Calling - /// Weak::upgrade() on the return value always gives None. + /// This allocates memory for `T`, but does not initialize it. Calling + /// [`upgrade`][upgrade] on the return value always gives + /// [`None`][option]. + /// + /// [upgrade]: struct.Weak.html#method.upgrade + /// [option]: ../../std/option/enum.Option.html /// /// # Examples /// @@ -780,6 +937,7 @@ impl Weak { /// use std::rc::Weak; /// /// let empty: Weak = Weak::new(); + /// assert!(empty.upgrade().is_none()); /// ``` #[stable(feature = "downgraded_weak", since = "1.10.0")] pub fn new() -> Weak { @@ -796,12 +954,13 @@ impl Weak { } impl Weak { - /// Upgrades a weak reference to a strong reference. + /// Upgrades the `Weak` pointer to an [`Rc`][rc], if possible. /// - /// Upgrades the `Weak` reference to an `Rc`, if possible. + /// Returns [`None`][option] if the strong count has reached zero and the + /// inner value was destroyed. /// - /// Returns `None` if there were no strong references and the data was - /// destroyed. + /// [rc]: struct.Rc.html + /// [option]: ../../std/option/enum.Option.html /// /// # Examples /// @@ -813,6 +972,13 @@ impl Weak { /// let weak_five = Rc::downgrade(&five); /// /// let strong_five: Option> = weak_five.upgrade(); + /// assert!(strong_five.is_some()); + /// + /// // Destroy all strong pointers. + /// drop(strong_five); + /// drop(five); + /// + /// assert!(weak_five.upgrade().is_none()); /// ``` #[stable(feature = "rc_weak", since = "1.4.0")] pub fn upgrade(&self) -> Option> { @@ -827,7 +993,7 @@ impl Weak { #[stable(feature = "rc_weak", since = "1.4.0")] impl Drop for Weak { - /// Drops the `Weak`. + /// Drops the `Weak` pointer. /// /// This will decrement the weak reference count. /// @@ -836,21 +1002,22 @@ impl Drop for Weak { /// ``` /// use std::rc::Rc; /// - /// { - /// let five = Rc::new(5); - /// let weak_five = Rc::downgrade(&five); + /// struct Foo; /// - /// // stuff - /// - /// drop(weak_five); // explicit drop + /// impl Drop for Foo { + /// fn drop(&mut self) { + /// println!("dropped!"); + /// } /// } - /// { - /// let five = Rc::new(5); - /// let weak_five = Rc::downgrade(&five); /// - /// // stuff + /// let foo = Rc::new(Foo); + /// let weak_foo = Rc::downgrade(&foo); + /// let other_weak_foo = weak_foo.clone(); + /// + /// drop(weak_foo); // Doesn't print anything + /// drop(foo); // Prints "dropped!" /// - /// } // implicit drop + /// assert!(other_weak_foo.upgrade().is_none()); /// ``` fn drop(&mut self) { unsafe { @@ -868,9 +1035,10 @@ impl Drop for Weak { #[stable(feature = "rc_weak", since = "1.4.0")] impl Clone for Weak { - /// Makes a clone of the `Weak`. + /// Makes a clone of the `Weak` pointer. /// - /// This increases the weak reference count. + /// This creates another pointer to the same inner value, increasing the + /// weak reference count. /// /// # Examples /// @@ -897,7 +1065,23 @@ impl fmt::Debug for Weak { #[stable(feature = "downgraded_weak", since = "1.10.0")] impl Default for Weak { - /// Creates a new `Weak`. + /// Constructs a new `Weak`, without an accompanying instance of `T`. + /// + /// This allocates memory for `T`, but does not initialize it. Calling + /// [`upgrade`][upgrade] on the return value always gives + /// [`None`][option]. + /// + /// [upgrade]: struct.Weak.html#method.upgrade + /// [option]: ../../std/option/enum.Option.html + /// + /// # Examples + /// + /// ``` + /// use std::rc::Weak; + /// + /// let empty: Weak = Default::default(); + /// assert!(empty.upgrade().is_none()); + /// ``` fn default() -> Weak { Weak::new() } diff --git a/src/liballoc_jemalloc/build.rs b/src/liballoc_jemalloc/build.rs index dc1b8d6ea9835..08a1f8ae8c6ca 100644 --- a/src/liballoc_jemalloc/build.rs +++ b/src/liballoc_jemalloc/build.rs @@ -22,11 +22,25 @@ fn main() { println!("cargo:rustc-cfg=cargobuild"); println!("cargo:rerun-if-changed=build.rs"); - let target = env::var("TARGET").unwrap(); - let host = env::var("HOST").unwrap(); + let target = env::var("TARGET").expect("TARGET was not set"); + let host = env::var("HOST").expect("HOST was not set"); let build_dir = PathBuf::from(env::var_os("OUT_DIR").unwrap()); let src_dir = env::current_dir().unwrap(); + // FIXME: This is a hack to support building targets that don't + // support jemalloc alongside hosts that do. The jemalloc build is + // controlled by a feature of the std crate, and if that feature + // changes between targets, it invalidates the fingerprint of + // std's build script (this is a cargo bug); so we must ensure + // that the feature set used by std is the same across all + // targets, which means we have to build the alloc_jemalloc crate + // for targets like emscripten, even if we don't use it. + if target.contains("rumprun") || target.contains("bitrig") || target.contains("openbsd") || + target.contains("msvc") || target.contains("emscripten") || target.contains("fuchsia") { + println!("cargo:rustc-cfg=dummy_jemalloc"); + return; + } + if let Some(jemalloc) = env::var_os("JEMALLOC_OVERRIDE") { let jemalloc = PathBuf::from(jemalloc); println!("cargo:rustc-link-search=native={}", @@ -46,16 +60,16 @@ fn main() { // only msvc returns None for ar so unwrap is okay let ar = build_helper::cc2ar(compiler.path(), &target).unwrap(); let cflags = compiler.args() - .iter() - .map(|s| s.to_str().unwrap()) - .collect::>() - .join(" "); + .iter() + .map(|s| s.to_str().unwrap()) + .collect::>() + .join(" "); let mut stack = src_dir.join("../jemalloc") - .read_dir() - .unwrap() - .map(|e| e.unwrap()) - .collect::>(); + .read_dir() + .unwrap() + .map(|e| e.unwrap()) + .collect::>(); while let Some(entry) = stack.pop() { let path = entry.path(); if entry.file_type().unwrap().is_dir() { @@ -137,10 +151,10 @@ fn main() { run(&mut cmd); run(Command::new("make") - .current_dir(&build_dir) - .arg("build_lib_static") - .arg("-j") - .arg(env::var("NUM_JOBS").unwrap())); + .current_dir(&build_dir) + .arg("build_lib_static") + .arg("-j") + .arg(env::var("NUM_JOBS").expect("NUM_JOBS was not set"))); if target.contains("windows") { println!("cargo:rustc-link-lib=static=jemalloc"); diff --git a/src/liballoc_jemalloc/lib.rs b/src/liballoc_jemalloc/lib.rs index 5bbf1c35e0dd4..21e45f9c4b20c 100644 --- a/src/liballoc_jemalloc/lib.rs +++ b/src/liballoc_jemalloc/lib.rs @@ -23,124 +23,170 @@ extern crate libc; -use libc::{c_int, c_void, size_t}; +pub use imp::*; -// Linkage directives to pull in jemalloc and its dependencies. -// -// On some platforms we need to be sure to link in `pthread` which jemalloc -// depends on, and specifically on android we need to also link to libgcc. -// Currently jemalloc is compiled with gcc which will generate calls to -// intrinsics that are libgcc specific (e.g. those intrinsics aren't present in -// libcompiler-rt), so link that in to get that support. -#[link(name = "jemalloc", kind = "static")] -#[cfg_attr(target_os = "android", link(name = "gcc"))] -#[cfg_attr(all(not(windows), - not(target_os = "android"), - not(target_env = "musl")), - link(name = "pthread"))] -#[cfg(not(cargobuild))] -extern "C" {} - -// Note that the symbols here are prefixed by default on OSX and Windows (we -// don't explicitly request it), and on Android and DragonFly we explicitly -// request it as unprefixing cause segfaults (mismatches in allocators). -extern "C" { - #[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios", - target_os = "dragonfly", target_os = "windows"), - link_name = "je_mallocx")] - fn mallocx(size: size_t, flags: c_int) -> *mut c_void; - #[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios", - target_os = "dragonfly", target_os = "windows"), - link_name = "je_rallocx")] - fn rallocx(ptr: *mut c_void, size: size_t, flags: c_int) -> *mut c_void; - #[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios", - target_os = "dragonfly", target_os = "windows"), - link_name = "je_xallocx")] - fn xallocx(ptr: *mut c_void, size: size_t, extra: size_t, flags: c_int) -> size_t; - #[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios", - target_os = "dragonfly", target_os = "windows"), - link_name = "je_sdallocx")] - fn sdallocx(ptr: *mut c_void, size: size_t, flags: c_int); - #[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios", - target_os = "dragonfly", target_os = "windows"), - link_name = "je_nallocx")] - fn nallocx(size: size_t, flags: c_int) -> size_t; -} +// See comments in build.rs for why we sometimes build a crate that does nothing +#[cfg(not(dummy_jemalloc))] +mod imp { + use libc::{c_int, c_void, size_t}; -// The minimum alignment guaranteed by the architecture. This value is used to -// add fast paths for low alignment values. In practice, the alignment is a -// constant at the call site and the branch will be optimized out. -#[cfg(all(any(target_arch = "arm", - target_arch = "mips", - target_arch = "powerpc")))] -const MIN_ALIGN: usize = 8; -#[cfg(all(any(target_arch = "x86", - target_arch = "x86_64", - target_arch = "aarch64", - target_arch = "powerpc64", - target_arch = "mips64", - target_arch = "s390x")))] -const MIN_ALIGN: usize = 16; - -// MALLOCX_ALIGN(a) macro -fn mallocx_align(a: usize) -> c_int { - a.trailing_zeros() as c_int -} + // Linkage directives to pull in jemalloc and its dependencies. + // + // On some platforms we need to be sure to link in `pthread` which jemalloc + // depends on, and specifically on android we need to also link to libgcc. + // Currently jemalloc is compiled with gcc which will generate calls to + // intrinsics that are libgcc specific (e.g. those intrinsics aren't present in + // libcompiler-rt), so link that in to get that support. + #[link(name = "jemalloc", kind = "static")] + #[cfg_attr(target_os = "android", link(name = "gcc"))] + #[cfg_attr(all(not(windows), + not(target_os = "android"), + not(target_env = "musl")), + link(name = "pthread"))] + #[cfg(not(cargobuild))] + extern "C" {} + + // Note that the symbols here are prefixed by default on OSX and Windows (we + // don't explicitly request it), and on Android and DragonFly we explicitly + // request it as unprefixing cause segfaults (mismatches in allocators). + extern "C" { + #[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios", + target_os = "dragonfly", target_os = "windows"), + link_name = "je_mallocx")] + fn mallocx(size: size_t, flags: c_int) -> *mut c_void; + #[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios", + target_os = "dragonfly", target_os = "windows"), + link_name = "je_rallocx")] + fn rallocx(ptr: *mut c_void, size: size_t, flags: c_int) -> *mut c_void; + #[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios", + target_os = "dragonfly", target_os = "windows"), + link_name = "je_xallocx")] + fn xallocx(ptr: *mut c_void, size: size_t, extra: size_t, flags: c_int) -> size_t; + #[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios", + target_os = "dragonfly", target_os = "windows"), + link_name = "je_sdallocx")] + fn sdallocx(ptr: *mut c_void, size: size_t, flags: c_int); + #[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios", + target_os = "dragonfly", target_os = "windows"), + link_name = "je_nallocx")] + fn nallocx(size: size_t, flags: c_int) -> size_t; + } + + // The minimum alignment guaranteed by the architecture. This value is used to + // add fast paths for low alignment values. In practice, the alignment is a + // constant at the call site and the branch will be optimized out. + #[cfg(all(any(target_arch = "arm", + target_arch = "mips", + target_arch = "powerpc")))] + const MIN_ALIGN: usize = 8; + #[cfg(all(any(target_arch = "x86", + target_arch = "x86_64", + target_arch = "aarch64", + target_arch = "powerpc64", + target_arch = "mips64", + target_arch = "s390x")))] + const MIN_ALIGN: usize = 16; + + // MALLOCX_ALIGN(a) macro + fn mallocx_align(a: usize) -> c_int { + a.trailing_zeros() as c_int + } + + fn align_to_flags(align: usize) -> c_int { + if align <= MIN_ALIGN { + 0 + } else { + mallocx_align(align) + } + } + + #[no_mangle] + pub extern "C" fn __rust_allocate(size: usize, align: usize) -> *mut u8 { + let flags = align_to_flags(align); + unsafe { mallocx(size as size_t, flags) as *mut u8 } + } + + #[no_mangle] + pub extern "C" fn __rust_reallocate(ptr: *mut u8, + _old_size: usize, + size: usize, + align: usize) + -> *mut u8 { + let flags = align_to_flags(align); + unsafe { rallocx(ptr as *mut c_void, size as size_t, flags) as *mut u8 } + } + + #[no_mangle] + pub extern "C" fn __rust_reallocate_inplace(ptr: *mut u8, + _old_size: usize, + size: usize, + align: usize) + -> usize { + let flags = align_to_flags(align); + unsafe { xallocx(ptr as *mut c_void, size as size_t, 0, flags) as usize } + } -fn align_to_flags(align: usize) -> c_int { - if align <= MIN_ALIGN { + #[no_mangle] + pub extern "C" fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize) { + let flags = align_to_flags(align); + unsafe { sdallocx(ptr as *mut c_void, old_size as size_t, flags) } + } + + #[no_mangle] + pub extern "C" fn __rust_usable_size(size: usize, align: usize) -> usize { + let flags = align_to_flags(align); + unsafe { nallocx(size as size_t, flags) as usize } + } + + // These symbols are used by jemalloc on android but the really old android + // we're building on doesn't have them defined, so just make sure the symbols + // are available. + #[no_mangle] + #[cfg(target_os = "android")] + pub extern "C" fn pthread_atfork(_prefork: *mut u8, + _postfork_parent: *mut u8, + _postfork_child: *mut u8) + -> i32 { 0 - } else { - mallocx_align(align) } } -#[no_mangle] -pub extern "C" fn __rust_allocate(size: usize, align: usize) -> *mut u8 { - let flags = align_to_flags(align); - unsafe { mallocx(size as size_t, flags) as *mut u8 } -} +#[cfg(dummy_jemalloc)] +mod imp { + fn bogus() -> ! { + panic!("jemalloc is not implemented for this platform"); + } -#[no_mangle] -pub extern "C" fn __rust_reallocate(ptr: *mut u8, - _old_size: usize, - size: usize, - align: usize) - -> *mut u8 { - let flags = align_to_flags(align); - unsafe { rallocx(ptr as *mut c_void, size as size_t, flags) as *mut u8 } -} + #[no_mangle] + pub extern "C" fn __rust_allocate(_size: usize, _align: usize) -> *mut u8 { + bogus() + } -#[no_mangle] -pub extern "C" fn __rust_reallocate_inplace(ptr: *mut u8, - _old_size: usize, - size: usize, - align: usize) - -> usize { - let flags = align_to_flags(align); - unsafe { xallocx(ptr as *mut c_void, size as size_t, 0, flags) as usize } -} + #[no_mangle] + pub extern "C" fn __rust_reallocate(_ptr: *mut u8, + _old_size: usize, + _size: usize, + _align: usize) + -> *mut u8 { + bogus() + } -#[no_mangle] -pub extern "C" fn __rust_deallocate(ptr: *mut u8, old_size: usize, align: usize) { - let flags = align_to_flags(align); - unsafe { sdallocx(ptr as *mut c_void, old_size as size_t, flags) } -} + #[no_mangle] + pub extern "C" fn __rust_reallocate_inplace(_ptr: *mut u8, + _old_size: usize, + _size: usize, + _align: usize) + -> usize { + bogus() + } -#[no_mangle] -pub extern "C" fn __rust_usable_size(size: usize, align: usize) -> usize { - let flags = align_to_flags(align); - unsafe { nallocx(size as size_t, flags) as usize } -} + #[no_mangle] + pub extern "C" fn __rust_deallocate(_ptr: *mut u8, _old_size: usize, _align: usize) { + bogus() + } -// These symbols are used by jemalloc on android but the really old android -// we're building on doesn't have them defined, so just make sure the symbols -// are available. -#[no_mangle] -#[cfg(target_os = "android")] -pub extern "C" fn pthread_atfork(_prefork: *mut u8, - _postfork_parent: *mut u8, - _postfork_child: *mut u8) - -> i32 { - 0 + #[no_mangle] + pub extern "C" fn __rust_usable_size(_size: usize, _align: usize) -> usize { + bogus() + } } diff --git a/src/liballoc_system/lib.rs b/src/liballoc_system/lib.rs index 01407d1acd2ec..a4fabb5a2c96d 100644 --- a/src/liballoc_system/lib.rs +++ b/src/liballoc_system/lib.rs @@ -29,7 +29,8 @@ target_arch = "mips", target_arch = "powerpc", target_arch = "powerpc64", - target_arch = "asmjs")))] + target_arch = "asmjs", + target_arch = "wasm32")))] const MIN_ALIGN: usize = 8; #[cfg(all(any(target_arch = "x86_64", target_arch = "aarch64", @@ -165,6 +166,7 @@ mod imp { fn HeapAlloc(hHeap: HANDLE, dwFlags: DWORD, dwBytes: SIZE_T) -> LPVOID; fn HeapReAlloc(hHeap: HANDLE, dwFlags: DWORD, lpMem: LPVOID, dwBytes: SIZE_T) -> LPVOID; fn HeapFree(hHeap: HANDLE, dwFlags: DWORD, lpMem: LPVOID) -> BOOL; + fn GetLastError() -> DWORD; } #[repr(C)] @@ -220,11 +222,7 @@ mod imp { HEAP_REALLOC_IN_PLACE_ONLY, ptr as LPVOID, size as SIZE_T) as *mut u8; - if new.is_null() { - old_size - } else { - size - } + if new.is_null() { old_size } else { size } } else { old_size } @@ -233,11 +231,11 @@ mod imp { pub unsafe fn deallocate(ptr: *mut u8, _old_size: usize, align: usize) { if align <= MIN_ALIGN { let err = HeapFree(GetProcessHeap(), 0, ptr as LPVOID); - debug_assert!(err != 0); + debug_assert!(err != 0, "Failed to free heap memory: {}", GetLastError()); } else { let header = get_header(ptr); let err = HeapFree(GetProcessHeap(), 0, header.0 as LPVOID); - debug_assert!(err != 0); + debug_assert!(err != 0, "Failed to free heap memory: {}", GetLastError()); } } diff --git a/src/libarena/lib.rs b/src/libarena/lib.rs index b299b786b35a8..6044bec2c5af7 100644 --- a/src/libarena/lib.rs +++ b/src/libarena/lib.rs @@ -15,9 +15,8 @@ //! of individual objects while the arena itself is still alive. The benefit //! of an arena is very fast allocation; just a pointer bump. //! -//! This crate has two arenas implemented: `TypedArena`, which is a simpler -//! arena but can only hold objects of a single type, and `Arena`, which is a -//! more complex, slower arena which can hold objects of any type. +//! This crate implements `TypedArena`, a simple arena that can only hold +//! objects of a single type. #![crate_name = "arena"] #![unstable(feature = "rustc_private", issue = "27812")] @@ -47,11 +46,12 @@ use std::intrinsics; use std::marker::{PhantomData, Send}; use std::mem; use std::ptr; +use std::slice; use alloc::heap; use alloc::raw_vec::RawVec; -/// A faster arena that can hold objects of only one type. +/// An arena that can hold objects of only one type. pub struct TypedArena { /// A pointer to the next object to be allocated. ptr: Cell<*mut T>, @@ -60,7 +60,7 @@ pub struct TypedArena { /// reached, a new chunk is allocated. end: Cell<*mut T>, - /// A vector arena segments. + /// A vector of arena chunks. chunks: RefCell>>, /// Marker indicating that dropping the arena causes its owned @@ -69,7 +69,7 @@ pub struct TypedArena { } struct TypedArenaChunk { - /// Pointer to the next arena segment. + /// The raw storage for the arena chunk. storage: RawVec, } @@ -117,26 +117,16 @@ impl TypedArenaChunk { const PAGE: usize = 4096; impl TypedArena { - /// Creates a new `TypedArena` with preallocated space for many objects. + /// Creates a new `TypedArena`. #[inline] pub fn new() -> TypedArena { - // Reserve at least one page. - let elem_size = cmp::max(1, mem::size_of::()); - TypedArena::with_capacity(PAGE / elem_size) - } - - /// Creates a new `TypedArena` with preallocated space for the given number of - /// objects. - #[inline] - pub fn with_capacity(capacity: usize) -> TypedArena { - unsafe { - let chunk = TypedArenaChunk::::new(cmp::max(1, capacity)); - TypedArena { - ptr: Cell::new(chunk.start()), - end: Cell::new(chunk.end()), - chunks: RefCell::new(vec![chunk]), - _own: PhantomData, - } + TypedArena { + // We set both `ptr` and `end` to 0 so that the first call to + // alloc() will trigger a grow(). + ptr: Cell::new(0 as *mut T), + end: Cell::new(0 as *mut T), + chunks: RefCell::new(vec![]), + _own: PhantomData, } } @@ -144,7 +134,7 @@ impl TypedArena { #[inline] pub fn alloc(&self, object: T) -> &mut T { if self.ptr == self.end { - self.grow() + self.grow(1) } unsafe { @@ -165,35 +155,79 @@ impl TypedArena { } } + /// Allocates a slice of objects that are copy into the `TypedArena`, returning a mutable + /// reference to it. Will panic if passed a zero-sized types. + /// + /// Panics: + /// - Zero-sized types + /// - Zero-length slices + #[inline] + pub fn alloc_slice(&self, slice: &[T]) -> &mut [T] + where T: Copy { + assert!(mem::size_of::() != 0); + assert!(slice.len() != 0); + + let available_capacity_bytes = self.end.get() as usize - self.ptr.get() as usize; + let at_least_bytes = slice.len() * mem::size_of::(); + if available_capacity_bytes < at_least_bytes { + self.grow(slice.len()); + } + + unsafe { + let start_ptr = self.ptr.get(); + let arena_slice = slice::from_raw_parts_mut(start_ptr, slice.len()); + self.ptr.set(start_ptr.offset(arena_slice.len() as isize)); + arena_slice.copy_from_slice(slice); + arena_slice + } + } + /// Grows the arena. #[inline(never)] #[cold] - fn grow(&self) { + fn grow(&self, n: usize) { unsafe { let mut chunks = self.chunks.borrow_mut(); - let prev_capacity = chunks.last().unwrap().storage.cap(); - let new_capacity = prev_capacity.checked_mul(2).unwrap(); - if chunks.last_mut().unwrap().storage.double_in_place() { - self.end.set(chunks.last().unwrap().end()); + let (chunk, mut new_capacity); + if let Some(last_chunk) = chunks.last_mut() { + let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize; + let currently_used_cap = used_bytes / mem::size_of::(); + if last_chunk.storage.reserve_in_place(currently_used_cap, n) { + self.end.set(last_chunk.end()); + return; + } else { + let prev_capacity = last_chunk.storage.cap(); + loop { + new_capacity = prev_capacity.checked_mul(2).unwrap(); + if new_capacity >= currently_used_cap + n { + break; + } + } + } } else { - let chunk = TypedArenaChunk::::new(new_capacity); - self.ptr.set(chunk.start()); - self.end.set(chunk.end()); - chunks.push(chunk); + let elem_size = cmp::max(1, mem::size_of::()); + new_capacity = cmp::max(n, PAGE / elem_size); } + chunk = TypedArenaChunk::::new(new_capacity); + self.ptr.set(chunk.start()); + self.end.set(chunk.end()); + chunks.push(chunk); } } + /// Clears the arena. Deallocates all but the longest chunk which may be reused. pub fn clear(&mut self) { unsafe { // Clear the last chunk, which is partially filled. let mut chunks_borrow = self.chunks.borrow_mut(); - let last_idx = chunks_borrow.len() - 1; - self.clear_last_chunk(&mut chunks_borrow[last_idx]); - // If `T` is ZST, code below has no effect. - for mut chunk in chunks_borrow.drain(..last_idx) { - let cap = chunk.storage.cap(); - chunk.destroy(cap); + if let Some(mut last_chunk) = chunks_borrow.pop() { + self.clear_last_chunk(&mut last_chunk); + // If `T` is ZST, code below has no effect. + for mut chunk in chunks_borrow.drain(..) { + let cap = chunk.storage.cap(); + chunk.destroy(cap); + } + chunks_borrow.push(last_chunk); } } } @@ -230,13 +264,14 @@ impl Drop for TypedArena { unsafe { // Determine how much was filled. let mut chunks_borrow = self.chunks.borrow_mut(); - let mut last_chunk = chunks_borrow.pop().unwrap(); - // Drop the contents of the last chunk. - self.clear_last_chunk(&mut last_chunk); - // The last chunk will be dropped. Destroy all other chunks. - for chunk in chunks_borrow.iter_mut() { - let cap = chunk.storage.cap(); - chunk.destroy(cap); + if let Some(mut last_chunk) = chunks_borrow.pop() { + // Drop the contents of the last chunk. + self.clear_last_chunk(&mut last_chunk); + // The last chunk will be dropped. Destroy all other chunks. + for chunk in chunks_borrow.iter_mut() { + let cap = chunk.storage.cap(); + chunk.destroy(cap); + } } // RawVec handles deallocation of `last_chunk` and `self.chunks`. } @@ -260,6 +295,12 @@ mod tests { z: i32, } + #[test] + pub fn test_unused() { + let arena: TypedArena = TypedArena::new(); + assert!(arena.chunks.borrow().is_empty()); + } + #[test] fn test_arena_alloc_nested() { struct Inner { @@ -296,9 +337,8 @@ mod tests { let arena = Wrap(TypedArena::new()); - let result = arena.alloc_outer(|| { - Outer { inner: arena.alloc_inner(|| Inner { value: 10 }) } - }); + let result = + arena.alloc_outer(|| Outer { inner: arena.alloc_inner(|| Inner { value: 10 }) }); assert_eq!(result.inner.value, 10); } diff --git a/src/libcollections/binary_heap.rs b/src/libcollections/binary_heap.rs index 1fe921543bd4e..b4be8a43213d8 100644 --- a/src/libcollections/binary_heap.rs +++ b/src/libcollections/binary_heap.rs @@ -535,6 +535,7 @@ impl BinaryHeap { /// /// ``` /// #![feature(binary_heap_extras)] + /// #![allow(deprecated)] /// /// use std::collections::BinaryHeap; /// let mut heap = BinaryHeap::new(); @@ -549,6 +550,7 @@ impl BinaryHeap { #[unstable(feature = "binary_heap_extras", reason = "needs to be audited", issue = "28147")] + #[rustc_deprecated(since = "1.13.0", reason = "use `peek_mut` instead")] pub fn push_pop(&mut self, mut item: T) -> T { match self.data.get_mut(0) { None => return item, @@ -575,6 +577,7 @@ impl BinaryHeap { /// /// ``` /// #![feature(binary_heap_extras)] + /// #![allow(deprecated)] /// /// use std::collections::BinaryHeap; /// let mut heap = BinaryHeap::new(); @@ -587,6 +590,7 @@ impl BinaryHeap { #[unstable(feature = "binary_heap_extras", reason = "needs to be audited", issue = "28147")] + #[rustc_deprecated(since = "1.13.0", reason = "use `peek_mut` instead")] pub fn replace(&mut self, mut item: T) -> Option { if !self.is_empty() { swap(&mut item, &mut self.data[0]); @@ -1029,7 +1033,7 @@ pub struct Drain<'a, T: 'a> { iter: vec::Drain<'a, T>, } -#[stable(feature = "rust1", since = "1.0.0")] +#[stable(feature = "drain", since = "1.6.0")] impl<'a, T: 'a> Iterator for Drain<'a, T> { type Item = T; @@ -1044,7 +1048,7 @@ impl<'a, T: 'a> Iterator for Drain<'a, T> { } } -#[stable(feature = "rust1", since = "1.0.0")] +#[stable(feature = "drain", since = "1.6.0")] impl<'a, T: 'a> DoubleEndedIterator for Drain<'a, T> { #[inline] fn next_back(&mut self) -> Option { @@ -1052,7 +1056,7 @@ impl<'a, T: 'a> DoubleEndedIterator for Drain<'a, T> { } } -#[stable(feature = "rust1", since = "1.0.0")] +#[stable(feature = "drain", since = "1.6.0")] impl<'a, T: 'a> ExactSizeIterator for Drain<'a, T> {} #[unstable(feature = "fused", issue = "35602")] diff --git a/src/libcollections/borrow.rs b/src/libcollections/borrow.rs index 700f88dc0f267..8f9c357833791 100644 --- a/src/libcollections/borrow.rs +++ b/src/libcollections/borrow.rs @@ -14,7 +14,7 @@ use core::cmp::Ordering; use core::hash::{Hash, Hasher}; -use core::ops::Deref; +use core::ops::{Add, AddAssign, Deref}; use fmt; @@ -86,16 +86,29 @@ impl ToOwned for T where T: Clone { /// ``` /// use std::borrow::Cow; /// -/// # #[allow(dead_code)] /// fn abs_all(input: &mut Cow<[i32]>) { /// for i in 0..input.len() { /// let v = input[i]; /// if v < 0 { -/// // clones into a vector the first time (if not already owned) +/// // Clones into a vector if not already owned. /// input.to_mut()[i] = -v; /// } /// } /// } +/// +/// // No clone occurs because `input` doesn't need to be mutated. +/// let slice = [0, 1, 2]; +/// let mut input = Cow::from(&slice[..]); +/// abs_all(&mut input); +/// +/// // Clone occurs because `input` needs to be mutated. +/// let slice = [-1, 0, 1]; +/// let mut input = Cow::from(&slice[..]); +/// abs_all(&mut input); +/// +/// // No clone occurs because `input` is already owned. +/// let mut input = Cow::from(vec![-1, 0, 1]); +/// abs_all(&mut input); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub enum Cow<'a, B: ?Sized + 'a> @@ -270,3 +283,49 @@ impl<'a, T: ?Sized + ToOwned> AsRef for Cow<'a, T> { self } } + +#[stable(feature = "cow_add", since = "1.13.0")] +impl<'a> Add<&'a str> for Cow<'a, str> { + type Output = Cow<'a, str>; + + fn add(self, rhs: &'a str) -> Self { + if self == "" { + Cow::Borrowed(rhs) + } else if rhs == "" { + self + } else { + Cow::Owned(self.into_owned() + rhs) + } + } +} + +#[stable(feature = "cow_add", since = "1.13.0")] +impl<'a> Add> for Cow<'a, str> { + type Output = Cow<'a, str>; + + fn add(self, rhs: Cow<'a, str>) -> Self { + if self == "" { + rhs + } else if rhs == "" { + self + } else { + Cow::Owned(self.into_owned() + rhs.borrow()) + } + } +} + +#[stable(feature = "cow_add", since = "1.13.0")] +impl<'a> AddAssign<&'a str> for Cow<'a, str> { + fn add_assign(&mut self, rhs: &'a str) { + if rhs == "" { return; } + self.to_mut().push_str(rhs); + } +} + +#[stable(feature = "cow_add", since = "1.13.0")] +impl<'a> AddAssign> for Cow<'a, str> { + fn add_assign(&mut self, rhs: Cow<'a, str>) { + if rhs == "" { return; } + self.to_mut().push_str(rhs.borrow()); + } +} diff --git a/src/libcollections/btree/map.rs b/src/libcollections/btree/map.rs index 36cb5a1fd9f6d..788236c24d063 100644 --- a/src/libcollections/btree/map.rs +++ b/src/libcollections/btree/map.rs @@ -136,6 +136,7 @@ pub struct BTreeMap { length: usize, } +#[stable(feature = "btree_drop", since = "1.7.0")] impl Drop for BTreeMap { #[unsafe_destructor_blind_to_params] fn drop(&mut self) { @@ -146,6 +147,7 @@ impl Drop for BTreeMap { } } +#[stable(feature = "rust1", since = "1.0.0")] impl Clone for BTreeMap { fn clone(&self) -> BTreeMap { fn clone_subtree(node: node::NodeRef BTreeMap { } } +#[stable(feature = "rust1", since = "1.0.0")] impl<'a, K: 'a, V: 'a> IntoIterator for &'a BTreeMap { type Item = (&'a K, &'a V); type IntoIter = Iter<'a, K, V>; @@ -1134,6 +1137,7 @@ impl<'a, K: 'a, V: 'a> IntoIterator for &'a BTreeMap { } } +#[stable(feature = "rust1", since = "1.0.0")] impl<'a, K: 'a, V: 'a> Iterator for Iter<'a, K, V> { type Item = (&'a K, &'a V); @@ -1154,6 +1158,7 @@ impl<'a, K: 'a, V: 'a> Iterator for Iter<'a, K, V> { #[unstable(feature = "fused", issue = "35602")] impl<'a, K, V> FusedIterator for Iter<'a, K, V> {} +#[stable(feature = "rust1", since = "1.0.0")] impl<'a, K: 'a, V: 'a> DoubleEndedIterator for Iter<'a, K, V> { fn next_back(&mut self) -> Option<(&'a K, &'a V)> { if self.length == 0 { @@ -1165,12 +1170,14 @@ impl<'a, K: 'a, V: 'a> DoubleEndedIterator for Iter<'a, K, V> { } } +#[stable(feature = "rust1", since = "1.0.0")] impl<'a, K: 'a, V: 'a> ExactSizeIterator for Iter<'a, K, V> { fn len(&self) -> usize { self.length } } +#[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> Clone for Iter<'a, K, V> { fn clone(&self) -> Iter<'a, K, V> { Iter { @@ -1180,6 +1187,7 @@ impl<'a, K, V> Clone for Iter<'a, K, V> { } } +#[stable(feature = "rust1", since = "1.0.0")] impl<'a, K: 'a, V: 'a> IntoIterator for &'a mut BTreeMap { type Item = (&'a K, &'a mut V); type IntoIter = IterMut<'a, K, V>; @@ -1189,6 +1197,7 @@ impl<'a, K: 'a, V: 'a> IntoIterator for &'a mut BTreeMap { } } +#[stable(feature = "rust1", since = "1.0.0")] impl<'a, K: 'a, V: 'a> Iterator for IterMut<'a, K, V> { type Item = (&'a K, &'a mut V); @@ -1206,6 +1215,7 @@ impl<'a, K: 'a, V: 'a> Iterator for IterMut<'a, K, V> { } } +#[stable(feature = "rust1", since = "1.0.0")] impl<'a, K: 'a, V: 'a> DoubleEndedIterator for IterMut<'a, K, V> { fn next_back(&mut self) -> Option<(&'a K, &'a mut V)> { if self.length == 0 { @@ -1217,6 +1227,7 @@ impl<'a, K: 'a, V: 'a> DoubleEndedIterator for IterMut<'a, K, V> { } } +#[stable(feature = "rust1", since = "1.0.0")] impl<'a, K: 'a, V: 'a> ExactSizeIterator for IterMut<'a, K, V> { fn len(&self) -> usize { self.length @@ -1226,6 +1237,7 @@ impl<'a, K: 'a, V: 'a> ExactSizeIterator for IterMut<'a, K, V> { #[unstable(feature = "fused", issue = "35602")] impl<'a, K, V> FusedIterator for IterMut<'a, K, V> {} +#[stable(feature = "rust1", since = "1.0.0")] impl IntoIterator for BTreeMap { type Item = (K, V); type IntoIter = IntoIter; @@ -1244,6 +1256,7 @@ impl IntoIterator for BTreeMap { } } +#[stable(feature = "btree_drop", since = "1.7.0")] impl Drop for IntoIter { fn drop(&mut self) { for _ in &mut *self { @@ -1260,6 +1273,7 @@ impl Drop for IntoIter { } } +#[stable(feature = "rust1", since = "1.0.0")] impl Iterator for IntoIter { type Item = (K, V); @@ -1304,6 +1318,7 @@ impl Iterator for IntoIter { } } +#[stable(feature = "rust1", since = "1.0.0")] impl DoubleEndedIterator for IntoIter { fn next_back(&mut self) -> Option<(K, V)> { if self.length == 0 { @@ -1342,6 +1357,7 @@ impl DoubleEndedIterator for IntoIter { } } +#[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for IntoIter { fn len(&self) -> usize { self.length @@ -1351,6 +1367,7 @@ impl ExactSizeIterator for IntoIter { #[unstable(feature = "fused", issue = "35602")] impl FusedIterator for IntoIter {} +#[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> Iterator for Keys<'a, K, V> { type Item = &'a K; @@ -1363,12 +1380,14 @@ impl<'a, K, V> Iterator for Keys<'a, K, V> { } } +#[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> DoubleEndedIterator for Keys<'a, K, V> { fn next_back(&mut self) -> Option<&'a K> { self.inner.next_back().map(|(k, _)| k) } } +#[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> ExactSizeIterator for Keys<'a, K, V> { fn len(&self) -> usize { self.inner.len() @@ -1378,12 +1397,14 @@ impl<'a, K, V> ExactSizeIterator for Keys<'a, K, V> { #[unstable(feature = "fused", issue = "35602")] impl<'a, K, V> FusedIterator for Keys<'a, K, V> {} +#[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> Clone for Keys<'a, K, V> { fn clone(&self) -> Keys<'a, K, V> { Keys { inner: self.inner.clone() } } } +#[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> Iterator for Values<'a, K, V> { type Item = &'a V; @@ -1396,12 +1417,14 @@ impl<'a, K, V> Iterator for Values<'a, K, V> { } } +#[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> DoubleEndedIterator for Values<'a, K, V> { fn next_back(&mut self) -> Option<&'a V> { self.inner.next_back().map(|(_, v)| v) } } +#[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> ExactSizeIterator for Values<'a, K, V> { fn len(&self) -> usize { self.inner.len() @@ -1411,6 +1434,7 @@ impl<'a, K, V> ExactSizeIterator for Values<'a, K, V> { #[unstable(feature = "fused", issue = "35602")] impl<'a, K, V> FusedIterator for Values<'a, K, V> {} +#[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> Clone for Values<'a, K, V> { fn clone(&self) -> Values<'a, K, V> { Values { inner: self.inner.clone() } @@ -1635,6 +1659,7 @@ impl<'a, K, V> RangeMut<'a, K, V> { } } +#[stable(feature = "rust1", since = "1.0.0")] impl FromIterator<(K, V)> for BTreeMap { fn from_iter>(iter: T) -> BTreeMap { let mut map = BTreeMap::new(); @@ -1643,6 +1668,7 @@ impl FromIterator<(K, V)> for BTreeMap { } } +#[stable(feature = "rust1", since = "1.0.0")] impl Extend<(K, V)> for BTreeMap { #[inline] fn extend>(&mut self, iter: T) { @@ -1652,12 +1678,14 @@ impl Extend<(K, V)> for BTreeMap { } } +#[stable(feature = "extend_ref", since = "1.2.0")] impl<'a, K: Ord + Copy, V: Copy> Extend<(&'a K, &'a V)> for BTreeMap { fn extend>(&mut self, iter: I) { self.extend(iter.into_iter().map(|(&key, &value)| (key, value))); } } +#[stable(feature = "rust1", since = "1.0.0")] impl Hash for BTreeMap { fn hash(&self, state: &mut H) { for elt in self { @@ -1666,6 +1694,7 @@ impl Hash for BTreeMap { } } +#[stable(feature = "rust1", since = "1.0.0")] impl Default for BTreeMap { /// Creates an empty `BTreeMap`. fn default() -> BTreeMap { @@ -1673,14 +1702,17 @@ impl Default for BTreeMap { } } +#[stable(feature = "rust1", since = "1.0.0")] impl PartialEq for BTreeMap { fn eq(&self, other: &BTreeMap) -> bool { self.len() == other.len() && self.iter().zip(other).all(|(a, b)| a == b) } } +#[stable(feature = "rust1", since = "1.0.0")] impl Eq for BTreeMap {} +#[stable(feature = "rust1", since = "1.0.0")] impl PartialOrd for BTreeMap { #[inline] fn partial_cmp(&self, other: &BTreeMap) -> Option { @@ -1688,6 +1720,7 @@ impl PartialOrd for BTreeMap { } } +#[stable(feature = "rust1", since = "1.0.0")] impl Ord for BTreeMap { #[inline] fn cmp(&self, other: &BTreeMap) -> Ordering { @@ -1695,12 +1728,14 @@ impl Ord for BTreeMap { } } +#[stable(feature = "rust1", since = "1.0.0")] impl Debug for BTreeMap { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_map().entries(self.iter()).finish() } } +#[stable(feature = "rust1", since = "1.0.0")] impl<'a, K: Ord, Q: ?Sized, V> Index<&'a Q> for BTreeMap where K: Borrow, Q: Ord diff --git a/src/libcollections/btree/set.rs b/src/libcollections/btree/set.rs index fc2a7f825474d..c57266d9e3b4a 100644 --- a/src/libcollections/btree/set.rs +++ b/src/libcollections/btree/set.rs @@ -779,6 +779,7 @@ impl Debug for BTreeSet { } } +#[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Clone for Iter<'a, T> { fn clone(&self) -> Iter<'a, T> { Iter { iter: self.iter.clone() } @@ -864,6 +865,7 @@ fn cmp_opt(x: Option<&T>, y: Option<&T>, short: Ordering, long: Ordering } } +#[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Clone for Difference<'a, T> { fn clone(&self) -> Difference<'a, T> { Difference { @@ -901,6 +903,7 @@ impl<'a, T: Ord> Iterator for Difference<'a, T> { #[unstable(feature = "fused", issue = "35602")] impl<'a, T: Ord> FusedIterator for Difference<'a, T> {} +#[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Clone for SymmetricDifference<'a, T> { fn clone(&self) -> SymmetricDifference<'a, T> { SymmetricDifference { @@ -934,6 +937,7 @@ impl<'a, T: Ord> Iterator for SymmetricDifference<'a, T> { #[unstable(feature = "fused", issue = "35602")] impl<'a, T: Ord> FusedIterator for SymmetricDifference<'a, T> {} +#[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Clone for Intersection<'a, T> { fn clone(&self) -> Intersection<'a, T> { Intersection { @@ -977,6 +981,7 @@ impl<'a, T: Ord> Iterator for Intersection<'a, T> { #[unstable(feature = "fused", issue = "35602")] impl<'a, T: Ord> FusedIterator for Intersection<'a, T> {} +#[stable(feature = "rust1", since = "1.0.0")] impl<'a, T> Clone for Union<'a, T> { fn clone(&self) -> Union<'a, T> { Union { diff --git a/src/libcollections/enum_set.rs b/src/libcollections/enum_set.rs index 2456a04e40a3a..2d12b4ccffe01 100644 --- a/src/libcollections/enum_set.rs +++ b/src/libcollections/enum_set.rs @@ -48,7 +48,6 @@ impl Clone for EnumSet { } } -#[stable(feature = "rust1", since = "1.0.0")] impl fmt::Debug for EnumSet { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { fmt.debug_set().entries(self).finish() @@ -277,7 +276,6 @@ impl FromIterator for EnumSet { } } -#[stable(feature = "rust1", since = "1.0.0")] impl<'a, E> IntoIterator for &'a EnumSet where E: CLike { type Item = E; @@ -296,7 +294,6 @@ impl Extend for EnumSet { } } -#[stable(feature = "extend_ref", since = "1.2.0")] impl<'a, E: 'a + CLike + Copy> Extend<&'a E> for EnumSet { fn extend>(&mut self, iter: I) { self.extend(iter.into_iter().cloned()); diff --git a/src/libcollections/fmt.rs b/src/libcollections/fmt.rs index beb3e6b3d4e31..883417e9f4ec7 100644 --- a/src/libcollections/fmt.rs +++ b/src/libcollections/fmt.rs @@ -261,8 +261,8 @@ //! This and `writeln` are two macros which are used to emit the format string //! to a specified stream. This is used to prevent intermediate allocations of //! format strings and instead directly write the output. Under the hood, this -//! function is actually invoking the `write` function defined in this module. -//! Example usage is: +//! function is actually invoking the `write_fmt` function defined on the +//! `std::io::Write` trait. Example usage is: //! //! ``` //! # #![allow(unused_must_use)] @@ -327,7 +327,7 @@ //! format := '{' [ argument ] [ ':' format_spec ] '}' //! argument := integer | identifier //! -//! format_spec := [[fill]align][sign]['#'][0][width]['.' precision][type] +//! format_spec := [[fill]align][sign]['#']['0'][width]['.' precision][type] //! fill := character //! align := '<' | '^' | '>' //! sign := '+' | '-' diff --git a/src/libcollections/lib.rs b/src/libcollections/lib.rs index c5a921693475a..990de541b6783 100644 --- a/src/libcollections/lib.rs +++ b/src/libcollections/lib.rs @@ -52,7 +52,6 @@ #![feature(step_by)] #![feature(unicode)] #![feature(unique)] -#![cfg_attr(stage0, feature(unsafe_no_drop_flag))] #![cfg_attr(test, feature(rand, test))] #![no_std] diff --git a/src/libcollections/linked_list.rs b/src/libcollections/linked_list.rs index 690c4f4af3589..67f3708a62b91 100644 --- a/src/libcollections/linked_list.rs +++ b/src/libcollections/linked_list.rs @@ -1294,6 +1294,7 @@ mod tests { } #[test] + #[cfg_attr(target_os = "emscripten", ignore)] fn test_send() { let n = list_from(&[1, 2, 3]); thread::spawn(move || { diff --git a/src/libcollections/macros.rs b/src/libcollections/macros.rs index d6a8362d58182..3115be00a4d72 100644 --- a/src/libcollections/macros.rs +++ b/src/libcollections/macros.rs @@ -68,7 +68,9 @@ macro_rules! vec { } /// Use the syntax described in `std::fmt` to create a value of type `String`. -/// See `std::fmt` for more information. +/// See [`std::fmt`][fmt] for more information. +/// +/// [fmt]: ../std/fmt/index.html /// /// # Examples /// diff --git a/src/libcollections/slice.rs b/src/libcollections/slice.rs index 5cdf4ee88c00c..75796cf94bfc2 100644 --- a/src/libcollections/slice.rs +++ b/src/libcollections/slice.rs @@ -36,7 +36,7 @@ //! //! ## Structs //! -//! There are several structs that are useful for slices, such as `Iter`, which +//! There are several structs that are useful for slices, such as [`Iter`], which //! represents iteration over a slice. //! //! ## Trait Implementations @@ -44,9 +44,9 @@ //! There are several implementations of common traits for slices. Some examples //! include: //! -//! * `Clone` -//! * `Eq`, `Ord` - for slices whose element type are `Eq` or `Ord`. -//! * `Hash` - for slices whose element type is `Hash` +//! * [`Clone`] +//! * [`Eq`], [`Ord`] - for slices whose element type are [`Eq`] or [`Ord`]. +//! * [`Hash`] - for slices whose element type is [`Hash`]. //! //! ## Iteration //! @@ -73,12 +73,24 @@ //! the element type of the slice is `i32`, the element type of the iterator is //! `&mut i32`. //! -//! * `.iter()` and `.iter_mut()` are the explicit methods to return the default +//! * [`.iter()`] and [`.iter_mut()`] are the explicit methods to return the default //! iterators. -//! * Further methods that return iterators are `.split()`, `.splitn()`, -//! `.chunks()`, `.windows()` and more. +//! * Further methods that return iterators are [`.split()`], [`.splitn()`], +//! [`.chunks()`], [`.windows()`] and more. //! //! *[See also the slice primitive type](../../std/primitive.slice.html).* +//! +//! [`Clone`]: ../../std/clone/trait.Clone.html +//! [`Eq`]: ../../std/cmp/trait.Eq.html +//! [`Ord`]: ../../std/cmp/trait.Ord.html +//! [`Iter`]: struct.Iter.html +//! [`Hash`]: ../../std/hash/trait.Hash.html +//! [`.iter()`]: ../../std/primitive.slice.html#method.iter +//! [`.iter_mut()`]: ../../std/primitive.slice.html#method.iter_mut +//! [`.split()`]: ../../std/primitive.slice.html#method.split +//! [`.splitn()`]: ../../std/primitive.slice.html#method.splitn +//! [`.chunks()`]: ../../std/primitive.slice.html#method.chunks +//! [`.windows()`]: ../../std/primitive.slice.html#method.windows #![stable(feature = "rust1", since = "1.0.0")] // Many of the usings in this module are only used in the test configuration. @@ -168,7 +180,7 @@ impl [T] { core_slice::SliceExt::len(self) } - /// Returns true if the slice has a length of 0 + /// Returns true if the slice has a length of 0. /// /// # Example /// @@ -402,7 +414,7 @@ impl [T] { core_slice::SliceExt::get_unchecked_mut(self, index) } - /// Returns an raw pointer to the slice's buffer + /// Returns an raw pointer to the slice's buffer. /// /// The caller must ensure that the slice outlives the pointer this /// function returns, or else it will end up pointing to garbage. @@ -468,7 +480,7 @@ impl [T] { /// /// # Examples /// - /// ```rust + /// ``` /// let mut v = ["a", "b", "c", "d"]; /// v.swap(1, 3); /// assert!(v == ["a", "d", "c", "b"]); @@ -483,7 +495,7 @@ impl [T] { /// /// # Example /// - /// ```rust + /// ``` /// let mut v = [1, 2, 3]; /// v.reverse(); /// assert!(v == [3, 2, 1]); @@ -567,9 +579,9 @@ impl [T] { } /// Returns an iterator over `size` elements of the slice at a - /// time. The chunks are slices and do not overlap. If `size` does not divide the - /// length of the slice, then the last chunk will not have length - /// `size`. + /// time. The chunks are slices and do not overlap. If `size` does + /// not divide the length of the slice, then the last chunk will + /// not have length `size`. /// /// # Panics /// @@ -656,7 +668,7 @@ impl [T] { /// /// # Examples /// - /// ```rust + /// ``` /// let mut v = [1, 2, 3, 4, 5, 6]; /// /// // scoped to restrict the lifetime of the borrows @@ -754,7 +766,7 @@ impl [T] { } /// Returns an iterator over subslices separated by elements that match - /// `pred`, limited to returning at most `n` items. The matched element is + /// `pred`, limited to returning at most `n` items. The matched element is /// not contained in the subslices. /// /// The last element returned, if any, will contain the remainder of the @@ -781,7 +793,7 @@ impl [T] { } /// Returns an iterator over subslices separated by elements that match - /// `pred`, limited to returning at most `n` items. The matched element is + /// `pred`, limited to returning at most `n` items. The matched element is /// not contained in the subslices. /// /// The last element returned, if any, will contain the remainder of the @@ -835,7 +847,7 @@ impl [T] { /// Returns an iterator over subslices separated by elements that match /// `pred` limited to returning at most `n` items. This starts at the end of - /// the slice and works backwards. The matched element is not contained in + /// the slice and works backwards. The matched element is not contained in /// the subslices. /// /// The last element returned, if any, will contain the remainder of the @@ -922,9 +934,9 @@ impl [T] { /// /// Looks up a series of four elements. The first is found, with a /// uniquely determined position; the second and third are not - /// found; the fourth could match any position in `[1,4]`. + /// found; the fourth could match any position in `[1, 4]`. /// - /// ```rust + /// ``` /// let s = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55]; /// /// assert_eq!(s.binary_search(&13), Ok(9)); @@ -956,9 +968,9 @@ impl [T] { /// /// Looks up a series of four elements. The first is found, with a /// uniquely determined position; the second and third are not - /// found; the fourth could match any position in `[1,4]`. + /// found; the fourth could match any position in `[1, 4]`. /// - /// ```rust + /// ``` /// let s = [0, 1, 1, 1, 1, 2, 3, 5, 8, 13, 21, 34, 55]; /// /// let seek = 13; @@ -982,21 +994,23 @@ impl [T] { /// Binary search a sorted slice with a key extraction function. /// /// Assumes that the slice is sorted by the key, for instance with - /// `sort_by_key` using the same key extraction function. + /// [`sort_by_key`] using the same key extraction function. /// /// If a matching value is found then returns `Ok`, containing the /// index for the matched element; if no match is found then `Err` /// is returned, containing the index where a matching element could /// be inserted while maintaining sorted order. /// + /// [`sort_by_key`]: #method.sort_by_key + /// /// # Examples /// /// Looks up a series of four elements in a slice of pairs sorted by /// their second elements. The first is found, with a uniquely /// determined position; the second and third are not found; the - /// fourth could match any position in `[1,4]`. + /// fourth could match any position in `[1, 4]`. /// - /// ```rust + /// ``` /// let s = [(0, 0), (2, 1), (4, 1), (5, 1), (3, 1), /// (1, 2), (2, 3), (4, 5), (5, 8), (3, 13), /// (1, 21), (2, 34), (4, 55)]; @@ -1023,7 +1037,7 @@ impl [T] { /// /// # Examples /// - /// ```rust + /// ``` /// let mut v = [-5, 4, 1, -3, 2]; /// /// v.sort(); @@ -1037,7 +1051,7 @@ impl [T] { self.sort_by(|a, b| a.cmp(b)) } - /// Sorts the slice, in place, using `key` to extract a key by which to + /// Sorts the slice, in place, using `f` to extract a key by which to /// order the sort by. /// /// This sort is stable and `O(n log n)` worst-case but allocates @@ -1045,7 +1059,7 @@ impl [T] { /// /// # Examples /// - /// ```rust + /// ``` /// let mut v = [-5i32, 4, 1, -3, 2]; /// /// v.sort_by_key(|k| k.abs()); @@ -1067,7 +1081,7 @@ impl [T] { /// /// # Examples /// - /// ```rust + /// ``` /// let mut v = [5, 4, 1, 3, 2]; /// v.sort_by(|a, b| a.cmp(b)); /// assert!(v == [1, 2, 3, 4, 5]); @@ -1094,7 +1108,7 @@ impl [T] { /// /// # Example /// - /// ```rust + /// ``` /// let mut dst = [0, 0, 0]; /// let src = [1, 2, 3]; /// @@ -1116,7 +1130,7 @@ impl [T] { /// /// # Example /// - /// ```rust + /// ``` /// let mut dst = [0, 0, 0]; /// let src = [1, 2, 3]; /// @@ -1156,7 +1170,7 @@ impl [T] { /// let x = s.into_vec(); /// // `s` cannot be used anymore because it has been converted into `x`. /// - /// assert_eq!(x, vec!(10, 40, 30)); + /// assert_eq!(x, vec![10, 40, 30]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] diff --git a/src/libcollections/str.rs b/src/libcollections/str.rs index 6a6b450e51863..48a74bdecbbef 100644 --- a/src/libcollections/str.rs +++ b/src/libcollections/str.rs @@ -122,7 +122,7 @@ pub struct EncodeUtf16<'a> { encoder: Utf16Encoder>, } -#[stable(feature = "rust1", since = "1.0.0")] +#[stable(feature = "encode_utf16", since = "1.8.0")] impl<'a> Iterator for EncodeUtf16<'a> { type Item = u16; @@ -697,7 +697,7 @@ impl str { /// /// Basic usage: /// - /// ```rust + /// ``` /// let bananas = "bananas"; /// /// assert!(bananas.ends_with("anas")); @@ -900,7 +900,7 @@ impl str { /// /// It does _not_ give you: /// - /// ```rust,ignore + /// ```,ignore /// assert_eq!(d, &["a", "b", "c"]); /// ``` /// @@ -1053,10 +1053,10 @@ impl str { } /// An iterator over substrings of the given string slice, separated by a - /// pattern, restricted to returning at most `count` items. + /// pattern, restricted to returning at most `n` items. /// - /// The last element returned, if any, will contain the remainder of the - /// string slice. + /// If `n` substrings are returned, the last substring (the `n`th substring) + /// will contain the remainder of the string. /// /// The pattern can be a `&str`, [`char`], or a closure that determines the /// split. @@ -1098,16 +1098,16 @@ impl str { /// assert_eq!(v, ["abc", "defXghi"]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - pub fn splitn<'a, P: Pattern<'a>>(&'a self, count: usize, pat: P) -> SplitN<'a, P> { - core_str::StrExt::splitn(self, count, pat) + pub fn splitn<'a, P: Pattern<'a>>(&'a self, n: usize, pat: P) -> SplitN<'a, P> { + core_str::StrExt::splitn(self, n, pat) } /// An iterator over substrings of this string slice, separated by a /// pattern, starting from the end of the string, restricted to returning - /// at most `count` items. + /// at most `n` items. /// - /// The last element returned, if any, will contain the remainder of the - /// string slice. + /// If `n` substrings are returned, the last substring (the `n`th substring) + /// will contain the remainder of the string. /// /// The pattern can be a `&str`, [`char`], or a closure that /// determines the split. @@ -1145,10 +1145,10 @@ impl str { /// assert_eq!(v, ["ghi", "abc1def"]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - pub fn rsplitn<'a, P: Pattern<'a>>(&'a self, count: usize, pat: P) -> RSplitN<'a, P> + pub fn rsplitn<'a, P: Pattern<'a>>(&'a self, n: usize, pat: P) -> RSplitN<'a, P> where P::Searcher: ReverseSearcher<'a> { - core_str::StrExt::rsplitn(self, count, pat) + core_str::StrExt::rsplitn(self, n, pat) } /// An iterator over the matches of a pattern within the given string @@ -1789,4 +1789,24 @@ impl str { String::from_utf8_unchecked(slice.into_vec()) } } + + /// Create a [`String`] by repeating a string `n` times. + /// + /// [`String`]: string/struct.String.html + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// #![feature(repeat_str)] + /// + /// assert_eq!("abc".repeat(4), String::from("abcabcabcabc")); + /// ``` + #[unstable(feature = "repeat_str", issue = "37079")] + pub fn repeat(&self, n: usize) -> String { + let mut s = String::with_capacity(self.len() * n); + s.extend((0..n).map(|_| self)); + s + } } diff --git a/src/libcollections/string.rs b/src/libcollections/string.rs index 773e94f1b414e..286f06b6fe32a 100644 --- a/src/libcollections/string.rs +++ b/src/libcollections/string.rs @@ -14,24 +14,25 @@ //! [`ToString`]s, and several error types that may result from working with //! [`String`]s. //! -//! [`String`]: struct.String.html //! [`ToString`]: trait.ToString.html //! //! # Examples //! -//! There are multiple ways to create a new `String` from a string literal: +//! There are multiple ways to create a new [`String`] from a string literal: //! -//! ```rust +//! ``` //! let s = "Hello".to_string(); //! //! let s = String::from("world"); //! let s: String = "also this".into(); //! ``` //! -//! You can create a new `String` from an existing one by concatenating with +//! You can create a new [`String`] from an existing one by concatenating with //! `+`: //! -//! ```rust +//! [`String`]: struct.String.html +//! +//! ``` //! let s = "Hello".to_string(); //! //! let message = s + " world!"; @@ -40,7 +41,7 @@ //! If you have a vector of valid UTF-8 bytes, you can make a `String` out of //! it. You can do the reverse too. //! -//! ```rust +//! ``` //! let sparkle_heart = vec![240, 159, 146, 150]; //! //! // We know these bytes are valid, so we'll use `unwrap()`. @@ -134,10 +135,10 @@ use boxed::Box; /// Indexing is intended to be a constant-time operation, but UTF-8 encoding /// does not allow us to do this. Furthermore, it's not clear what sort of /// thing the index should return: a byte, a codepoint, or a grapheme cluster. -/// The [`as_bytes()`] and [`chars()`] methods return iterators over the first +/// The [`bytes()`] and [`chars()`] methods return iterators over the first /// two, respectively. /// -/// [`as_bytes()`]: #method.as_bytes +/// [`bytes()`]: #method.bytes /// [`chars()`]: #method.chars /// /// # Deref @@ -975,7 +976,7 @@ impl String { pub fn push(&mut self, ch: char) { match ch.len_utf8() { 1 => self.vec.push(ch as u8), - _ => self.vec.extend_from_slice(ch.encode_utf8().as_slice()), + _ => self.vec.extend_from_slice(ch.encode_utf8(&mut [0;4]).as_bytes()), } } @@ -1131,10 +1132,11 @@ impl String { let len = self.len(); assert!(idx <= len); assert!(self.is_char_boundary(idx)); - let bits = ch.encode_utf8(); + let mut bits = [0; 4]; + let bits = ch.encode_utf8(&mut bits).as_bytes(); unsafe { - self.insert_bytes(idx, bits.as_slice()); + self.insert_bytes(idx, bits); } } @@ -1858,6 +1860,13 @@ impl<'a> From<&'a str> for String { } } +#[stable(feature = "string_from_cow_str", since = "1.14.0")] +impl<'a> From> for String { + fn from(s: Cow<'a, str>) -> String { + s.into_owned() + } +} + #[stable(feature = "rust1", since = "1.0.0")] impl<'a> From<&'a str> for Cow<'a, str> { #[inline] @@ -1902,26 +1911,6 @@ impl Into> for String { } } -#[stable(feature = "stringfromchars", since = "1.12.0")] -impl<'a> From<&'a [char]> for String { - #[inline] - fn from(v: &'a [char]) -> String { - let mut s = String::with_capacity(v.len()); - for c in v { - s.push(*c); - } - s - } -} - -#[stable(feature = "stringfromchars", since = "1.12.0")] -impl From> for String { - #[inline] - fn from(v: Vec) -> String { - String::from(v.as_slice()) - } -} - #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Write for String { #[inline] diff --git a/src/libcollections/vec.rs b/src/libcollections/vec.rs index f8b4a92df2c5d..d94a27917e869 100644 --- a/src/libcollections/vec.rs +++ b/src/libcollections/vec.rs @@ -16,13 +16,13 @@ //! //! # Examples //! -//! You can explicitly create a `Vec` with `new()`: +//! You can explicitly create a [`Vec`] with [`new()`]: //! //! ``` //! let v: Vec = Vec::new(); //! ``` //! -//! ...or by using the `vec!` macro: +//! ...or by using the [`vec!`] macro: //! //! ``` //! let v: Vec = vec![]; @@ -32,7 +32,7 @@ //! let v = vec![0; 10]; // ten zeroes //! ``` //! -//! You can `push` values onto the end of a vector (which will grow the vector +//! You can [`push`] values onto the end of a vector (which will grow the vector //! as needed): //! //! ``` @@ -49,13 +49,20 @@ //! let two = v.pop(); //! ``` //! -//! Vectors also support indexing (through the `Index` and `IndexMut` traits): +//! Vectors also support indexing (through the [`Index`] and [`IndexMut`] traits): //! //! ``` //! let mut v = vec![1, 2, 3]; //! let three = v[2]; //! v[1] = v[1] + 5; //! ``` +//! +//! [`Vec`]: ../../std/vec/struct.Vec.html +//! [`new()`]: ../../std/vec/struct.Vec.html#method.new +//! [`push`]: ../../std/vec/struct.Vec.html#method.push +//! [`Index`]: ../../std/ops/trait.Index.html +//! [`IndexMut`]: ../../std/ops/trait.IndexMut.html +//! [`vec!`]: ../../std/macro.vec.html #![stable(feature = "rust1", since = "1.0.0")] @@ -79,7 +86,7 @@ use core::slice; use super::SpecExtend; use super::range::RangeArgument; -/// A contiguous growable array type, written `Vec` but pronounced 'vector.' +/// A contiguous growable array type, written `Vec` but pronounced 'vector'. /// /// # Examples /// @@ -105,7 +112,7 @@ use super::range::RangeArgument; /// assert_eq!(vec, [7, 1, 2, 3]); /// ``` /// -/// The `vec!` macro is provided to make initialization more convenient: +/// The [`vec!`] macro is provided to make initialization more convenient: /// /// ``` /// let mut vec = vec![1, 2, 3]; @@ -137,19 +144,19 @@ use super::range::RangeArgument; /// /// # Indexing /// -/// The Vec type allows to access values by index, because it implements the -/// `Index` trait. An example will be more explicit: +/// The `Vec` type allows to access values by index, because it implements the +/// [`Index`] trait. An example will be more explicit: /// /// ``` -/// let v = vec!(0, 2, 4, 6); +/// let v = vec![0, 2, 4, 6]; /// println!("{}", v[1]); // it will display '2' /// ``` /// -/// However be careful: if you try to access an index which isn't in the Vec, +/// However be careful: if you try to access an index which isn't in the `Vec`, /// your software will panic! You cannot do this: /// /// ```ignore -/// let v = vec!(0, 2, 4, 6); +/// let v = vec![0, 2, 4, 6]; /// println!("{}", v[6]); // it will panic! /// ``` /// @@ -158,15 +165,15 @@ use super::range::RangeArgument; /// /// # Slicing /// -/// A Vec can be mutable. Slices, on the other hand, are read-only objects. -/// To get a slice, use "&". Example: +/// A `Vec` can be mutable. Slices, on the other hand, are read-only objects. +/// To get a slice, use `&`. Example: /// /// ``` /// fn read_slice(slice: &[usize]) { /// // ... /// } /// -/// let v = vec!(0, 1); +/// let v = vec![0, 1]; /// read_slice(&v); /// /// // ... and that's all! @@ -175,8 +182,8 @@ use super::range::RangeArgument; /// ``` /// /// In Rust, it's more common to pass slices as arguments rather than vectors -/// when you just want to provide a read access. The same goes for String and -/// &str. +/// when you just want to provide a read access. The same goes for [`String`] and +/// [`&str`]. /// /// # Capacity and reallocation /// @@ -191,84 +198,100 @@ use super::range::RangeArgument; /// with space for 10 more elements. Pushing 10 or fewer elements onto the /// vector will not change its capacity or cause reallocation to occur. However, /// if the vector's length is increased to 11, it will have to reallocate, which -/// can be slow. For this reason, it is recommended to use `Vec::with_capacity` +/// can be slow. For this reason, it is recommended to use [`Vec::with_capacity`] /// whenever possible to specify how big the vector is expected to get. /// /// # Guarantees /// -/// Due to its incredibly fundamental nature, Vec makes a lot of guarantees +/// Due to its incredibly fundamental nature, `Vec` makes a lot of guarantees /// about its design. This ensures that it's as low-overhead as possible in /// the general case, and can be correctly manipulated in primitive ways /// by unsafe code. Note that these guarantees refer to an unqualified `Vec`. /// If additional type parameters are added (e.g. to support custom allocators), /// overriding their defaults may change the behavior. /// -/// Most fundamentally, Vec is and always will be a (pointer, capacity, length) +/// Most fundamentally, `Vec` is and always will be a (pointer, capacity, length) /// triplet. No more, no less. The order of these fields is completely /// unspecified, and you should use the appropriate methods to modify these. /// The pointer will never be null, so this type is null-pointer-optimized. /// /// However, the pointer may not actually point to allocated memory. In particular, -/// if you construct a Vec with capacity 0 via `Vec::new()`, `vec![]`, -/// `Vec::with_capacity(0)`, or by calling `shrink_to_fit()` on an empty Vec, it -/// will not allocate memory. Similarly, if you store zero-sized types inside -/// a Vec, it will not allocate space for them. *Note that in this case the -/// Vec may not report a `capacity()` of 0*. Vec will allocate if and only -/// if `mem::size_of::() * capacity() > 0`. In general, Vec's allocation +/// if you construct a `Vec` with capacity 0 via [`Vec::new()`], [`vec![]`][`vec!`], +/// [`Vec::with_capacity(0)`][`Vec::with_capacity`], or by calling [`shrink_to_fit()`] +/// on an empty Vec, it will not allocate memory. Similarly, if you store zero-sized +/// types inside a `Vec`, it will not allocate space for them. *Note that in this case +/// the `Vec` may not report a [`capacity()`] of 0*. `Vec` will allocate if and only +/// if [`mem::size_of::()`]` * capacity() > 0`. In general, `Vec`'s allocation /// details are subtle enough that it is strongly recommended that you only -/// free memory allocated by a Vec by creating a new Vec and dropping it. +/// free memory allocated by a `Vec` by creating a new `Vec` and dropping it. /// -/// If a Vec *has* allocated memory, then the memory it points to is on the heap +/// If a `Vec` *has* allocated memory, then the memory it points to is on the heap /// (as defined by the allocator Rust is configured to use by default), and its -/// pointer points to `len()` initialized elements in order (what you would see -/// if you coerced it to a slice), followed by `capacity() - len()` logically -/// uninitialized elements. +/// pointer points to [`len()`] initialized elements in order (what you would see +/// if you coerced it to a slice), followed by [`capacity()`]` - `[`len()`] +/// logically uninitialized elements. /// -/// Vec will never perform a "small optimization" where elements are actually +/// `Vec` will never perform a "small optimization" where elements are actually /// stored on the stack for two reasons: /// /// * It would make it more difficult for unsafe code to correctly manipulate -/// a Vec. The contents of a Vec wouldn't have a stable address if it were -/// only moved, and it would be more difficult to determine if a Vec had +/// a `Vec`. The contents of a `Vec` wouldn't have a stable address if it were +/// only moved, and it would be more difficult to determine if a `Vec` had /// actually allocated memory. /// /// * It would penalize the general case, incurring an additional branch /// on every access. /// -/// Vec will never automatically shrink itself, even if completely empty. This -/// ensures no unnecessary allocations or deallocations occur. Emptying a Vec -/// and then filling it back up to the same `len()` should incur no calls to -/// the allocator. If you wish to free up unused memory, use `shrink_to_fit`. +/// `Vec` will never automatically shrink itself, even if completely empty. This +/// ensures no unnecessary allocations or deallocations occur. Emptying a `Vec` +/// and then filling it back up to the same [`len()`] should incur no calls to +/// the allocator. If you wish to free up unused memory, use +/// [`shrink_to_fit`][`shrink_to_fit()`]. /// -/// `push` and `insert` will never (re)allocate if the reported capacity is -/// sufficient. `push` and `insert` *will* (re)allocate if `len() == capacity()`. -/// That is, the reported capacity is completely accurate, and can be relied on. -/// It can even be used to manually free the memory allocated by a Vec if -/// desired. Bulk insertion methods *may* reallocate, even when not necessary. +/// [`push`] and [`insert`] will never (re)allocate if the reported capacity is +/// sufficient. [`push`] and [`insert`] *will* (re)allocate if +/// [`len()`]` == `[`capacity()`]. That is, the reported capacity is completely +/// accurate, and can be relied on. It can even be used to manually free the memory +/// allocated by a `Vec` if desired. Bulk insertion methods *may* reallocate, even +/// when not necessary. /// -/// Vec does not guarantee any particular growth strategy when reallocating -/// when full, nor when `reserve` is called. The current strategy is basic +/// `Vec` does not guarantee any particular growth strategy when reallocating +/// when full, nor when [`reserve`] is called. The current strategy is basic /// and it may prove desirable to use a non-constant growth factor. Whatever -/// strategy is used will of course guarantee `O(1)` amortized `push`. +/// strategy is used will of course guarantee `O(1)` amortized [`push`]. /// -/// `vec![x; n]`, `vec![a, b, c, d]`, and `Vec::with_capacity(n)`, will all -/// produce a Vec with exactly the requested capacity. If `len() == capacity()`, -/// (as is the case for the `vec!` macro), then a `Vec` can be converted -/// to and from a `Box<[T]>` without reallocating or moving the elements. +/// `vec![x; n]`, `vec![a, b, c, d]`, and +/// [`Vec::with_capacity(n)`][`Vec::with_capacity`], will all produce a `Vec` +/// with exactly the requested capacity. If [`len()`]` == `[`capacity()`], +/// (as is the case for the [`vec!`] macro), then a `Vec` can be converted to +/// and from a [`Box<[T]>`][owned slice] without reallocating or moving the elements. /// -/// Vec will not specifically overwrite any data that is removed from it, +/// `Vec` will not specifically overwrite any data that is removed from it, /// but also won't specifically preserve it. Its uninitialized memory is /// scratch space that it may use however it wants. It will generally just do /// whatever is most efficient or otherwise easy to implement. Do not rely on -/// removed data to be erased for security purposes. Even if you drop a Vec, its -/// buffer may simply be reused by another Vec. Even if you zero a Vec's memory +/// removed data to be erased for security purposes. Even if you drop a `Vec`, its +/// buffer may simply be reused by another `Vec`. Even if you zero a `Vec`'s memory /// first, that may not actually happen because the optimizer does not consider /// this a side-effect that must be preserved. /// -/// Vec does not currently guarantee the order in which elements are dropped +/// `Vec` does not currently guarantee the order in which elements are dropped /// (the order has changed in the past, and may change again). /// -#[cfg_attr(stage0, unsafe_no_drop_flag)] +/// [`vec!`]: ../../std/macro.vec.html +/// [`Index`]: ../../std/ops/trait.Index.html +/// [`String`]: ../../std/string/struct.String.html +/// [`&str`]: ../../std/primitive.str.html +/// [`Vec::with_capacity`]: ../../std/vec/struct.Vec.html#method.with_capacity +/// [`Vec::new()`]: ../../std/vec/struct.Vec.html#method.new +/// [`shrink_to_fit()`]: ../../std/vec/struct.Vec.html#method.shrink_to_fit +/// [`capacity()`]: ../../std/vec/struct.Vec.html#method.capacity +/// [`mem::size_of::()`]: ../../std/mem/fn.size_of.html +/// [`len()`]: ../../std/vec/struct.Vec.html#method.len +/// [`push`]: ../../std/vec/struct.Vec.html#method.push +/// [`insert`]: ../../std/vec/struct.Vec.html#method.insert +/// [`reserve`]: ../../std/vec/struct.Vec.html#method.reserve +/// [owned slice]: ../../std/boxed/struct.Box.html #[stable(feature = "rust1", since = "1.0.0")] pub struct Vec { buf: RawVec, @@ -305,9 +328,10 @@ impl Vec { /// reallocating. If `capacity` is 0, the vector will not allocate. /// /// It is important to note that this function does not specify the *length* - /// of the returned vector, but only the *capacity*. (For an explanation of - /// the difference between length and capacity, see the main `Vec` docs - /// above, 'Capacity and reallocation'.) + /// of the returned vector, but only the *capacity*. For an explanation of + /// the difference between length and capacity, see *[Capacity and reallocation]*. + /// + /// [Capacity and reallocation]: #capacity-and-reallocation /// /// # Examples /// @@ -341,7 +365,7 @@ impl Vec { /// This is highly unsafe, due to the number of invariants that aren't /// checked: /// - /// * `ptr` needs to have been previously allocated via `String`/`Vec` + /// * `ptr` needs to have been previously allocated via [`String`]/`Vec` /// (at least, it's highly likely to be incorrect if it wasn't). /// * `length` needs to be less than or equal to `capacity`. /// * `capacity` needs to be the capacity that the pointer was allocated with. @@ -355,6 +379,8 @@ impl Vec { /// that nothing else uses the pointer after calling this /// function. /// + /// [`String`]: ../../std/string/struct.String.html + /// /// # Examples /// /// ``` @@ -471,11 +497,15 @@ impl Vec { self.buf.shrink_to_fit(self.len); } - /// Converts the vector into Box<[T]>. + /// Converts the vector into [`Box<[T]>`][owned slice]. /// /// Note that this will drop any excess capacity. Calling this and - /// converting back to a vector with `into_vec()` is equivalent to calling - /// `shrink_to_fit()`. + /// converting back to a vector with [`into_vec()`] is equivalent to calling + /// [`shrink_to_fit()`]. + /// + /// [owned slice]: ../../std/boxed/struct.Box.html + /// [`into_vec()`]: ../../std/primitive.slice.html#method.into_vec + /// [`shrink_to_fit()`]: #method.shrink_to_fit /// /// # Examples /// @@ -674,7 +704,7 @@ impl Vec { /// /// # Panics /// - /// Panics if `index` is greater than the vector's length. + /// Panics if `index` is out of bounds. /// /// # Examples /// @@ -749,7 +779,7 @@ impl Vec { /// Retains only the elements specified by the predicate. /// - /// In other words, remove all elements `e` such that `f(&e)` returns false. + /// In other words, remove all elements `e` such that `f(&e)` returns `false`. /// This method operates in place and preserves the order of the retained /// elements. /// @@ -782,6 +812,130 @@ impl Vec { } } + /// Removes consecutive elements in the vector that resolve to the same key. + /// + /// If the vector is sorted, this removes all duplicates. + /// + /// # Examples + /// + /// ``` + /// #![feature(dedup_by)] + /// + /// let mut vec = vec![10, 20, 21, 30, 20]; + /// + /// vec.dedup_by_key(|i| *i / 10); + /// + /// assert_eq!(vec, [10, 20, 30, 20]); + /// ``` + #[unstable(feature = "dedup_by", reason = "recently added", issue = "37087")] + #[inline] + pub fn dedup_by_key(&mut self, mut key: F) where F: FnMut(&mut T) -> K, K: PartialEq { + self.dedup_by(|a, b| key(a) == key(b)) + } + + /// Removes consecutive elements in the vector that resolve to the same key. + /// + /// If the vector is sorted, this removes all duplicates. + /// + /// # Examples + /// + /// ``` + /// #![feature(dedup_by)] + /// use std::ascii::AsciiExt; + /// + /// let mut vec = vec!["foo", "bar", "Bar", "baz", "bar"]; + /// + /// vec.dedup_by(|a, b| a.eq_ignore_ascii_case(b)); + /// + /// assert_eq!(vec, ["foo", "bar", "baz", "bar"]); + /// ``` + #[unstable(feature = "dedup_by", reason = "recently added", issue = "37087")] + pub fn dedup_by(&mut self, mut same_bucket: F) where F: FnMut(&mut T, &mut T) -> bool { + unsafe { + // Although we have a mutable reference to `self`, we cannot make + // *arbitrary* changes. The `same_bucket` calls could panic, so we + // must ensure that the vector is in a valid state at all time. + // + // The way that we handle this is by using swaps; we iterate + // over all the elements, swapping as we go so that at the end + // the elements we wish to keep are in the front, and those we + // wish to reject are at the back. We can then truncate the + // vector. This operation is still O(n). + // + // Example: We start in this state, where `r` represents "next + // read" and `w` represents "next_write`. + // + // r + // +---+---+---+---+---+---+ + // | 0 | 1 | 1 | 2 | 3 | 3 | + // +---+---+---+---+---+---+ + // w + // + // Comparing self[r] against self[w-1], this is not a duplicate, so + // we swap self[r] and self[w] (no effect as r==w) and then increment both + // r and w, leaving us with: + // + // r + // +---+---+---+---+---+---+ + // | 0 | 1 | 1 | 2 | 3 | 3 | + // +---+---+---+---+---+---+ + // w + // + // Comparing self[r] against self[w-1], this value is a duplicate, + // so we increment `r` but leave everything else unchanged: + // + // r + // +---+---+---+---+---+---+ + // | 0 | 1 | 1 | 2 | 3 | 3 | + // +---+---+---+---+---+---+ + // w + // + // Comparing self[r] against self[w-1], this is not a duplicate, + // so swap self[r] and self[w] and advance r and w: + // + // r + // +---+---+---+---+---+---+ + // | 0 | 1 | 2 | 1 | 3 | 3 | + // +---+---+---+---+---+---+ + // w + // + // Not a duplicate, repeat: + // + // r + // +---+---+---+---+---+---+ + // | 0 | 1 | 2 | 3 | 1 | 3 | + // +---+---+---+---+---+---+ + // w + // + // Duplicate, advance r. End of vec. Truncate to w. + + let ln = self.len(); + if ln <= 1 { + return; + } + + // Avoid bounds checks by using raw pointers. + let p = self.as_mut_ptr(); + let mut r: usize = 1; + let mut w: usize = 1; + + while r < ln { + let p_r = p.offset(r as isize); + let p_wm1 = p.offset((w - 1) as isize); + if !same_bucket(&mut *p_r, &mut *p_wm1) { + if r != w { + let p_w = p_wm1.offset(1); + mem::swap(&mut *p_r, &mut *p_w); + } + w += 1; + } + r += 1; + } + + self.truncate(w); + } + } + /// Appends an element to the back of a collection. /// /// # Panics @@ -810,9 +964,11 @@ impl Vec { } } - /// Removes the last element from a vector and returns it, or `None` if it + /// Removes the last element from a vector and returns it, or [`None`] if it /// is empty. /// + /// [`None`]: ../../std/option/enum.Option.html#variant.None + /// /// # Examples /// /// ``` @@ -1156,90 +1312,9 @@ impl Vec { /// assert_eq!(vec, [1, 2, 3, 2]); /// ``` #[stable(feature = "rust1", since = "1.0.0")] + #[inline] pub fn dedup(&mut self) { - unsafe { - // Although we have a mutable reference to `self`, we cannot make - // *arbitrary* changes. The `PartialEq` comparisons could panic, so we - // must ensure that the vector is in a valid state at all time. - // - // The way that we handle this is by using swaps; we iterate - // over all the elements, swapping as we go so that at the end - // the elements we wish to keep are in the front, and those we - // wish to reject are at the back. We can then truncate the - // vector. This operation is still O(n). - // - // Example: We start in this state, where `r` represents "next - // read" and `w` represents "next_write`. - // - // r - // +---+---+---+---+---+---+ - // | 0 | 1 | 1 | 2 | 3 | 3 | - // +---+---+---+---+---+---+ - // w - // - // Comparing self[r] against self[w-1], this is not a duplicate, so - // we swap self[r] and self[w] (no effect as r==w) and then increment both - // r and w, leaving us with: - // - // r - // +---+---+---+---+---+---+ - // | 0 | 1 | 1 | 2 | 3 | 3 | - // +---+---+---+---+---+---+ - // w - // - // Comparing self[r] against self[w-1], this value is a duplicate, - // so we increment `r` but leave everything else unchanged: - // - // r - // +---+---+---+---+---+---+ - // | 0 | 1 | 1 | 2 | 3 | 3 | - // +---+---+---+---+---+---+ - // w - // - // Comparing self[r] against self[w-1], this is not a duplicate, - // so swap self[r] and self[w] and advance r and w: - // - // r - // +---+---+---+---+---+---+ - // | 0 | 1 | 2 | 1 | 3 | 3 | - // +---+---+---+---+---+---+ - // w - // - // Not a duplicate, repeat: - // - // r - // +---+---+---+---+---+---+ - // | 0 | 1 | 2 | 3 | 1 | 3 | - // +---+---+---+---+---+---+ - // w - // - // Duplicate, advance r. End of vec. Truncate to w. - - let ln = self.len(); - if ln <= 1 { - return; - } - - // Avoid bounds checks by using raw pointers. - let p = self.as_mut_ptr(); - let mut r: usize = 1; - let mut w: usize = 1; - - while r < ln { - let p_r = p.offset(r as isize); - let p_wm1 = p.offset((w - 1) as isize); - if *p_r != *p_wm1 { - if r != w { - let p_w = p_wm1.offset(1); - mem::swap(&mut *p_r, &mut *p_w); - } - w += 1; - } - r += 1; - } - - self.truncate(w); - } + self.dedup_by(|a, b| a == b) } } @@ -1572,7 +1647,24 @@ impl Vec { #[stable(feature = "extend_ref", since = "1.2.0")] impl<'a, T: 'a + Copy> Extend<&'a T> for Vec { fn extend>(&mut self, iter: I) { - self.extend(iter.into_iter().cloned()); + >::extend_vec(iter, self); + } +} + +// helper trait for specialization of Vec's Extend impl +trait SpecExtendVec { + fn extend_vec(self, vec: &mut Vec); +} + +impl <'a, T: 'a + Copy, I: IntoIterator> SpecExtendVec for I { + default fn extend_vec(self, vec: &mut Vec) { + vec.extend(self.into_iter().cloned()); + } +} + +impl<'a, T: Copy> SpecExtendVec for &'a [T] { + fn extend_vec(self, vec: &mut Vec) { + vec.extend_from_slice(self); } } @@ -1705,6 +1797,13 @@ impl<'a, T: Clone> From<&'a [T]> for Vec { } } +#[stable(feature = "vec_from_cow_slice", since = "1.14.0")] +impl<'a, T> From> for Vec where [T]: ToOwned> { + fn from(s: Cow<'a, [T]>) -> Vec { + s.into_owned() + } +} + #[stable(feature = "rust1", since = "1.0.0")] impl<'a> From<&'a str> for Vec { fn from(s: &'a str) -> Vec { @@ -1756,7 +1855,7 @@ pub struct IntoIter { end: *const T, } -#[stable(feature = "vec_intoiter_debug", since = "")] +#[stable(feature = "vec_intoiter_debug", since = "1.13.0")] impl fmt::Debug for IntoIter { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_tuple("IntoIter") @@ -1770,7 +1869,7 @@ impl IntoIter { /// /// # Examples /// - /// ```rust + /// ``` /// # #![feature(vec_into_iter_as_slice)] /// let vec = vec!['a', 'b', 'c']; /// let mut into_iter = vec.into_iter(); @@ -1789,7 +1888,7 @@ impl IntoIter { /// /// # Examples /// - /// ```rust + /// ``` /// # #![feature(vec_into_iter_as_slice)] /// let vec = vec!['a', 'b', 'c']; /// let mut into_iter = vec.into_iter(); @@ -1930,7 +2029,7 @@ unsafe impl<'a, T: Sync> Sync for Drain<'a, T> {} #[stable(feature = "drain", since = "1.6.0")] unsafe impl<'a, T: Send> Send for Drain<'a, T> {} -#[stable(feature = "rust1", since = "1.0.0")] +#[stable(feature = "drain", since = "1.6.0")] impl<'a, T> Iterator for Drain<'a, T> { type Item = T; @@ -1944,7 +2043,7 @@ impl<'a, T> Iterator for Drain<'a, T> { } } -#[stable(feature = "rust1", since = "1.0.0")] +#[stable(feature = "drain", since = "1.6.0")] impl<'a, T> DoubleEndedIterator for Drain<'a, T> { #[inline] fn next_back(&mut self) -> Option { @@ -1952,7 +2051,7 @@ impl<'a, T> DoubleEndedIterator for Drain<'a, T> { } } -#[stable(feature = "rust1", since = "1.0.0")] +#[stable(feature = "drain", since = "1.6.0")] impl<'a, T> Drop for Drain<'a, T> { fn drop(&mut self) { // exhaust self first @@ -1974,7 +2073,7 @@ impl<'a, T> Drop for Drain<'a, T> { } -#[stable(feature = "rust1", since = "1.0.0")] +#[stable(feature = "drain", since = "1.6.0")] impl<'a, T> ExactSizeIterator for Drain<'a, T> {} #[unstable(feature = "fused", issue = "35602")] diff --git a/src/libcollections/vec_deque.rs b/src/libcollections/vec_deque.rs index 2e561dabb4794..5397193cab40f 100644 --- a/src/libcollections/vec_deque.rs +++ b/src/libcollections/vec_deque.rs @@ -726,33 +726,25 @@ impl VecDeque { /// ``` /// use std::collections::VecDeque; /// - /// let mut vector: VecDeque = VecDeque::new(); + /// let mut vector = VecDeque::new(); /// /// vector.push_back(0); /// vector.push_back(1); /// vector.push_back(2); /// - /// assert_eq!(vector.as_slices(), (&[0u32, 1, 2] as &[u32], &[] as &[u32])); + /// assert_eq!(vector.as_slices(), (&[0, 1, 2][..], &[][..])); /// /// vector.push_front(10); /// vector.push_front(9); /// - /// assert_eq!(vector.as_slices(), (&[9u32, 10] as &[u32], &[0u32, 1, 2] as &[u32])); + /// assert_eq!(vector.as_slices(), (&[9, 10][..], &[0, 1, 2][..])); /// ``` #[inline] #[stable(feature = "deque_extras_15", since = "1.5.0")] pub fn as_slices(&self) -> (&[T], &[T]) { unsafe { - let contiguous = self.is_contiguous(); let buf = self.buffer_as_slice(); - if contiguous { - let (empty, buf) = buf.split_at(0); - (&buf[self.tail..self.head], empty) - } else { - let (mid, right) = buf.split_at(self.tail); - let (left, _) = mid.split_at(self.head); - (right, left) - } + RingSlices::ring_slices(buf, self.head, self.tail) } } @@ -764,7 +756,7 @@ impl VecDeque { /// ``` /// use std::collections::VecDeque; /// - /// let mut vector: VecDeque = VecDeque::new(); + /// let mut vector = VecDeque::new(); /// /// vector.push_back(0); /// vector.push_back(1); @@ -774,26 +766,16 @@ impl VecDeque { /// /// vector.as_mut_slices().0[0] = 42; /// vector.as_mut_slices().1[0] = 24; - /// assert_eq!(vector.as_slices(), (&[42u32, 10] as &[u32], &[24u32, 1] as &[u32])); + /// assert_eq!(vector.as_slices(), (&[42, 10][..], &[24, 1][..])); /// ``` #[inline] #[stable(feature = "deque_extras_15", since = "1.5.0")] pub fn as_mut_slices(&mut self) -> (&mut [T], &mut [T]) { unsafe { - let contiguous = self.is_contiguous(); let head = self.head; let tail = self.tail; let buf = self.buffer_as_mut_slice(); - - if contiguous { - let (empty, buf) = buf.split_at_mut(0); - (&mut buf[tail..head], empty) - } else { - let (mid, right) = buf.split_at_mut(tail); - let (left, _) = mid.split_at_mut(head); - - (right, left) - } + RingSlices::ring_slices(buf, head, tail) } } @@ -1829,6 +1811,42 @@ fn wrap_index(index: usize, size: usize) -> usize { index & (size - 1) } +/// Returns the two slices that cover the VecDeque's valid range +trait RingSlices : Sized { + fn slice(self, from: usize, to: usize) -> Self; + fn split_at(self, i: usize) -> (Self, Self); + + fn ring_slices(buf: Self, head: usize, tail: usize) -> (Self, Self) { + let contiguous = tail <= head; + if contiguous { + let (empty, buf) = buf.split_at(0); + (buf.slice(tail, head), empty) + } else { + let (mid, right) = buf.split_at(tail); + let (left, _) = mid.split_at(head); + (right, left) + } + } +} + +impl<'a, T> RingSlices for &'a [T] { + fn slice(self, from: usize, to: usize) -> Self { + &self[from..to] + } + fn split_at(self, i: usize) -> (Self, Self) { + (*self).split_at(i) + } +} + +impl<'a, T> RingSlices for &'a mut [T] { + fn slice(self, from: usize, to: usize) -> Self { + &mut self[from..to] + } + fn split_at(self, i: usize) -> (Self, Self) { + (*self).split_at_mut(i) + } +} + /// Calculate the number of elements left to be read in the buffer #[inline] fn count(tail: usize, head: usize, size: usize) -> usize { @@ -1875,6 +1893,14 @@ impl<'a, T> Iterator for Iter<'a, T> { let len = count(self.tail, self.head, self.ring.len()); (len, Some(len)) } + + fn fold(self, mut accum: Acc, mut f: F) -> Acc + where F: FnMut(Acc, Self::Item) -> Acc, + { + let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail); + accum = front.iter().fold(accum, &mut f); + back.iter().fold(accum, &mut f) + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -1927,6 +1953,14 @@ impl<'a, T> Iterator for IterMut<'a, T> { let len = count(self.tail, self.head, self.ring.len()); (len, Some(len)) } + + fn fold(self, mut accum: Acc, mut f: F) -> Acc + where F: FnMut(Acc, Self::Item) -> Acc, + { + let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail); + accum = front.iter_mut().fold(accum, &mut f); + back.iter_mut().fold(accum, &mut f) + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -2002,7 +2036,7 @@ unsafe impl<'a, T: Sync> Sync for Drain<'a, T> {} #[stable(feature = "drain", since = "1.6.0")] unsafe impl<'a, T: Send> Send for Drain<'a, T> {} -#[stable(feature = "rust1", since = "1.0.0")] +#[stable(feature = "drain", since = "1.6.0")] impl<'a, T: 'a> Drop for Drain<'a, T> { fn drop(&mut self) { for _ in self.by_ref() {} @@ -2051,7 +2085,7 @@ impl<'a, T: 'a> Drop for Drain<'a, T> { } } -#[stable(feature = "rust1", since = "1.0.0")] +#[stable(feature = "drain", since = "1.6.0")] impl<'a, T: 'a> Iterator for Drain<'a, T> { type Item = T; @@ -2066,7 +2100,7 @@ impl<'a, T: 'a> Iterator for Drain<'a, T> { } } -#[stable(feature = "rust1", since = "1.0.0")] +#[stable(feature = "drain", since = "1.6.0")] impl<'a, T: 'a> DoubleEndedIterator for Drain<'a, T> { #[inline] fn next_back(&mut self) -> Option { @@ -2074,7 +2108,7 @@ impl<'a, T: 'a> DoubleEndedIterator for Drain<'a, T> { } } -#[stable(feature = "rust1", since = "1.0.0")] +#[stable(feature = "drain", since = "1.6.0")] impl<'a, T: 'a> ExactSizeIterator for Drain<'a, T> {} #[unstable(feature = "fused", issue = "35602")] diff --git a/src/libcollectionstest/binary_heap.rs b/src/libcollectionstest/binary_heap.rs index e2a57bd8d3862..9cd63d8793184 100644 --- a/src/libcollectionstest/binary_heap.rs +++ b/src/libcollectionstest/binary_heap.rs @@ -139,6 +139,7 @@ fn test_push_unique() { } #[test] +#[allow(deprecated)] fn test_push_pop() { let mut heap = BinaryHeap::from(vec![5, 5, 2, 1, 3]); assert_eq!(heap.len(), 5); @@ -153,6 +154,7 @@ fn test_push_pop() { } #[test] +#[allow(deprecated)] fn test_replace() { let mut heap = BinaryHeap::from(vec![5, 5, 2, 1, 3]); assert_eq!(heap.len(), 5); @@ -212,6 +214,7 @@ fn test_empty_peek_mut() { } #[test] +#[allow(deprecated)] fn test_empty_replace() { let mut heap = BinaryHeap::new(); assert!(heap.replace(5).is_none()); @@ -296,5 +299,7 @@ fn test_extend_specialization() { #[allow(dead_code)] fn assert_covariance() { - fn drain<'new>(d: Drain<'static, &'static str>) -> Drain<'new, &'new str> { d } + fn drain<'new>(d: Drain<'static, &'static str>) -> Drain<'new, &'new str> { + d + } } diff --git a/src/libcollectionstest/btree/map.rs b/src/libcollectionstest/btree/map.rs index 49fce68d15e54..8222da105ccad 100644 --- a/src/libcollectionstest/btree/map.rs +++ b/src/libcollectionstest/btree/map.rs @@ -533,9 +533,7 @@ create_append_test!(test_append_1700, 1700); fn rand_data(len: usize) -> Vec<(u32, u32)> { let mut rng = DeterministicRng::new(); - Vec::from_iter( - (0..len).map(|_| (rng.next(), rng.next())) - ) + Vec::from_iter((0..len).map(|_| (rng.next(), rng.next()))) } #[test] diff --git a/src/libcollectionstest/btree/mod.rs b/src/libcollectionstest/btree/mod.rs index ea43f423b7c1f..ae8b18d0c9fd9 100644 --- a/src/libcollectionstest/btree/mod.rs +++ b/src/libcollectionstest/btree/mod.rs @@ -25,7 +25,7 @@ impl DeterministicRng { x: 0x193a6754, y: 0xa8a7d469, z: 0x97830e05, - w: 0x113ba7bb + w: 0x113ba7bb, } } diff --git a/src/libcollectionstest/btree/set.rs b/src/libcollectionstest/btree/set.rs index a32e3f1a76aea..6171b8ba624cd 100644 --- a/src/libcollectionstest/btree/set.rs +++ b/src/libcollectionstest/btree/set.rs @@ -15,45 +15,51 @@ use super::DeterministicRng; #[test] fn test_clone_eq() { - let mut m = BTreeSet::new(); + let mut m = BTreeSet::new(); - m.insert(1); - m.insert(2); + m.insert(1); + m.insert(2); - assert!(m.clone() == m); + assert!(m.clone() == m); } #[test] fn test_hash() { - let mut x = BTreeSet::new(); - let mut y = BTreeSet::new(); + let mut x = BTreeSet::new(); + let mut y = BTreeSet::new(); - x.insert(1); - x.insert(2); - x.insert(3); + x.insert(1); + x.insert(2); + x.insert(3); - y.insert(3); - y.insert(2); - y.insert(1); + y.insert(3); + y.insert(2); + y.insert(1); - assert!(::hash(&x) == ::hash(&y)); + assert!(::hash(&x) == ::hash(&y)); } -fn check(a: &[i32], b: &[i32], expected: &[i32], f: F) where - F: FnOnce(&BTreeSet, &BTreeSet, &mut FnMut(&i32) -> bool) -> bool, +fn check(a: &[i32], b: &[i32], expected: &[i32], f: F) + where F: FnOnce(&BTreeSet, &BTreeSet, &mut FnMut(&i32) -> bool) -> bool { let mut set_a = BTreeSet::new(); let mut set_b = BTreeSet::new(); - for x in a { assert!(set_a.insert(*x)) } - for y in b { assert!(set_b.insert(*y)) } + for x in a { + assert!(set_a.insert(*x)) + } + for y in b { + assert!(set_b.insert(*y)) + } let mut i = 0; - f(&set_a, &set_b, &mut |&x| { - assert_eq!(x, expected[i]); - i += 1; - true - }); + f(&set_a, + &set_b, + &mut |&x| { + assert_eq!(x, expected[i]); + i += 1; + true + }); assert_eq!(i, expected.len()); } @@ -82,9 +88,7 @@ fn test_difference() { check_difference(&[], &[], &[]); check_difference(&[1, 12], &[], &[1, 12]); check_difference(&[], &[1, 2, 3, 9], &[]); - check_difference(&[1, 3, 5, 9, 11], - &[3, 9], - &[1, 5, 11]); + check_difference(&[1, 3, 5, 9, 11], &[3, 9], &[1, 5, 11]); check_difference(&[-5, 11, 22, 33, 40, 42], &[-12, -5, 14, 23, 34, 38, 39, 50], &[11, 22, 33, 40, 42]); @@ -245,10 +249,18 @@ fn test_recovery() { fn test_variance() { use std::collections::btree_set::{IntoIter, Iter, Range}; - fn set<'new>(v: BTreeSet<&'static str>) -> BTreeSet<&'new str> { v } - fn iter<'a, 'new>(v: Iter<'a, &'static str>) -> Iter<'a, &'new str> { v } - fn into_iter<'new>(v: IntoIter<&'static str>) -> IntoIter<&'new str> { v } - fn range<'a, 'new>(v: Range<'a, &'static str>) -> Range<'a, &'new str> { v } + fn set<'new>(v: BTreeSet<&'static str>) -> BTreeSet<&'new str> { + v + } + fn iter<'a, 'new>(v: Iter<'a, &'static str>) -> Iter<'a, &'new str> { + v + } + fn into_iter<'new>(v: IntoIter<&'static str>) -> IntoIter<&'new str> { + v + } + fn range<'a, 'new>(v: Range<'a, &'static str>) -> Range<'a, &'new str> { + v + } } #[test] @@ -277,9 +289,7 @@ fn test_append() { fn rand_data(len: usize) -> Vec { let mut rng = DeterministicRng::new(); - Vec::from_iter( - (0..len).map(|_| rng.next()) - ) + Vec::from_iter((0..len).map(|_| rng.next())) } #[test] diff --git a/src/libcollectionstest/cow_str.rs b/src/libcollectionstest/cow_str.rs new file mode 100644 index 0000000000000..82533ba077541 --- /dev/null +++ b/src/libcollectionstest/cow_str.rs @@ -0,0 +1,65 @@ +// Copyright 2012-2013-2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::borrow::Cow; + +// check that Cow<'a, str> implements addition +#[test] +fn check_cow_add() { + borrowed1 = Cow::Borrowed("Hello, "); + borrowed2 = Cow::Borrowed("World!"); + borrow_empty = Cow::Borrowed(""); + + owned1 = Cow::Owned("Hi, ".into()); + owned2 = Cow::Owned("Rustaceans!".into()); + owned_empty = Cow::Owned("".into()); + + assert_eq!("Hello, World!", borrowed1 + borrowed2); + assert_eq!("Hello, Rustaceans!", borrowed1 + owned2); + + assert_eq!("Hello, World!", owned1 + borrowed2); + assert_eq!("Hello, Rustaceans!", owned1 + owned2); + + if let Cow::Owned(_) = borrowed1 + borrow_empty { + panic!("Adding empty strings to a borrow should note allocate"); + } + if let Cow::Owned(_) = borrow_empty + borrowed1 { + panic!("Adding empty strings to a borrow should note allocate"); + } + if let Cow::Owned(_) = borrowed1 + owned_empty { + panic!("Adding empty strings to a borrow should note allocate"); + } + if let Cow::Owned(_) = owned_empty + borrowed1 { + panic!("Adding empty strings to a borrow should note allocate"); + } +} + +fn check_cow_add_assign() { + borrowed1 = Cow::Borrowed("Hello, "); + borrowed2 = Cow::Borrowed("World!"); + borrow_empty = Cow::Borrowed(""); + + owned1 = Cow::Owned("Hi, ".into()); + owned2 = Cow::Owned("Rustaceans!".into()); + owned_empty = Cow::Owned("".into()); + + let borrowed1clone = borrowed1.clone(); + borrowed1clone += borrow_empty; + assert_eq!((&borrowed1clone).as_ptr(), (&borrowed1).as_ptr()); + + borrowed1clone += owned_empty; + assert_eq!((&borrowed1clone).as_ptr(), (&borrowed1).as_ptr()); + + owned1 += borrowed2; + borrowed1 += owned2; + + assert_eq!("Hello, World!", owned1); + assert_eq!("Hello, Rustaceans!", borrowed1); +} diff --git a/src/libcollectionstest/lib.rs b/src/libcollectionstest/lib.rs index 878581a4f296e..5d3e03c2dee36 100644 --- a/src/libcollectionstest/lib.rs +++ b/src/libcollectionstest/lib.rs @@ -16,9 +16,11 @@ #![feature(collections)] #![feature(collections_bound)] #![feature(const_fn)] +#![feature(dedup_by)] #![feature(enumset)] #![feature(pattern)] #![feature(rand)] +#![feature(repeat_str)] #![feature(step_by)] #![feature(str_escape)] #![feature(str_replacen)] @@ -31,9 +33,12 @@ extern crate collections; extern crate test; extern crate rustc_unicode; -use std::hash::{Hash, Hasher, SipHasher}; +use std::hash::{Hash, Hasher}; +use std::collections::hash_map::DefaultHasher; -#[cfg(test)] #[macro_use] mod bench; +#[cfg(test)] +#[macro_use] +mod bench; mod binary_heap; mod btree; @@ -47,7 +52,7 @@ mod vec_deque; mod vec; fn hash(t: &T) -> u64 { - let mut s = SipHasher::new(); + let mut s = DefaultHasher::new(); t.hash(&mut s); s.finish() } diff --git a/src/libcollectionstest/slice.rs b/src/libcollectionstest/slice.rs index 5b341ab62d097..a6230ef471cec 100644 --- a/src/libcollectionstest/slice.rs +++ b/src/libcollectionstest/slice.rs @@ -315,47 +315,6 @@ fn test_clear() { // If the unsafe block didn't drop things properly, we blow up here. } -#[test] -fn test_dedup() { - fn case(a: Vec, b: Vec) { - let mut v = a; - v.dedup(); - assert_eq!(v, b); - } - case(vec![], vec![]); - case(vec![1], vec![1]); - case(vec![1, 1], vec![1]); - case(vec![1, 2, 3], vec![1, 2, 3]); - case(vec![1, 1, 2, 3], vec![1, 2, 3]); - case(vec![1, 2, 2, 3], vec![1, 2, 3]); - case(vec![1, 2, 3, 3], vec![1, 2, 3]); - case(vec![1, 1, 2, 2, 2, 3, 3], vec![1, 2, 3]); -} - -#[test] -fn test_dedup_unique() { - let mut v0: Vec> = vec![box 1, box 1, box 2, box 3]; - v0.dedup(); - let mut v1: Vec> = vec![box 1, box 2, box 2, box 3]; - v1.dedup(); - let mut v2: Vec> = vec![box 1, box 2, box 3, box 3]; - v2.dedup(); - // If the boxed pointers were leaked or otherwise misused, valgrind - // and/or rt should raise errors. -} - -#[test] -fn test_dedup_shared() { - let mut v0: Vec> = vec![box 1, box 1, box 2, box 3]; - v0.dedup(); - let mut v1: Vec> = vec![box 1, box 2, box 2, box 3]; - v1.dedup(); - let mut v2: Vec> = vec![box 1, box 2, box 3, box 3]; - v2.dedup(); - // If the pointers were leaked or otherwise misused, valgrind and/or - // rt should raise errors. -} - #[test] fn test_retain() { let mut v = vec![1, 2, 3, 4, 5]; @@ -461,12 +420,12 @@ fn test_sort_stability() { // number this element is, i.e. the second elements // will occur in sorted order. let mut v: Vec<_> = (0..len) - .map(|_| { - let n = thread_rng().gen::() % 10; - counts[n] += 1; - (n, counts[n]) - }) - .collect(); + .map(|_| { + let n = thread_rng().gen::() % 10; + counts[n] += 1; + (n, counts[n]) + }) + .collect(); // only sort on the first element, so an unstable sort // may mix up the counts. @@ -1116,6 +1075,7 @@ fn test_box_slice_clone() { } #[test] +#[cfg_attr(target_os = "emscripten", ignore)] fn test_box_slice_clone_panics() { use std::sync::Arc; use std::sync::atomic::{AtomicUsize, Ordering}; @@ -1156,13 +1116,13 @@ fn test_box_slice_clone_panics() { }; spawn(move || { - // When xs is dropped, +5. - let xs = vec![canary.clone(), canary.clone(), canary.clone(), panic, canary] - .into_boxed_slice(); + // When xs is dropped, +5. + let xs = vec![canary.clone(), canary.clone(), canary.clone(), panic, canary] + .into_boxed_slice(); - // When panic is cloned, +3. - xs.clone(); - }) + // When panic is cloned, +3. + xs.clone(); + }) .join() .unwrap_err(); @@ -1414,8 +1374,8 @@ mod bench { let mut rng = thread_rng(); b.iter(|| { let mut v = rng.gen_iter::() - .take(5) - .collect::>(); + .take(5) + .collect::>(); v.sort(); }); b.bytes = 5 * mem::size_of::() as u64; @@ -1426,8 +1386,8 @@ mod bench { let mut rng = thread_rng(); b.iter(|| { let mut v = rng.gen_iter::() - .take(100) - .collect::>(); + .take(100) + .collect::>(); v.sort(); }); b.bytes = 100 * mem::size_of::() as u64; @@ -1438,8 +1398,8 @@ mod bench { let mut rng = thread_rng(); b.iter(|| { let mut v = rng.gen_iter::() - .take(10000) - .collect::>(); + .take(10000) + .collect::>(); v.sort(); }); b.bytes = 10000 * mem::size_of::() as u64; diff --git a/src/libcollectionstest/str.rs b/src/libcollectionstest/str.rs index 62e164a569aa6..cc56bbf4890aa 100644 --- a/src/libcollectionstest/str.rs +++ b/src/libcollectionstest/str.rs @@ -786,9 +786,9 @@ fn test_rev_iterator() { #[test] fn test_chars_decoding() { + let mut bytes = [0; 4]; for c in (0..0x110000).filter_map(::std::char::from_u32) { - let bytes = c.encode_utf8(); - let s = ::std::str::from_utf8(bytes.as_slice()).unwrap(); + let s = c.encode_utf8(&mut bytes); if Some(c) != s.chars().next() { panic!("character {:x}={} does not decode correctly", c as u32, c); } @@ -797,9 +797,9 @@ fn test_chars_decoding() { #[test] fn test_chars_rev_decoding() { + let mut bytes = [0; 4]; for c in (0..0x110000).filter_map(::std::char::from_u32) { - let bytes = c.encode_utf8(); - let s = ::std::str::from_utf8(bytes.as_slice()).unwrap(); + let s = c.encode_utf8(&mut bytes); if Some(c) != s.chars().rev().next() { panic!("character {:x}={} does not decode correctly", c as u32, c); } @@ -1286,6 +1286,13 @@ fn test_cow_from() { } } +#[test] +fn test_repeat() { + assert_eq!("".repeat(3), ""); + assert_eq!("abc".repeat(0), ""); + assert_eq!("α".repeat(3), "ααα"); +} + mod pattern { use std::str::pattern::Pattern; use std::str::pattern::{Searcher, ReverseSearcher}; diff --git a/src/libcollectionstest/string.rs b/src/libcollectionstest/string.rs index 1652fb5a88d80..98de33bdaa8e1 100644 --- a/src/libcollectionstest/string.rs +++ b/src/libcollectionstest/string.rs @@ -35,6 +35,12 @@ fn test_from_str() { assert_eq!(owned.as_ref().map(|s| &**s), Some("string")); } +#[test] +fn test_from_cow_str() { + assert_eq!(String::from(Cow::Borrowed("string")), "string"); + assert_eq!(String::from(Cow::Owned(String::from("string"))), "string"); +} + #[test] fn test_unsized_to_string() { let s: &str = "abc"; diff --git a/src/libcollectionstest/vec.rs b/src/libcollectionstest/vec.rs index ee2b898d5c2c2..3bc1321d75653 100644 --- a/src/libcollectionstest/vec.rs +++ b/src/libcollectionstest/vec.rs @@ -8,6 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +use std::ascii::AsciiExt; use std::borrow::Cow; use std::iter::{FromIterator, repeat}; use std::mem::size_of; @@ -213,6 +214,60 @@ fn test_retain() { assert_eq!(vec, [2, 4]); } +#[test] +fn test_dedup() { + fn case(a: Vec, b: Vec) { + let mut v = a; + v.dedup(); + assert_eq!(v, b); + } + case(vec![], vec![]); + case(vec![1], vec![1]); + case(vec![1, 1], vec![1]); + case(vec![1, 2, 3], vec![1, 2, 3]); + case(vec![1, 1, 2, 3], vec![1, 2, 3]); + case(vec![1, 2, 2, 3], vec![1, 2, 3]); + case(vec![1, 2, 3, 3], vec![1, 2, 3]); + case(vec![1, 1, 2, 2, 2, 3, 3], vec![1, 2, 3]); +} + +#[test] +fn test_dedup_by_key() { + fn case(a: Vec, b: Vec) { + let mut v = a; + v.dedup_by_key(|i| *i / 10); + assert_eq!(v, b); + } + case(vec![], vec![]); + case(vec![10], vec![10]); + case(vec![10, 11], vec![10]); + case(vec![10, 20, 30], vec![10, 20, 30]); + case(vec![10, 11, 20, 30], vec![10, 20, 30]); + case(vec![10, 20, 21, 30], vec![10, 20, 30]); + case(vec![10, 20, 30, 31], vec![10, 20, 30]); + case(vec![10, 11, 20, 21, 22, 30, 31], vec![10, 20, 30]); +} + +#[test] +fn test_dedup_by() { + let mut vec = vec!["foo", "bar", "Bar", "baz", "bar"]; + vec.dedup_by(|a, b| a.eq_ignore_ascii_case(b)); + + assert_eq!(vec, ["foo", "bar", "baz", "bar"]); +} + +#[test] +fn test_dedup_unique() { + let mut v0: Vec> = vec![box 1, box 1, box 2, box 3]; + v0.dedup(); + let mut v1: Vec> = vec![box 1, box 2, box 2, box 3]; + v1.dedup(); + let mut v2: Vec> = vec![box 1, box 2, box 3, box 3]; + v2.dedup(); + // If the boxed pointers were leaked or otherwise misused, valgrind + // and/or rt should raise errors. +} + #[test] fn zero_sized_values() { let mut v = Vec::new(); @@ -271,22 +326,22 @@ fn test_zip_unzip() { #[test] fn test_vec_truncate_drop() { - static mut drops: u32 = 0; + static mut DROPS: u32 = 0; struct Elem(i32); impl Drop for Elem { fn drop(&mut self) { unsafe { - drops += 1; + DROPS += 1; } } } let mut v = vec![Elem(1), Elem(2), Elem(3), Elem(4), Elem(5)]; - assert_eq!(unsafe { drops }, 0); + assert_eq!(unsafe { DROPS }, 0); v.truncate(3); - assert_eq!(unsafe { drops }, 2); + assert_eq!(unsafe { DROPS }, 2); v.truncate(0); - assert_eq!(unsafe { drops }, 5); + assert_eq!(unsafe { DROPS }, 5); } #[test] @@ -542,10 +597,22 @@ fn test_cow_from() { } } +#[test] +fn test_from_cow() { + let borrowed: &[_] = &["borrowed", "(slice)"]; + let owned = vec!["owned", "(vec)"]; + assert_eq!(Vec::from(Cow::Borrowed(borrowed)), vec!["borrowed", "(slice)"]); + assert_eq!(Vec::from(Cow::Owned(owned)), vec!["owned", "(vec)"]); +} + #[allow(dead_code)] fn assert_covariance() { - fn drain<'new>(d: Drain<'static, &'static str>) -> Drain<'new, &'new str> { d } - fn into_iter<'new>(i: IntoIter<&'static str>) -> IntoIter<&'new str> { i } + fn drain<'new>(d: Drain<'static, &'static str>) -> Drain<'new, &'new str> { + d + } + fn into_iter<'new>(i: IntoIter<&'static str>) -> IntoIter<&'new str> { + i + } } #[bench] diff --git a/src/libcollectionstest/vec_deque.rs b/src/libcollectionstest/vec_deque.rs index 5e8633a974824..f1ea85a6c5bed 100644 --- a/src/libcollectionstest/vec_deque.rs +++ b/src/libcollectionstest/vec_deque.rs @@ -686,21 +686,21 @@ fn test_show() { assert_eq!(format!("{:?}", ringbuf), "[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]"); let ringbuf: VecDeque<_> = vec!["just", "one", "test", "more"] - .iter() - .cloned() - .collect(); + .iter() + .cloned() + .collect(); assert_eq!(format!("{:?}", ringbuf), "[\"just\", \"one\", \"test\", \"more\"]"); } #[test] fn test_drop() { - static mut drops: i32 = 0; + static mut DROPS: i32 = 0; struct Elem; impl Drop for Elem { fn drop(&mut self) { unsafe { - drops += 1; + DROPS += 1; } } } @@ -712,17 +712,17 @@ fn test_drop() { ring.push_front(Elem); drop(ring); - assert_eq!(unsafe { drops }, 4); + assert_eq!(unsafe { DROPS }, 4); } #[test] fn test_drop_with_pop() { - static mut drops: i32 = 0; + static mut DROPS: i32 = 0; struct Elem; impl Drop for Elem { fn drop(&mut self) { unsafe { - drops += 1; + DROPS += 1; } } } @@ -735,20 +735,20 @@ fn test_drop_with_pop() { drop(ring.pop_back()); drop(ring.pop_front()); - assert_eq!(unsafe { drops }, 2); + assert_eq!(unsafe { DROPS }, 2); drop(ring); - assert_eq!(unsafe { drops }, 4); + assert_eq!(unsafe { DROPS }, 4); } #[test] fn test_drop_clear() { - static mut drops: i32 = 0; + static mut DROPS: i32 = 0; struct Elem; impl Drop for Elem { fn drop(&mut self) { unsafe { - drops += 1; + DROPS += 1; } } } @@ -759,10 +759,10 @@ fn test_drop_clear() { ring.push_back(Elem); ring.push_front(Elem); ring.clear(); - assert_eq!(unsafe { drops }, 4); + assert_eq!(unsafe { DROPS }, 4); drop(ring); - assert_eq!(unsafe { drops }, 4); + assert_eq!(unsafe { DROPS }, 4); } #[test] @@ -1003,5 +1003,7 @@ fn test_contains() { #[allow(dead_code)] fn assert_covariance() { - fn drain<'new>(d: Drain<'static, &'static str>) -> Drain<'new, &'new str> { d } + fn drain<'new>(d: Drain<'static, &'static str>) -> Drain<'new, &'new str> { + d + } } diff --git a/src/libcompiler_builtins/build.rs b/src/libcompiler_builtins/build.rs index 09c400b52bc83..acbd39bb1630c 100644 --- a/src/libcompiler_builtins/build.rs +++ b/src/libcompiler_builtins/build.rs @@ -72,7 +72,13 @@ impl Sources { } fn main() { - let target = env::var("TARGET").unwrap(); + let target = env::var("TARGET").expect("TARGET was not set"); + + // Emscripten's runtime includes all the builtins + if target.contains("emscripten") { + return; + } + let cfg = &mut gcc::Config::new(); if target.contains("msvc") { diff --git a/src/libcompiler_builtins/lib.rs b/src/libcompiler_builtins/lib.rs index fbcf5204d2537..4a703b3da68f6 100644 --- a/src/libcompiler_builtins/lib.rs +++ b/src/libcompiler_builtins/lib.rs @@ -8,9 +8,9 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![cfg_attr(not(stage0), feature(compiler_builtins))] +#![feature(compiler_builtins)] #![no_std] -#![cfg_attr(not(stage0), compiler_builtins)] +#![compiler_builtins] #![unstable(feature = "compiler_builtins_lib", reason = "internal implementation detail of rustc right now", issue = "0")] diff --git a/src/libcore/any.rs b/src/libcore/any.rs index a3018a46eea22..eb0636e8576be 100644 --- a/src/libcore/any.rs +++ b/src/libcore/any.rs @@ -73,7 +73,6 @@ use fmt; use intrinsics; -use marker::Reflect; /////////////////////////////////////////////////////////////////////////////// // Any trait @@ -86,7 +85,7 @@ use marker::Reflect; /// /// [mod]: index.html #[stable(feature = "rust1", since = "1.0.0")] -pub trait Any: Reflect + 'static { +pub trait Any: 'static { /// Gets the `TypeId` of `self`. /// /// # Examples @@ -112,7 +111,7 @@ pub trait Any: Reflect + 'static { } #[stable(feature = "rust1", since = "1.0.0")] -impl Any for T { +impl Any for T { fn get_type_id(&self) -> TypeId { TypeId::of::() } } @@ -352,12 +351,10 @@ impl TypeId { /// # Examples /// /// ``` - /// #![feature(get_type_id)] - /// /// use std::any::{Any, TypeId}; /// - /// fn is_string(s: &Any) -> bool { - /// TypeId::of::() == s.get_type_id() + /// fn is_string(_s: &T) -> bool { + /// TypeId::of::() == TypeId::of::() /// } /// /// fn main() { @@ -366,7 +363,7 @@ impl TypeId { /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] - pub fn of() -> TypeId { + pub fn of() -> TypeId { TypeId { t: unsafe { intrinsics::type_id::() }, } diff --git a/src/libcore/array.rs b/src/libcore/array.rs index 9866a39619a82..37bd57034a7b6 100644 --- a/src/libcore/array.rs +++ b/src/libcore/array.rs @@ -93,6 +93,7 @@ macro_rules! __impl_slice_eq2 { macro_rules! array_impls { ($($N:expr)+) => { $( + #[stable(feature = "rust1", since = "1.0.0")] impl AsRef<[T]> for [T; $N] { #[inline] fn as_ref(&self) -> &[T] { @@ -100,6 +101,7 @@ macro_rules! array_impls { } } + #[stable(feature = "rust1", since = "1.0.0")] impl AsMut<[T]> for [T; $N] { #[inline] fn as_mut(&mut self) -> &mut [T] { diff --git a/src/libcore/cell.rs b/src/libcore/cell.rs index 51221f1b9b9e9..64a7a8c5ef785 100644 --- a/src/libcore/cell.rs +++ b/src/libcore/cell.rs @@ -175,7 +175,7 @@ use cmp::Ordering; use fmt::{self, Debug, Display}; -use marker::{PhantomData, Unsize}; +use marker::Unsize; use ops::{Deref, DerefMut, CoerceUnsized}; /// A mutable memory location that admits only `Copy` data. @@ -403,40 +403,40 @@ pub enum BorrowState { } /// An error returned by [`RefCell::try_borrow`](struct.RefCell.html#method.try_borrow). -#[unstable(feature = "try_borrow", issue = "35070")] -pub struct BorrowError<'a, T: 'a + ?Sized> { - marker: PhantomData<&'a RefCell>, +#[stable(feature = "try_borrow", since = "1.13.0")] +pub struct BorrowError { + _private: (), } -#[unstable(feature = "try_borrow", issue = "35070")] -impl<'a, T: ?Sized> Debug for BorrowError<'a, T> { +#[stable(feature = "try_borrow", since = "1.13.0")] +impl Debug for BorrowError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("BorrowError").finish() } } -#[unstable(feature = "try_borrow", issue = "35070")] -impl<'a, T: ?Sized> Display for BorrowError<'a, T> { +#[stable(feature = "try_borrow", since = "1.13.0")] +impl Display for BorrowError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { Display::fmt("already mutably borrowed", f) } } /// An error returned by [`RefCell::try_borrow_mut`](struct.RefCell.html#method.try_borrow_mut). -#[unstable(feature = "try_borrow", issue = "35070")] -pub struct BorrowMutError<'a, T: 'a + ?Sized> { - marker: PhantomData<&'a RefCell>, +#[stable(feature = "try_borrow", since = "1.13.0")] +pub struct BorrowMutError { + _private: (), } -#[unstable(feature = "try_borrow", issue = "35070")] -impl<'a, T: ?Sized> Debug for BorrowMutError<'a, T> { +#[stable(feature = "try_borrow", since = "1.13.0")] +impl Debug for BorrowMutError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("BorrowMutError").finish() } } -#[unstable(feature = "try_borrow", issue = "35070")] -impl<'a, T: ?Sized> Display for BorrowMutError<'a, T> { +#[stable(feature = "try_borrow", since = "1.13.0")] +impl Display for BorrowMutError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { Display::fmt("already borrowed", f) } @@ -573,8 +573,6 @@ impl RefCell { /// # Examples /// /// ``` - /// #![feature(try_borrow)] - /// /// use std::cell::RefCell; /// /// let c = RefCell::new(5); @@ -589,15 +587,15 @@ impl RefCell { /// assert!(c.try_borrow().is_ok()); /// } /// ``` - #[unstable(feature = "try_borrow", issue = "35070")] + #[stable(feature = "try_borrow", since = "1.13.0")] #[inline] - pub fn try_borrow(&self) -> Result, BorrowError> { + pub fn try_borrow(&self) -> Result, BorrowError> { match BorrowRef::new(&self.borrow) { Some(b) => Ok(Ref { value: unsafe { &*self.value.get() }, borrow: b, }), - None => Err(BorrowError { marker: PhantomData }), + None => Err(BorrowError { _private: () }), } } @@ -654,8 +652,6 @@ impl RefCell { /// # Examples /// /// ``` - /// #![feature(try_borrow)] - /// /// use std::cell::RefCell; /// /// let c = RefCell::new(5); @@ -667,15 +663,15 @@ impl RefCell { /// /// assert!(c.try_borrow_mut().is_ok()); /// ``` - #[unstable(feature = "try_borrow", issue = "35070")] + #[stable(feature = "try_borrow", since = "1.13.0")] #[inline] - pub fn try_borrow_mut(&self) -> Result, BorrowMutError> { + pub fn try_borrow_mut(&self) -> Result, BorrowMutError> { match BorrowRefMut::new(&self.borrow) { Some(b) => Ok(RefMut { value: unsafe { &mut *self.value.get() }, borrow: b, }), - None => Err(BorrowMutError { marker: PhantomData }), + None => Err(BorrowMutError { _private: () }), } } diff --git a/src/libcore/char.rs b/src/libcore/char.rs index ad492c81bd38a..26d28049a474d 100644 --- a/src/libcore/char.rs +++ b/src/libcore/char.rs @@ -18,6 +18,7 @@ use char_private::is_printable; use convert::TryFrom; use fmt; +use slice; use iter::FusedIterator; use mem::transmute; @@ -327,9 +328,9 @@ pub trait CharExt { #[stable(feature = "core", since = "1.6.0")] fn len_utf16(self) -> usize; #[unstable(feature = "unicode", issue = "27784")] - fn encode_utf8(self) -> EncodeUtf8; + fn encode_utf8(self, dst: &mut [u8]) -> &mut str; #[unstable(feature = "unicode", issue = "27784")] - fn encode_utf16(self) -> EncodeUtf16; + fn encode_utf16(self, dst: &mut [u16]) -> &mut [u16]; } #[stable(feature = "core", since = "1.6.0")] @@ -419,47 +420,59 @@ impl CharExt for char { } #[inline] - fn encode_utf8(self) -> EncodeUtf8 { + fn encode_utf8(self, dst: &mut [u8]) -> &mut str { let code = self as u32; - let mut buf = [0; 4]; - let pos = if code < MAX_ONE_B { - buf[3] = code as u8; - 3 - } else if code < MAX_TWO_B { - buf[2] = (code >> 6 & 0x1F) as u8 | TAG_TWO_B; - buf[3] = (code & 0x3F) as u8 | TAG_CONT; - 2 - } else if code < MAX_THREE_B { - buf[1] = (code >> 12 & 0x0F) as u8 | TAG_THREE_B; - buf[2] = (code >> 6 & 0x3F) as u8 | TAG_CONT; - buf[3] = (code & 0x3F) as u8 | TAG_CONT; - 1 - } else { - buf[0] = (code >> 18 & 0x07) as u8 | TAG_FOUR_B; - buf[1] = (code >> 12 & 0x3F) as u8 | TAG_CONT; - buf[2] = (code >> 6 & 0x3F) as u8 | TAG_CONT; - buf[3] = (code & 0x3F) as u8 | TAG_CONT; - 0 - }; - EncodeUtf8 { buf: buf, pos: pos } + unsafe { + let len = + if code < MAX_ONE_B && !dst.is_empty() { + *dst.get_unchecked_mut(0) = code as u8; + 1 + } else if code < MAX_TWO_B && dst.len() >= 2 { + *dst.get_unchecked_mut(0) = (code >> 6 & 0x1F) as u8 | TAG_TWO_B; + *dst.get_unchecked_mut(1) = (code & 0x3F) as u8 | TAG_CONT; + 2 + } else if code < MAX_THREE_B && dst.len() >= 3 { + *dst.get_unchecked_mut(0) = (code >> 12 & 0x0F) as u8 | TAG_THREE_B; + *dst.get_unchecked_mut(1) = (code >> 6 & 0x3F) as u8 | TAG_CONT; + *dst.get_unchecked_mut(2) = (code & 0x3F) as u8 | TAG_CONT; + 3 + } else if dst.len() >= 4 { + *dst.get_unchecked_mut(0) = (code >> 18 & 0x07) as u8 | TAG_FOUR_B; + *dst.get_unchecked_mut(1) = (code >> 12 & 0x3F) as u8 | TAG_CONT; + *dst.get_unchecked_mut(2) = (code >> 6 & 0x3F) as u8 | TAG_CONT; + *dst.get_unchecked_mut(3) = (code & 0x3F) as u8 | TAG_CONT; + 4 + } else { + panic!("encode_utf8: need {} bytes to encode U+{:X}, but the buffer has {}", + from_u32_unchecked(code).len_utf8(), + code, + dst.len()) + }; + transmute(slice::from_raw_parts_mut(dst.as_mut_ptr(), len)) + } } #[inline] - fn encode_utf16(self) -> EncodeUtf16 { - let mut buf = [0; 2]; + fn encode_utf16(self, dst: &mut [u16]) -> &mut [u16] { let mut code = self as u32; - let pos = if (code & 0xFFFF) == code { - // The BMP falls through (assuming non-surrogate, as it should) - buf[1] = code as u16; - 1 - } else { - // Supplementary planes break into surrogates. - code -= 0x1_0000; - buf[0] = 0xD800 | ((code >> 10) as u16); - buf[1] = 0xDC00 | ((code as u16) & 0x3FF); - 0 - }; - EncodeUtf16 { buf: buf, pos: pos } + unsafe { + if (code & 0xFFFF) == code && !dst.is_empty() { + // The BMP falls through (assuming non-surrogate, as it should) + *dst.get_unchecked_mut(0) = code as u16; + slice::from_raw_parts_mut(dst.as_mut_ptr(), 1) + } else if dst.len() >= 2 { + // Supplementary planes break into surrogates. + code -= 0x1_0000; + *dst.get_unchecked_mut(0) = 0xD800 | ((code >> 10) as u16); + *dst.get_unchecked_mut(1) = 0xDC00 | ((code as u16) & 0x3FF); + slice::from_raw_parts_mut(dst.as_mut_ptr(), 2) + } else { + panic!("encode_utf16: need {} units to encode U+{:X}, but the buffer has {}", + from_u32_unchecked(code).len_utf16(), + code, + dst.len()) + } + } } } @@ -702,88 +715,7 @@ impl ExactSizeIterator for EscapeDebug { } #[unstable(feature = "fused", issue = "35602")] impl FusedIterator for EscapeDebug {} -/// An iterator over `u8` entries represending the UTF-8 encoding of a `char` -/// value. -/// -/// Constructed via the `.encode_utf8()` method on `char`. -#[unstable(feature = "unicode", issue = "27784")] -#[derive(Debug)] -pub struct EncodeUtf8 { - buf: [u8; 4], - pos: usize, -} - -impl EncodeUtf8 { - /// Returns the remaining bytes of this iterator as a slice. - #[unstable(feature = "unicode", issue = "27784")] - pub fn as_slice(&self) -> &[u8] { - &self.buf[self.pos..] - } -} - -#[unstable(feature = "unicode", issue = "27784")] -impl Iterator for EncodeUtf8 { - type Item = u8; - - fn next(&mut self) -> Option { - if self.pos == self.buf.len() { - None - } else { - let ret = Some(self.buf[self.pos]); - self.pos += 1; - ret - } - } - - fn size_hint(&self) -> (usize, Option) { - self.as_slice().iter().size_hint() - } -} - -#[unstable(feature = "fused", issue = "35602")] -impl FusedIterator for EncodeUtf8 {} - -/// An iterator over `u16` entries represending the UTF-16 encoding of a `char` -/// value. -/// -/// Constructed via the `.encode_utf16()` method on `char`. -#[unstable(feature = "unicode", issue = "27784")] -#[derive(Debug)] -pub struct EncodeUtf16 { - buf: [u16; 2], - pos: usize, -} - -impl EncodeUtf16 { - /// Returns the remaining bytes of this iterator as a slice. - #[unstable(feature = "unicode", issue = "27784")] - pub fn as_slice(&self) -> &[u16] { - &self.buf[self.pos..] - } -} - - -#[unstable(feature = "unicode", issue = "27784")] -impl Iterator for EncodeUtf16 { - type Item = u16; - - fn next(&mut self) -> Option { - if self.pos == self.buf.len() { - None - } else { - let ret = Some(self.buf[self.pos]); - self.pos += 1; - ret - } - } - - fn size_hint(&self) -> (usize, Option) { - self.as_slice().iter().size_hint() - } -} -#[unstable(feature = "fused", issue = "35602")] -impl FusedIterator for EncodeUtf16 {} /// An iterator over an iterator of bytes of the characters the bytes represent /// as UTF-8 @@ -800,7 +732,7 @@ pub fn decode_utf8>(i: I) -> DecodeUtf8 /// `::next` returns this for an invalid input sequence. #[unstable(feature = "decode_utf8", issue = "33906")] -#[derive(PartialEq, Debug)] +#[derive(PartialEq, Eq, Debug)] pub struct InvalidSequence(()); #[unstable(feature = "decode_utf8", issue = "33906")] diff --git a/src/libcore/clone.rs b/src/libcore/clone.rs index 0b800cacfc19a..d72b18ae345ce 100644 --- a/src/libcore/clone.rs +++ b/src/libcore/clone.rs @@ -129,13 +129,6 @@ pub struct AssertParamIsClone { _field: ::marker::PhantomData reason = "deriving hack, should not be public", issue = "0")] pub struct AssertParamIsCopy { _field: ::marker::PhantomData } -#[cfg(stage0)] -#[doc(hidden)] -#[inline(always)] -#[unstable(feature = "derive_clone_copy", - reason = "deriving hack, should not be public", - issue = "0")] -pub fn assert_receiver_is_clone(_: &T) {} #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T: ?Sized> Clone for &'a T { diff --git a/src/libcore/cmp.rs b/src/libcore/cmp.rs index 31920fcb0f8c4..0daf658a0f42d 100644 --- a/src/libcore/cmp.rs +++ b/src/libcore/cmp.rs @@ -10,10 +10,13 @@ //! Functionality for ordering and comparison. //! -//! This module defines both `PartialOrd` and `PartialEq` traits which are used +//! This module defines both [`PartialOrd`] and [`PartialEq`] traits which are used //! by the compiler to implement comparison operators. Rust programs may -//! implement `PartialOrd` to overload the `<`, `<=`, `>`, and `>=` operators, -//! and may implement `PartialEq` to overload the `==` and `!=` operators. +//! implement [`PartialOrd`] to overload the `<`, `<=`, `>`, and `>=` operators, +//! and may implement [`PartialEq`] to overload the `==` and `!=` operators. +//! +//! [`PartialOrd`]: trait.PartialOrd.html +//! [`PartialEq`]: trait.PartialEq.html //! //! # Examples //! @@ -777,24 +780,24 @@ mod impls { ord_impl! { char usize u8 u16 u32 u64 isize i8 i16 i32 i64 } - #[unstable(feature = "never_type", issue = "35121")] + #[unstable(feature = "never_type_impls", issue = "35121")] impl PartialEq for ! { fn eq(&self, _: &!) -> bool { *self } } - #[unstable(feature = "never_type", issue = "35121")] + #[unstable(feature = "never_type_impls", issue = "35121")] impl Eq for ! {} - #[unstable(feature = "never_type", issue = "35121")] + #[unstable(feature = "never_type_impls", issue = "35121")] impl PartialOrd for ! { fn partial_cmp(&self, _: &!) -> Option { *self } } - #[unstable(feature = "never_type", issue = "35121")] + #[unstable(feature = "never_type_impls", issue = "35121")] impl Ord for ! { fn cmp(&self, _: &!) -> Ordering { *self diff --git a/src/libcore/convert.rs b/src/libcore/convert.rs index 5191cd7601064..5f16a4f2435f8 100644 --- a/src/libcore/convert.rs +++ b/src/libcore/convert.rs @@ -92,6 +92,22 @@ pub trait AsRef { /// [`Option`]: ../../std/option/enum.Option.html /// [`Result`]: ../../std/result/enum.Result.html /// +/// # Examples +/// +/// [`Box`] implements `AsMut`: +/// +/// [`Box`]: ../../std/boxed/struct.Box.html +/// +/// ``` +/// fn add_one>(num: &mut T) { +/// *num.as_mut() += 1; +/// } +/// +/// let mut boxed_num = Box::new(0); +/// add_one(&mut boxed_num); +/// assert_eq!(*boxed_num, 1); +/// ``` +/// /// # Generic Impls /// /// - `AsMut` auto-dereferences if the inner type is a reference or a mutable diff --git a/src/libcore/fmt/mod.rs b/src/libcore/fmt/mod.rs index 66ef92928eb06..2d75a8ec420b1 100644 --- a/src/libcore/fmt/mod.rs +++ b/src/libcore/fmt/mod.rs @@ -97,9 +97,7 @@ pub trait Write { /// This function will return an instance of `Error` on error. #[stable(feature = "fmt_write_char", since = "1.1.0")] fn write_char(&mut self, c: char) -> Result { - self.write_str(unsafe { - str::from_utf8_unchecked(c.encode_utf8().as_slice()) - }) + self.write_str(c.encode_utf8(&mut [0; 4])) } /// Glue for usage of the `write!` macro with implementors of this trait. @@ -272,10 +270,14 @@ impl<'a> Arguments<'a> { /// safely be done so, so no constructors are given and the fields are private /// to prevent modification. /// -/// The `format_args!` macro will safely create an instance of this structure +/// The [`format_args!`] macro will safely create an instance of this structure /// and pass it to a function or closure, passed as the first argument. The -/// macro validates the format string at compile-time so usage of the `write` -/// and `format` functions can be safely performed. +/// macro validates the format string at compile-time so usage of the [`write`] +/// and [`format`] functions can be safely performed. +/// +/// [`format_args!`]: ../../std/macro.format_args.html +/// [`format`]: ../../std/fmt/fn.format.html +/// [`write`]: ../../std/fmt/fn.write.html #[stable(feature = "rust1", since = "1.0.0")] #[derive(Copy, Clone)] pub struct Arguments<'a> { @@ -792,7 +794,7 @@ pub trait UpperExp { /// assert_eq!(output, "Hello world!"); /// ``` /// -/// Please note that using [`write!`][write_macro] might be preferrable. Example: +/// Please note that using [`write!`] might be preferrable. Example: /// /// ``` /// use std::fmt::Write; @@ -803,7 +805,7 @@ pub trait UpperExp { /// assert_eq!(output, "Hello world!"); /// ``` /// -/// [write_macro]: ../../std/macro.write!.html +/// [`write!`]: ../../std/macro.write.html #[stable(feature = "rust1", since = "1.0.0")] pub fn write(output: &mut Write, args: Arguments) -> Result { let mut formatter = Formatter { @@ -920,9 +922,7 @@ impl<'a> Formatter<'a> { // Writes the sign if it exists, and then the prefix if it was requested let write_prefix = |f: &mut Formatter| { if let Some(c) = sign { - f.buf.write_str(unsafe { - str::from_utf8_unchecked(c.encode_utf8().as_slice()) - })?; + f.buf.write_str(c.encode_utf8(&mut [0; 4]))?; } if prefixed { f.buf.write_str(prefix) } else { Ok(()) } @@ -1028,10 +1028,8 @@ impl<'a> Formatter<'a> { rt::v1::Alignment::Center => (padding / 2, (padding + 1) / 2), }; - let fill = self.fill.encode_utf8(); - let fill = unsafe { - str::from_utf8_unchecked(fill.as_slice()) - }; + let mut fill = [0; 4]; + let fill = self.fill.encode_utf8(&mut fill); for _ in 0..pre_pad { self.buf.write_str(fill)?; @@ -1358,14 +1356,14 @@ macro_rules! fmt_refs { fmt_refs! { Debug, Display, Octal, Binary, LowerHex, UpperHex, LowerExp, UpperExp } -#[unstable(feature = "never_type", issue = "35121")] +#[unstable(feature = "never_type_impls", issue = "35121")] impl Debug for ! { fn fmt(&self, _: &mut Formatter) -> Result { *self } } -#[unstable(feature = "never_type", issue = "35121")] +#[unstable(feature = "never_type_impls", issue = "35121")] impl Display for ! { fn fmt(&self, _: &mut Formatter) -> Result { *self @@ -1431,9 +1429,7 @@ impl Display for char { if f.width.is_none() && f.precision.is_none() { f.write_char(*self) } else { - f.pad(unsafe { - str::from_utf8_unchecked(self.encode_utf8().as_slice()) - }) + f.pad(self.encode_utf8(&mut [0; 4])) } } } @@ -1570,11 +1566,11 @@ floating! { f64 } // Implementation of Display/Debug for various core types #[stable(feature = "rust1", since = "1.0.0")] -impl Debug for *const T { +impl Debug for *const T { fn fmt(&self, f: &mut Formatter) -> Result { Pointer::fmt(self, f) } } #[stable(feature = "rust1", since = "1.0.0")] -impl Debug for *mut T { +impl Debug for *mut T { fn fmt(&self, f: &mut Formatter) -> Result { Pointer::fmt(self, f) } } diff --git a/src/libcore/fmt/rt/v1.rs b/src/libcore/fmt/rt/v1.rs index 6b31e040622cd..ec7add9c3759f 100644 --- a/src/libcore/fmt/rt/v1.rs +++ b/src/libcore/fmt/rt/v1.rs @@ -31,7 +31,7 @@ pub struct FormatSpec { } /// Possible alignments that can be requested as part of a formatting directive. -#[derive(Copy, Clone, PartialEq)] +#[derive(Copy, Clone, PartialEq, Eq)] pub enum Alignment { /// Indication that contents should be left-aligned. Left, diff --git a/src/libcore/hash/mod.rs b/src/libcore/hash/mod.rs index 081f0c14ec30c..ac36cbaace7a8 100644 --- a/src/libcore/hash/mod.rs +++ b/src/libcore/hash/mod.rs @@ -38,7 +38,9 @@ //! ``` //! //! If you need more control over how a value is hashed, you need to implement -//! the `Hash` trait: +//! the [`Hash`] trait: +//! +//! [`Hash`]: trait.Hash.html //! //! ```rust //! use std::hash::{Hash, Hasher, SipHasher}; @@ -76,9 +78,11 @@ use marker; use mem; #[stable(feature = "rust1", since = "1.0.0")] +#[allow(deprecated)] pub use self::sip::SipHasher; #[unstable(feature = "sip_hash_13", issue = "29754")] +#[allow(deprecated)] pub use self::sip::{SipHasher13, SipHasher24}; mod sip; @@ -88,7 +92,7 @@ mod sip; /// The `H` type parameter is an abstract hash state that is used by the `Hash` /// to compute the hash. /// -/// If you are also implementing `Eq`, there is an additional property that +/// If you are also implementing [`Eq`], there is an additional property that /// is important: /// /// ```text @@ -96,13 +100,13 @@ mod sip; /// ``` /// /// In other words, if two keys are equal, their hashes should also be equal. -/// `HashMap` and `HashSet` both rely on this behavior. +/// [`HashMap`] and [`HashSet`] both rely on this behavior. /// /// ## Derivable /// /// This trait can be used with `#[derive]` if all fields implement `Hash`. /// When `derive`d, the resulting hash will be the combination of the values -/// from calling `.hash()` on each field. +/// from calling [`.hash()`] on each field. /// /// ## How can I implement `Hash`? /// @@ -125,6 +129,11 @@ mod sip; /// } /// } /// ``` +/// +/// [`Eq`]: ../../std/cmp/trait.Eq.html +/// [`HashMap`]: ../../std/collections/struct.HashMap.html +/// [`HashSet`]: ../../std/collections/struct.HashSet.html +/// [`.hash()`]: #tymethod.hash #[stable(feature = "rust1", since = "1.0.0")] pub trait Hash { /// Feeds this value into the state given, updating the hasher as necessary. @@ -149,35 +158,35 @@ pub trait Hasher { #[stable(feature = "rust1", since = "1.0.0")] fn finish(&self) -> u64; - /// Writes some data into this `Hasher` + /// Writes some data into this `Hasher`. #[stable(feature = "rust1", since = "1.0.0")] fn write(&mut self, bytes: &[u8]); - /// Write a single `u8` into this hasher + /// Write a single `u8` into this hasher. #[inline] #[stable(feature = "hasher_write", since = "1.3.0")] fn write_u8(&mut self, i: u8) { self.write(&[i]) } - /// Write a single `u16` into this hasher. + /// Writes a single `u16` into this hasher. #[inline] #[stable(feature = "hasher_write", since = "1.3.0")] fn write_u16(&mut self, i: u16) { self.write(&unsafe { mem::transmute::<_, [u8; 2]>(i) }) } - /// Write a single `u32` into this hasher. + /// Writes a single `u32` into this hasher. #[inline] #[stable(feature = "hasher_write", since = "1.3.0")] fn write_u32(&mut self, i: u32) { self.write(&unsafe { mem::transmute::<_, [u8; 4]>(i) }) } - /// Write a single `u64` into this hasher. + /// Writes a single `u64` into this hasher. #[inline] #[stable(feature = "hasher_write", since = "1.3.0")] fn write_u64(&mut self, i: u64) { self.write(&unsafe { mem::transmute::<_, [u8; 8]>(i) }) } - /// Write a single `usize` into this hasher. + /// Writes a single `usize` into this hasher. #[inline] #[stable(feature = "hasher_write", since = "1.3.0")] fn write_usize(&mut self, i: usize) { @@ -187,31 +196,31 @@ pub trait Hasher { self.write(bytes); } - /// Write a single `i8` into this hasher. + /// Writes a single `i8` into this hasher. #[inline] #[stable(feature = "hasher_write", since = "1.3.0")] fn write_i8(&mut self, i: i8) { self.write_u8(i as u8) } - /// Write a single `i16` into this hasher. + /// Writes a single `i16` into this hasher. #[inline] #[stable(feature = "hasher_write", since = "1.3.0")] fn write_i16(&mut self, i: i16) { self.write_u16(i as u16) } - /// Write a single `i32` into this hasher. + /// Writes a single `i32` into this hasher. #[inline] #[stable(feature = "hasher_write", since = "1.3.0")] fn write_i32(&mut self, i: i32) { self.write_u32(i as u32) } - /// Write a single `i64` into this hasher. + /// Writes a single `i64` into this hasher. #[inline] #[stable(feature = "hasher_write", since = "1.3.0")] fn write_i64(&mut self, i: i64) { self.write_u64(i as u64) } - /// Write a single `isize` into this hasher. + /// Writes a single `isize` into this hasher. #[inline] #[stable(feature = "hasher_write", since = "1.3.0")] fn write_isize(&mut self, i: isize) { diff --git a/src/libcore/hash/sip.rs b/src/libcore/hash/sip.rs index dc53683d6337c..6b3ab64dfd88c 100644 --- a/src/libcore/hash/sip.rs +++ b/src/libcore/hash/sip.rs @@ -10,13 +10,21 @@ //! An implementation of SipHash. +#![allow(deprecated)] + use marker::PhantomData; use ptr; +use cmp; +use mem; /// An implementation of SipHash 1-3. /// +/// This is currently the default hashing function used by standard library +/// (eg. `collections::HashMap` uses it by default). +/// /// See: https://131002.net/siphash/ #[unstable(feature = "sip_hash_13", issue = "34767")] +#[rustc_deprecated(since = "1.13.0", reason = "use `DefaultHasher` instead")] #[derive(Debug, Clone, Default)] pub struct SipHasher13 { hasher: Hasher, @@ -26,6 +34,7 @@ pub struct SipHasher13 { /// /// See: https://131002.net/siphash/ #[unstable(feature = "sip_hash_13", issue = "34767")] +#[rustc_deprecated(since = "1.13.0", reason = "use `DefaultHasher` instead")] #[derive(Debug, Clone, Default)] pub struct SipHasher24 { hasher: Hasher, @@ -35,9 +44,6 @@ pub struct SipHasher24 { /// /// See: https://131002.net/siphash/ /// -/// This is currently the default hashing function used by standard library -/// (eg. `collections::HashMap` uses it by default). -/// /// SipHash is a general-purpose hashing function: it runs at a good /// speed (competitive with Spooky and City) and permits strong _keyed_ /// hashing. This lets you key your hashtables from a strong RNG, such as @@ -47,6 +53,7 @@ pub struct SipHasher24 { /// it is not intended for cryptographic purposes. As such, all /// cryptographic uses of this implementation are _strongly discouraged_. #[stable(feature = "rust1", since = "1.0.0")] +#[rustc_deprecated(since = "1.13.0", reason = "use `DefaultHasher` instead")] #[derive(Debug, Clone, Default)] pub struct SipHasher(SipHasher24); @@ -73,69 +80,67 @@ struct State { v3: u64, } -// sadly, these macro definitions can't appear later, -// because they're needed in the following defs; -// this design could be improved. - -macro_rules! u8to64_le { - ($buf:expr, $i:expr) => - ($buf[0+$i] as u64 | - ($buf[1+$i] as u64) << 8 | - ($buf[2+$i] as u64) << 16 | - ($buf[3+$i] as u64) << 24 | - ($buf[4+$i] as u64) << 32 | - ($buf[5+$i] as u64) << 40 | - ($buf[6+$i] as u64) << 48 | - ($buf[7+$i] as u64) << 56); - ($buf:expr, $i:expr, $len:expr) => +macro_rules! compress { + ($state:expr) => ({ + compress!($state.v0, $state.v1, $state.v2, $state.v3) + }); + ($v0:expr, $v1:expr, $v2:expr, $v3:expr) => ({ - let mut t = 0; - let mut out = 0; - while t < $len { - out |= ($buf[t+$i] as u64) << t*8; - t += 1; - } - out + $v0 = $v0.wrapping_add($v1); $v1 = $v1.rotate_left(13); $v1 ^= $v0; + $v0 = $v0.rotate_left(32); + $v2 = $v2.wrapping_add($v3); $v3 = $v3.rotate_left(16); $v3 ^= $v2; + $v0 = $v0.wrapping_add($v3); $v3 = $v3.rotate_left(21); $v3 ^= $v0; + $v2 = $v2.wrapping_add($v1); $v1 = $v1.rotate_left(17); $v1 ^= $v2; + $v2 = $v2.rotate_left(32); }); } -/// Load a full u64 word from a byte stream, in LE order. Use +/// Load an integer of the desired type from a byte stream, in LE order. Uses /// `copy_nonoverlapping` to let the compiler generate the most efficient way -/// to load u64 from a possibly unaligned address. +/// to load it from a possibly unaligned address. /// -/// Unsafe because: unchecked indexing at i..i+8 -#[inline] -unsafe fn load_u64_le(buf: &[u8], i: usize) -> u64 { - debug_assert!(i + 8 <= buf.len()); - let mut data = 0u64; - ptr::copy_nonoverlapping(buf.get_unchecked(i), &mut data as *mut _ as *mut u8, 8); - data.to_le() -} - -macro_rules! rotl { - ($x:expr, $b:expr) => - (($x << $b) | ($x >> (64_i32.wrapping_sub($b)))) -} - -macro_rules! compress { - ($state:expr) => ({ - compress!($state.v0, $state.v1, $state.v2, $state.v3) - }); - ($v0:expr, $v1:expr, $v2:expr, $v3:expr) => +/// Unsafe because: unchecked indexing at i..i+size_of(int_ty) +macro_rules! load_int_le { + ($buf:expr, $i:expr, $int_ty:ident) => ({ - $v0 = $v0.wrapping_add($v1); $v1 = rotl!($v1, 13); $v1 ^= $v0; - $v0 = rotl!($v0, 32); - $v2 = $v2.wrapping_add($v3); $v3 = rotl!($v3, 16); $v3 ^= $v2; - $v0 = $v0.wrapping_add($v3); $v3 = rotl!($v3, 21); $v3 ^= $v0; - $v2 = $v2.wrapping_add($v1); $v1 = rotl!($v1, 17); $v1 ^= $v2; - $v2 = rotl!($v2, 32); + debug_assert!($i + mem::size_of::<$int_ty>() <= $buf.len()); + let mut data = 0 as $int_ty; + ptr::copy_nonoverlapping($buf.get_unchecked($i), + &mut data as *mut _ as *mut u8, + mem::size_of::<$int_ty>()); + data.to_le() }); } +/// Load an u64 using up to 7 bytes of a byte slice. +/// +/// Unsafe because: unchecked indexing at start..start+len +#[inline] +unsafe fn u8to64_le(buf: &[u8], start: usize, len: usize) -> u64 { + debug_assert!(len < 8); + let mut i = 0; // current byte index (from LSB) in the output u64 + let mut out = 0; + if i + 3 < len { + out = load_int_le!(buf, start + i, u32) as u64; + i += 4; + } + if i + 1 < len { + out |= (load_int_le!(buf, start + i, u16) as u64) << (i * 8); + i += 2 + } + if i < len { + out |= (*buf.get_unchecked(start + i) as u64) << (i * 8); + i += 1; + } + debug_assert_eq!(i, len); + out +} + impl SipHasher { /// Creates a new `SipHasher` with the two initial keys set to 0. #[inline] #[stable(feature = "rust1", since = "1.0.0")] + #[rustc_deprecated(since = "1.13.0", reason = "use `DefaultHasher` instead")] pub fn new() -> SipHasher { SipHasher::new_with_keys(0, 0) } @@ -143,16 +148,17 @@ impl SipHasher { /// Creates a `SipHasher` that is keyed off the provided keys. #[inline] #[stable(feature = "rust1", since = "1.0.0")] + #[rustc_deprecated(since = "1.13.0", reason = "use `DefaultHasher` instead")] pub fn new_with_keys(key0: u64, key1: u64) -> SipHasher { SipHasher(SipHasher24::new_with_keys(key0, key1)) } } - impl SipHasher13 { /// Creates a new `SipHasher13` with the two initial keys set to 0. #[inline] #[unstable(feature = "sip_hash_13", issue = "34767")] + #[rustc_deprecated(since = "1.13.0", reason = "use `DefaultHasher` instead")] pub fn new() -> SipHasher13 { SipHasher13::new_with_keys(0, 0) } @@ -160,6 +166,7 @@ impl SipHasher13 { /// Creates a `SipHasher13` that is keyed off the provided keys. #[inline] #[unstable(feature = "sip_hash_13", issue = "34767")] + #[rustc_deprecated(since = "1.13.0", reason = "use `DefaultHasher` instead")] pub fn new_with_keys(key0: u64, key1: u64) -> SipHasher13 { SipHasher13 { hasher: Hasher::new_with_keys(key0, key1) @@ -171,6 +178,7 @@ impl SipHasher24 { /// Creates a new `SipHasher24` with the two initial keys set to 0. #[inline] #[unstable(feature = "sip_hash_13", issue = "34767")] + #[rustc_deprecated(since = "1.13.0", reason = "use `DefaultHasher` instead")] pub fn new() -> SipHasher24 { SipHasher24::new_with_keys(0, 0) } @@ -178,6 +186,7 @@ impl SipHasher24 { /// Creates a `SipHasher24` that is keyed off the provided keys. #[inline] #[unstable(feature = "sip_hash_13", issue = "34767")] + #[rustc_deprecated(since = "1.13.0", reason = "use `DefaultHasher` instead")] pub fn new_with_keys(key0: u64, key1: u64) -> SipHasher24 { SipHasher24 { hasher: Hasher::new_with_keys(key0, key1) @@ -215,6 +224,37 @@ impl Hasher { self.state.v3 = self.k1 ^ 0x7465646279746573; self.ntail = 0; } + + // Specialized write function that is only valid for buffers with len <= 8. + // It's used to force inlining of write_u8 and write_usize, those would normally be inlined + // except for composite types (that includes slices and str hashing because of delimiter). + // Without this extra push the compiler is very reluctant to inline delimiter writes, + // degrading performance substantially for the most common use cases. + #[inline(always)] + fn short_write(&mut self, msg: &[u8]) { + debug_assert!(msg.len() <= 8); + let length = msg.len(); + self.length += length; + + let needed = 8 - self.ntail; + let fill = cmp::min(length, needed); + if fill == 8 { + self.tail = unsafe { load_int_le!(msg, 0, u64) }; + } else { + self.tail |= unsafe { u8to64_le(msg, 0, fill) } << (8 * self.ntail); + if length < needed { + self.ntail += length; + return; + } + } + self.state.v3 ^= self.tail; + S::c_rounds(&mut self.state); + self.state.v0 ^= self.tail; + + // Buffered tail is now flushed, process new input. + self.ntail = length - needed; + self.tail = unsafe { u8to64_le(msg, needed, self.ntail) }; + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -257,6 +297,21 @@ impl super::Hasher for SipHasher24 { } impl super::Hasher for Hasher { + // see short_write comment for explanation + #[inline] + fn write_usize(&mut self, i: usize) { + let bytes = unsafe { + ::slice::from_raw_parts(&i as *const usize as *const u8, mem::size_of::()) + }; + self.short_write(bytes); + } + + // see short_write comment for explanation + #[inline] + fn write_u8(&mut self, i: u8) { + self.short_write(&[i]); + } + #[inline] fn write(&mut self, msg: &[u8]) { let length = msg.len(); @@ -266,19 +321,16 @@ impl super::Hasher for Hasher { if self.ntail != 0 { needed = 8 - self.ntail; + self.tail |= unsafe { u8to64_le(msg, 0, cmp::min(length, needed)) } << 8 * self.ntail; if length < needed { - self.tail |= u8to64_le!(msg, 0, length) << 8 * self.ntail; self.ntail += length; return + } else { + self.state.v3 ^= self.tail; + S::c_rounds(&mut self.state); + self.state.v0 ^= self.tail; + self.ntail = 0; } - - let m = self.tail | u8to64_le!(msg, 0, needed) << 8 * self.ntail; - - self.state.v3 ^= m; - S::c_rounds(&mut self.state); - self.state.v0 ^= m; - - self.ntail = 0; } // Buffered tail is now flushed, process new input. @@ -287,7 +339,7 @@ impl super::Hasher for Hasher { let mut i = needed; while i < len - left { - let mi = unsafe { load_u64_le(msg, i) }; + let mi = unsafe { load_int_le!(msg, i, u64) }; self.state.v3 ^= mi; S::c_rounds(&mut self.state); @@ -296,7 +348,7 @@ impl super::Hasher for Hasher { i += 8; } - self.tail = u8to64_le!(msg, i, left); + self.tail = unsafe { u8to64_le(msg, i, left) }; self.ntail = left; } diff --git a/src/libcore/intrinsics.rs b/src/libcore/intrinsics.rs index 22abe7a99b187..e844a158484af 100644 --- a/src/libcore/intrinsics.rs +++ b/src/libcore/intrinsics.rs @@ -194,14 +194,12 @@ extern "rust-intrinsic" { /// own, or if it does not enable any significant optimizations. pub fn assume(b: bool); - #[cfg(not(stage0))] /// Hints to the compiler that branch condition is likely to be true. /// Returns the value passed to it. /// /// Any use other than with `if` statements will probably not have an effect. pub fn likely(b: bool) -> bool; - #[cfg(not(stage0))] /// Hints to the compiler that branch condition is likely to be false. /// Returns the value passed to it. /// @@ -596,6 +594,19 @@ extern "rust-intrinsic" { /// Invokes memset on the specified pointer, setting `count * size_of::()` /// bytes of memory starting at `dst` to `val`. + /// + /// # Examples + /// + /// ``` + /// use std::ptr; + /// + /// let mut vec = vec![0; 4]; + /// unsafe { + /// let vec_ptr = vec.as_mut_ptr(); + /// ptr::write_bytes(vec_ptr, b'a', 2); + /// } + /// assert_eq!(vec, [b'a', b'a', 0, 0]); + /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn write_bytes(dst: *mut T, val: u8, count: usize); diff --git a/src/libcore/iter/iterator.rs b/src/libcore/iter/iterator.rs index 0e74bbe9c2600..5a12f5db19ddb 100644 --- a/src/libcore/iter/iterator.rs +++ b/src/libcore/iter/iterator.rs @@ -195,13 +195,9 @@ pub trait Iterator { last } - /// Consumes the `n` first elements of the iterator, then returns the - /// `next()` one. + /// Returns the `n`th element of the iterator. /// - /// This method will evaluate the iterator `n` times, discarding those elements. - /// After it does so, it will call [`next()`] and return its value. - /// - /// [`next()`]: #tymethod.next + /// Note that all preceding elements will be consumed (i.e. discarded). /// /// Like most indexing operations, the count starts from zero, so `nth(0)` /// returns the first value, `nth(1)` the second, and so on. diff --git a/src/libcore/iter/mod.rs b/src/libcore/iter/mod.rs index b1d3ab1d1febc..df4f5e5c57643 100644 --- a/src/libcore/iter/mod.rs +++ b/src/libcore/iter/mod.rs @@ -386,7 +386,7 @@ pub struct Cloned { it: I, } -#[stable(feature = "rust1", since = "1.0.0")] +#[stable(feature = "iter_cloned", since = "1.1.0")] impl<'a, I, T: 'a> Iterator for Cloned where I: Iterator, T: Clone { @@ -399,9 +399,15 @@ impl<'a, I, T: 'a> Iterator for Cloned fn size_hint(&self) -> (usize, Option) { self.it.size_hint() } + + fn fold(self, init: Acc, mut f: F) -> Acc + where F: FnMut(Acc, Self::Item) -> Acc, + { + self.it.fold(init, move |acc, elt| f(acc, elt.clone())) + } } -#[stable(feature = "rust1", since = "1.0.0")] +#[stable(feature = "iter_cloned", since = "1.1.0")] impl<'a, I, T: 'a> DoubleEndedIterator for Cloned where I: DoubleEndedIterator, T: Clone { @@ -410,7 +416,7 @@ impl<'a, I, T: 'a> DoubleEndedIterator for Cloned } } -#[stable(feature = "rust1", since = "1.0.0")] +#[stable(feature = "iter_cloned", since = "1.1.0")] impl<'a, I, T: 'a> ExactSizeIterator for Cloned where I: ExactSizeIterator, T: Clone {} @@ -420,6 +426,18 @@ impl<'a, I, T: 'a> FusedIterator for Cloned where I: FusedIterator, T: Clone {} +#[doc(hidden)] +unsafe impl<'a, I, T: 'a> TrustedRandomAccess for Cloned + where I: TrustedRandomAccess, T: Clone +{ + unsafe fn get_unchecked(&mut self, i: usize) -> Self::Item { + self.it.get_unchecked(i).clone() + } + + #[inline] + fn may_have_side_effect() -> bool { true } +} + /// An iterator that repeats endlessly. /// /// This `struct` is created by the [`cycle()`] method on [`Iterator`]. See its @@ -532,6 +550,25 @@ impl Iterator for Chain where } } + fn fold(self, init: Acc, mut f: F) -> Acc + where F: FnMut(Acc, Self::Item) -> Acc, + { + let mut accum = init; + match self.state { + ChainState::Both | ChainState::Front => { + accum = self.a.fold(accum, &mut f); + } + _ => { } + } + match self.state { + ChainState::Both | ChainState::Back => { + accum = self.b.fold(accum, &mut f); + } + _ => { } + } + accum + } + #[inline] fn nth(&mut self, mut n: usize) -> Option { match self.state { @@ -643,7 +680,9 @@ impl FusedIterator for Chain pub struct Zip { a: A, b: B, - spec: <(A, B) as ZipImplData>::Data, + // index and len are only used by the specialized version of zip + index: usize, + len: usize, } #[stable(feature = "rust1", since = "1.0.0")] @@ -685,17 +724,6 @@ trait ZipImpl { B: DoubleEndedIterator + ExactSizeIterator; } -// Zip specialization data members -#[doc(hidden)] -trait ZipImplData { - type Data: 'static + Clone + Default + fmt::Debug; -} - -#[doc(hidden)] -impl ZipImplData for T { - default type Data = (); -} - // General Zip impl #[doc(hidden)] impl ZipImpl for Zip @@ -706,7 +734,8 @@ impl ZipImpl for Zip Zip { a: a, b: b, - spec: Default::default(), // unused + index: 0, // unused + len: 0, // unused } } @@ -759,20 +788,6 @@ impl ZipImpl for Zip } } -#[doc(hidden)] -#[derive(Default, Debug, Clone)] -struct ZipImplFields { - index: usize, - len: usize, -} - -#[doc(hidden)] -impl ZipImplData for (A, B) - where A: TrustedRandomAccess, B: TrustedRandomAccess -{ - type Data = ZipImplFields; -} - #[doc(hidden)] impl ZipImpl for Zip where A: TrustedRandomAccess, B: TrustedRandomAccess @@ -782,21 +797,26 @@ impl ZipImpl for Zip Zip { a: a, b: b, - spec: ZipImplFields { - index: 0, - len: len, - } + index: 0, + len: len, } } #[inline] fn next(&mut self) -> Option<(A::Item, B::Item)> { - if self.spec.index < self.spec.len { - let i = self.spec.index; - self.spec.index += 1; + if self.index < self.len { + let i = self.index; + self.index += 1; unsafe { Some((self.a.get_unchecked(i), self.b.get_unchecked(i))) } + } else if A::may_have_side_effect() && self.index < self.a.len() { + // match the base implementation's potential side effects + unsafe { + self.a.get_unchecked(self.index); + } + self.index += 1; + None } else { None } @@ -804,7 +824,7 @@ impl ZipImpl for Zip #[inline] fn size_hint(&self) -> (usize, Option) { - let len = self.spec.len - self.spec.index; + let len = self.len - self.index; (len, Some(len)) } @@ -813,9 +833,26 @@ impl ZipImpl for Zip where A: DoubleEndedIterator + ExactSizeIterator, B: DoubleEndedIterator + ExactSizeIterator { - if self.spec.index < self.spec.len { - self.spec.len -= 1; - let i = self.spec.len; + // Adjust a, b to equal length + if A::may_have_side_effect() { + let sz = self.a.len(); + if sz > self.len { + for _ in 0..sz - cmp::max(self.len, self.index) { + self.a.next_back(); + } + } + } + if B::may_have_side_effect() { + let sz = self.b.len(); + if sz > self.len { + for _ in 0..sz - self.len { + self.b.next_back(); + } + } + } + if self.index < self.len { + self.len -= 1; + let i = self.len; unsafe { Some((self.a.get_unchecked(i), self.b.get_unchecked(i))) } @@ -838,6 +875,9 @@ unsafe impl TrustedRandomAccess for Zip (self.a.get_unchecked(i), self.b.get_unchecked(i)) } + fn may_have_side_effect() -> bool { + A::may_have_side_effect() || B::may_have_side_effect() + } } #[unstable(feature = "fused", issue = "35602")] @@ -924,6 +964,13 @@ impl Iterator for Map where F: FnMut(I::Item) -> B { fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } + + fn fold(self, init: Acc, mut g: G) -> Acc + where G: FnMut(Acc, Self::Item) -> Acc, + { + let mut f = self.f; + self.iter.fold(init, move |acc, elt| g(acc, f(elt))) + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -944,6 +991,18 @@ impl ExactSizeIterator for Map impl FusedIterator for Map where F: FnMut(I::Item) -> B {} +#[doc(hidden)] +unsafe impl TrustedRandomAccess for Map + where I: TrustedRandomAccess, + F: FnMut(I::Item) -> B, +{ + unsafe fn get_unchecked(&mut self, i: usize) -> Self::Item { + (self.f)(self.iter.get_unchecked(i)) + } + #[inline] + fn may_have_side_effect() -> bool { true } +} + /// An iterator that filters the elements of `iter` with `predicate`. /// /// This `struct` is created by the [`filter()`] method on [`Iterator`]. See its @@ -1159,6 +1218,10 @@ unsafe impl TrustedRandomAccess for Enumerate unsafe fn get_unchecked(&mut self, i: usize) -> (usize, I::Item) { (self.count + i, self.iter.get_unchecked(i)) } + + fn may_have_side_effect() -> bool { + I::may_have_side_effect() + } } #[unstable(feature = "fused", issue = "35602")] @@ -1788,6 +1851,10 @@ unsafe impl TrustedRandomAccess for Fuse unsafe fn get_unchecked(&mut self, i: usize) -> I::Item { self.iter.get_unchecked(i) } + + fn may_have_side_effect() -> bool { + I::may_have_side_effect() + } } #[unstable(feature = "fused", issue = "35602")] diff --git a/src/libcore/iter/range.rs b/src/libcore/iter/range.rs index 66d05d81d80cd..eaa3d50c88ade 100644 --- a/src/libcore/iter/range.rs +++ b/src/libcore/iter/range.rs @@ -328,7 +328,8 @@ impl ops::RangeInclusive { } } -#[stable(feature = "rust1", since = "1.0.0")] +#[unstable(feature = "step_by", reason = "recent addition", + issue = "27741")] impl Iterator for StepBy> where A: Clone, for<'a> &'a A: Add<&'a A, Output = A> @@ -352,7 +353,8 @@ impl Iterator for StepBy> where impl FusedIterator for StepBy> where A: Clone, for<'a> &'a A: Add<&'a A, Output = A> {} -#[stable(feature = "rust1", since = "1.0.0")] +#[unstable(feature = "step_by", reason = "recent addition", + issue = "27741")] impl Iterator for StepBy> { type Item = A; @@ -466,7 +468,11 @@ macro_rules! range_exact_iter_impl { ($($t:ty)*) => ($( #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for ops::Range<$t> { } + )*) +} +macro_rules! range_incl_exact_iter_impl { + ($($t:ty)*) => ($( #[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")] @@ -500,9 +506,12 @@ impl Iterator for ops::Range where } } -// Ranges of u64 and i64 are excluded because they cannot guarantee having -// a length <= usize::MAX, which is required by ExactSizeIterator. +// These macros generate `ExactSizeIterator` impls for various range types. +// Range<{u,i}64> and RangeInclusive<{u,i}{32,64,size}> are excluded +// because they cannot guarantee having a length <= usize::MAX, which is +// required by ExactSizeIterator. range_exact_iter_impl!(usize u8 u16 u32 isize i8 i16 i32); +range_incl_exact_iter_impl!(u8 u16 i8 i16); #[stable(feature = "rust1", since = "1.0.0")] impl DoubleEndedIterator for ops::Range where diff --git a/src/libcore/iter_private.rs b/src/libcore/iter_private.rs index 83eeef31ab054..bc1aaa09f3dbd 100644 --- a/src/libcore/iter_private.rs +++ b/src/libcore/iter_private.rs @@ -14,6 +14,7 @@ /// # Safety /// /// The iterator's .len() and size_hint() must be exact. +/// `.len()` must be cheap to call. /// /// .get_unchecked() must return distinct mutable references for distinct /// indices (if applicable), and must return a valid reference if index is in @@ -21,5 +22,7 @@ #[doc(hidden)] pub unsafe trait TrustedRandomAccess : ExactSizeIterator { unsafe fn get_unchecked(&mut self, i: usize) -> Self::Item; + /// Return `true` if getting an iterator element may have + /// side effects. Remember to take inner iterators into account. + fn may_have_side_effect() -> bool; } - diff --git a/src/libcore/lib.rs b/src/libcore/lib.rs index 1ae4cf8e5ef5f..28101d21fc25e 100644 --- a/src/libcore/lib.rs +++ b/src/libcore/lib.rs @@ -89,7 +89,7 @@ #![feature(specialization)] #![feature(staged_api)] #![feature(unboxed_closures)] -#![feature(question_mark)] +#![cfg_attr(stage0, feature(question_mark))] #![feature(never_type)] #![feature(prelude_import)] diff --git a/src/libcore/macros.rs b/src/libcore/macros.rs index f29a49dd5fe1a..e6c3f549ec8e6 100644 --- a/src/libcore/macros.rs +++ b/src/libcore/macros.rs @@ -119,6 +119,44 @@ macro_rules! assert_eq { }); } +/// Asserts that two expressions are not equal to each other. +/// +/// On panic, this macro will print the values of the expressions with their +/// debug representations. +/// +/// # Examples +/// +/// ``` +/// let a = 3; +/// let b = 2; +/// assert_ne!(a, b); +/// ``` +#[macro_export] +#[stable(feature = "assert_ne", since = "1.12.0")] +macro_rules! assert_ne { + ($left:expr , $right:expr) => ({ + match (&$left, &$right) { + (left_val, right_val) => { + if *left_val == *right_val { + panic!("assertion failed: `(left != right)` \ + (left: `{:?}`, right: `{:?}`)", left_val, right_val) + } + } + } + }); + ($left:expr , $right:expr, $($arg:tt)*) => ({ + match (&($left), &($right)) { + (left_val, right_val) => { + if *left_val == *right_val { + panic!("assertion failed: `(left != right)` \ + (left: `{:?}`, right: `{:?}`): {}", left_val, right_val, + format_args!($($arg)*)) + } + } + } + }); +} + /// Ensure that a boolean expression is `true` at runtime. /// /// This will invoke the `panic!` macro if the provided expression cannot be @@ -189,9 +227,37 @@ macro_rules! debug_assert_eq { ($($arg:tt)*) => (if cfg!(debug_assertions) { assert_eq!($($arg)*); }) } +/// Asserts that two expressions are not equal to each other. +/// +/// On panic, this macro will print the values of the expressions with their +/// debug representations. +/// +/// Unlike `assert_ne!`, `debug_assert_ne!` statements are only enabled in non +/// optimized builds by default. An optimized build will omit all +/// `debug_assert_ne!` statements unless `-C debug-assertions` is passed to the +/// compiler. This makes `debug_assert_ne!` useful for checks that are too +/// expensive to be present in a release build but may be helpful during +/// development. +/// +/// # Examples +/// +/// ``` +/// let a = 3; +/// let b = 2; +/// debug_assert_ne!(a, b); +/// ``` +#[macro_export] +#[stable(feature = "assert_ne", since = "1.12.0")] +macro_rules! debug_assert_ne { + ($($arg:tt)*) => (if cfg!(debug_assertions) { assert_ne!($($arg)*); }) +} + /// Helper macro for reducing boilerplate code for matching `Result` together /// with converting downstream errors. /// +/// Prefer using `?` syntax to `try!`. `?` is built in to the language and is +/// more succinct than `try!`. It is the standard method for error propagation. +/// /// `try!` matches the given `Result`. In case of the `Ok` variant, the /// expression has the value of the wrapped value. /// @@ -443,3 +509,143 @@ macro_rules! unreachable { macro_rules! unimplemented { () => (panic!("not yet implemented")) } + +/// Built-in macros to the compiler itself. +/// +/// These macros do not have any corresponding definition with a `macro_rules!` +/// macro, but are documented here. Their implementations can be found hardcoded +/// into libsyntax itself. +/// +/// For more information, see documentation for `std`'s macros. +#[cfg(dox)] +pub mod builtin { + /// The core macro for formatted string creation & output. + /// + /// For more information, see the documentation for [`std::format_args!`]. + /// + /// [`std::format_args!`]: ../std/macro.format_args.html + #[stable(feature = "rust1", since = "1.0.0")] + #[macro_export] + macro_rules! format_args { ($fmt:expr, $($args:tt)*) => ({ + /* compiler built-in */ + }) } + + /// Inspect an environment variable at compile time. + /// + /// For more information, see the documentation for [`std::env!`]. + /// + /// [`std::env!`]: ../std/macro.env.html + #[stable(feature = "rust1", since = "1.0.0")] + #[macro_export] + macro_rules! env { ($name:expr) => ({ /* compiler built-in */ }) } + + /// Optionally inspect an environment variable at compile time. + /// + /// For more information, see the documentation for [`std::option_env!`]. + /// + /// [`std::option_env!`]: ../std/macro.option_env.html + #[stable(feature = "rust1", since = "1.0.0")] + #[macro_export] + macro_rules! option_env { ($name:expr) => ({ /* compiler built-in */ }) } + + /// Concatenate identifiers into one identifier. + /// + /// For more information, see the documentation for [`std::concat_idents!`]. + /// + /// [`std::concat_idents!`]: ../std/macro.concat_idents.html + #[unstable(feature = "concat_idents_macro", issue = "29599")] + #[macro_export] + macro_rules! concat_idents { + ($($e:ident),*) => ({ /* compiler built-in */ }) + } + + /// Concatenates literals into a static string slice. + /// + /// For more information, see the documentation for [`std::concat!`]. + /// + /// [`std::concat!`]: ../std/macro.concat.html + #[stable(feature = "rust1", since = "1.0.0")] + #[macro_export] + macro_rules! concat { ($($e:expr),*) => ({ /* compiler built-in */ }) } + + /// A macro which expands to the line number on which it was invoked. + /// + /// For more information, see the documentation for [`std::line!`]. + /// + /// [`std::line!`]: ../std/macro.line.html + #[stable(feature = "rust1", since = "1.0.0")] + #[macro_export] + macro_rules! line { () => ({ /* compiler built-in */ }) } + + /// A macro which expands to the column number on which it was invoked. + /// + /// For more information, see the documentation for [`std::column!`]. + /// + /// [`std::column!`]: ../std/macro.column.html + #[stable(feature = "rust1", since = "1.0.0")] + #[macro_export] + macro_rules! column { () => ({ /* compiler built-in */ }) } + + /// A macro which expands to the file name from which it was invoked. + /// + /// For more information, see the documentation for [`std::file!`]. + /// + /// [`std::file!`]: ../std/macro.file.html + #[stable(feature = "rust1", since = "1.0.0")] + #[macro_export] + macro_rules! file { () => ({ /* compiler built-in */ }) } + + /// A macro which stringifies its argument. + /// + /// For more information, see the documentation for [`std::stringify!`]. + /// + /// [`std::stringify!`]: ../std/macro.stringify.html + #[stable(feature = "rust1", since = "1.0.0")] + #[macro_export] + macro_rules! stringify { ($t:tt) => ({ /* compiler built-in */ }) } + + /// Includes a utf8-encoded file as a string. + /// + /// For more information, see the documentation for [`std::include_str!`]. + /// + /// [`std::include_str!`]: ../std/macro.include_str.html + #[stable(feature = "rust1", since = "1.0.0")] + #[macro_export] + macro_rules! include_str { ($file:expr) => ({ /* compiler built-in */ }) } + + /// Includes a file as a reference to a byte array. + /// + /// For more information, see the documentation for [`std::include_bytes!`]. + /// + /// [`std::include_bytes!`]: ../std/macro.include_bytes.html + #[stable(feature = "rust1", since = "1.0.0")] + #[macro_export] + macro_rules! include_bytes { ($file:expr) => ({ /* compiler built-in */ }) } + + /// Expands to a string that represents the current module path. + /// + /// For more information, see the documentation for [`std::module_path!`]. + /// + /// [`std::module_path!`]: ../std/macro.module_path.html + #[stable(feature = "rust1", since = "1.0.0")] + #[macro_export] + macro_rules! module_path { () => ({ /* compiler built-in */ }) } + + /// Boolean evaluation of configuration flags. + /// + /// For more information, see the documentation for [`std::cfg!`]. + /// + /// [`std::cfg!`]: ../std/macro.cfg.html + #[stable(feature = "rust1", since = "1.0.0")] + #[macro_export] + macro_rules! cfg { ($($cfg:tt)*) => ({ /* compiler built-in */ }) } + + /// Parse a file as an expression or an item according to the context. + /// + /// For more information, see the documentation for [`std::include!`]. + /// + /// [`std::include!`]: ../std/macro.include.html + #[stable(feature = "rust1", since = "1.0.0")] + #[macro_export] + macro_rules! include { ($file:expr) => ({ /* compiler built-in */ }) } +} diff --git a/src/libcore/marker.rs b/src/libcore/marker.rs index c22c9f0d1c717..03d8af1563d6d 100644 --- a/src/libcore/marker.rs +++ b/src/libcore/marker.rs @@ -8,11 +8,11 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -//! Primitive traits and marker types representing basic 'kinds' of types. +//! Primitive traits and types representing basic properties of types. //! //! Rust types can be classified in various useful ways according to -//! intrinsic properties of the type. These classifications, often called -//! 'kinds', are represented as traits. +//! their intrinsic properties. These classifications are represented +//! as traits. #![stable(feature = "rust1", since = "1.0.0")] @@ -22,7 +22,21 @@ use hash::Hasher; /// Types that can be transferred across thread boundaries. /// -/// This trait is automatically derived when the compiler determines it's appropriate. +/// This trait is automatically implemented when the compiler determines it's +/// appropriate. +/// +/// An example of a non-`Send` type is the reference-counting pointer +/// [`rc::Rc`][rc]. If two threads attempt to clone `Rc`s that point to the same +/// reference-counted value, they might try to update the reference count at the +/// same time, which is [undefined behavior][ub] because `Rc` doesn't use atomic +/// operations. Its cousin [`sync::Arc`][arc] does use atomic operations (incurring +/// some overhead) and thus is `Send`. +/// +/// See [the Nomicon](../../nomicon/send-and-sync.html) for more details. +/// +/// [rc]: ../../std/rc/struct.Rc.html +/// [arc]: ../../std/sync/struct.Arc.html +/// [ub]: ../../reference.html#behavior-considered-undefined #[stable(feature = "rust1", since = "1.0.0")] #[lang = "send"] #[rustc_on_unimplemented = "`{Self}` cannot be sent between threads safely"] @@ -38,10 +52,10 @@ impl !Send for *const T { } #[stable(feature = "rust1", since = "1.0.0")] impl !Send for *mut T { } -/// Types with a constant size known at compile-time. +/// Types with a constant size known at compile time. /// -/// All type parameters which can be bounded have an implicit bound of `Sized`. The special syntax -/// `?Sized` can be used to remove this bound if it is not appropriate. +/// All type parameters have an implicit bound of `Sized`. The special syntax +/// `?Sized` can be used to remove this bound if it's not appropriate. /// /// ``` /// # #![allow(dead_code)] @@ -51,6 +65,26 @@ impl !Send for *mut T { } /// // struct FooUse(Foo<[i32]>); // error: Sized is not implemented for [i32] /// struct BarUse(Bar<[i32]>); // OK /// ``` +/// +/// The one exception is the implicit `Self` type of a trait, which does not +/// get an implicit `Sized` bound. This is because a `Sized` bound prevents +/// the trait from being used to form a [trait object]: +/// +/// ``` +/// # #![allow(unused_variables)] +/// trait Foo { } +/// trait Bar: Sized { } +/// +/// struct Impl; +/// impl Foo for Impl { } +/// impl Bar for Impl { } +/// +/// let x: &Foo = &Impl; // OK +/// // let y: &Bar = &Impl; // error: the trait `Bar` cannot +/// // be made into an object +/// ``` +/// +/// [trait object]: ../../book/trait-objects.html #[stable(feature = "rust1", since = "1.0.0")] #[lang = "sized"] #[rustc_on_unimplemented = "`{Self}` does not have a constant size known at compile-time"] @@ -59,14 +93,27 @@ pub trait Sized { // Empty. } -/// Types that can be "unsized" to a dynamically sized type. +/// Types that can be "unsized" to a dynamically-sized type. +/// +/// For example, the sized array type `[i8; 2]` implements `Unsize<[i8]>` and +/// `Unsize`. +/// +/// All implementations of `Unsize` are provided automatically by the compiler. +/// +/// `Unsize` is used along with [`ops::CoerceUnsized`][coerceunsized] to allow +/// "user-defined" containers such as [`rc::Rc`][rc] to contain dynamically-sized +/// types. See the [DST coercion RFC][RFC982] for more details. +/// +/// [coerceunsized]: ../ops/trait.CoerceUnsized.html +/// [rc]: ../../std/rc/struct.Rc.html +/// [RFC982]: https://github.com/rust-lang/rfcs/blob/master/text/0982-dst-coercion.md #[unstable(feature = "unsize", issue = "27732")] #[lang="unsize"] pub trait Unsize { // Empty. } -/// Types that can be copied by simply copying bits (i.e. `memcpy`). +/// Types whose values can be duplicated simply by copying bits. /// /// By default, variable bindings have 'move semantics.' In other /// words: @@ -87,7 +134,8 @@ pub trait Unsize { /// However, if a type implements `Copy`, it instead has 'copy semantics': /// /// ``` -/// // we can just derive a `Copy` implementation +/// // We can derive a `Copy` implementation. `Clone` is also required, as it's +/// // a supertrait of `Copy`. /// #[derive(Debug, Copy, Clone)] /// struct Foo; /// @@ -100,13 +148,59 @@ pub trait Unsize { /// println!("{:?}", x); // A-OK! /// ``` /// -/// It's important to note that in these two examples, the only difference is if you are allowed to -/// access `x` after the assignment: a move is also a bitwise copy under the hood. +/// It's important to note that in these two examples, the only difference is whether you +/// are allowed to access `x` after the assignment. Under the hood, both a copy and a move +/// can result in bits being copied in memory, although this is sometimes optimized away. +/// +/// ## How can I implement `Copy`? +/// +/// There are two ways to implement `Copy` on your type. The simplest is to use `derive`: +/// +/// ``` +/// #[derive(Copy, Clone)] +/// struct MyStruct; +/// ``` +/// +/// You can also implement `Copy` and `Clone` manually: +/// +/// ``` +/// struct MyStruct; +/// +/// impl Copy for MyStruct { } +/// +/// impl Clone for MyStruct { +/// fn clone(&self) -> MyStruct { +/// *self +/// } +/// } +/// ``` +/// +/// There is a small difference between the two: the `derive` strategy will also place a `Copy` +/// bound on type parameters, which isn't always desired. +/// +/// ## What's the difference between `Copy` and `Clone`? +/// +/// Copies happen implicitly, for example as part of an assignment `y = x`. The behavior of +/// `Copy` is not overloadable; it is always a simple bit-wise copy. +/// +/// Cloning is an explicit action, `x.clone()`. The implementation of [`Clone`][clone] can +/// provide any type-specific behavior necessary to duplicate values safely. For example, +/// the implementation of `Clone` for [`String`][string] needs to copy the pointed-to string +/// buffer in the heap. A simple bitwise copy of `String` values would merely copy the +/// pointer, leading to a double free down the line. For this reason, `String` is `Clone` +/// but not `Copy`. +/// +/// `Clone` is a supertrait of `Copy`, so everything which is `Copy` must also implement +/// `Clone`. If a type is `Copy` then its `Clone` implementation need only return `*self` +/// (see the example above). +/// +/// [clone]: ../clone/trait.Clone.html +/// [string]: ../../std/string/struct.String.html /// /// ## When can my type be `Copy`? /// /// A type can implement `Copy` if all of its components implement `Copy`. For example, this -/// `struct` can be `Copy`: +/// struct can be `Copy`: /// /// ``` /// # #[allow(dead_code)] @@ -116,7 +210,8 @@ pub trait Unsize { /// } /// ``` /// -/// A `struct` can be `Copy`, and `i32` is `Copy`, so therefore, `Point` is eligible to be `Copy`. +/// A struct can be `Copy`, and `i32` is `Copy`, therefore `Point` is eligible to be `Copy`. +/// By contrast, consider /// /// ``` /// # #![allow(dead_code)] @@ -126,57 +221,35 @@ pub trait Unsize { /// } /// ``` /// -/// The `PointList` `struct` cannot implement `Copy`, because [`Vec`] is not `Copy`. If we +/// The struct `PointList` cannot implement `Copy`, because [`Vec`] is not `Copy`. If we /// attempt to derive a `Copy` implementation, we'll get an error: /// /// ```text /// the trait `Copy` may not be implemented for this type; field `points` does not implement `Copy` /// ``` /// -/// ## When can my type _not_ be `Copy`? +/// ## When *can't* my type be `Copy`? /// /// Some types can't be copied safely. For example, copying `&mut T` would create an aliased -/// mutable reference, and copying [`String`] would result in two attempts to free the same buffer. +/// mutable reference. Copying [`String`] would duplicate responsibility for managing the `String`'s +/// buffer, leading to a double free. /// /// Generalizing the latter case, any type implementing [`Drop`] can't be `Copy`, because it's /// managing some resource besides its own [`size_of::()`] bytes. /// -/// ## What if I derive `Copy` on a type that can't? -/// -/// If you try to derive `Copy` on a struct or enum, you will get a compile-time error. -/// Specifically, with structs you'll get [E0204](https://doc.rust-lang.org/error-index.html#E0204) -/// and with enums you'll get [E0205](https://doc.rust-lang.org/error-index.html#E0205). -/// -/// ## When should my type be `Copy`? -/// -/// Generally speaking, if your type _can_ implement `Copy`, it should. There's one important thing -/// to consider though: if you think your type may _not_ be able to implement `Copy` in the future, -/// then it might be prudent to not implement `Copy`. This is because removing `Copy` is a breaking -/// change: that second example would fail to compile if we made `Foo` non-`Copy`. +/// If you try to implement `Copy` on a struct or enum containing non-`Copy` data, you will get a +/// compile-time error. Specifically, with structs you'll get [E0204] and with enums you'll get +/// [E0205]. /// -/// ## Derivable +/// [E0204]: https://doc.rust-lang.org/error-index.html#E0204 +/// [E0205]: https://doc.rust-lang.org/error-index.html#E0205 /// -/// This trait can be used with `#[derive]` if all of its components implement `Copy` and the type. +/// ## When *should* my type be `Copy`? /// -/// ## How can I implement `Copy`? -/// -/// There are two ways to implement `Copy` on your type: -/// -/// ``` -/// #[derive(Copy, Clone)] -/// struct MyStruct; -/// ``` -/// -/// and -/// -/// ``` -/// struct MyStruct; -/// impl Copy for MyStruct {} -/// impl Clone for MyStruct { fn clone(&self) -> MyStruct { *self } } -/// ``` -/// -/// There is a small difference between the two: the `derive` strategy will also place a `Copy` -/// bound on type parameters, which isn't always desired. +/// Generally speaking, if your type _can_ implement `Copy`, it should. Keep in mind, though, +/// that implementing `Copy` is part of the public API of your type. If the type might become +/// non-`Copy` in the future, it could be prudent to omit the `Copy` implementation now, to +/// avoid a breaking API change. /// /// [`Vec`]: ../../std/vec/struct.Vec.html /// [`String`]: ../../std/string/struct.String.html @@ -188,64 +261,74 @@ pub trait Copy : Clone { // Empty. } -/// Types that can be safely shared between threads when aliased. +/// Types for which it is safe to share references between threads. +/// +/// This trait is automatically implemented when the compiler determines +/// it's appropriate. /// /// The precise definition is: a type `T` is `Sync` if `&T` is -/// thread-safe. In other words, there is no possibility of data races -/// when passing `&T` references between threads. -/// -/// As one would expect, primitive types like [`u8`] and [`f64`] are all -/// `Sync`, and so are simple aggregate types containing them (like -/// tuples, structs and enums). More instances of basic `Sync` types -/// include "immutable" types like `&T` and those with simple -/// inherited mutability, such as [`Box`], [`Vec`] and most other -/// collection types. (Generic parameters need to be `Sync` for their -/// container to be `Sync`.) -/// -/// A somewhat surprising consequence of the definition is `&mut T` is -/// `Sync` (if `T` is `Sync`) even though it seems that it might -/// provide unsynchronized mutation. The trick is a mutable reference -/// stored in an aliasable reference (that is, `& &mut T`) becomes -/// read-only, as if it were a `& &T`, hence there is no risk of a data -/// race. +/// [`Send`][send]. In other words, if there is no possibility of +/// [undefined behavior][ub] (including data races) when passing +/// `&T` references between threads. +/// +/// As one would expect, primitive types like [`u8`][u8] and [`f64`][f64] +/// are all `Sync`, and so are simple aggregate types containing them, +/// like tuples, structs and enums. More examples of basic `Sync` +/// types include "immutable" types like `&T`, and those with simple +/// inherited mutability, such as [`Box`][box], [`Vec`][vec] and +/// most other collection types. (Generic parameters need to be `Sync` +/// for their container to be `Sync`.) +/// +/// A somewhat surprising consequence of the definition is that `&mut T` +/// is `Sync` (if `T` is `Sync`) even though it seems like that might +/// provide unsynchronized mutation. The trick is that a mutable +/// reference behind a shared reference (that is, `& &mut T`) +/// becomes read-only, as if it were a `& &T`. Hence there is no risk +/// of a data race. /// /// Types that are not `Sync` are those that have "interior -/// mutability" in a non-thread-safe way, such as [`Cell`] and [`RefCell`] -/// in [`std::cell`]. These types allow for mutation of their contents -/// even when in an immutable, aliasable slot, e.g. the contents of -/// [`&Cell`][`Cell`] can be [`.set`], and do not ensure data races are -/// impossible, hence they cannot be `Sync`. A higher level example -/// of a non-`Sync` type is the reference counted pointer -/// [`std::rc::Rc`][`Rc`], because any reference [`&Rc`][`Rc`] can clone a new -/// reference, which modifies the reference counts in a non-atomic -/// way. -/// -/// For cases when one does need thread-safe interior mutability, -/// types like the atomics in [`std::sync`][`sync`] and [`Mutex`] / [`RwLock`] in -/// the [`sync`] crate do ensure that any mutation cannot cause data -/// races. Hence these types are `Sync`. -/// -/// Any types with interior mutability must also use the [`std::cell::UnsafeCell`] -/// wrapper around the value(s) which can be mutated when behind a `&` -/// reference; not doing this is undefined behavior (for example, -/// [`transmute`]-ing from `&T` to `&mut T` is invalid). +/// mutability" in a non-thread-safe form, such as [`cell::Cell`][cell] +/// and [`cell::RefCell`][refcell]. These types allow for mutation of +/// their contents even through an immutable, shared reference. For +/// example the `set` method on `Cell` takes `&self`, so it requires +/// only a shared reference `&Cell`. The method performs no +/// synchronization, thus `Cell` cannot be `Sync`. /// -/// This trait is automatically derived when the compiler determines it's appropriate. +/// Another example of a non-`Sync` type is the reference-counting +/// pointer [`rc::Rc`][rc]. Given any reference `&Rc`, you can clone +/// a new `Rc`, modifying the reference counts in a non-atomic way. /// -/// [`u8`]: ../../std/primitive.u8.html -/// [`f64`]: ../../std/primitive.f64.html -/// [`Vec`]: ../../std/vec/struct.Vec.html -/// [`Box`]: ../../std/boxed/struct.Box.html -/// [`Cell`]: ../../std/cell/struct.Cell.html -/// [`RefCell`]: ../../std/cell/struct.RefCell.html -/// [`std::cell`]: ../../std/cell/index.html -/// [`.set`]: ../../std/cell/struct.Cell.html#method.set -/// [`Rc`]: ../../std/rc/struct.Rc.html -/// [`sync`]: ../../std/sync/index.html -/// [`Mutex`]: ../../std/sync/struct.Mutex.html -/// [`RwLock`]: ../../std/sync/struct.RwLock.html -/// [`std::cell::UnsafeCell`]: ../../std/cell/struct.UnsafeCell.html -/// [`transmute`]: ../../std/mem/fn.transmute.html +/// For cases when one does need thread-safe interior mutability, +/// Rust provides [atomic data types], as well as explicit locking via +/// [`sync::Mutex`][mutex] and [`sync::RWLock`][rwlock]. These types +/// ensure that any mutation cannot cause data races, hence the types +/// are `Sync`. Likewise, [`sync::Arc`][arc] provides a thread-safe +/// analogue of `Rc`. +/// +/// Any types with interior mutability must also use the +/// [`cell::UnsafeCell`][unsafecell] wrapper around the value(s) which +/// can be mutated through a shared reference. Failing to doing this is +/// [undefined behavior][ub]. For example, [`transmute`][transmute]-ing +/// from `&T` to `&mut T` is invalid. +/// +/// See [the Nomicon](../../nomicon/send-and-sync.html) for more +/// details about `Sync`. +/// +/// [send]: trait.Send.html +/// [u8]: ../../std/primitive.u8.html +/// [f64]: ../../std/primitive.f64.html +/// [box]: ../../std/boxed/struct.Box.html +/// [vec]: ../../std/vec/struct.Vec.html +/// [cell]: ../cell/struct.Cell.html +/// [refcell]: ../cell/struct.RefCell.html +/// [rc]: ../../std/rc/struct.Rc.html +/// [arc]: ../../std/sync/struct.Arc.html +/// [atomic data types]: ../sync/atomic/index.html +/// [mutex]: ../../std/sync/struct.Mutex.html +/// [rwlock]: ../../std/sync/struct.RwLock.html +/// [unsafecell]: ../cell/struct.UnsafeCell.html +/// [ub]: ../../reference.html#behavior-considered-undefined +/// [transmute]: ../../std/mem/fn.transmute.html #[stable(feature = "rust1", since = "1.0.0")] #[lang = "sync"] #[rustc_on_unimplemented = "`{Self}` cannot be shared between threads safely"] @@ -314,29 +397,30 @@ macro_rules! impls{ ) } -/// `PhantomData` allows you to describe that a type acts as if it stores a value of type `T`, -/// even though it does not. This allows you to inform the compiler about certain safety properties -/// of your code. +/// Zero-sized type used to mark things that "act like" they own a `T`. /// -/// For a more in-depth explanation of how to use `PhantomData`, please see [the Nomicon]. +/// Adding a `PhantomData` field to your type tells the compiler that your +/// type acts as though it stores a value of type `T`, even though it doesn't +/// really. This information is used when computing certain safety properties. /// -/// [the Nomicon]: ../../nomicon/phantom-data.html +/// For a more in-depth explanation of how to use `PhantomData`, please see +/// [the Nomicon](../../nomicon/phantom-data.html). /// /// # A ghastly note 👻👻👻 /// -/// Though they both have scary names, `PhantomData` and 'phantom types' are related, but not -/// identical. Phantom types are a more general concept that don't require `PhantomData` to -/// implement, but `PhantomData` is the most common way to implement them in a correct manner. +/// Though they both have scary names, `PhantomData` and 'phantom types' are +/// related, but not identical. A phantom type parameter is simply a type +/// parameter which is never used. In Rust, this often causes the compiler to +/// complain, and the solution is to add a "dummy" use by way of `PhantomData`. /// /// # Examples /// -/// ## Unused lifetime parameter +/// ## Unused lifetime parameters /// -/// Perhaps the most common time that `PhantomData` is required is -/// with a struct that has an unused lifetime parameter, typically as -/// part of some unsafe code. For example, here is a struct `Slice` -/// that has two pointers of type `*const T`, presumably pointing into -/// an array somewhere: +/// Perhaps the most common use case for `PhantomData` is a struct that has an +/// unused lifetime parameter, typically as part of some unsafe code. For +/// example, here is a struct `Slice` that has two pointers of type `*const T`, +/// presumably pointing into an array somewhere: /// /// ```ignore /// struct Slice<'a, T> { @@ -350,7 +434,7 @@ macro_rules! impls{ /// intent is not expressed in the code, since there are no uses of /// the lifetime `'a` and hence it is not clear what data it applies /// to. We can correct this by telling the compiler to act *as if* the -/// `Slice` struct contained a borrowed reference `&'a T`: +/// `Slice` struct contained a reference `&'a T`: /// /// ``` /// use std::marker::PhantomData; @@ -359,29 +443,53 @@ macro_rules! impls{ /// struct Slice<'a, T: 'a> { /// start: *const T, /// end: *const T, -/// phantom: PhantomData<&'a T> +/// phantom: PhantomData<&'a T>, /// } /// ``` /// -/// This also in turn requires that we annotate `T:'a`, indicating -/// that `T` is a type that can be borrowed for the lifetime `'a`. +/// This also in turn requires the annotation `T: 'a`, indicating +/// that any references in `T` are valid over the lifetime `'a`. +/// +/// When initializing a `Slice` you simply provide the value +/// `PhantomData` for the field `phantom`: +/// +/// ``` +/// # #![allow(dead_code)] +/// # use std::marker::PhantomData; +/// # struct Slice<'a, T: 'a> { +/// # start: *const T, +/// # end: *const T, +/// # phantom: PhantomData<&'a T>, +/// # } +/// fn borrow_vec<'a, T>(vec: &'a Vec) -> Slice<'a, T> { +/// let ptr = vec.as_ptr(); +/// Slice { +/// start: ptr, +/// end: unsafe { ptr.offset(vec.len() as isize) }, +/// phantom: PhantomData, +/// } +/// } +/// ``` /// /// ## Unused type parameters /// -/// It sometimes happens that there are unused type parameters that +/// It sometimes happens that you have unused type parameters which /// indicate what type of data a struct is "tied" to, even though that /// data is not actually found in the struct itself. Here is an -/// example where this arises when handling external resources over a -/// foreign function interface. `PhantomData` can prevent -/// mismatches by enforcing types in the method implementations: +/// example where this arises with [FFI]. The foreign interface uses +/// handles of type `*mut ()` to refer to Rust values of different +/// types. We track the Rust type using a phantom type parameter on +/// the struct `ExternalResource` which wraps a handle. +/// +/// [FFI]: ../../book/ffi.html /// /// ``` /// # #![allow(dead_code)] -/// # trait ResType { fn foo(&self); } +/// # trait ResType { } /// # struct ParamType; /// # mod foreign_lib { -/// # pub fn new(_: usize) -> *mut () { 42 as *mut () } -/// # pub fn do_stuff(_: *mut (), _: usize) {} +/// # pub fn new(_: usize) -> *mut () { 42 as *mut () } +/// # pub fn do_stuff(_: *mut (), _: usize) {} /// # } /// # fn convert_params(_: ParamType) -> usize { 42 } /// use std::marker::PhantomData; @@ -408,21 +516,20 @@ macro_rules! impls{ /// } /// ``` /// -/// ## Indicating ownership +/// ## Ownership and the drop check /// -/// Adding a field of type `PhantomData` also indicates that your -/// struct owns data of type `T`. This in turn implies that when your -/// struct is dropped, it may in turn drop one or more instances of -/// the type `T`, though that may not be apparent from the other -/// structure of the type itself. This is commonly necessary if the -/// structure is using a raw pointer like `*mut T` whose referent -/// may be dropped when the type is dropped, as a `*mut T` is -/// otherwise not treated as owned. +/// Adding a field of type `PhantomData` indicates that your +/// type owns data of type `T`. This in turn implies that when your +/// type is dropped, it may drop one or more instances of the type +/// `T`. This has bearing on the Rust compiler's [drop check] +/// analysis. /// /// If your struct does not in fact *own* the data of type `T`, it is /// better to use a reference type, like `PhantomData<&'a T>` /// (ideally) or `PhantomData<*const T>` (if no lifetime applies), so /// as not to indicate ownership. +/// +/// [drop check]: ../../nomicon/dropck.html #[lang = "phantom_data"] #[stable(feature = "rust1", since = "1.0.0")] pub struct PhantomData; @@ -438,10 +545,13 @@ mod impls { /// Types that can be reflected over. /// -/// This trait is implemented for all types. Its purpose is to ensure -/// that when you write a generic function that will employ -/// reflection, that must be reflected (no pun intended) in the -/// generic bounds of that function. Here is an example: +/// By "reflection" we mean use of the [`Any`][any] trait, or related +/// machinery such as [`TypeId`][typeid]. +/// +/// `Reflect` is implemented for all types. Its purpose is to ensure +/// that when you write a generic function that will employ reflection, +/// that must be reflected (no pun intended) in the generic bounds of +/// that function. /// /// ``` /// #![feature(reflect_marker)] @@ -455,25 +565,29 @@ mod impls { /// } /// ``` /// -/// Without the declaration `T: Reflect`, `foo` would not type check -/// (note: as a matter of style, it would be preferable to write -/// `T: Any`, because `T: Any` implies `T: Reflect` and `T: 'static`, but -/// we use `Reflect` here to show how it works). The `Reflect` bound -/// thus serves to alert `foo`'s caller to the fact that `foo` may -/// behave differently depending on whether `T = u32` or not. In -/// particular, thanks to the `Reflect` bound, callers know that a -/// function declared like `fn bar(...)` will always act in -/// precisely the same way no matter what type `T` is supplied, -/// because there are no bounds declared on `T`. (The ability for a -/// caller to reason about what a function may do based solely on what -/// generic bounds are declared is often called the ["parametricity -/// property"][1].) -/// -/// [1]: http://en.wikipedia.org/wiki/Parametricity +/// Without the bound `T: Reflect`, `foo` would not typecheck. (As +/// a matter of style, it would be preferable to write `T: Any`, +/// because `T: Any` implies `T: Reflect` and `T: 'static`, but we +/// use `Reflect` here for illustrative purposes.) +/// +/// The `Reflect` bound serves to alert `foo`'s caller to the +/// fact that `foo` may behave differently depending on whether +/// `T` is `u32` or not. The ability for a caller to reason about what +/// a function may do based solely on what generic bounds are declared +/// is often called the "[parametricity property][param]". Despite the +/// use of `Reflect`, Rust lacks true parametricity because a generic +/// function can, at the very least, call [`mem::size_of`][size_of] +/// without employing any trait bounds whatsoever. +/// +/// [any]: ../any/trait.Any.html +/// [typeid]: ../any/struct.TypeId.html +/// [param]: http://en.wikipedia.org/wiki/Parametricity +/// [size_of]: ../mem/fn.size_of.html #[rustc_reflect_like] #[unstable(feature = "reflect_marker", reason = "requires RFC and more experience", issue = "27749")] +#[rustc_deprecated(since = "1.14.0", reason = "Specialization makes parametricity impossible")] #[rustc_on_unimplemented = "`{Self}` does not implement `Any`; \ ensure all type parameters are bounded by `Any`"] pub trait Reflect {} @@ -481,4 +595,6 @@ pub trait Reflect {} #[unstable(feature = "reflect_marker", reason = "requires RFC and more experience", issue = "27749")] +#[rustc_deprecated(since = "1.14.0", reason = "Specialization makes parametricity impossible")] +#[allow(deprecated)] impl Reflect for .. { } diff --git a/src/libcore/mem.rs b/src/libcore/mem.rs index d3b8a60b79776..e0aa25724c1f9 100644 --- a/src/libcore/mem.rs +++ b/src/libcore/mem.rs @@ -15,7 +15,12 @@ #![stable(feature = "rust1", since = "1.0.0")] +use clone; +use cmp; +use fmt; +use hash; use intrinsics; +use marker::{Copy, PhantomData, Sized}; use ptr; #[stable(feature = "rust1", since = "1.0.0")] @@ -647,3 +652,80 @@ pub fn drop(_x: T) { } pub unsafe fn transmute_copy(src: &T) -> U { ptr::read(src as *const T as *const U) } + +/// Opaque type representing the discriminant of an enum. +/// +/// See the `discriminant` function in this module for more information. +#[unstable(feature = "discriminant_value", reason = "recently added, follows RFC", issue = "24263")] +pub struct Discriminant(u64, PhantomData<*const T>); + +// N.B. These trait implementations cannot be derived because we don't want any bounds on T. + +#[unstable(feature = "discriminant_value", reason = "recently added, follows RFC", issue = "24263")] +impl Copy for Discriminant {} + +#[unstable(feature = "discriminant_value", reason = "recently added, follows RFC", issue = "24263")] +impl clone::Clone for Discriminant { + fn clone(&self) -> Self { + *self + } +} + +#[unstable(feature = "discriminant_value", reason = "recently added, follows RFC", issue = "24263")] +impl cmp::PartialEq for Discriminant { + fn eq(&self, rhs: &Self) -> bool { + self.0 == rhs.0 + } +} + +#[unstable(feature = "discriminant_value", reason = "recently added, follows RFC", issue = "24263")] +impl cmp::Eq for Discriminant {} + +#[unstable(feature = "discriminant_value", reason = "recently added, follows RFC", issue = "24263")] +impl hash::Hash for Discriminant { + fn hash(&self, state: &mut H) { + self.0.hash(state); + } +} + +#[unstable(feature = "discriminant_value", reason = "recently added, follows RFC", issue = "24263")] +impl fmt::Debug for Discriminant { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + fmt.debug_tuple("Discriminant") + .field(&self.0) + .finish() + } +} + +/// Returns a value uniquely identifying the enum variant in `v`. +/// +/// If `T` is not an enum, calling this function will not result in undefined behavior, but the +/// return value is unspecified. +/// +/// # Stability +/// +/// The discriminant of an enum variant may change if the enum definition changes. A discriminant +/// of some variant will not change between compilations with the same compiler. +/// +/// # Examples +/// +/// This can be used to compare enums that carry data, while disregarding +/// the actual data: +/// +/// ``` +/// #![feature(discriminant_value)] +/// use std::mem; +/// +/// enum Foo { A(&'static str), B(i32), C(i32) } +/// +/// assert!(mem::discriminant(&Foo::A("bar")) == mem::discriminant(&Foo::A("baz"))); +/// assert!(mem::discriminant(&Foo::B(1)) == mem::discriminant(&Foo::B(2))); +/// assert!(mem::discriminant(&Foo::B(3)) != mem::discriminant(&Foo::C(3))); +/// ``` +#[unstable(feature = "discriminant_value", reason = "recently added, follows RFC", issue = "24263")] +pub fn discriminant(v: &T) -> Discriminant { + unsafe { + Discriminant(intrinsics::discriminant_value(v), PhantomData) + } +} + diff --git a/src/libcore/num/bignum.rs b/src/libcore/num/bignum.rs index 1ca550c67463c..a1f4630c304bf 100644 --- a/src/libcore/num/bignum.rs +++ b/src/libcore/num/bignum.rs @@ -34,19 +34,22 @@ use intrinsics; pub trait FullOps: Sized { /// Returns `(carry', v')` such that `carry' * 2^W + v' = self + other + carry`, /// where `W` is the number of bits in `Self`. - fn full_add(self, other: Self, carry: bool) -> (bool /*carry*/, Self); + fn full_add(self, other: Self, carry: bool) -> (bool /* carry */, Self); /// Returns `(carry', v')` such that `carry' * 2^W + v' = self * other + carry`, /// where `W` is the number of bits in `Self`. - fn full_mul(self, other: Self, carry: Self) -> (Self /*carry*/, Self); + fn full_mul(self, other: Self, carry: Self) -> (Self /* carry */, Self); /// Returns `(carry', v')` such that `carry' * 2^W + v' = self * other + other2 + carry`, /// where `W` is the number of bits in `Self`. - fn full_mul_add(self, other: Self, other2: Self, carry: Self) -> (Self /*carry*/, Self); + fn full_mul_add(self, other: Self, other2: Self, carry: Self) -> (Self /* carry */, Self); /// Returns `(quo, rem)` such that `borrow * 2^W + self = quo * other + rem` /// and `0 <= rem < other`, where `W` is the number of bits in `Self`. - fn full_div_rem(self, other: Self, borrow: Self) -> (Self /*quotient*/, Self /*remainder*/); + fn full_div_rem(self, + other: Self, + borrow: Self) + -> (Self /* quotient */, Self /* remainder */); } macro_rules! impl_full_ops { @@ -100,11 +103,7 @@ impl_full_ops! { /// Table of powers of 5 representable in digits. Specifically, the largest {u8, u16, u32} value /// that's a power of five, plus the corresponding exponent. Used in `mul_pow5`. -const SMALL_POW5: [(u64, usize); 3] = [ - (125, 3), - (15625, 6), - (1_220_703_125, 13), -]; +const SMALL_POW5: [(u64, usize); 3] = [(125, 3), (15625, 6), (1_220_703_125, 13)]; macro_rules! define_bignum { ($name:ident: type=$ty:ty, n=$n:expr) => ( diff --git a/src/libcore/num/dec2flt/mod.rs b/src/libcore/num/dec2flt/mod.rs index cd40e399ab95e..eee3e9250fe81 100644 --- a/src/libcore/num/dec2flt/mod.rs +++ b/src/libcore/num/dec2flt/mod.rs @@ -155,13 +155,13 @@ from_str_float_impl!(f64); /// [`FromStr`]: ../str/trait.FromStr.html /// [`f32`]: ../../std/primitive.f32.html /// [`f64`]: ../../std/primitive.f64.html -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone, PartialEq, Eq)] #[stable(feature = "rust1", since = "1.0.0")] pub struct ParseFloatError { kind: FloatErrorKind } -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone, PartialEq, Eq)] enum FloatErrorKind { Empty, Invalid, diff --git a/src/libcore/num/diy_float.rs b/src/libcore/num/diy_float.rs index 7c369ee3b3bd7..11eea753f93f9 100644 --- a/src/libcore/num/diy_float.rs +++ b/src/libcore/num/diy_float.rs @@ -49,12 +49,30 @@ impl Fp { pub fn normalize(&self) -> Fp { let mut f = self.f; let mut e = self.e; - if f >> (64 - 32) == 0 { f <<= 32; e -= 32; } - if f >> (64 - 16) == 0 { f <<= 16; e -= 16; } - if f >> (64 - 8) == 0 { f <<= 8; e -= 8; } - if f >> (64 - 4) == 0 { f <<= 4; e -= 4; } - if f >> (64 - 2) == 0 { f <<= 2; e -= 2; } - if f >> (64 - 1) == 0 { f <<= 1; e -= 1; } + if f >> (64 - 32) == 0 { + f <<= 32; + e -= 32; + } + if f >> (64 - 16) == 0 { + f <<= 16; + e -= 16; + } + if f >> (64 - 8) == 0 { + f <<= 8; + e -= 8; + } + if f >> (64 - 4) == 0 { + f <<= 4; + e -= 4; + } + if f >> (64 - 2) == 0 { + f <<= 2; + e -= 2; + } + if f >> (64 - 1) == 0 { + f <<= 1; + e -= 1; + } debug_assert!(f >= (1 >> 63)); Fp { f: f, e: e } } @@ -66,6 +84,9 @@ impl Fp { assert!(edelta >= 0); let edelta = edelta as usize; assert_eq!(self.f << edelta >> edelta, self.f); - Fp { f: self.f << edelta, e: e } + Fp { + f: self.f << edelta, + e: e, + } } } diff --git a/src/libcore/num/f32.rs b/src/libcore/num/f32.rs index 07b05f91f489f..4527d46a27d8a 100644 --- a/src/libcore/num/f32.rs +++ b/src/libcore/num/f32.rs @@ -61,13 +61,13 @@ pub const MAX_10_EXP: i32 = 38; /// Not a Number (NaN). #[stable(feature = "rust1", since = "1.0.0")] -pub const NAN: f32 = 0.0_f32/0.0_f32; +pub const NAN: f32 = 0.0_f32 / 0.0_f32; /// Infinity (∞). #[stable(feature = "rust1", since = "1.0.0")] -pub const INFINITY: f32 = 1.0_f32/0.0_f32; +pub const INFINITY: f32 = 1.0_f32 / 0.0_f32; /// Negative infinity (-∞). #[stable(feature = "rust1", since = "1.0.0")] -pub const NEG_INFINITY: f32 = -1.0_f32/0.0_f32; +pub const NEG_INFINITY: f32 = -1.0_f32 / 0.0_f32; /// Basic mathematical constants. #[stable(feature = "rust1", since = "1.0.0")] @@ -144,26 +144,40 @@ pub mod consts { issue = "32110")] impl Float for f32 { #[inline] - fn nan() -> f32 { NAN } + fn nan() -> f32 { + NAN + } #[inline] - fn infinity() -> f32 { INFINITY } + fn infinity() -> f32 { + INFINITY + } #[inline] - fn neg_infinity() -> f32 { NEG_INFINITY } + fn neg_infinity() -> f32 { + NEG_INFINITY + } #[inline] - fn zero() -> f32 { 0.0 } + fn zero() -> f32 { + 0.0 + } #[inline] - fn neg_zero() -> f32 { -0.0 } + fn neg_zero() -> f32 { + -0.0 + } #[inline] - fn one() -> f32 { 1.0 } + fn one() -> f32 { + 1.0 + } /// Returns `true` if the number is NaN. #[inline] - fn is_nan(self) -> bool { self != self } + fn is_nan(self) -> bool { + self != self + } /// Returns `true` if the number is infinite. #[inline] @@ -192,11 +206,11 @@ impl Float for f32 { let bits: u32 = unsafe { mem::transmute(self) }; match (bits & MAN_MASK, bits & EXP_MASK) { - (0, 0) => Fp::Zero, - (_, 0) => Fp::Subnormal, + (0, 0) => Fp::Zero, + (_, 0) => Fp::Subnormal, (0, EXP_MASK) => Fp::Infinite, (_, EXP_MASK) => Fp::Nan, - _ => Fp::Normal, + _ => Fp::Normal, } } @@ -252,7 +266,9 @@ impl Float for f32 { /// Returns the reciprocal (multiplicative inverse) of the number. #[inline] - fn recip(self) -> f32 { 1.0 / self } + fn recip(self) -> f32 { + 1.0 / self + } #[inline] fn powi(self, n: i32) -> f32 { @@ -261,7 +277,9 @@ impl Float for f32 { /// Converts to degrees, assuming the number is in radians. #[inline] - fn to_degrees(self) -> f32 { self * (180.0f32 / consts::PI) } + fn to_degrees(self) -> f32 { + self * (180.0f32 / consts::PI) + } /// Converts to radians, assuming the number is in degrees. #[inline] diff --git a/src/libcore/num/f64.rs b/src/libcore/num/f64.rs index 82a09e599e027..991a856834948 100644 --- a/src/libcore/num/f64.rs +++ b/src/libcore/num/f64.rs @@ -61,13 +61,13 @@ pub const MAX_10_EXP: i32 = 308; /// Not a Number (NaN). #[stable(feature = "rust1", since = "1.0.0")] -pub const NAN: f64 = 0.0_f64/0.0_f64; +pub const NAN: f64 = 0.0_f64 / 0.0_f64; /// Infinity (∞). #[stable(feature = "rust1", since = "1.0.0")] -pub const INFINITY: f64 = 1.0_f64/0.0_f64; +pub const INFINITY: f64 = 1.0_f64 / 0.0_f64; /// Negative infinity (-∞). #[stable(feature = "rust1", since = "1.0.0")] -pub const NEG_INFINITY: f64 = -1.0_f64/0.0_f64; +pub const NEG_INFINITY: f64 = -1.0_f64 / 0.0_f64; /// Basic mathematical constants. #[stable(feature = "rust1", since = "1.0.0")] @@ -144,26 +144,40 @@ pub mod consts { issue = "32110")] impl Float for f64 { #[inline] - fn nan() -> f64 { NAN } + fn nan() -> f64 { + NAN + } #[inline] - fn infinity() -> f64 { INFINITY } + fn infinity() -> f64 { + INFINITY + } #[inline] - fn neg_infinity() -> f64 { NEG_INFINITY } + fn neg_infinity() -> f64 { + NEG_INFINITY + } #[inline] - fn zero() -> f64 { 0.0 } + fn zero() -> f64 { + 0.0 + } #[inline] - fn neg_zero() -> f64 { -0.0 } + fn neg_zero() -> f64 { + -0.0 + } #[inline] - fn one() -> f64 { 1.0 } + fn one() -> f64 { + 1.0 + } /// Returns `true` if the number is NaN. #[inline] - fn is_nan(self) -> bool { self != self } + fn is_nan(self) -> bool { + self != self + } /// Returns `true` if the number is infinite. #[inline] @@ -192,11 +206,11 @@ impl Float for f64 { let bits: u64 = unsafe { mem::transmute(self) }; match (bits & MAN_MASK, bits & EXP_MASK) { - (0, 0) => Fp::Zero, - (_, 0) => Fp::Subnormal, + (0, 0) => Fp::Zero, + (_, 0) => Fp::Subnormal, (0, EXP_MASK) => Fp::Infinite, (_, EXP_MASK) => Fp::Nan, - _ => Fp::Normal, + _ => Fp::Normal, } } @@ -252,7 +266,9 @@ impl Float for f64 { /// Returns the reciprocal (multiplicative inverse) of the number. #[inline] - fn recip(self) -> f64 { 1.0 / self } + fn recip(self) -> f64 { + 1.0 / self + } #[inline] fn powi(self, n: i32) -> f64 { @@ -261,7 +277,9 @@ impl Float for f64 { /// Converts to degrees, assuming the number is in radians. #[inline] - fn to_degrees(self) -> f64 { self * (180.0f64 / consts::PI) } + fn to_degrees(self) -> f64 { + self * (180.0f64 / consts::PI) + } /// Converts to radians, assuming the number is in degrees. #[inline] diff --git a/src/libcore/num/flt2dec/decoder.rs b/src/libcore/num/flt2dec/decoder.rs index 276667e44aae1..72529d3da01d1 100644 --- a/src/libcore/num/flt2dec/decoder.rs +++ b/src/libcore/num/flt2dec/decoder.rs @@ -21,7 +21,7 @@ use num::dec2flt::rawfp::RawFloat; /// - Any number from `(mant - minus) * 2^exp` to `(mant + plus) * 2^exp` will /// round to the original value. The range is inclusive only when /// `inclusive` is true. -#[derive(Copy, Clone, Debug, PartialEq)] +#[derive(Copy, Clone, Debug, PartialEq, Eq)] pub struct Decoded { /// The scaled mantissa. pub mant: u64, @@ -38,7 +38,7 @@ pub struct Decoded { } /// Decoded unsigned value. -#[derive(Copy, Clone, Debug, PartialEq)] +#[derive(Copy, Clone, Debug, PartialEq, Eq)] pub enum FullDecoded { /// Not-a-number. Nan, diff --git a/src/libcore/num/mod.rs b/src/libcore/num/mod.rs index 29ee29eb3eb7f..a4529909e83ef 100644 --- a/src/libcore/num/mod.rs +++ b/src/libcore/num/mod.rs @@ -43,7 +43,8 @@ use str::FromStr; /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Default, Hash)] -pub struct Wrapping(#[stable(feature = "rust1", since = "1.0.0")] pub T); +pub struct Wrapping(#[stable(feature = "rust1", since = "1.0.0")] + pub T); #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Debug for Wrapping { @@ -516,11 +517,10 @@ macro_rules! int_impl { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn checked_div(self, other: Self) -> Option { - if other == 0 { + if other == 0 || (self == Self::min_value() && other == -1) { None } else { - let (a, b) = self.overflowing_div(other); - if b {None} else {Some(a)} + Some(unsafe { intrinsics::unchecked_div(self, other) }) } } @@ -541,11 +541,10 @@ macro_rules! int_impl { #[stable(feature = "wrapping", since = "1.7.0")] #[inline] pub fn checked_rem(self, other: Self) -> Option { - if other == 0 { + if other == 0 || (self == Self::min_value() && other == -1) { None } else { - let (a, b) = self.overflowing_rem(other); - if b {None} else {Some(a)} + Some(unsafe { intrinsics::unchecked_rem(self, other) }) } } @@ -613,14 +612,12 @@ macro_rules! int_impl { /// Basic usage: /// /// ``` - /// # #![feature(no_panic_abs)] - /// /// use std::i32; /// /// assert_eq!((-5i32).checked_abs(), Some(5)); /// assert_eq!(i32::MIN.checked_abs(), None); /// ``` - #[unstable(feature = "no_panic_abs", issue = "35057")] + #[stable(feature = "no_panic_abs", since = "1.13.0")] #[inline] pub fn checked_abs(self) -> Option { if self.is_negative() { @@ -895,14 +892,12 @@ macro_rules! int_impl { /// Basic usage: /// /// ``` - /// # #![feature(no_panic_abs)] - /// /// assert_eq!(100i8.wrapping_abs(), 100); /// assert_eq!((-100i8).wrapping_abs(), 100); /// assert_eq!((-128i8).wrapping_abs(), -128); /// assert_eq!((-128i8).wrapping_abs() as u8, 128); /// ``` - #[unstable(feature = "no_panic_abs", issue = "35057")] + #[stable(feature = "no_panic_abs", since = "1.13.0")] #[inline(always)] pub fn wrapping_abs(self) -> Self { if self.is_negative() { @@ -1133,13 +1128,11 @@ macro_rules! int_impl { /// Basic usage: /// /// ``` - /// # #![feature(no_panic_abs)] - /// /// assert_eq!(10i8.overflowing_abs(), (10,false)); /// assert_eq!((-10i8).overflowing_abs(), (10,false)); /// assert_eq!((-128i8).overflowing_abs(), (-128,true)); /// ``` - #[unstable(feature = "no_panic_abs", issue = "35057")] + #[stable(feature = "no_panic_abs", since = "1.13.0")] #[inline] pub fn overflowing_abs(self) -> (Self, bool) { if self.is_negative() { @@ -1694,7 +1687,7 @@ macro_rules! uint_impl { pub fn checked_div(self, other: Self) -> Option { match other { 0 => None, - other => Some(self / other), + other => Some(unsafe { intrinsics::unchecked_div(self, other) }), } } @@ -1715,7 +1708,7 @@ macro_rules! uint_impl { if other == 0 { None } else { - Some(self % other) + Some(unsafe { intrinsics::unchecked_rem(self, other) }) } } @@ -2211,25 +2204,21 @@ macro_rules! uint_impl { let mut base = self; let mut acc = 1; - let mut prev_base = self; - let mut base_oflo = false; - while exp > 0 { + while exp > 1 { if (exp & 1) == 1 { - if base_oflo { - // ensure overflow occurs in the same manner it - // would have otherwise (i.e. signal any exception - // it would have otherwise). - acc = acc * (prev_base * prev_base); - } else { - acc = acc * base; - } + acc = acc * base; } - prev_base = base; - let (new_base, new_base_oflo) = base.overflowing_mul(base); - base = new_base; - base_oflo = new_base_oflo; exp /= 2; + base = base * base; + } + + // Deal with the final bit of the exponent separately, since + // squaring the base afterwards is not necessary and may cause a + // needless overflow. + if exp == 1 { + acc = acc * base; } + acc } @@ -2405,7 +2394,7 @@ impl usize { /// assert_eq!(nan.classify(), FpCategory::Nan); /// assert_eq!(sub.classify(), FpCategory::Subnormal); /// ``` -#[derive(Copy, Clone, PartialEq, Debug)] +#[derive(Copy, Clone, PartialEq, Eq, Debug)] #[stable(feature = "rust1", since = "1.0.0")] pub enum FpCategory { /// "Not a Number", often obtained by dividing by zero. @@ -2414,7 +2403,7 @@ pub enum FpCategory { /// Positive or negative infinity. #[stable(feature = "rust1", since = "1.0.0")] - Infinite , + Infinite, /// Positive or negative zero. #[stable(feature = "rust1", since = "1.0.0")] @@ -2580,7 +2569,7 @@ impl fmt::Display for TryFromIntError { macro_rules! same_sign_from_int_impl { ($storage:ty, $target:ty, $($source:ty),*) => {$( - #[stable(feature = "rust1", since = "1.0.0")] + #[unstable(feature = "try_from", issue = "33417")] impl TryFrom<$source> for $target { type Err = TryFromIntError; @@ -2610,7 +2599,7 @@ same_sign_from_int_impl!(i64, isize, i8, i16, i32, i64, isize); macro_rules! cross_sign_from_int_impl { ($unsigned:ty, $($signed:ty),*) => {$( - #[stable(feature = "rust1", since = "1.0.0")] + #[unstable(feature = "try_from", issue = "33417")] impl TryFrom<$unsigned> for $signed { type Err = TryFromIntError; @@ -2624,7 +2613,7 @@ macro_rules! cross_sign_from_int_impl { } } - #[stable(feature = "rust1", since = "1.0.0")] + #[unstable(feature = "try_from", issue = "33417")] impl TryFrom<$signed> for $unsigned { type Err = TryFromIntError; @@ -2674,8 +2663,7 @@ macro_rules! doit { } doit! { i8 i16 i32 i64 isize u8 u16 u32 u64 usize } -fn from_str_radix(src: &str, radix: u32) - -> Result { +fn from_str_radix(src: &str, radix: u32) -> Result { use self::IntErrorKind::*; use self::ParseIntError as PIE; @@ -2698,7 +2686,7 @@ fn from_str_radix(src: &str, radix: u32) let (is_positive, digits) = match src[0] { b'+' => (true, &src[1..]), b'-' if is_signed_ty => (false, &src[1..]), - _ => (true, src) + _ => (true, src), }; if digits.is_empty() { @@ -2748,11 +2736,13 @@ fn from_str_radix(src: &str, radix: u32) /// on the primitive integer types, such as [`i8::from_str_radix()`]. /// /// [`i8::from_str_radix()`]: ../../std/primitive.i8.html#method.from_str_radix -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone, PartialEq, Eq)] #[stable(feature = "rust1", since = "1.0.0")] -pub struct ParseIntError { kind: IntErrorKind } +pub struct ParseIntError { + kind: IntErrorKind, +} -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone, PartialEq, Eq)] enum IntErrorKind { Empty, InvalidDigit, diff --git a/src/libcore/num/wrapping.rs b/src/libcore/num/wrapping.rs index 4857817e84e4f..d35c451ac2604 100644 --- a/src/libcore/num/wrapping.rs +++ b/src/libcore/num/wrapping.rs @@ -28,7 +28,7 @@ macro_rules! sh_impl_signed { } } - #[stable(feature = "wrapping_impls", since = "1.7.0")] + #[stable(feature = "op_assign_traits", since = "1.8.0")] impl ShlAssign<$f> for Wrapping<$t> { #[inline(always)] fn shl_assign(&mut self, other: $f) { @@ -50,7 +50,7 @@ macro_rules! sh_impl_signed { } } - #[stable(feature = "wrapping_impls", since = "1.7.0")] + #[stable(feature = "op_assign_traits", since = "1.8.0")] impl ShrAssign<$f> for Wrapping<$t> { #[inline(always)] fn shr_assign(&mut self, other: $f) { @@ -72,7 +72,7 @@ macro_rules! sh_impl_unsigned { } } - #[stable(feature = "wrapping_impls", since = "1.7.0")] + #[stable(feature = "op_assign_traits", since = "1.8.0")] impl ShlAssign<$f> for Wrapping<$t> { #[inline(always)] fn shl_assign(&mut self, other: $f) { @@ -90,7 +90,7 @@ macro_rules! sh_impl_unsigned { } } - #[stable(feature = "wrapping_impls", since = "1.7.0")] + #[stable(feature = "op_assign_traits", since = "1.8.0")] impl ShrAssign<$f> for Wrapping<$t> { #[inline(always)] fn shr_assign(&mut self, other: $f) { @@ -310,13 +310,13 @@ mod shift_max { pub const isize: u32 = super::i64; } - pub const i8: u32 = (1 << 3) - 1; + pub const i8: u32 = (1 << 3) - 1; pub const i16: u32 = (1 << 4) - 1; pub const i32: u32 = (1 << 5) - 1; pub const i64: u32 = (1 << 6) - 1; pub use self::platform::isize; - pub const u8: u32 = i8; + pub const u8: u32 = i8; pub const u16: u32 = i16; pub const u32: u32 = i32; pub const u64: u32 = i64; diff --git a/src/libcore/ops.rs b/src/libcore/ops.rs index 85a52da332db5..62aa57bbbbf05 100644 --- a/src/libcore/ops.rs +++ b/src/libcore/ops.rs @@ -14,7 +14,7 @@ //! //! Some of these traits are imported by the prelude, so they are available in //! every Rust program. Only operators backed by traits can be overloaded. For -//! example, the addition operator (`+`) can be overloaded through the `Add` +//! example, the addition operator (`+`) can be overloaded through the [`Add`] //! trait, but since the assignment operator (`=`) has no backing trait, there //! is no way of overloading its semantics. Additionally, this module does not //! provide any mechanism to create new operators. If traitless overloading or @@ -30,17 +30,18 @@ //! contexts involving built-in types, this is usually not a problem. //! However, using these operators in generic code, requires some //! attention if values have to be reused as opposed to letting the operators -//! consume them. One option is to occasionally use `clone()`. +//! consume them. One option is to occasionally use [`clone()`]. //! Another option is to rely on the types involved providing additional //! operator implementations for references. For example, for a user-defined //! type `T` which is supposed to support addition, it is probably a good -//! idea to have both `T` and `&T` implement the traits `Add` and `Add<&T>` -//! so that generic code can be written without unnecessary cloning. +//! idea to have both `T` and `&T` implement the traits [`Add`][`Add`] and +//! [`Add<&T>`][`Add`] so that generic code can be written without unnecessary +//! cloning. //! //! # Examples //! -//! This example creates a `Point` struct that implements `Add` and `Sub`, and -//! then demonstrates adding and subtracting two `Point`s. +//! This example creates a `Point` struct that implements [`Add`] and [`Sub`], +//! and then demonstrates adding and subtracting two `Point`s. //! //! ```rust //! use std::ops::{Add, Sub}; @@ -75,18 +76,14 @@ //! See the documentation for each trait for an example implementation. //! //! The [`Fn`], [`FnMut`], and [`FnOnce`] traits are implemented by types that can be -//! invoked like functions. Note that `Fn` takes `&self`, `FnMut` takes `&mut -//! self` and `FnOnce` takes `self`. These correspond to the three kinds of +//! invoked like functions. Note that [`Fn`] takes `&self`, [`FnMut`] takes `&mut +//! self` and [`FnOnce`] takes `self`. These correspond to the three kinds of //! methods that can be invoked on an instance: call-by-reference, //! call-by-mutable-reference, and call-by-value. The most common use of these //! traits is to act as bounds to higher-level functions that take functions or //! closures as arguments. //! -//! [`Fn`]: trait.Fn.html -//! [`FnMut`]: trait.FnMut.html -//! [`FnOnce`]: trait.FnOnce.html -//! -//! Taking a `Fn` as a parameter: +//! Taking a [`Fn`] as a parameter: //! //! ```rust //! fn call_with_one(func: F) -> usize @@ -99,7 +96,7 @@ //! assert_eq!(call_with_one(double), 2); //! ``` //! -//! Taking a `FnMut` as a parameter: +//! Taking a [`FnMut`] as a parameter: //! //! ```rust //! fn do_twice(mut func: F) @@ -118,7 +115,7 @@ //! assert_eq!(x, 5); //! ``` //! -//! Taking a `FnOnce` as a parameter: +//! Taking a [`FnOnce`] as a parameter: //! //! ```rust //! fn consume_with_relish(func: F) @@ -140,6 +137,13 @@ //! //! // `consume_and_return_x` can no longer be invoked at this point //! ``` +//! +//! [`Fn`]: trait.Fn.html +//! [`FnMut`]: trait.FnMut.html +//! [`FnOnce`]: trait.FnOnce.html +//! [`Add`]: trait.Add.html +//! [`Sub`]: trait.Sub.html +//! [`clone()`]: ../clone/trait.Clone.html#tymethod.clone #![stable(feature = "rust1", since = "1.0.0")] @@ -1641,7 +1645,7 @@ rem_assign_impl! { usize u8 u16 u32 u64 isize i8 i16 i32 i64 f32 f64 } #[lang = "bitand_assign"] #[stable(feature = "op_assign_traits", since = "1.8.0")] pub trait BitAndAssign { - /// The method for the `&` operator + /// The method for the `&=` operator #[stable(feature = "op_assign_traits", since = "1.8.0")] fn bitand_assign(&mut self, Rhs); } @@ -1873,12 +1877,20 @@ macro_rules! shr_assign_impl_all { shr_assign_impl_all! { u8 u16 u32 u64 usize i8 i16 i32 i64 isize } /// The `Index` trait is used to specify the functionality of indexing operations -/// like `arr[idx]` when used in an immutable context. +/// like `container[index]` when used in an immutable context. +/// +/// `container[index]` is actually syntactic sugar for `*container.index(index)`, +/// but only when used as an immutable value. If a mutable value is requested, +/// [`IndexMut`] is used instead. This allows nice things such as +/// `let value = v[index]` if `value` implements [`Copy`]. +/// +/// [`IndexMut`]: ../../std/ops/trait.IndexMut.html +/// [`Copy`]: ../../std/marker/trait.Copy.html /// /// # Examples /// -/// This example implements `Index` on a read-only `NucleotideCount` container, -/// enabling individual counts to be retrieved with index syntax. +/// The following example implements `Index` on a read-only `NucleotideCount` +/// container, enabling individual counts to be retrieved with index syntax. /// /// ``` /// use std::ops::Index; @@ -1924,50 +1936,91 @@ pub trait Index { #[stable(feature = "rust1", since = "1.0.0")] type Output: ?Sized; - /// The method for the indexing (`Foo[Bar]`) operation + /// The method for the indexing (`container[index]`) operation #[stable(feature = "rust1", since = "1.0.0")] fn index(&self, index: Idx) -> &Self::Output; } /// The `IndexMut` trait is used to specify the functionality of indexing -/// operations like `arr[idx]`, when used in a mutable context. +/// operations like `container[index]` when used in a mutable context. +/// +/// `container[index]` is actually syntactic sugar for +/// `*container.index_mut(index)`, but only when used as a mutable value. If +/// an immutable value is requested, the [`Index`] trait is used instead. This +/// allows nice things such as `v[index] = value` if `value` implements [`Copy`]. +/// +/// [`Index`]: ../../std/ops/trait.Index.html +/// [`Copy`]: ../../std/marker/trait.Copy.html /// /// # Examples /// -/// A trivial implementation of `IndexMut`. When `Foo[Bar]` happens, it ends up -/// calling `index_mut`, and therefore, `main` prints `Indexing!`. +/// A very simple implementation of a `Balance` struct that has two sides, where +/// each can be indexed mutably and immutably. /// /// ``` -/// use std::ops::{Index, IndexMut}; +/// use std::ops::{Index,IndexMut}; /// -/// #[derive(Copy, Clone)] -/// struct Foo; -/// struct Bar; +/// #[derive(Debug)] +/// enum Side { +/// Left, +/// Right, +/// } +/// +/// #[derive(Debug, PartialEq)] +/// enum Weight { +/// Kilogram(f32), +/// Pound(f32), +/// } /// -/// impl Index for Foo { -/// type Output = Foo; +/// struct Balance { +/// pub left: Weight, +/// pub right:Weight, +/// } +/// +/// impl Index for Balance { +/// type Output = Weight; /// -/// fn index<'a>(&'a self, _index: Bar) -> &'a Foo { -/// self +/// fn index<'a>(&'a self, index: Side) -> &'a Weight { +/// println!("Accessing {:?}-side of balance immutably", index); +/// match index { +/// Side::Left => &self.left, +/// Side::Right => &self.right, +/// } /// } /// } /// -/// impl IndexMut for Foo { -/// fn index_mut<'a>(&'a mut self, _index: Bar) -> &'a mut Foo { -/// println!("Indexing!"); -/// self +/// impl IndexMut for Balance { +/// fn index_mut<'a>(&'a mut self, index: Side) -> &'a mut Weight { +/// println!("Accessing {:?}-side of balance mutably", index); +/// match index { +/// Side::Left => &mut self.left, +/// Side::Right => &mut self.right, +/// } /// } /// } /// /// fn main() { -/// &mut Foo[Bar]; +/// let mut balance = Balance { +/// right: Weight::Kilogram(2.5), +/// left: Weight::Pound(1.5), +/// }; +/// +/// // In this case balance[Side::Right] is sugar for +/// // *balance.index(Side::Right), since we are only reading +/// // balance[Side::Right], not writing it. +/// assert_eq!(balance[Side::Right],Weight::Kilogram(2.5)); +/// +/// // However in this case balance[Side::Left] is sugar for +/// // *balance.index_mut(Side::Left), since we are writing +/// // balance[Side::Left]. +/// balance[Side::Left] = Weight::Kilogram(3.0); /// } /// ``` #[lang = "index_mut"] #[rustc_on_unimplemented = "the type `{Self}` cannot be mutably indexed by `{Idx}`"] #[stable(feature = "rust1", since = "1.0.0")] pub trait IndexMut: Index { - /// The method for the indexing (`Foo[Bar]`) operation + /// The method for the mutable indexing (`container[index]`) operation #[stable(feature = "rust1", since = "1.0.0")] fn index_mut(&mut self, index: Idx) -> &mut Self::Output; } diff --git a/src/libcore/option.rs b/src/libcore/option.rs index b9fb2dc90c728..a74979911d34d 100644 --- a/src/libcore/option.rs +++ b/src/libcore/option.rs @@ -296,16 +296,14 @@ impl Option { /// Moves the value `v` out of the `Option` if it is `Some(v)`. /// - /// # Panics - /// - /// Panics if the self value equals `None`. - /// - /// # Safety note - /// /// In general, because this function may panic, its use is discouraged. /// Instead, prefer to use pattern matching and handle the `None` /// case explicitly. /// + /// # Panics + /// + /// Panics if the self value equals `None`. + /// /// # Examples /// /// ``` @@ -916,12 +914,12 @@ impl> FromIterator> for Option { /// ``` /// use std::u16; /// - /// let v = vec!(1, 2); + /// let v = vec![1, 2]; /// let res: Option> = v.iter().map(|&x: &u16| /// if x == u16::MAX { None } /// else { Some(x + 1) } /// ).collect(); - /// assert!(res == Some(vec!(2, 3))); + /// assert!(res == Some(vec![2, 3])); /// ``` #[inline] fn from_iter>>(iter: I) -> Option { diff --git a/src/libcore/prelude/v1.rs b/src/libcore/prelude/v1.rs index 75db6fceab9b7..3fa6a97d4cd15 100644 --- a/src/libcore/prelude/v1.rs +++ b/src/libcore/prelude/v1.rs @@ -18,36 +18,50 @@ // Reexported core operators #[stable(feature = "core_prelude", since = "1.4.0")] -#[doc(no_inline)] pub use marker::{Copy, Send, Sized, Sync}; +#[doc(no_inline)] +pub use marker::{Copy, Send, Sized, Sync}; #[stable(feature = "core_prelude", since = "1.4.0")] -#[doc(no_inline)] pub use ops::{Drop, Fn, FnMut, FnOnce}; +#[doc(no_inline)] +pub use ops::{Drop, Fn, FnMut, FnOnce}; // Reexported functions #[stable(feature = "core_prelude", since = "1.4.0")] -#[doc(no_inline)] pub use mem::drop; +#[doc(no_inline)] +pub use mem::drop; // Reexported types and traits #[stable(feature = "core_prelude", since = "1.4.0")] -#[doc(no_inline)] pub use clone::Clone; +#[doc(no_inline)] +pub use clone::Clone; #[stable(feature = "core_prelude", since = "1.4.0")] -#[doc(no_inline)] pub use cmp::{PartialEq, PartialOrd, Eq, Ord}; +#[doc(no_inline)] +pub use cmp::{PartialEq, PartialOrd, Eq, Ord}; #[stable(feature = "core_prelude", since = "1.4.0")] -#[doc(no_inline)] pub use convert::{AsRef, AsMut, Into, From}; +#[doc(no_inline)] +pub use convert::{AsRef, AsMut, Into, From}; #[stable(feature = "core_prelude", since = "1.4.0")] -#[doc(no_inline)] pub use default::Default; +#[doc(no_inline)] +pub use default::Default; #[stable(feature = "core_prelude", since = "1.4.0")] -#[doc(no_inline)] pub use iter::{Iterator, Extend, IntoIterator}; +#[doc(no_inline)] +pub use iter::{Iterator, Extend, IntoIterator}; #[stable(feature = "core_prelude", since = "1.4.0")] -#[doc(no_inline)] pub use iter::{DoubleEndedIterator, ExactSizeIterator}; +#[doc(no_inline)] +pub use iter::{DoubleEndedIterator, ExactSizeIterator}; #[stable(feature = "core_prelude", since = "1.4.0")] -#[doc(no_inline)] pub use option::Option::{self, Some, None}; +#[doc(no_inline)] +pub use option::Option::{self, Some, None}; #[stable(feature = "core_prelude", since = "1.4.0")] -#[doc(no_inline)] pub use result::Result::{self, Ok, Err}; +#[doc(no_inline)] +pub use result::Result::{self, Ok, Err}; // Reexported extension traits for primitive types #[stable(feature = "core_prelude", since = "1.4.0")] -#[doc(no_inline)] pub use slice::SliceExt; +#[doc(no_inline)] +pub use slice::SliceExt; #[stable(feature = "core_prelude", since = "1.4.0")] -#[doc(no_inline)] pub use str::StrExt; +#[doc(no_inline)] +pub use str::StrExt; #[stable(feature = "core_prelude", since = "1.4.0")] -#[doc(no_inline)] pub use char::CharExt; +#[doc(no_inline)] +pub use char::CharExt; diff --git a/src/libcore/ptr.rs b/src/libcore/ptr.rs index 69682652a6a51..f0510422a07d7 100644 --- a/src/libcore/ptr.rs +++ b/src/libcore/ptr.rs @@ -761,7 +761,7 @@ impl Deref for Unique { } } -#[stable(feature = "rust1", since = "1.0.0")] +#[unstable(feature = "unique", issue = "27730")] impl fmt::Pointer for Unique { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fmt::Pointer::fmt(&*self.pointer, f) diff --git a/src/libcore/result.rs b/src/libcore/result.rs index 96845259299be..3d34f62006785 100644 --- a/src/libcore/result.rs +++ b/src/libcore/result.rs @@ -792,6 +792,44 @@ impl Result { } } +impl Result { + /// Returns the contained value or a default + /// + /// Consumes the `self` argument then, if `Ok`, returns the contained + /// value, otherwise if `Err`, returns the default value for that + /// type. + /// + /// # Examples + /// + /// Convert a string to an integer, turning poorly-formed strings + /// into 0 (the default value for integers). [`parse`] converts + /// a string to any other type that implements [`FromStr`], returning an + /// `Err` on error. + /// + /// ``` + /// #![feature(result_unwrap_or_default)] + /// + /// let good_year_from_input = "1909"; + /// let bad_year_from_input = "190blarg"; + /// let good_year = good_year_from_input.parse().unwrap_or_default(); + /// let bad_year = bad_year_from_input.parse().unwrap_or_default(); + /// + /// assert_eq!(1909, good_year); + /// assert_eq!(0, bad_year); + /// + /// [`parse`]: ../../std/primitive.str.html#method.parse + /// [`FromStr`]: ../../std/str/trait.FromStr.html + /// ``` + #[inline] + #[unstable(feature = "result_unwrap_or_default", issue = "0")] + pub fn unwrap_or_default(self) -> T { + match self { + Ok(x) => x, + Err(_) => Default::default(), + } + } +} + // This is a separate function to reduce the code size of the methods #[inline(never)] #[cold] @@ -977,12 +1015,12 @@ impl> FromIterator> for Result { /// ``` /// use std::u32; /// - /// let v = vec!(1, 2); + /// let v = vec![1, 2]; /// let res: Result, &'static str> = v.iter().map(|&x: &u32| /// if x == u32::MAX { Err("Overflow!") } /// else { Ok(x + 1) } /// ).collect(); - /// assert!(res == Ok(vec!(2, 3))); + /// assert!(res == Ok(vec![2, 3])); /// ``` #[inline] fn from_iter>>(iter: I) -> Result { @@ -1008,6 +1046,11 @@ impl> FromIterator> for Result { None => None, } } + + fn size_hint(&self) -> (usize, Option) { + let (_min, max) = self.iter.size_hint(); + (0, max) + } } let mut adapter = Adapter { iter: iter.into_iter(), err: None }; diff --git a/src/libcore/slice.rs b/src/libcore/slice.rs index d1df56905df24..31be404ba905a 100644 --- a/src/libcore/slice.rs +++ b/src/libcore/slice.rs @@ -1968,6 +1968,7 @@ unsafe impl<'a, T> TrustedRandomAccess for Iter<'a, T> { unsafe fn get_unchecked(&mut self, i: usize) -> &'a T { &*self.ptr.offset(i as isize) } + fn may_have_side_effect() -> bool { false } } #[doc(hidden)] @@ -1975,4 +1976,5 @@ unsafe impl<'a, T> TrustedRandomAccess for IterMut<'a, T> { unsafe fn get_unchecked(&mut self, i: usize) -> &'a mut T { &mut *self.ptr.offset(i as isize) } + fn may_have_side_effect() -> bool { false } } diff --git a/src/libcore/str/mod.rs b/src/libcore/str/mod.rs index 1f1ae6f12ab45..d63d2d64fe101 100644 --- a/src/libcore/str/mod.rs +++ b/src/libcore/str/mod.rs @@ -101,7 +101,7 @@ impl FromStr for bool { } /// An error returned when parsing a `bool` from a string fails. -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone, PartialEq, Eq)] #[stable(feature = "rust1", since = "1.0.0")] pub struct ParseBoolError { _priv: () } diff --git a/src/libcore/sync/atomic.rs b/src/libcore/sync/atomic.rs index f5f37be52de6e..657f7e7992fee 100644 --- a/src/libcore/sync/atomic.rs +++ b/src/libcore/sync/atomic.rs @@ -95,7 +95,7 @@ pub struct AtomicBool { #[cfg(target_has_atomic = "8")] #[stable(feature = "rust1", since = "1.0.0")] impl Default for AtomicBool { - /// Creates an `AtomicBool` initialised as false. + /// Creates an `AtomicBool` initialized to `false`. fn default() -> Self { Self::new(false) } @@ -277,7 +277,9 @@ impl AtomicBool { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn store(&self, val: bool, order: Ordering) { - unsafe { atomic_store(self.v.get(), val as u8, order); } + unsafe { + atomic_store(self.v.get(), val as u8, order); + } } /// Stores a value into the bool, returning the old value. @@ -366,9 +368,11 @@ impl AtomicBool { current: bool, new: bool, success: Ordering, - failure: Ordering) -> Result { - match unsafe { atomic_compare_exchange(self.v.get(), current as u8, new as u8, - success, failure) } { + failure: Ordering) + -> Result { + match unsafe { + atomic_compare_exchange(self.v.get(), current as u8, new as u8, success, failure) + } { Ok(x) => Ok(x != 0), Err(x) => Err(x != 0), } @@ -409,9 +413,11 @@ impl AtomicBool { current: bool, new: bool, success: Ordering, - failure: Ordering) -> Result { - match unsafe { atomic_compare_exchange_weak(self.v.get(), current as u8, new as u8, - success, failure) } { + failure: Ordering) + -> Result { + match unsafe { + atomic_compare_exchange_weak(self.v.get(), current as u8, new as u8, success, failure) + } { Ok(x) => Ok(x != 0), Err(x) => Err(x != 0), } @@ -632,9 +638,7 @@ impl AtomicPtr { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn load(&self, order: Ordering) -> *mut T { - unsafe { - atomic_load(self.p.get() as *mut usize, order) as *mut T - } + unsafe { atomic_load(self.p.get() as *mut usize, order) as *mut T } } /// Stores a value into the pointer. @@ -660,7 +664,9 @@ impl AtomicPtr { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn store(&self, ptr: *mut T, order: Ordering) { - unsafe { atomic_store(self.p.get() as *mut usize, ptr as usize, order); } + unsafe { + atomic_store(self.p.get() as *mut usize, ptr as usize, order); + } } /// Stores a value into the pointer, returning the old value. @@ -745,7 +751,8 @@ impl AtomicPtr { current: *mut T, new: *mut T, success: Ordering, - failure: Ordering) -> Result<*mut T, *mut T> { + failure: Ordering) + -> Result<*mut T, *mut T> { unsafe { let res = atomic_compare_exchange(self.p.get() as *mut usize, current as usize, @@ -794,7 +801,8 @@ impl AtomicPtr { current: *mut T, new: *mut T, success: Ordering, - failure: Ordering) -> Result<*mut T, *mut T> { + failure: Ordering) + -> Result<*mut T, *mut T> { unsafe { let res = atomic_compare_exchange_weak(self.p.get() as *mut usize, current as usize, @@ -1266,9 +1274,9 @@ fn strongest_failure_ordering(order: Ordering) -> Ordering { match order { Release => Relaxed, Relaxed => Relaxed, - SeqCst => SeqCst, + SeqCst => SeqCst, Acquire => Acquire, - AcqRel => Acquire, + AcqRel => Acquire, } } @@ -1277,9 +1285,9 @@ unsafe fn atomic_store(dst: *mut T, val: T, order: Ordering) { match order { Release => intrinsics::atomic_store_rel(dst, val), Relaxed => intrinsics::atomic_store_relaxed(dst, val), - SeqCst => intrinsics::atomic_store(dst, val), + SeqCst => intrinsics::atomic_store(dst, val), Acquire => panic!("there is no such thing as an acquire store"), - AcqRel => panic!("there is no such thing as an acquire/release store"), + AcqRel => panic!("there is no such thing as an acquire/release store"), } } @@ -1288,9 +1296,9 @@ unsafe fn atomic_load(dst: *const T, order: Ordering) -> T { match order { Acquire => intrinsics::atomic_load_acq(dst), Relaxed => intrinsics::atomic_load_relaxed(dst), - SeqCst => intrinsics::atomic_load(dst), + SeqCst => intrinsics::atomic_load(dst), Release => panic!("there is no such thing as a release load"), - AcqRel => panic!("there is no such thing as an acquire/release load"), + AcqRel => panic!("there is no such thing as an acquire/release load"), } } @@ -1299,9 +1307,9 @@ unsafe fn atomic_swap(dst: *mut T, val: T, order: Ordering) -> T { match order { Acquire => intrinsics::atomic_xchg_acq(dst, val), Release => intrinsics::atomic_xchg_rel(dst, val), - AcqRel => intrinsics::atomic_xchg_acqrel(dst, val), + AcqRel => intrinsics::atomic_xchg_acqrel(dst, val), Relaxed => intrinsics::atomic_xchg_relaxed(dst, val), - SeqCst => intrinsics::atomic_xchg(dst, val) + SeqCst => intrinsics::atomic_xchg(dst, val), } } @@ -1311,9 +1319,9 @@ unsafe fn atomic_add(dst: *mut T, val: T, order: Ordering) -> T { match order { Acquire => intrinsics::atomic_xadd_acq(dst, val), Release => intrinsics::atomic_xadd_rel(dst, val), - AcqRel => intrinsics::atomic_xadd_acqrel(dst, val), + AcqRel => intrinsics::atomic_xadd_acqrel(dst, val), Relaxed => intrinsics::atomic_xadd_relaxed(dst, val), - SeqCst => intrinsics::atomic_xadd(dst, val) + SeqCst => intrinsics::atomic_xadd(dst, val), } } @@ -1323,9 +1331,9 @@ unsafe fn atomic_sub(dst: *mut T, val: T, order: Ordering) -> T { match order { Acquire => intrinsics::atomic_xsub_acq(dst, val), Release => intrinsics::atomic_xsub_rel(dst, val), - AcqRel => intrinsics::atomic_xsub_acqrel(dst, val), + AcqRel => intrinsics::atomic_xsub_acqrel(dst, val), Relaxed => intrinsics::atomic_xsub_relaxed(dst, val), - SeqCst => intrinsics::atomic_xsub(dst, val) + SeqCst => intrinsics::atomic_xsub(dst, val), } } @@ -1334,26 +1342,23 @@ unsafe fn atomic_compare_exchange(dst: *mut T, old: T, new: T, success: Ordering, - failure: Ordering) -> Result { + failure: Ordering) + -> Result { let (val, ok) = match (success, failure) { (Acquire, Acquire) => intrinsics::atomic_cxchg_acq(dst, old, new), (Release, Relaxed) => intrinsics::atomic_cxchg_rel(dst, old, new), - (AcqRel, Acquire) => intrinsics::atomic_cxchg_acqrel(dst, old, new), + (AcqRel, Acquire) => intrinsics::atomic_cxchg_acqrel(dst, old, new), (Relaxed, Relaxed) => intrinsics::atomic_cxchg_relaxed(dst, old, new), - (SeqCst, SeqCst) => intrinsics::atomic_cxchg(dst, old, new), + (SeqCst, SeqCst) => intrinsics::atomic_cxchg(dst, old, new), (Acquire, Relaxed) => intrinsics::atomic_cxchg_acq_failrelaxed(dst, old, new), - (AcqRel, Relaxed) => intrinsics::atomic_cxchg_acqrel_failrelaxed(dst, old, new), - (SeqCst, Relaxed) => intrinsics::atomic_cxchg_failrelaxed(dst, old, new), - (SeqCst, Acquire) => intrinsics::atomic_cxchg_failacq(dst, old, new), + (AcqRel, Relaxed) => intrinsics::atomic_cxchg_acqrel_failrelaxed(dst, old, new), + (SeqCst, Relaxed) => intrinsics::atomic_cxchg_failrelaxed(dst, old, new), + (SeqCst, Acquire) => intrinsics::atomic_cxchg_failacq(dst, old, new), (_, AcqRel) => panic!("there is no such thing as an acquire/release failure ordering"), (_, Release) => panic!("there is no such thing as a release failure ordering"), _ => panic!("a failure ordering can't be stronger than a success ordering"), }; - if ok { - Ok(val) - } else { - Err(val) - } + if ok { Ok(val) } else { Err(val) } } #[inline] @@ -1361,26 +1366,23 @@ unsafe fn atomic_compare_exchange_weak(dst: *mut T, old: T, new: T, success: Ordering, - failure: Ordering) -> Result { + failure: Ordering) + -> Result { let (val, ok) = match (success, failure) { (Acquire, Acquire) => intrinsics::atomic_cxchgweak_acq(dst, old, new), (Release, Relaxed) => intrinsics::atomic_cxchgweak_rel(dst, old, new), - (AcqRel, Acquire) => intrinsics::atomic_cxchgweak_acqrel(dst, old, new), + (AcqRel, Acquire) => intrinsics::atomic_cxchgweak_acqrel(dst, old, new), (Relaxed, Relaxed) => intrinsics::atomic_cxchgweak_relaxed(dst, old, new), - (SeqCst, SeqCst) => intrinsics::atomic_cxchgweak(dst, old, new), + (SeqCst, SeqCst) => intrinsics::atomic_cxchgweak(dst, old, new), (Acquire, Relaxed) => intrinsics::atomic_cxchgweak_acq_failrelaxed(dst, old, new), - (AcqRel, Relaxed) => intrinsics::atomic_cxchgweak_acqrel_failrelaxed(dst, old, new), - (SeqCst, Relaxed) => intrinsics::atomic_cxchgweak_failrelaxed(dst, old, new), - (SeqCst, Acquire) => intrinsics::atomic_cxchgweak_failacq(dst, old, new), + (AcqRel, Relaxed) => intrinsics::atomic_cxchgweak_acqrel_failrelaxed(dst, old, new), + (SeqCst, Relaxed) => intrinsics::atomic_cxchgweak_failrelaxed(dst, old, new), + (SeqCst, Acquire) => intrinsics::atomic_cxchgweak_failacq(dst, old, new), (_, AcqRel) => panic!("there is no such thing as an acquire/release failure ordering"), (_, Release) => panic!("there is no such thing as a release failure ordering"), _ => panic!("a failure ordering can't be stronger than a success ordering"), }; - if ok { - Ok(val) - } else { - Err(val) - } + if ok { Ok(val) } else { Err(val) } } #[inline] @@ -1388,9 +1390,9 @@ unsafe fn atomic_and(dst: *mut T, val: T, order: Ordering) -> T { match order { Acquire => intrinsics::atomic_and_acq(dst, val), Release => intrinsics::atomic_and_rel(dst, val), - AcqRel => intrinsics::atomic_and_acqrel(dst, val), + AcqRel => intrinsics::atomic_and_acqrel(dst, val), Relaxed => intrinsics::atomic_and_relaxed(dst, val), - SeqCst => intrinsics::atomic_and(dst, val) + SeqCst => intrinsics::atomic_and(dst, val), } } @@ -1399,9 +1401,9 @@ unsafe fn atomic_or(dst: *mut T, val: T, order: Ordering) -> T { match order { Acquire => intrinsics::atomic_or_acq(dst, val), Release => intrinsics::atomic_or_rel(dst, val), - AcqRel => intrinsics::atomic_or_acqrel(dst, val), + AcqRel => intrinsics::atomic_or_acqrel(dst, val), Relaxed => intrinsics::atomic_or_relaxed(dst, val), - SeqCst => intrinsics::atomic_or(dst, val) + SeqCst => intrinsics::atomic_or(dst, val), } } @@ -1410,9 +1412,9 @@ unsafe fn atomic_xor(dst: *mut T, val: T, order: Ordering) -> T { match order { Acquire => intrinsics::atomic_xor_acq(dst, val), Release => intrinsics::atomic_xor_rel(dst, val), - AcqRel => intrinsics::atomic_xor_acqrel(dst, val), + AcqRel => intrinsics::atomic_xor_acqrel(dst, val), Relaxed => intrinsics::atomic_xor_relaxed(dst, val), - SeqCst => intrinsics::atomic_xor(dst, val) + SeqCst => intrinsics::atomic_xor(dst, val), } } @@ -1443,9 +1445,9 @@ pub fn fence(order: Ordering) { match order { Acquire => intrinsics::atomic_fence_acq(), Release => intrinsics::atomic_fence_rel(), - AcqRel => intrinsics::atomic_fence_acqrel(), - SeqCst => intrinsics::atomic_fence(), - Relaxed => panic!("there is no such thing as a relaxed fence") + AcqRel => intrinsics::atomic_fence_acqrel(), + SeqCst => intrinsics::atomic_fence(), + Relaxed => panic!("there is no such thing as a relaxed fence"), } } } diff --git a/src/libcoretest/char.rs b/src/libcoretest/char.rs index 199437a431eee..7da0b6902f271 100644 --- a/src/libcoretest/char.rs +++ b/src/libcoretest/char.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use std::char; +use std::{char,str}; use std::convert::TryFrom; #[test] @@ -248,10 +248,12 @@ fn test_escape_unicode() { #[test] fn test_encode_utf8() { fn check(input: char, expect: &[u8]) { - assert_eq!(input.encode_utf8().as_slice(), expect); - for (a, b) in input.encode_utf8().zip(expect) { - assert_eq!(a, *b); - } + let mut buf = [0; 4]; + let ptr = buf.as_ptr(); + let s = input.encode_utf8(&mut buf); + assert_eq!(s.as_ptr() as usize, ptr as usize); + assert!(str::from_utf8(s.as_bytes()).is_ok()); + assert_eq!(s.as_bytes(), expect); } check('x', &[0x78]); @@ -263,10 +265,11 @@ fn test_encode_utf8() { #[test] fn test_encode_utf16() { fn check(input: char, expect: &[u16]) { - assert_eq!(input.encode_utf16().as_slice(), expect); - for (a, b) in input.encode_utf16().zip(expect) { - assert_eq!(a, *b); - } + let mut buf = [0; 2]; + let ptr = buf.as_mut_ptr(); + let b = input.encode_utf16(&mut buf); + assert_eq!(b.as_mut_ptr() as usize, ptr as usize); + assert_eq!(b, expect); } check('x', &[0x0078]); diff --git a/src/libcoretest/hash/sip.rs b/src/libcoretest/hash/sip.rs index a5e6005545bd7..fa3bfdea42df8 100644 --- a/src/libcoretest/hash/sip.rs +++ b/src/libcoretest/hash/sip.rs @@ -7,10 +7,14 @@ // , at your // option. This file may not be copied, modified, or distributed // except according to those terms. + +#![allow(deprecated)] + use test::{Bencher, black_box}; use core::hash::{Hash, Hasher}; use core::hash::{SipHasher, SipHasher13, SipHasher24}; +use core::{slice, mem}; // Hash just the bytes of the slice, without length prefix struct Bytes<'a>(&'a [u8]); @@ -324,6 +328,26 @@ fn test_hash_no_concat_alias() { assert!(hash(&v) != hash(&w)); } +#[test] +fn test_write_short_works() { + let test_usize = 0xd0c0b0a0usize; + let mut h1 = SipHasher24::new(); + h1.write_usize(test_usize); + h1.write(b"bytes"); + h1.write(b"string"); + h1.write_u8(0xFFu8); + h1.write_u8(0x01u8); + let mut h2 = SipHasher24::new(); + h2.write(unsafe { + slice::from_raw_parts(&test_usize as *const _ as *const u8, + mem::size_of::()) + }); + h2.write(b"bytes"); + h2.write(b"string"); + h2.write(&[0xFFu8, 0x01u8]); + assert_eq!(h1.finish(), h2.finish()); +} + #[bench] fn bench_str_under_8_bytes(b: &mut Bencher) { let s = "foo"; diff --git a/src/libcoretest/iter.rs b/src/libcoretest/iter.rs index 27eb25537f31b..58b6444ef88cd 100644 --- a/src/libcoretest/iter.rs +++ b/src/libcoretest/iter.rs @@ -985,6 +985,18 @@ fn test_empty() { assert_eq!(it.next(), None); } +#[test] +fn test_chain_fold() { + let xs = [1, 2, 3]; + let ys = [1, 2, 0]; + + let mut iter = xs.iter().chain(&ys); + iter.next(); + let mut result = Vec::new(); + iter.fold((), |(), &elt| result.push(elt)); + assert_eq!(&[2, 3, 1, 2, 0], &result[..]); +} + #[bench] fn bench_rposition(b: &mut Bencher) { let it: Vec = (0..300).collect(); diff --git a/src/libcoretest/lib.rs b/src/libcoretest/lib.rs index aad0d2fedaa10..b8c01e570f509 100644 --- a/src/libcoretest/lib.rs +++ b/src/libcoretest/lib.rs @@ -35,6 +35,7 @@ #![feature(iter_max_by)] #![feature(iter_min_by)] #![feature(ordering_chaining)] +#![feature(result_unwrap_or_default)] extern crate core; extern crate test; diff --git a/src/libcoretest/num/flt2dec/estimator.rs b/src/libcoretest/num/flt2dec/estimator.rs index 857aae72c8a5b..0bca616ea9abc 100644 --- a/src/libcoretest/num/flt2dec/estimator.rs +++ b/src/libcoretest/num/flt2dec/estimator.rs @@ -8,6 +8,11 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +// FIXME https://github.com/kripken/emscripten/issues/4563 +// NB we have to actually not compile this test to avoid +// an undefined symbol error +#![cfg(not(target_os = "emscripten"))] + use core::num::flt2dec::estimator::*; #[test] diff --git a/src/libcoretest/result.rs b/src/libcoretest/result.rs index 6e9f653dcd8ac..bc2cd8bbfc651 100644 --- a/src/libcoretest/result.rs +++ b/src/libcoretest/result.rs @@ -183,3 +183,9 @@ pub fn test_iter_mut() { } assert_eq!(err, Err("error")); } + +#[test] +pub fn test_unwrap_or_default() { + assert_eq!(op1().unwrap_or_default(), 666); + assert_eq!(op2().unwrap_or_default(), 0); +} diff --git a/src/libflate/lib.rs b/src/libflate/lib.rs index 63913f2878c20..3c608ef9c9268 100644 --- a/src/libflate/lib.rs +++ b/src/libflate/lib.rs @@ -94,11 +94,14 @@ extern "C" { -> *mut c_void; } -const LZ_NORM: c_int = 0x80; // LZ with 128 probes, "normal" -const TINFL_FLAG_PARSE_ZLIB_HEADER: c_int = 0x1; // parse zlib header and adler32 checksum -const TDEFL_WRITE_ZLIB_HEADER: c_int = 0x01000; // write zlib header and adler32 checksum +const LZ_FAST: c_int = 0x01; // LZ with 1 probe, "fast" +const TDEFL_GREEDY_PARSING_FLAG: c_int = 0x04000; // fast greedy parsing instead of lazy parsing -fn deflate_bytes_internal(bytes: &[u8], flags: c_int) -> Bytes { +/// Compress a buffer without writing any sort of header on the output. Fast +/// compression is used because it is almost twice as fast as default +/// compression and the compression ratio is only marginally worse. +pub fn deflate_bytes(bytes: &[u8]) -> Bytes { + let flags = LZ_FAST | TDEFL_GREEDY_PARSING_FLAG; unsafe { let mut outsz: size_t = 0; let res = tdefl_compress_mem_to_heap(bytes.as_ptr() as *const _, @@ -113,17 +116,9 @@ fn deflate_bytes_internal(bytes: &[u8], flags: c_int) -> Bytes { } } -/// Compress a buffer, without writing any sort of header on the output. -pub fn deflate_bytes(bytes: &[u8]) -> Bytes { - deflate_bytes_internal(bytes, LZ_NORM) -} - -/// Compress a buffer, using a header that zlib can understand. -pub fn deflate_bytes_zlib(bytes: &[u8]) -> Bytes { - deflate_bytes_internal(bytes, LZ_NORM | TDEFL_WRITE_ZLIB_HEADER) -} - -fn inflate_bytes_internal(bytes: &[u8], flags: c_int) -> Result { +/// Decompress a buffer without parsing any sort of header on the input. +pub fn inflate_bytes(bytes: &[u8]) -> Result { + let flags = 0; unsafe { let mut outsz: size_t = 0; let res = tinfl_decompress_mem_to_heap(bytes.as_ptr() as *const _, @@ -141,16 +136,6 @@ fn inflate_bytes_internal(bytes: &[u8], flags: c_int) -> Result { } } -/// Decompress a buffer, without parsing any sort of header on the input. -pub fn inflate_bytes(bytes: &[u8]) -> Result { - inflate_bytes_internal(bytes, 0) -} - -/// Decompress a buffer that starts with a zlib header. -pub fn inflate_bytes_zlib(bytes: &[u8]) -> Result { - inflate_bytes_internal(bytes, TINFL_FLAG_PARSE_ZLIB_HEADER) -} - #[cfg(test)] mod tests { #![allow(deprecated)] diff --git a/src/libgetopts/lib.rs b/src/libgetopts/lib.rs index 42200795bb3a5..4d2f1b999a2ae 100644 --- a/src/libgetopts/lib.rs +++ b/src/libgetopts/lib.rs @@ -1610,8 +1610,8 @@ Options: #[test] fn test_args_with_equals() { - let args = vec!("--one".to_string(), "A=B".to_string(), - "--two=C=D".to_string()); + let args = vec!["--one".to_string(), "A=B".to_string(), + "--two=C=D".to_string()]; let opts = vec![optopt("o", "one", "One", "INFO"), optopt("t", "two", "Two", "INFO")]; let matches = &match getopts(&args, &opts) { diff --git a/src/libgraphviz/lib.rs b/src/libgraphviz/lib.rs index 74cc498a7df23..03057af4a843b 100644 --- a/src/libgraphviz/lib.rs +++ b/src/libgraphviz/lib.rs @@ -58,7 +58,7 @@ //! struct Edges(Vec); //! //! pub fn render_to(output: &mut W) { -//! let edges = Edges(vec!((0,1), (0,2), (1,3), (2,3), (3,4), (4,4))); +//! let edges = Edges(vec![(0,1), (0,2), (1,3), (2,3), (3,4), (4,4)]); //! dot::render(&edges, output).unwrap() //! } //! @@ -164,8 +164,8 @@ //! struct Graph { nodes: Vec<&'static str>, edges: Vec<(usize,usize)> } //! //! pub fn render_to(output: &mut W) { -//! let nodes = vec!("{x,y}","{x}","{y}","{}"); -//! let edges = vec!((0,1), (0,2), (1,3), (2,3)); +//! let nodes = vec!["{x,y}","{x}","{y}","{}"]; +//! let edges = vec![(0,1), (0,2), (1,3), (2,3)]; //! let graph = Graph { nodes: nodes, edges: edges }; //! //! dot::render(&graph, output).unwrap() @@ -226,8 +226,8 @@ //! struct Graph { nodes: Vec<&'static str>, edges: Vec<(usize,usize)> } //! //! pub fn render_to(output: &mut W) { -//! let nodes = vec!("{x,y}","{x}","{y}","{}"); -//! let edges = vec!((0,1), (0,2), (1,3), (2,3)); +//! let nodes = vec!["{x,y}","{x}","{y}","{}"]; +//! let edges = vec![(0,1), (0,2), (1,3), (2,3)]; //! let graph = Graph { nodes: nodes, edges: edges }; //! //! dot::render(&graph, output).unwrap() @@ -295,7 +295,7 @@ #![cfg_attr(not(stage0), deny(warnings))] #![feature(str_escape)] -#![feature(question_mark)] +#![cfg_attr(stage0, feature(question_mark))] use self::LabelText::*; diff --git a/src/liblibc b/src/liblibc index d4f6a19c55a03..7d9b71f0971f8 160000 --- a/src/liblibc +++ b/src/liblibc @@ -1 +1 @@ -Subproject commit d4f6a19c55a03e3f9f6fb7377911b37ed807eb6c +Subproject commit 7d9b71f0971f8fa196d864d7071f216a59036d6e diff --git a/src/liblog/directive.rs b/src/liblog/directive.rs index f1ebf16737831..eb50d6e6135ef 100644 --- a/src/liblog/directive.rs +++ b/src/liblog/directive.rs @@ -22,12 +22,12 @@ pub const LOG_LEVEL_NAMES: [&'static str; 5] = ["ERROR", "WARN", "INFO", "DEBUG" /// Parse an individual log level that is either a number or a symbolic log level fn parse_log_level(level: &str) -> Option { level.parse::() - .ok() - .or_else(|| { - let pos = LOG_LEVEL_NAMES.iter().position(|&name| name.eq_ignore_ascii_case(level)); - pos.map(|p| p as u32 + 1) - }) - .map(|p| cmp::min(p, ::MAX_LOG_LEVEL)) + .ok() + .or_else(|| { + let pos = LOG_LEVEL_NAMES.iter().position(|&name| name.eq_ignore_ascii_case(level)); + pos.map(|p| p as u32 + 1) + }) + .map(|p| cmp::min(p, ::MAX_LOG_LEVEL)) } /// Parse a logging specification string (e.g: "crate1,crate2::mod3,crate3::x=1/foo") @@ -52,32 +52,31 @@ pub fn parse_logging_spec(spec: &str) -> (Vec, Option) { continue; } let mut parts = s.split('='); - let (log_level, name) = match (parts.next(), - parts.next().map(|s| s.trim()), - parts.next()) { - (Some(part0), None, None) => { - // if the single argument is a log-level string or number, - // treat that as a global fallback - match parse_log_level(part0) { - Some(num) => (num, None), - None => (::MAX_LOG_LEVEL, Some(part0)), + let (log_level, name) = + match (parts.next(), parts.next().map(|s| s.trim()), parts.next()) { + (Some(part0), None, None) => { + // if the single argument is a log-level string or number, + // treat that as a global fallback + match parse_log_level(part0) { + Some(num) => (num, None), + None => (::MAX_LOG_LEVEL, Some(part0)), + } } - } - (Some(part0), Some(""), None) => (::MAX_LOG_LEVEL, Some(part0)), - (Some(part0), Some(part1), None) => { - match parse_log_level(part1) { - Some(num) => (num, Some(part0)), - _ => { - println!("warning: invalid logging spec '{}', ignoring it", part1); - continue; + (Some(part0), Some(""), None) => (::MAX_LOG_LEVEL, Some(part0)), + (Some(part0), Some(part1), None) => { + match parse_log_level(part1) { + Some(num) => (num, Some(part0)), + _ => { + println!("warning: invalid logging spec '{}', ignoring it", part1); + continue; + } } } - } - _ => { - println!("warning: invalid logging spec '{}', ignoring it", s); - continue; - } - }; + _ => { + println!("warning: invalid logging spec '{}', ignoring it", s); + continue; + } + }; dirs.push(LogDirective { name: name.map(str::to_owned), level: log_level, diff --git a/src/libpanic_unwind/Cargo.lock b/src/libpanic_unwind/Cargo.lock deleted file mode 100644 index 20d826d4a470e..0000000000000 --- a/src/libpanic_unwind/Cargo.lock +++ /dev/null @@ -1,27 +0,0 @@ -[root] -name = "panic_unwind" -version = "0.0.0" -dependencies = [ - "alloc 0.0.0", - "core 0.0.0", - "libc 0.0.0", -] - -[[package]] -name = "alloc" -version = "0.0.0" -dependencies = [ - "core 0.0.0", -] - -[[package]] -name = "core" -version = "0.0.0" - -[[package]] -name = "libc" -version = "0.0.0" -dependencies = [ - "core 0.0.0", -] - diff --git a/src/libpanic_unwind/dwarf/eh.rs b/src/libpanic_unwind/dwarf/eh.rs index 2284a9bbb73e1..e7994f4e0ef0a 100644 --- a/src/libpanic_unwind/dwarf/eh.rs +++ b/src/libpanic_unwind/dwarf/eh.rs @@ -116,7 +116,7 @@ pub unsafe fn find_eh_action(lsda: *const u8, context: &EHContext) -> EHAction { // The "IP" is an index into the call-site table, with two exceptions: // -1 means 'no-action', and 0 means 'terminate'. match ip as isize { - -1 => return EHAction::None, + -1 => return EHAction::None, 0 => return EHAction::Terminate, _ => (), } @@ -182,12 +182,8 @@ unsafe fn read_encoded_pointer(reader: &mut DwarfReader, assert!(context.func_start != 0); context.func_start } - DW_EH_PE_textrel => { - (*context.get_text_start)() - } - DW_EH_PE_datarel => { - (*context.get_data_start)() - } + DW_EH_PE_textrel => (*context.get_text_start)(), + DW_EH_PE_datarel => (*context.get_data_start)(), _ => panic!(), }; diff --git a/src/libpanic_unwind/emcc.rs b/src/libpanic_unwind/emcc.rs new file mode 100644 index 0000000000000..0e48e37c92358 --- /dev/null +++ b/src/libpanic_unwind/emcc.rs @@ -0,0 +1,75 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Unwinding for emscripten +//! +//! Whereas Rust's usual unwinding implementation for Unix platforms +//! calls into the libunwind APIs directly, on emscripten we instead +//! call into the C++ unwinding APIs. This is just an expedience since +//! emscripten's runtime always implements those APIs and does not +//! implement libunwind. + +#![allow(private_no_mangle_fns)] + +use core::any::Any; +use core::ptr; +use alloc::boxed::Box; +use libc::{self, c_int}; +use unwind as uw; +use core::mem; + +pub fn payload() -> *mut u8 { + ptr::null_mut() +} + +pub unsafe fn cleanup(ptr: *mut u8) -> Box { + assert!(!ptr.is_null()); + let ex = ptr::read(ptr as *mut _); + __cxa_free_exception(ptr as *mut _); + ex +} + +pub unsafe fn panic(data: Box) -> u32 { + let sz = mem::size_of_val(&data); + let exception = __cxa_allocate_exception(sz); + if exception == ptr::null_mut() { + return uw::_URC_FATAL_PHASE1_ERROR as u32; + } + let exception = exception as *mut Box; + ptr::write(exception, data); + __cxa_throw(exception as *mut _, ptr::null_mut(), ptr::null_mut()); + + unreachable!() +} + +#[lang = "eh_personality"] +#[no_mangle] +unsafe extern "C" fn rust_eh_personality(version: c_int, + actions: uw::_Unwind_Action, + exception_class: uw::_Unwind_Exception_Class, + exception_object: *mut uw::_Unwind_Exception, + context: *mut uw::_Unwind_Context) + -> uw::_Unwind_Reason_Code { + __gxx_personality_v0(version, actions, exception_class, exception_object, context) +} + +extern "C" { + fn __cxa_allocate_exception(thrown_size: libc::size_t) -> *mut libc::c_void; + fn __cxa_free_exception(thrown_exception: *mut libc::c_void); + fn __cxa_throw(thrown_exception: *mut libc::c_void, + tinfo: *mut libc::c_void, + dest: *mut libc::c_void); + fn __gxx_personality_v0(version: c_int, + actions: uw::_Unwind_Action, + exception_class: uw::_Unwind_Exception_Class, + exception_object: *mut uw::_Unwind_Exception, + context: *mut uw::_Unwind_Context) + -> uw::_Unwind_Reason_Code; +} diff --git a/src/libpanic_unwind/gcc.rs b/src/libpanic_unwind/gcc.rs index 33b24fbaa2659..f0f19be3366bd 100644 --- a/src/libpanic_unwind/gcc.rs +++ b/src/libpanic_unwind/gcc.rs @@ -156,14 +156,16 @@ unsafe extern "C" fn rust_eh_personality(version: c_int, let eh_action = find_eh_action(context); if actions as i32 & uw::_UA_SEARCH_PHASE as i32 != 0 { match eh_action { - EHAction::None | EHAction::Cleanup(_) => return uw::_URC_CONTINUE_UNWIND, + EHAction::None | + EHAction::Cleanup(_) => return uw::_URC_CONTINUE_UNWIND, EHAction::Catch(_) => return uw::_URC_HANDLER_FOUND, EHAction::Terminate => return uw::_URC_FATAL_PHASE1_ERROR, } } else { match eh_action { EHAction::None => return uw::_URC_CONTINUE_UNWIND, - EHAction::Cleanup(lpad) | EHAction::Catch(lpad) => { + EHAction::Cleanup(lpad) | + EHAction::Catch(lpad) => { uw::_Unwind_SetGR(context, UNWIND_DATA_REG.0, exception_object as uintptr_t); uw::_Unwind_SetGR(context, UNWIND_DATA_REG.1, 0); uw::_Unwind_SetIP(context, lpad); @@ -182,7 +184,7 @@ unsafe extern "C" fn rust_eh_personality(version: c_int, unsafe extern "C" fn rust_eh_personality(state: uw::_Unwind_State, exception_object: *mut uw::_Unwind_Exception, context: *mut uw::_Unwind_Context) - -> uw::_Unwind_Reason_Code { + -> uw::_Unwind_Reason_Code { let state = state as c_int; let action = state & uw::_US_ACTION_MASK as c_int; let search_phase = if action == uw::_US_VIRTUAL_UNWIND_FRAME as c_int { @@ -191,7 +193,7 @@ unsafe extern "C" fn rust_eh_personality(state: uw::_Unwind_State, // we want to continue unwinding the stack, otherwise all our backtraces // would end at __rust_try if state & uw::_US_FORCE_UNWIND as c_int != 0 { - return continue_unwind(exception_object, context) + return continue_unwind(exception_object, context); } true } else if action == uw::_US_UNWIND_FRAME_STARTING as c_int { @@ -207,7 +209,9 @@ unsafe extern "C" fn rust_eh_personality(state: uw::_Unwind_State, // To preserve signatures of functions like _Unwind_GetLanguageSpecificData(), which // take only the context pointer, GCC personality routines stash a pointer to exception_object // in the context, using location reserved for ARM's "scratch register" (r12). - uw::_Unwind_SetGR(context, uw::UNWIND_POINTER_REG, exception_object as uw::_Unwind_Ptr); + uw::_Unwind_SetGR(context, + uw::UNWIND_POINTER_REG, + exception_object as uw::_Unwind_Ptr); // ...A more principled approach would be to provide the full definition of ARM's // _Unwind_Context in our libunwind bindings and fetch the required data from there directly, // bypassing DWARF compatibility functions. @@ -223,7 +227,8 @@ unsafe extern "C" fn rust_eh_personality(state: uw::_Unwind_State, } else { match eh_action { EHAction::None => return continue_unwind(exception_object, context), - EHAction::Cleanup(lpad) | EHAction::Catch(lpad) => { + EHAction::Cleanup(lpad) | + EHAction::Catch(lpad) => { uw::_Unwind_SetGR(context, UNWIND_DATA_REG.0, exception_object as uintptr_t); uw::_Unwind_SetGR(context, UNWIND_DATA_REG.1, 0); uw::_Unwind_SetIP(context, lpad); @@ -247,8 +252,8 @@ unsafe extern "C" fn rust_eh_personality(state: uw::_Unwind_State, // defined in libgcc extern "C" { fn __gnu_unwind_frame(exception_object: *mut uw::_Unwind_Exception, - context: *mut uw::_Unwind_Context) - -> uw::_Unwind_Reason_Code; + context: *mut uw::_Unwind_Context) + -> uw::_Unwind_Reason_Code; } } @@ -287,7 +292,7 @@ unsafe extern "C" fn rust_eh_unwind_resume(panic_ctx: *mut u8) -> ! { // to actively register their unwind info sections via unwinder API. // // This module defines two symbols which are referenced and called from -// rsbegin.rs to reigster our information with the GCC runtime. The +// rsbegin.rs to register our information with the GCC runtime. The // implementation of stack unwinding is (for now) deferred to libgcc_eh, however // Rust crates use these Rust-specific entry points to avoid potential clashes // with any GCC runtime. diff --git a/src/libpanic_unwind/lib.rs b/src/libpanic_unwind/lib.rs index 11dd9befe0a82..ff483fa823e0c 100644 --- a/src/libpanic_unwind/lib.rs +++ b/src/libpanic_unwind/lib.rs @@ -68,10 +68,16 @@ mod imp; mod imp; // i686-pc-windows-gnu and all others -#[cfg(any(unix, all(windows, target_arch = "x86", target_env = "gnu")))] +#[cfg(any(all(unix, not(target_os = "emscripten")), + all(windows, target_arch = "x86", target_env = "gnu")))] #[path = "gcc.rs"] mod imp; +// emscripten +#[cfg(target_os = "emscripten")] +#[path = "emcc.rs"] +mod imp; + mod dwarf; mod windows; diff --git a/src/libpanic_unwind/seh64_gnu.rs b/src/libpanic_unwind/seh64_gnu.rs index e6d3920b29cb0..d4906b556b31a 100644 --- a/src/libpanic_unwind/seh64_gnu.rs +++ b/src/libpanic_unwind/seh64_gnu.rs @@ -129,7 +129,8 @@ unsafe fn find_landing_pad(dc: &c::DISPATCHER_CONTEXT) -> Option { }; match find_eh_action(dc.HandlerData, &eh_ctx) { EHAction::None => None, - EHAction::Cleanup(lpad) | EHAction::Catch(lpad) => Some(lpad), + EHAction::Cleanup(lpad) | + EHAction::Catch(lpad) => Some(lpad), EHAction::Terminate => intrinsics::abort(), } } diff --git a/src/libproc_macro/Cargo.toml b/src/libproc_macro/Cargo.toml index 99fb1d65cda90..7ce65d0fe4dbc 100644 --- a/src/libproc_macro/Cargo.toml +++ b/src/libproc_macro/Cargo.toml @@ -4,12 +4,8 @@ name = "proc_macro" version = "0.0.0" [lib] -name = "proc_macro" path = "lib.rs" crate-type = ["dylib"] [dependencies] -log = { path = "../liblog" } -rustc_plugin = { path = "../librustc_plugin" } syntax = { path = "../libsyntax" } -syntax_pos = { path = "../libsyntax_pos" } diff --git a/src/libproc_macro/lib.rs b/src/libproc_macro/lib.rs index 9e25cb88e015c..1d2c64d6d938a 100644 --- a/src/libproc_macro/lib.rs +++ b/src/libproc_macro/lib.rs @@ -8,130 +8,158 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -//! # Proc_Macro +//! A support library for macro authors when defining new macros. //! -//! A library for procedural macro writers. +//! This library, provided by the standard distribution, provides the types +//! consumed in the interfaces of procedurally defined macro definitions. +//! Currently the primary use of this crate is to provide the ability to define +//! new custom derive modes through `#[proc_macro_derive]`. //! -//! ## Usage -//! This package provides the `qquote!` macro for syntax creation, and the prelude -//! (at libproc_macro::prelude) provides a number of operations: -//! - `concat`, for concatenating two TokenStreams. -//! - `ident_eq`, for checking if two identifiers are equal regardless of syntax context. -//! - `str_to_token_ident`, for converting an `&str` into a Token. -//! - `keyword_to_token_delim`, for converting a `parse::token::keywords::Keyword` into a -//! Token. -//! - `build_delimited`, for creating a new TokenStream from an existing one and a delimiter -//! by wrapping the TokenStream in the delimiter. -//! - `build_bracket_delimited`, `build_brace_delimited`, and `build_paren_delimited`, for -//! easing the above. -//! - `build_empty_args`, which returns a TokenStream containing `()`. -//! - `lex`, which takes an `&str` and returns the TokenStream it represents. +//! Added recently as part of [RFC 1681] this crate is currently *unstable* and +//! requires the `#![feature(proc_macro_lib)]` directive to use. //! -//! The `qquote!` macro also imports `syntax::ext::proc_macro_shim::prelude::*`, so you -//! will need to `extern crate syntax` for usage. (This is a temporary solution until more -//! of the external API in libproc_macro is stabilized to support the token construction -//! operations that the qausiquoter relies on.) The shim file also provides additional -//! operations, such as `build_block_emitter` (as used in the `cond` example below). -//! -//! ## TokenStreams -//! -//! TokenStreams serve as the basis of the macro system. They are, in essence, vectors of -//! TokenTrees, where indexing treats delimited values as a single term. That is, the term -//! `even(a+c) && even(b)` will be indexibly encoded as `even | (a+c) | even | (b)` where, -//! in reality, `(a+c)` is actually a decorated pointer to `a | + | c`. -//! -//! If a user has a TokenStream that is a single, delimited value, they can use -//! `maybe_delimited` to destruct it and receive the internal vector as a new TokenStream -//! as: -//! ``` -//! `(a+c)`.maybe_delimited() ~> Some(a | + | c)` -//! ``` -//! -//! Check the TokenStream documentation for more information; the structure also provides -//! cheap concatenation and slicing. -//! -//! ## Quasiquotation -//! -//! The quasiquoter creates output that, when run, constructs the tokenstream specified as -//! input. For example, `qquote!(5 + 5)` will produce a program, that, when run, will -//! construct the TokenStream `5 | + | 5`. -//! -//! ### Unquoting -//! -//! Unquoting is currently done as `unquote`, and works by taking the single next -//! TokenTree in the TokenStream as the unquoted term. Ergonomically, `unquote(foo)` works -//! fine, but `unquote foo` is also supported. -//! -//! A simple example might be: -//! -//!``` -//!fn double(tmp: TokenStream) -> TokenStream { -//! qquote!(unquote(tmp) * 2) -//!} -//!``` -//! -//! ### Large Example: Implementing Scheme's `cond` -//! -//! Below is the full implementation of Scheme's `cond` operator. -//! -//! ``` -//! fn cond_rec(input: TokenStream) -> TokenStream { -//! if input.is_empty() { return quote!(); } -//! -//! let next = input.slice(0..1); -//! let rest = input.slice_from(1..); -//! -//! let clause : TokenStream = match next.maybe_delimited() { -//! Some(ts) => ts, -//! _ => panic!("Invalid input"), -//! }; -//! -//! // clause is ([test]) [rhs] -//! if clause.len() < 2 { panic!("Invalid macro usage in cond: {:?}", clause) } -//! -//! let test: TokenStream = clause.slice(0..1); -//! let rhs: TokenStream = clause.slice_from(1..); -//! -//! if ident_eq(&test[0], str_to_ident("else")) || rest.is_empty() { -//! quote!({unquote(rhs)}) -//! } else { -//! quote!({if unquote(test) { unquote(rhs) } else { cond!(unquote(rest)) } }) -//! } -//! } -//! ``` +//! [RFC 1681]: https://github.com/rust-lang/rfcs/blob/master/text/1681-macros-1.1.md //! +//! Note that this crate is intentionally very bare-bones currently. The main +//! type, `TokenStream`, only supports `fmt::Display` and `FromStr` +//! implementations, indicating that it can only go to and come from a string. +//! This functionality is intended to be expanded over time as more surface +//! area for macro authors is stabilized. #![crate_name = "proc_macro"] -#![unstable(feature = "rustc_private", issue = "27812")] -#![feature(plugin_registrar)] -#![crate_type = "dylib"] +#![unstable(feature = "proc_macro_lib", issue = "27812")] #![crate_type = "rlib"] -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/")] +#![crate_type = "dylib"] #![cfg_attr(not(stage0), deny(warnings))] +#![deny(missing_docs)] -#![feature(staged_api)] -#![feature(rustc_diagnostic_macros)] #![feature(rustc_private)] +#![feature(staged_api)] +#![feature(lang_items)] -extern crate rustc_plugin; extern crate syntax; -extern crate syntax_pos; -#[macro_use] extern crate log; -mod qquote; -pub mod build; -pub mod parse; -pub mod prelude; -use qquote::qquote; +use std::fmt; +use std::str::FromStr; + +use syntax::ast; +use syntax::parse; +use syntax::ptr::P; + +/// The main type provided by this crate, representing an abstract stream of +/// tokens. +/// +/// This is both the input and output of `#[proc_macro_derive]` definitions. +/// Currently it's required to be a list of valid Rust items, but this +/// restriction may be lifted in the future. +/// +/// The API of this type is intentionally bare-bones, but it'll be expanded over +/// time! +pub struct TokenStream { + inner: Vec>, +} + +/// Error returned from `TokenStream::from_str`. +#[derive(Debug)] +pub struct LexError { + _inner: (), +} + +/// Permanently unstable internal implementation details of this crate. This +/// should not be used. +/// +/// These methods are used by the rest of the compiler to generate instances of +/// `TokenStream` to hand to macro definitions, as well as consume the output. +/// +/// Note that this module is also intentionally separate from the rest of the +/// crate. This allows the `#[unstable]` directive below to naturally apply to +/// all of the contents. +#[unstable(feature = "proc_macro_internals", issue = "27812")] +#[doc(hidden)] +pub mod __internal { + use std::cell::Cell; + + use syntax::ast; + use syntax::ptr::P; + use syntax::parse::ParseSess; + use super::TokenStream; + + pub fn new_token_stream(item: P) -> TokenStream { + TokenStream { inner: vec![item] } + } -use rustc_plugin::Registry; + pub fn token_stream_items(stream: TokenStream) -> Vec> { + stream.inner + } -// ____________________________________________________________________________________________ -// Main macro definition + pub trait Registry { + fn register_custom_derive(&mut self, + trait_name: &str, + expand: fn(TokenStream) -> TokenStream); + } + + // Emulate scoped_thread_local!() here essentially + thread_local! { + static CURRENT_SESS: Cell<*const ParseSess> = Cell::new(0 as *const _); + } + + pub fn set_parse_sess(sess: &ParseSess, f: F) -> R + where F: FnOnce() -> R + { + struct Reset { prev: *const ParseSess } + + impl Drop for Reset { + fn drop(&mut self) { + CURRENT_SESS.with(|p| p.set(self.prev)); + } + } + + CURRENT_SESS.with(|p| { + let _reset = Reset { prev: p.get() }; + p.set(sess); + f() + }) + } + + pub fn with_parse_sess(f: F) -> R + where F: FnOnce(&ParseSess) -> R + { + let p = CURRENT_SESS.with(|p| p.get()); + assert!(!p.is_null()); + f(unsafe { &*p }) + } +} + +impl FromStr for TokenStream { + type Err = LexError; + + fn from_str(src: &str) -> Result { + __internal::with_parse_sess(|sess| { + let src = src.to_string(); + let name = "".to_string(); + let mut parser = parse::new_parser_from_source_str(sess, name, src); + let mut ret = TokenStream { inner: Vec::new() }; + loop { + match parser.parse_item() { + Ok(Some(item)) => ret.inner.push(item), + Ok(None) => return Ok(ret), + Err(mut err) => { + err.cancel(); + return Err(LexError { _inner: () }) + } + } + } + }) + } +} -#[plugin_registrar] -pub fn plugin_registrar(reg: &mut Registry) { - reg.register_macro("qquote", qquote); +impl fmt::Display for TokenStream { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + for item in self.inner.iter() { + let item = syntax::print::pprust::item_to_string(item); + try!(f.write_str(&item)); + try!(f.write_str("\n")); + } + Ok(()) + } } diff --git a/src/libproc_macro_plugin/Cargo.toml b/src/libproc_macro_plugin/Cargo.toml new file mode 100644 index 0000000000000..4bc3f488d3280 --- /dev/null +++ b/src/libproc_macro_plugin/Cargo.toml @@ -0,0 +1,15 @@ +[package] +authors = ["The Rust Project Developers"] +name = "proc_macro_plugin" +version = "0.0.0" + +[lib] +path = "lib.rs" +crate-type = ["dylib"] + +[dependencies] +log = { path = "../liblog" } +rustc_plugin = { path = "../librustc_plugin" } +syntax = { path = "../libsyntax" } +syntax_pos = { path = "../libsyntax_pos" } +proc_macro_tokens = { path = "../libproc_macro_tokens" } diff --git a/src/libproc_macro_plugin/lib.rs b/src/libproc_macro_plugin/lib.rs new file mode 100644 index 0000000000000..c45762bfb6e71 --- /dev/null +++ b/src/libproc_macro_plugin/lib.rs @@ -0,0 +1,107 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! # Proc_Macro +//! +//! A library for procedural macro writers. +//! +//! ## Usage +//! This crate provides the `qquote!` macro for syntax creation. +//! +//! The `qquote!` macro imports `syntax::ext::proc_macro_shim::prelude::*`, so you +//! will need to `extern crate syntax` for usage. (This is a temporary solution until more +//! of the external API in libproc_macro_tokens is stabilized to support the token construction +//! operations that the qausiquoter relies on.) The shim file also provides additional +//! operations, such as `build_block_emitter` (as used in the `cond` example below). +//! +//! ## Quasiquotation +//! +//! The quasiquoter creates output that, when run, constructs the tokenstream specified as +//! input. For example, `qquote!(5 + 5)` will produce a program, that, when run, will +//! construct the TokenStream `5 | + | 5`. +//! +//! ### Unquoting +//! +//! Unquoting is currently done as `unquote`, and works by taking the single next +//! TokenTree in the TokenStream as the unquoted term. Ergonomically, `unquote(foo)` works +//! fine, but `unquote foo` is also supported. +//! +//! A simple example might be: +//! +//!``` +//!fn double(tmp: TokenStream) -> TokenStream { +//! qquote!(unquote(tmp) * 2) +//!} +//!``` +//! +//! ### Large Example: Implementing Scheme's `cond` +//! +//! Below is the full implementation of Scheme's `cond` operator. +//! +//! ``` +//! fn cond_rec(input: TokenStream) -> TokenStream { +//! if input.is_empty() { return quote!(); } +//! +//! let next = input.slice(0..1); +//! let rest = input.slice_from(1..); +//! +//! let clause : TokenStream = match next.maybe_delimited() { +//! Some(ts) => ts, +//! _ => panic!("Invalid input"), +//! }; +//! +//! // clause is ([test]) [rhs] +//! if clause.len() < 2 { panic!("Invalid macro usage in cond: {:?}", clause) } +//! +//! let test: TokenStream = clause.slice(0..1); +//! let rhs: TokenStream = clause.slice_from(1..); +//! +//! if ident_eq(&test[0], str_to_ident("else")) || rest.is_empty() { +//! quote!({unquote(rhs)}) +//! } else { +//! quote!({if unquote(test) { unquote(rhs) } else { cond!(unquote(rest)) } }) +//! } +//! } +//! ``` +//! + +#![crate_name = "proc_macro_plugin"] +#![unstable(feature = "rustc_private", issue = "27812")] +#![feature(plugin_registrar)] +#![crate_type = "dylib"] +#![crate_type = "rlib"] +#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", + html_favicon_url = "https://doc.rust-lang.org/favicon.ico", + html_root_url = "https://doc.rust-lang.org/nightly/")] +#![cfg_attr(not(stage0), deny(warnings))] + +#![feature(staged_api)] +#![feature(rustc_diagnostic_macros)] +#![feature(rustc_private)] + +extern crate rustc_plugin; +extern crate syntax; +extern crate syntax_pos; +extern crate proc_macro_tokens; +#[macro_use] extern crate log; + +mod qquote; + +use qquote::qquote; + +use rustc_plugin::Registry; + +// ____________________________________________________________________________________________ +// Main macro definition + +#[plugin_registrar] +pub fn plugin_registrar(reg: &mut Registry) { + reg.register_macro("qquote", qquote); +} diff --git a/src/libproc_macro/qquote.rs b/src/libproc_macro_plugin/qquote.rs similarity index 98% rename from src/libproc_macro/qquote.rs rename to src/libproc_macro_plugin/qquote.rs index 67d0c77b00d83..e5a3abc2ea982 100644 --- a/src/libproc_macro/qquote.rs +++ b/src/libproc_macro_plugin/qquote.rs @@ -24,12 +24,9 @@ //! TokenStream that resembles the output syntax. //! -extern crate rustc_plugin; -extern crate syntax; -extern crate syntax_pos; +use proc_macro_tokens::build::*; +use proc_macro_tokens::parse::lex; -use build::*; -use parse::lex; use qquote::int_build::*; use syntax::ast::Ident; @@ -51,7 +48,7 @@ pub fn qquote<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree]) let output = qquoter(cx, TokenStream::from_tts(tts.clone().to_owned())); debug!("\nQQ out: {}\n", pprust::tts_to_string(&output.to_tts()[..])); let imports = concat(lex("use syntax::ext::proc_macro_shim::prelude::*;"), - lex("use proc_macro::prelude::*;")); + lex("use proc_macro_tokens::prelude::*;")); build_block_emitter(cx, sp, build_brace_delimited(concat(imports, output))) } @@ -219,7 +216,7 @@ fn convert_complex_tts<'cx>(cx: &'cx mut ExtCtxt, tts: Vec) -> (Bindings, T let sep = build_delim_tok(qdl.delim); - pushes.push(build_mod_call(vec![str_to_ident("proc_macro"), + pushes.push(build_mod_call(vec![str_to_ident("proc_macro_tokens"), str_to_ident("build"), str_to_ident("build_delimited")], concat(from_tokens(vec![Token::Ident(new_id)]), @@ -264,11 +261,8 @@ fn is_qquote(id: Ident) -> bool { } mod int_build { - extern crate syntax; - extern crate syntax_pos; - - use parse::*; - use build::*; + use proc_macro_tokens::build::*; + use proc_macro_tokens::parse::*; use syntax::ast::{self, Ident}; use syntax::codemap::{DUMMY_SP}; diff --git a/src/librustc_macro/Cargo.toml b/src/libproc_macro_tokens/Cargo.toml similarity index 62% rename from src/librustc_macro/Cargo.toml rename to src/libproc_macro_tokens/Cargo.toml index 6b3ee21d9aceb..2b66d56759f35 100644 --- a/src/librustc_macro/Cargo.toml +++ b/src/libproc_macro_tokens/Cargo.toml @@ -1,12 +1,13 @@ [package] authors = ["The Rust Project Developers"] -name = "rustc_macro" +name = "proc_macro_tokens" version = "0.0.0" [lib] -name = "rustc_macro" path = "lib.rs" crate-type = ["dylib"] [dependencies] syntax = { path = "../libsyntax" } +syntax_pos = { path = "../libsyntax_pos" } +log = { path = "../liblog" } diff --git a/src/libproc_macro/build.rs b/src/libproc_macro_tokens/build.rs similarity index 100% rename from src/libproc_macro/build.rs rename to src/libproc_macro_tokens/build.rs diff --git a/src/libproc_macro_tokens/lib.rs b/src/libproc_macro_tokens/lib.rs new file mode 100644 index 0000000000000..3bfa2fbb29fbd --- /dev/null +++ b/src/libproc_macro_tokens/lib.rs @@ -0,0 +1,66 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! # Proc_Macro +//! +//! A library for procedural macro writers. +//! +//! ## Usage +//! This crate provides the prelude (at libproc_macro_tokens::prelude), which +//! provides a number of operations: +//! - `concat`, for concatenating two TokenStreams. +//! - `ident_eq`, for checking if two identifiers are equal regardless of syntax context. +//! - `str_to_token_ident`, for converting an `&str` into a Token. +//! - `keyword_to_token_delim`, for converting a `parse::token::keywords::Keyword` into a +//! Token. +//! - `build_delimited`, for creating a new TokenStream from an existing one and a delimiter +//! by wrapping the TokenStream in the delimiter. +//! - `build_bracket_delimited`, `build_brace_delimited`, and `build_paren_delimited`, for +//! easing the above. +//! - `build_empty_args`, which returns a TokenStream containing `()`. +//! - `lex`, which takes an `&str` and returns the TokenStream it represents. +//! +//! ## TokenStreams +//! +//! TokenStreams serve as the basis of the macro system. They are, in essence, vectors of +//! TokenTrees, where indexing treats delimited values as a single term. That is, the term +//! `even(a+c) && even(b)` will be indexibly encoded as `even | (a+c) | even | (b)` where, +//! in reality, `(a+c)` is actually a decorated pointer to `a | + | c`. +//! +//! If a user has a TokenStream that is a single, delimited value, they can use +//! `maybe_delimited` to destruct it and receive the internal vector as a new TokenStream +//! as: +//! ``` +//! `(a+c)`.maybe_delimited() ~> Some(a | + | c)` +//! ``` +//! +//! Check the TokenStream documentation for more information; the structure also provides +//! cheap concatenation and slicing. +//! + +#![crate_name = "proc_macro_tokens"] +#![unstable(feature = "rustc_private", issue = "27812")] +#![crate_type = "dylib"] +#![crate_type = "rlib"] +#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", + html_favicon_url = "https://doc.rust-lang.org/favicon.ico", + html_root_url = "https://doc.rust-lang.org/nightly/")] +#![cfg_attr(not(stage0), deny(warnings))] + +#![feature(staged_api)] +#![feature(rustc_private)] + +extern crate syntax; +extern crate syntax_pos; +#[macro_use] extern crate log; + +pub mod build; +pub mod parse; +pub mod prelude; diff --git a/src/libproc_macro/parse.rs b/src/libproc_macro_tokens/parse.rs similarity index 100% rename from src/libproc_macro/parse.rs rename to src/libproc_macro_tokens/parse.rs diff --git a/src/libproc_macro/prelude.rs b/src/libproc_macro_tokens/prelude.rs similarity index 100% rename from src/libproc_macro/prelude.rs rename to src/libproc_macro_tokens/prelude.rs diff --git a/src/librand/chacha.rs b/src/librand/chacha.rs index 5fba44a1c38f1..7dc0d19e6a615 100644 --- a/src/librand/chacha.rs +++ b/src/librand/chacha.rs @@ -217,8 +217,8 @@ mod tests { let mut ra: ChaChaRng = SeedableRng::from_seed(&*s); let mut rb: ChaChaRng = SeedableRng::from_seed(&*s); assert!(ra.gen_ascii_chars() - .take(100) - .eq(rb.gen_ascii_chars().take(100))); + .take(100) + .eq(rb.gen_ascii_chars().take(100))); } #[test] @@ -227,8 +227,8 @@ mod tests { let mut ra: ChaChaRng = SeedableRng::from_seed(seed); let mut rb: ChaChaRng = SeedableRng::from_seed(seed); assert!(ra.gen_ascii_chars() - .take(100) - .eq(rb.gen_ascii_chars().take(100))); + .take(100) + .eq(rb.gen_ascii_chars().take(100))); } #[test] @@ -253,17 +253,17 @@ mod tests { let v = (0..16).map(|_| ra.next_u32()).collect::>(); assert_eq!(v, - vec!(0xade0b876, 0x903df1a0, 0xe56a5d40, 0x28bd8653, + vec![0xade0b876, 0x903df1a0, 0xe56a5d40, 0x28bd8653, 0xb819d2bd, 0x1aed8da0, 0xccef36a8, 0xc70d778b, 0x7c5941da, 0x8d485751, 0x3fe02477, 0x374ad8b8, - 0xf4b8436a, 0x1ca11815, 0x69b687c3, 0x8665eeb2)); + 0xf4b8436a, 0x1ca11815, 0x69b687c3, 0x8665eeb2]); let v = (0..16).map(|_| ra.next_u32()).collect::>(); assert_eq!(v, - vec!(0xbee7079f, 0x7a385155, 0x7c97ba98, 0x0d082d73, + vec![0xbee7079f, 0x7a385155, 0x7c97ba98, 0x0d082d73, 0xa0290fcb, 0x6965e348, 0x3e53c612, 0xed7aee32, 0x7621b729, 0x434ee69c, 0xb03371d5, 0xd539d874, - 0x281fed31, 0x45fb0a51, 0x1f0ae1ac, 0x6f4d794b)); + 0x281fed31, 0x45fb0a51, 0x1f0ae1ac, 0x6f4d794b]); let seed: &[_] = &[0, 1, 2, 3, 4, 5, 6, 7]; @@ -280,10 +280,10 @@ mod tests { } assert_eq!(v, - vec!(0xf225c81a, 0x6ab1be57, 0x04d42951, 0x70858036, + vec![0xf225c81a, 0x6ab1be57, 0x04d42951, 0x70858036, 0x49884684, 0x64efec72, 0x4be2d186, 0x3615b384, 0x11cfa18e, 0xd3c50049, 0x75c775f6, 0x434c6530, - 0x2c5bad8f, 0x898881dc, 0x5f1c86d9, 0xc1f8e7f4)); + 0x2c5bad8f, 0x898881dc, 0x5f1c86d9, 0xc1f8e7f4]); } #[test] diff --git a/src/librand/distributions/mod.rs b/src/librand/distributions/mod.rs index 36c9f783ff5e0..41175c81df891 100644 --- a/src/librand/distributions/mod.rs +++ b/src/librand/distributions/mod.rs @@ -237,18 +237,10 @@ fn ziggurat(rng: &mut R, // u is either U(-1, 1) or U(0, 1) depending on if this is a // symmetric distribution or not. - let u = if symmetric { - 2.0 * f - 1.0 - } else { - f - }; + let u = if symmetric { 2.0 * f - 1.0 } else { f }; let x = u * x_tab[i]; - let test_x = if symmetric { - x.abs() - } else { - x - }; + let test_x = if symmetric { x.abs() } else { x }; // algebraically equivalent to |u| < x_tab[i+1]/x_tab[i] (or u < x_tab[i+1]/x_tab[i]) if test_x < x_tab[i + 1] { @@ -320,37 +312,37 @@ mod tests { }} } - t!(vec!(Weighted { weight: 1, item: 10 }), + t!(vec![Weighted { weight: 1, item: 10 }], [10]); // skip some - t!(vec!(Weighted { weight: 0, item: 20 }, + t!(vec![Weighted { weight: 0, item: 20 }, Weighted { weight: 2, item: 21 }, Weighted { weight: 0, item: 22 }, - Weighted { weight: 1, item: 23 }), + Weighted { weight: 1, item: 23 }], [21, 21, 23]); // different weights - t!(vec!(Weighted { weight: 4, item: 30 }, - Weighted { weight: 3, item: 31 }), + t!(vec![Weighted { weight: 4, item: 30 }, + Weighted { weight: 3, item: 31 }], [30, 30, 30, 30, 31, 31, 31]); // check that we're binary searching // correctly with some vectors of odd // length. - t!(vec!(Weighted { weight: 1, item: 40 }, + t!(vec![Weighted { weight: 1, item: 40 }, Weighted { weight: 1, item: 41 }, Weighted { weight: 1, item: 42 }, Weighted { weight: 1, item: 43 }, - Weighted { weight: 1, item: 44 }), + Weighted { weight: 1, item: 44 }], [40, 41, 42, 43, 44]); - t!(vec!(Weighted { weight: 1, item: 50 }, + t!(vec![Weighted { weight: 1, item: 50 }, Weighted { weight: 1, item: 51 }, Weighted { weight: 1, item: 52 }, Weighted { weight: 1, item: 53 }, Weighted { weight: 1, item: 54 }, Weighted { weight: 1, item: 55 }, - Weighted { weight: 1, item: 56 }), + Weighted { weight: 1, item: 56 }], [50, 51, 52, 53, 54, 55, 56]); } diff --git a/src/librand/isaac.rs b/src/librand/isaac.rs index e8cc7b5cc2dac..69d5015f18140 100644 --- a/src/librand/isaac.rs +++ b/src/librand/isaac.rs @@ -599,8 +599,8 @@ mod tests { let mut ra: IsaacRng = SeedableRng::from_seed(&s[..]); let mut rb: IsaacRng = SeedableRng::from_seed(&s[..]); assert!(ra.gen_ascii_chars() - .take(100) - .eq(rb.gen_ascii_chars().take(100))); + .take(100) + .eq(rb.gen_ascii_chars().take(100))); } #[test] fn test_rng_64_rand_seeded() { @@ -608,8 +608,8 @@ mod tests { let mut ra: Isaac64Rng = SeedableRng::from_seed(&s[..]); let mut rb: Isaac64Rng = SeedableRng::from_seed(&s[..]); assert!(ra.gen_ascii_chars() - .take(100) - .eq(rb.gen_ascii_chars().take(100))); + .take(100) + .eq(rb.gen_ascii_chars().take(100))); } #[test] @@ -618,8 +618,8 @@ mod tests { let mut ra: IsaacRng = SeedableRng::from_seed(seed); let mut rb: IsaacRng = SeedableRng::from_seed(seed); assert!(ra.gen_ascii_chars() - .take(100) - .eq(rb.gen_ascii_chars().take(100))); + .take(100) + .eq(rb.gen_ascii_chars().take(100))); } #[test] fn test_rng_64_seeded() { @@ -627,8 +627,8 @@ mod tests { let mut ra: Isaac64Rng = SeedableRng::from_seed(seed); let mut rb: Isaac64Rng = SeedableRng::from_seed(seed); assert!(ra.gen_ascii_chars() - .take(100) - .eq(rb.gen_ascii_chars().take(100))); + .take(100) + .eq(rb.gen_ascii_chars().take(100))); } #[test] @@ -662,8 +662,8 @@ mod tests { // Regression test that isaac is actually using the above vector let v = (0..10).map(|_| ra.next_u32()).collect::>(); assert_eq!(v, - vec!(2558573138, 873787463, 263499565, 2103644246, 3595684709, - 4203127393, 264982119, 2765226902, 2737944514, 3900253796)); + vec![2558573138, 873787463, 263499565, 2103644246, 3595684709, + 4203127393, 264982119, 2765226902, 2737944514, 3900253796]); let seed: &[_] = &[12345, 67890, 54321, 9876]; let mut rb: IsaacRng = SeedableRng::from_seed(seed); @@ -674,8 +674,8 @@ mod tests { let v = (0..10).map(|_| rb.next_u32()).collect::>(); assert_eq!(v, - vec!(3676831399, 3183332890, 2834741178, 3854698763, 2717568474, - 1576568959, 3507990155, 179069555, 141456972, 2478885421)); + vec![3676831399, 3183332890, 2834741178, 3854698763, 2717568474, + 1576568959, 3507990155, 179069555, 141456972, 2478885421]); } #[test] #[rustfmt_skip] @@ -685,10 +685,10 @@ mod tests { // Regression test that isaac is actually using the above vector let v = (0..10).map(|_| ra.next_u64()).collect::>(); assert_eq!(v, - vec!(547121783600835980, 14377643087320773276, 17351601304698403469, + vec![547121783600835980, 14377643087320773276, 17351601304698403469, 1238879483818134882, 11952566807690396487, 13970131091560099343, 4469761996653280935, 15552757044682284409, 6860251611068737823, - 13722198873481261842)); + 13722198873481261842]); let seed: &[_] = &[12345, 67890, 54321, 9876]; let mut rb: Isaac64Rng = SeedableRng::from_seed(seed); @@ -699,10 +699,10 @@ mod tests { let v = (0..10).map(|_| rb.next_u64()).collect::>(); assert_eq!(v, - vec!(18143823860592706164, 8491801882678285927, 2699425367717515619, + vec![18143823860592706164, 8491801882678285927, 2699425367717515619, 17196852593171130876, 2606123525235546165, 15790932315217671084, 596345674630742204, 9947027391921273664, 11788097613744130851, - 10391409374914919106)); + 10391409374914919106]); } diff --git a/src/librand/rand_impls.rs b/src/librand/rand_impls.rs index 41ad089ecd235..b0d824da3ab47 100644 --- a/src/librand/rand_impls.rs +++ b/src/librand/rand_impls.rs @@ -203,10 +203,6 @@ tuple_impl!{A, B, C, D, E, F, G, H, I, J, K, L} impl Rand for Option { #[inline] fn rand(rng: &mut R) -> Option { - if rng.gen() { - Some(rng.gen()) - } else { - None - } + if rng.gen() { Some(rng.gen()) } else { None } } } diff --git a/src/librand/reseeding.rs b/src/librand/reseeding.rs index 48395c12fafeb..b8a65842e2ff5 100644 --- a/src/librand/reseeding.rs +++ b/src/librand/reseeding.rs @@ -138,7 +138,7 @@ mod tests { } } impl Default for Counter { - /// Constructs a `Counter` with initial value zero. + /// Constructs a `Counter` with initial value zero. fn default() -> Counter { Counter { i: 0 } } @@ -169,8 +169,8 @@ mod tests { let mut ra: MyRng = SeedableRng::from_seed((ReseedWithDefault, 2)); let mut rb: MyRng = SeedableRng::from_seed((ReseedWithDefault, 2)); assert!(ra.gen_ascii_chars() - .take(100) - .eq(rb.gen_ascii_chars().take(100))); + .take(100) + .eq(rb.gen_ascii_chars().take(100))); } #[test] diff --git a/src/librbml/Cargo.toml b/src/librbml/Cargo.toml deleted file mode 100644 index ab89ac2b7a1eb..0000000000000 --- a/src/librbml/Cargo.toml +++ /dev/null @@ -1,13 +0,0 @@ -[package] -authors = ["The Rust Project Developers"] -name = "rbml" -version = "0.0.0" - -[lib] -name = "rbml" -path = "lib.rs" -crate-type = ["dylib"] - -[dependencies] -log = { path = "../liblog" } -serialize = { path = "../libserialize" } diff --git a/src/librbml/lib.rs b/src/librbml/lib.rs deleted file mode 100644 index 5a8a52f7dfc6e..0000000000000 --- a/src/librbml/lib.rs +++ /dev/null @@ -1,1609 +0,0 @@ -// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Really Bad Markup Language (rbml) is an internal serialization format of rustc. -//! This is not intended to be used by users. -//! -//! Originally based on the Extensible Binary Markup Language -//! (ebml; http://www.matroska.org/technical/specs/rfc/index.html), -//! it is now a separate format tuned for the rust object metadata. -//! -//! # Encoding -//! -//! RBML document consists of the tag, length and data. -//! The encoded data can contain multiple RBML documents concatenated. -//! -//! **Tags** are a hint for the following data. -//! Tags are a number from 0x000 to 0xfff, where 0xf0 through 0xff is reserved. -//! Tags less than 0xf0 are encoded in one literal byte. -//! Tags greater than 0xff are encoded in two big-endian bytes, -//! where the tag number is ORed with 0xf000. (E.g. tag 0x123 = `f1 23`) -//! -//! **Lengths** encode the length of the following data. -//! It is a variable-length unsigned isize, and one of the following forms: -//! -//! - `80` through `fe` for lengths up to 0x7e; -//! - `40 ff` through `7f ff` for lengths up to 0x3fff; -//! - `20 40 00` through `3f ff ff` for lengths up to 0x1fffff; -//! - `10 20 00 00` through `1f ff ff ff` for lengths up to 0xfffffff. -//! -//! The "overlong" form is allowed so that the length can be encoded -//! without the prior knowledge of the encoded data. -//! For example, the length 0 can be represented either by `80`, `40 00`, -//! `20 00 00` or `10 00 00 00`. -//! The encoder tries to minimize the length if possible. -//! Also, some predefined tags listed below are so commonly used that -//! their lengths are omitted ("implicit length"). -//! -//! **Data** can be either binary bytes or zero or more nested RBML documents. -//! Nested documents cannot overflow, and should be entirely contained -//! within a parent document. -//! -//! # Predefined Tags -//! -//! Most RBML tags are defined by the application. -//! (For the rust object metadata, see also `rustc::metadata::common`.) -//! RBML itself does define a set of predefined tags however, -//! intended for the auto-serialization implementation. -//! -//! Predefined tags with an implicit length: -//! -//! - `U8` (`00`): 1-byte unsigned integer. -//! - `U16` (`01`): 2-byte big endian unsigned integer. -//! - `U32` (`02`): 4-byte big endian unsigned integer. -//! - `U64` (`03`): 8-byte big endian unsigned integer. -//! Any of `U*` tags can be used to encode primitive unsigned integer types, -//! as long as it is no greater than the actual size. -//! For example, `u8` can only be represented via the `U8` tag. -//! -//! - `I8` (`04`): 1-byte signed integer. -//! - `I16` (`05`): 2-byte big endian signed integer. -//! - `I32` (`06`): 4-byte big endian signed integer. -//! - `I64` (`07`): 8-byte big endian signed integer. -//! Similar to `U*` tags. Always uses two's complement encoding. -//! -//! - `Bool` (`08`): 1-byte boolean value, `00` for false and `01` for true. -//! -//! - `Char` (`09`): 4-byte big endian Unicode scalar value. -//! Surrogate pairs or out-of-bound values are invalid. -//! -//! - `F32` (`0a`): 4-byte big endian unsigned integer representing -//! IEEE 754 binary32 floating-point format. -//! - `F64` (`0b`): 8-byte big endian unsigned integer representing -//! IEEE 754 binary64 floating-point format. -//! -//! - `Sub8` (`0c`): 1-byte unsigned integer for supplementary information. -//! - `Sub32` (`0d`): 4-byte unsigned integer for supplementary information. -//! Those two tags normally occur as the first subdocument of certain tags, -//! namely `Enum`, `Vec` and `Map`, to provide a variant or size information. -//! They can be used interchangeably. -//! -//! Predefined tags with an explicit length: -//! -//! - `Str` (`10`): A UTF-8-encoded string. -//! -//! - `Enum` (`11`): An enum. -//! The first subdocument should be `Sub*` tags with a variant ID. -//! Subsequent subdocuments, if any, encode variant arguments. -//! -//! - `Vec` (`12`): A vector (sequence). -//! - `VecElt` (`13`): A vector element. -//! The first subdocument should be `Sub*` tags with the number of elements. -//! Subsequent subdocuments should be `VecElt` tag per each element. -//! -//! - `Map` (`14`): A map (associated array). -//! - `MapKey` (`15`): A key part of the map entry. -//! - `MapVal` (`16`): A value part of the map entry. -//! The first subdocument should be `Sub*` tags with the number of entries. -//! Subsequent subdocuments should be an alternating sequence of -//! `MapKey` and `MapVal` tags per each entry. -//! -//! - `Opaque` (`17`): An opaque, custom-format tag. -//! Used to wrap ordinary custom tags or data in the auto-serialized context. -//! Rustc typically uses this to encode type information. -//! -//! First 0x20 tags are reserved by RBML; custom tags start at 0x20. - -#![crate_name = "rbml"] -#![unstable(feature = "rustc_private", issue = "27812")] -#![crate_type = "rlib"] -#![crate_type = "dylib"] -#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", - html_favicon_url = "https://doc.rust-lang.org/favicon.ico", - html_root_url = "https://doc.rust-lang.org/nightly/", - html_playground_url = "https://play.rust-lang.org/", - test(attr(deny(warnings))))] -#![cfg_attr(not(stage0), deny(warnings))] - -#![feature(rustc_private)] -#![feature(staged_api)] -#![feature(question_mark)] - -#![cfg_attr(test, feature(test))] - -extern crate serialize; - -#[cfg(test)] -extern crate serialize as rustc_serialize; // Used by RustcEncodable - -#[macro_use] -extern crate log; - -#[cfg(test)] -extern crate test; - -pub mod opaque; -pub mod leb128; - -pub use self::EbmlEncoderTag::*; -pub use self::Error::*; - -use std::str; -use std::fmt; - -/// Common data structures -#[derive(Clone, Copy)] -pub struct Doc<'a> { - pub data: &'a [u8], - pub start: usize, - pub end: usize, -} - -impl<'doc> Doc<'doc> { - pub fn new(data: &'doc [u8]) -> Doc<'doc> { - Doc { - data: data, - start: 0, - end: data.len(), - } - } - - pub fn get(&self, tag: usize) -> Doc<'doc> { - reader::get_doc(*self, tag) - } - - pub fn is_empty(&self) -> bool { - self.start == self.end - } - - pub fn as_str(&self) -> &'doc str { - str::from_utf8(&self.data[self.start..self.end]).unwrap() - } - - pub fn to_string(&self) -> String { - self.as_str().to_string() - } -} - -pub struct TaggedDoc<'a> { - tag: usize, - pub doc: Doc<'a>, -} - -#[derive(Copy, Clone, Debug)] -pub enum EbmlEncoderTag { - // tags 00..1f are reserved for auto-serialization. - // first NUM_IMPLICIT_TAGS tags are implicitly sized and lengths are not encoded. - EsU8 = 0x00, // + 1 byte - EsU16 = 0x01, // + 2 bytes - EsU32 = 0x02, // + 4 bytes - EsU64 = 0x03, // + 8 bytes - EsI8 = 0x04, // + 1 byte - EsI16 = 0x05, // + 2 bytes - EsI32 = 0x06, // + 4 bytes - EsI64 = 0x07, // + 8 bytes - EsBool = 0x08, // + 1 byte - EsChar = 0x09, // + 4 bytes - EsF32 = 0x0a, // + 4 bytes - EsF64 = 0x0b, // + 8 bytes - EsSub8 = 0x0c, // + 1 byte - EsSub32 = 0x0d, // + 4 bytes - // 0x0e and 0x0f are reserved - EsStr = 0x10, - EsEnum = 0x11, // encodes the variant id as the first EsSub* - EsVec = 0x12, // encodes the # of elements as the first EsSub* - EsVecElt = 0x13, - EsMap = 0x14, // encodes the # of pairs as the first EsSub* - EsMapKey = 0x15, - EsMapVal = 0x16, - EsOpaque = 0x17, -} - -const NUM_TAGS: usize = 0x1000; -const NUM_IMPLICIT_TAGS: usize = 0x0e; - -#[cfg_attr(rustfmt, rustfmt_skip)] -static TAG_IMPLICIT_LEN: [i8; NUM_IMPLICIT_TAGS] = [ - 1, 2, 4, 8, // EsU* - 1, 2, 4, 8, // ESI* - 1, // EsBool - 4, // EsChar - 4, 8, // EsF* - 1, 4, // EsSub* -]; - -#[derive(Debug)] -pub enum Error { - IntTooBig(usize), - InvalidTag(usize), - Expected(String), - IoError(std::io::Error), - ApplicationError(String), -} - -impl fmt::Display for Error { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - // FIXME: this should be a more useful display form - fmt::Debug::fmt(self, f) - } -} -// -------------------------------------- - -pub mod reader { - use std::char; - - use std::isize; - use std::mem::transmute; - - use serialize; - - use super::opaque; - use super::{ApplicationError, EsVec, EsMap, EsEnum, EsSub8, EsSub32, EsVecElt, EsMapKey, - EsU64, EsU32, EsU16, EsU8, EsI64, EsI32, EsI16, EsI8, EsBool, EsF64, EsF32, - EsChar, EsStr, EsMapVal, EsOpaque, EbmlEncoderTag, Doc, TaggedDoc, Error, - IntTooBig, InvalidTag, Expected, NUM_IMPLICIT_TAGS, TAG_IMPLICIT_LEN}; - - pub type DecodeResult = Result; - // rbml reading - - macro_rules! try_or { - ($e:expr, $r:expr) => ( - match $e { - Ok(e) => e, - Err(e) => { - debug!("ignored error: {:?}", e); - return $r - } - } - ) - } - - #[derive(Copy, Clone)] - pub struct Res { - pub val: usize, - pub next: usize, - } - - pub fn tag_at(data: &[u8], start: usize) -> DecodeResult { - let v = data[start] as usize; - if v < 0xf0 { - Ok(Res { - val: v, - next: start + 1, - }) - } else if v > 0xf0 { - Ok(Res { - val: ((v & 0xf) << 8) | data[start + 1] as usize, - next: start + 2, - }) - } else { - // every tag starting with byte 0xf0 is an overlong form, which is prohibited. - Err(InvalidTag(v)) - } - } - - #[inline(never)] - fn vuint_at_slow(data: &[u8], start: usize) -> DecodeResult { - let a = data[start]; - if a & 0x80 != 0 { - return Ok(Res { - val: (a & 0x7f) as usize, - next: start + 1, - }); - } - if a & 0x40 != 0 { - return Ok(Res { - val: ((a & 0x3f) as usize) << 8 | (data[start + 1] as usize), - next: start + 2, - }); - } - if a & 0x20 != 0 { - return Ok(Res { - val: ((a & 0x1f) as usize) << 16 | (data[start + 1] as usize) << 8 | - (data[start + 2] as usize), - next: start + 3, - }); - } - if a & 0x10 != 0 { - return Ok(Res { - val: ((a & 0x0f) as usize) << 24 | (data[start + 1] as usize) << 16 | - (data[start + 2] as usize) << 8 | - (data[start + 3] as usize), - next: start + 4, - }); - } - Err(IntTooBig(a as usize)) - } - - pub fn vuint_at(data: &[u8], start: usize) -> DecodeResult { - if data.len() - start < 4 { - return vuint_at_slow(data, start); - } - - // Lookup table for parsing EBML Element IDs as per - // http://ebml.sourceforge.net/specs/ The Element IDs are parsed by - // reading a big endian u32 positioned at data[start]. Using the four - // most significant bits of the u32 we lookup in the table below how - // the element ID should be derived from it. - // - // The table stores tuples (shift, mask) where shift is the number the - // u32 should be right shifted with and mask is the value the right - // shifted value should be masked with. If for example the most - // significant bit is set this means it's a class A ID and the u32 - // should be right shifted with 24 and masked with 0x7f. Therefore we - // store (24, 0x7f) at index 0x8 - 0xF (four bit numbers where the most - // significant bit is set). - // - // By storing the number of shifts and masks in a table instead of - // checking in order if the most significant bit is set, the second - // most significant bit is set etc. we can replace up to three - // "and+branch" with a single table lookup which gives us a measured - // speedup of around 2x on x86_64. - static SHIFT_MASK_TABLE: [(usize, u32); 16] = [(0, 0x0), - (0, 0x0fffffff), - (8, 0x1fffff), - (8, 0x1fffff), - (16, 0x3fff), - (16, 0x3fff), - (16, 0x3fff), - (16, 0x3fff), - (24, 0x7f), - (24, 0x7f), - (24, 0x7f), - (24, 0x7f), - (24, 0x7f), - (24, 0x7f), - (24, 0x7f), - (24, 0x7f)]; - - unsafe { - let ptr = data.as_ptr().offset(start as isize) as *const u32; - let val = u32::from_be(*ptr); - - let i = (val >> 28) as usize; - let (shift, mask) = SHIFT_MASK_TABLE[i]; - Ok(Res { - val: ((val >> shift) & mask) as usize, - next: start + ((32 - shift) >> 3), - }) - } - } - - pub fn tag_len_at(data: &[u8], tag: Res) -> DecodeResult { - if tag.val < NUM_IMPLICIT_TAGS && TAG_IMPLICIT_LEN[tag.val] >= 0 { - Ok(Res { - val: TAG_IMPLICIT_LEN[tag.val] as usize, - next: tag.next, - }) - } else { - vuint_at(data, tag.next) - } - } - - pub fn doc_at<'a>(data: &'a [u8], start: usize) -> DecodeResult> { - let elt_tag = tag_at(data, start)?; - let elt_size = tag_len_at(data, elt_tag)?; - let end = elt_size.next + elt_size.val; - Ok(TaggedDoc { - tag: elt_tag.val, - doc: Doc { - data: data, - start: elt_size.next, - end: end, - }, - }) - } - - pub fn maybe_get_doc<'a>(d: Doc<'a>, tg: usize) -> Option> { - let mut pos = d.start; - while pos < d.end { - let elt_tag = try_or!(tag_at(d.data, pos), None); - let elt_size = try_or!(tag_len_at(d.data, elt_tag), None); - pos = elt_size.next + elt_size.val; - if elt_tag.val == tg { - return Some(Doc { - data: d.data, - start: elt_size.next, - end: pos, - }); - } - } - None - } - - pub fn get_doc<'a>(d: Doc<'a>, tg: usize) -> Doc<'a> { - match maybe_get_doc(d, tg) { - Some(d) => d, - None => { - error!("failed to find block with tag {:?}", tg); - panic!(); - } - } - } - - pub fn docs<'a>(d: Doc<'a>) -> DocsIterator<'a> { - DocsIterator { d: d } - } - - pub struct DocsIterator<'a> { - d: Doc<'a>, - } - - impl<'a> Iterator for DocsIterator<'a> { - type Item = (usize, Doc<'a>); - - fn next(&mut self) -> Option<(usize, Doc<'a>)> { - if self.d.start >= self.d.end { - return None; - } - - let elt_tag = try_or!(tag_at(self.d.data, self.d.start), { - self.d.start = self.d.end; - None - }); - let elt_size = try_or!(tag_len_at(self.d.data, elt_tag), { - self.d.start = self.d.end; - None - }); - - let end = elt_size.next + elt_size.val; - let doc = Doc { - data: self.d.data, - start: elt_size.next, - end: end, - }; - - self.d.start = end; - return Some((elt_tag.val, doc)); - } - } - - pub fn tagged_docs<'a>(d: Doc<'a>, tag: usize) -> TaggedDocsIterator<'a> { - TaggedDocsIterator { - iter: docs(d), - tag: tag, - } - } - - pub struct TaggedDocsIterator<'a> { - iter: DocsIterator<'a>, - tag: usize, - } - - impl<'a> Iterator for TaggedDocsIterator<'a> { - type Item = Doc<'a>; - - fn next(&mut self) -> Option> { - while let Some((tag, doc)) = self.iter.next() { - if tag == self.tag { - return Some(doc); - } - } - None - } - } - - pub fn with_doc_data(d: Doc, f: F) -> T - where F: FnOnce(&[u8]) -> T - { - f(&d.data[d.start..d.end]) - } - - pub fn doc_as_u8(d: Doc) -> u8 { - assert_eq!(d.end, d.start + 1); - d.data[d.start] - } - - pub fn doc_as_u64(d: Doc) -> u64 { - if d.end >= 8 { - // For performance, we read 8 big-endian bytes, - // and mask off the junk if there is any. This - // obviously won't work on the first 8 bytes - // of a file - we will fall of the start - // of the page and segfault. - - let mut b = [0; 8]; - b.copy_from_slice(&d.data[d.end - 8..d.end]); - let data = unsafe { (*(b.as_ptr() as *const u64)).to_be() }; - let len = d.end - d.start; - if len < 8 { - data & ((1 << (len * 8)) - 1) - } else { - data - } - } else { - let mut result = 0; - for b in &d.data[d.start..d.end] { - result = (result << 8) + (*b as u64); - } - result - } - } - - #[inline] - pub fn doc_as_u16(d: Doc) -> u16 { - doc_as_u64(d) as u16 - } - #[inline] - pub fn doc_as_u32(d: Doc) -> u32 { - doc_as_u64(d) as u32 - } - - #[inline] - pub fn doc_as_i8(d: Doc) -> i8 { - doc_as_u8(d) as i8 - } - #[inline] - pub fn doc_as_i16(d: Doc) -> i16 { - doc_as_u16(d) as i16 - } - #[inline] - pub fn doc_as_i32(d: Doc) -> i32 { - doc_as_u32(d) as i32 - } - #[inline] - pub fn doc_as_i64(d: Doc) -> i64 { - doc_as_u64(d) as i64 - } - - pub struct Decoder<'a> { - parent: Doc<'a>, - pos: usize, - } - - impl<'doc> Decoder<'doc> { - pub fn new(d: Doc<'doc>) -> Decoder<'doc> { - Decoder { - parent: d, - pos: d.start, - } - } - - fn next_doc(&mut self, exp_tag: EbmlEncoderTag) -> DecodeResult> { - debug!(". next_doc(exp_tag={:?})", exp_tag); - if self.pos >= self.parent.end { - return Err(Expected(format!("no more documents in current node!"))); - } - let TaggedDoc { tag: r_tag, doc: r_doc } = doc_at(self.parent.data, self.pos)?; - debug!("self.parent={:?}-{:?} self.pos={:?} r_tag={:?} r_doc={:?}-{:?}", - self.parent.start, - self.parent.end, - self.pos, - r_tag, - r_doc.start, - r_doc.end); - if r_tag != (exp_tag as usize) { - return Err(Expected(format!("expected EBML doc with tag {:?} but found tag {:?}", - exp_tag, - r_tag))); - } - if r_doc.end > self.parent.end { - return Err(Expected(format!("invalid EBML, child extends to {:#x}, parent to \ - {:#x}", - r_doc.end, - self.parent.end))); - } - self.pos = r_doc.end; - Ok(r_doc) - } - - fn push_doc(&mut self, exp_tag: EbmlEncoderTag, f: F) -> DecodeResult - where F: FnOnce(&mut Decoder<'doc>) -> DecodeResult - { - let d = self.next_doc(exp_tag)?; - let old_parent = self.parent; - let old_pos = self.pos; - self.parent = d; - self.pos = d.start; - let r = f(self)?; - self.parent = old_parent; - self.pos = old_pos; - Ok(r) - } - - fn _next_sub(&mut self) -> DecodeResult { - // empty vector/map optimization - if self.parent.is_empty() { - return Ok(0); - } - - let TaggedDoc { tag: r_tag, doc: r_doc } = doc_at(self.parent.data, self.pos)?; - let r = if r_tag == (EsSub8 as usize) { - doc_as_u8(r_doc) as usize - } else if r_tag == (EsSub32 as usize) { - doc_as_u32(r_doc) as usize - } else { - return Err(Expected(format!("expected EBML doc with tag {:?} or {:?} but found \ - tag {:?}", - EsSub8, - EsSub32, - r_tag))); - }; - if r_doc.end > self.parent.end { - return Err(Expected(format!("invalid EBML, child extends to {:#x}, parent to \ - {:#x}", - r_doc.end, - self.parent.end))); - } - self.pos = r_doc.end; - debug!("_next_sub result={:?}", r); - Ok(r) - } - - // variable-length unsigned integer with different tags. - // `first_tag` should be a tag for u8 or i8. - // `last_tag` should be the largest allowed integer tag with the matching signedness. - // all tags between them should be valid, in the order of u8, u16, u32 and u64. - fn _next_int(&mut self, - first_tag: EbmlEncoderTag, - last_tag: EbmlEncoderTag) - -> DecodeResult { - if self.pos >= self.parent.end { - return Err(Expected(format!("no more documents in current node!"))); - } - - let TaggedDoc { tag: r_tag, doc: r_doc } = doc_at(self.parent.data, self.pos)?; - let r = if first_tag as usize <= r_tag && r_tag <= last_tag as usize { - match r_tag - first_tag as usize { - 0 => doc_as_u8(r_doc) as u64, - 1 => doc_as_u16(r_doc) as u64, - 2 => doc_as_u32(r_doc) as u64, - 3 => doc_as_u64(r_doc), - _ => unreachable!(), - } - } else { - return Err(Expected(format!("expected EBML doc with tag {:?} through {:?} but \ - found tag {:?}", - first_tag, - last_tag, - r_tag))); - }; - if r_doc.end > self.parent.end { - return Err(Expected(format!("invalid EBML, child extends to {:#x}, parent to \ - {:#x}", - r_doc.end, - self.parent.end))); - } - self.pos = r_doc.end; - debug!("_next_int({:?}, {:?}) result={:?}", first_tag, last_tag, r); - Ok(r) - } - - pub fn read_opaque(&mut self, op: F) -> DecodeResult - where F: FnOnce(&mut opaque::Decoder, Doc) -> DecodeResult - { - let doc = self.next_doc(EsOpaque)?; - - let result = { - let mut opaque_decoder = opaque::Decoder::new(doc.data, doc.start); - op(&mut opaque_decoder, doc)? - }; - - Ok(result) - } - - pub fn position(&self) -> usize { - self.pos - } - - pub fn advance(&mut self, bytes: usize) { - self.pos += bytes; - } - } - - impl<'doc> serialize::Decoder for Decoder<'doc> { - type Error = Error; - fn read_nil(&mut self) -> DecodeResult<()> { - Ok(()) - } - - fn read_u64(&mut self) -> DecodeResult { - self._next_int(EsU8, EsU64) - } - fn read_u32(&mut self) -> DecodeResult { - Ok(self._next_int(EsU8, EsU32)? as u32) - } - fn read_u16(&mut self) -> DecodeResult { - Ok(self._next_int(EsU8, EsU16)? as u16) - } - fn read_u8(&mut self) -> DecodeResult { - Ok(doc_as_u8(self.next_doc(EsU8)?)) - } - fn read_usize(&mut self) -> DecodeResult { - let v = self._next_int(EsU8, EsU64)?; - if v > (::std::usize::MAX as u64) { - Err(IntTooBig(v as usize)) - } else { - Ok(v as usize) - } - } - - fn read_i64(&mut self) -> DecodeResult { - Ok(self._next_int(EsI8, EsI64)? as i64) - } - fn read_i32(&mut self) -> DecodeResult { - Ok(self._next_int(EsI8, EsI32)? as i32) - } - fn read_i16(&mut self) -> DecodeResult { - Ok(self._next_int(EsI8, EsI16)? as i16) - } - fn read_i8(&mut self) -> DecodeResult { - Ok(doc_as_u8(self.next_doc(EsI8)?) as i8) - } - fn read_isize(&mut self) -> DecodeResult { - let v = self._next_int(EsI8, EsI64)? as i64; - if v > (isize::MAX as i64) || v < (isize::MIN as i64) { - debug!("FIXME \\#6122: Removing this makes this function miscompile"); - Err(IntTooBig(v as usize)) - } else { - Ok(v as isize) - } - } - - fn read_bool(&mut self) -> DecodeResult { - Ok(doc_as_u8(self.next_doc(EsBool)?) != 0) - } - - fn read_f64(&mut self) -> DecodeResult { - let bits = doc_as_u64(self.next_doc(EsF64)?); - Ok(unsafe { transmute(bits) }) - } - fn read_f32(&mut self) -> DecodeResult { - let bits = doc_as_u32(self.next_doc(EsF32)?); - Ok(unsafe { transmute(bits) }) - } - fn read_char(&mut self) -> DecodeResult { - Ok(char::from_u32(doc_as_u32(self.next_doc(EsChar)?)).unwrap()) - } - fn read_str(&mut self) -> DecodeResult { - Ok(self.next_doc(EsStr)?.to_string()) - } - - // Compound types: - fn read_enum(&mut self, name: &str, f: F) -> DecodeResult - where F: FnOnce(&mut Decoder<'doc>) -> DecodeResult - { - debug!("read_enum({})", name); - - let doc = self.next_doc(EsEnum)?; - - let (old_parent, old_pos) = (self.parent, self.pos); - self.parent = doc; - self.pos = self.parent.start; - - let result = f(self)?; - - self.parent = old_parent; - self.pos = old_pos; - Ok(result) - } - - fn read_enum_variant(&mut self, _: &[&str], mut f: F) -> DecodeResult - where F: FnMut(&mut Decoder<'doc>, usize) -> DecodeResult - { - debug!("read_enum_variant()"); - let idx = self._next_sub()?; - debug!(" idx={}", idx); - - f(self, idx) - } - - fn read_enum_variant_arg(&mut self, idx: usize, f: F) -> DecodeResult - where F: FnOnce(&mut Decoder<'doc>) -> DecodeResult - { - debug!("read_enum_variant_arg(idx={})", idx); - f(self) - } - - fn read_enum_struct_variant(&mut self, _: &[&str], mut f: F) -> DecodeResult - where F: FnMut(&mut Decoder<'doc>, usize) -> DecodeResult - { - debug!("read_enum_struct_variant()"); - let idx = self._next_sub()?; - debug!(" idx={}", idx); - - f(self, idx) - } - - fn read_enum_struct_variant_field(&mut self, - name: &str, - idx: usize, - f: F) - -> DecodeResult - where F: FnOnce(&mut Decoder<'doc>) -> DecodeResult - { - debug!("read_enum_struct_variant_arg(name={}, idx={})", name, idx); - f(self) - } - - fn read_struct(&mut self, name: &str, _: usize, f: F) -> DecodeResult - where F: FnOnce(&mut Decoder<'doc>) -> DecodeResult - { - debug!("read_struct(name={})", name); - f(self) - } - - fn read_struct_field(&mut self, name: &str, idx: usize, f: F) -> DecodeResult - where F: FnOnce(&mut Decoder<'doc>) -> DecodeResult - { - debug!("read_struct_field(name={}, idx={})", name, idx); - f(self) - } - - fn read_tuple(&mut self, tuple_len: usize, f: F) -> DecodeResult - where F: FnOnce(&mut Decoder<'doc>) -> DecodeResult - { - debug!("read_tuple()"); - self.read_seq(move |d, len| { - if len == tuple_len { - f(d) - } else { - Err(Expected(format!("Expected tuple of length `{}`, found tuple of length \ - `{}`", - tuple_len, - len))) - } - }) - } - - fn read_tuple_arg(&mut self, idx: usize, f: F) -> DecodeResult - where F: FnOnce(&mut Decoder<'doc>) -> DecodeResult - { - debug!("read_tuple_arg(idx={})", idx); - self.read_seq_elt(idx, f) - } - - fn read_tuple_struct(&mut self, name: &str, len: usize, f: F) -> DecodeResult - where F: FnOnce(&mut Decoder<'doc>) -> DecodeResult - { - debug!("read_tuple_struct(name={})", name); - self.read_tuple(len, f) - } - - fn read_tuple_struct_arg(&mut self, idx: usize, f: F) -> DecodeResult - where F: FnOnce(&mut Decoder<'doc>) -> DecodeResult - { - debug!("read_tuple_struct_arg(idx={})", idx); - self.read_tuple_arg(idx, f) - } - - fn read_option(&mut self, mut f: F) -> DecodeResult - where F: FnMut(&mut Decoder<'doc>, bool) -> DecodeResult - { - debug!("read_option()"); - self.read_enum("Option", move |this| { - this.read_enum_variant(&["None", "Some"], move |this, idx| { - match idx { - 0 => f(this, false), - 1 => f(this, true), - _ => Err(Expected(format!("Expected None or Some"))), - } - }) - }) - } - - fn read_seq(&mut self, f: F) -> DecodeResult - where F: FnOnce(&mut Decoder<'doc>, usize) -> DecodeResult - { - debug!("read_seq()"); - self.push_doc(EsVec, move |d| { - let len = d._next_sub()?; - debug!(" len={}", len); - f(d, len) - }) - } - - fn read_seq_elt(&mut self, idx: usize, f: F) -> DecodeResult - where F: FnOnce(&mut Decoder<'doc>) -> DecodeResult - { - debug!("read_seq_elt(idx={})", idx); - self.push_doc(EsVecElt, f) - } - - fn read_map(&mut self, f: F) -> DecodeResult - where F: FnOnce(&mut Decoder<'doc>, usize) -> DecodeResult - { - debug!("read_map()"); - self.push_doc(EsMap, move |d| { - let len = d._next_sub()?; - debug!(" len={}", len); - f(d, len) - }) - } - - fn read_map_elt_key(&mut self, idx: usize, f: F) -> DecodeResult - where F: FnOnce(&mut Decoder<'doc>) -> DecodeResult - { - debug!("read_map_elt_key(idx={})", idx); - self.push_doc(EsMapKey, f) - } - - fn read_map_elt_val(&mut self, idx: usize, f: F) -> DecodeResult - where F: FnOnce(&mut Decoder<'doc>) -> DecodeResult - { - debug!("read_map_elt_val(idx={})", idx); - self.push_doc(EsMapVal, f) - } - - fn error(&mut self, err: &str) -> Error { - ApplicationError(err.to_string()) - } - } -} - -pub mod writer { - use std::mem; - use std::io::prelude::*; - use std::io::{self, SeekFrom, Cursor}; - - use super::opaque; - use super::{EsVec, EsMap, EsEnum, EsSub8, EsSub32, EsVecElt, EsMapKey, EsU64, EsU32, EsU16, - EsU8, EsI64, EsI32, EsI16, EsI8, EsBool, EsF64, EsF32, EsChar, EsStr, EsMapVal, - EsOpaque, NUM_IMPLICIT_TAGS, NUM_TAGS}; - - use serialize; - - - pub type EncodeResult = io::Result<()>; - - // rbml writing - pub struct Encoder<'a> { - pub writer: &'a mut Cursor>, - size_positions: Vec, - relax_limit: u64, // do not move encoded bytes before this position - } - - fn write_tag(w: &mut W, n: usize) -> EncodeResult { - if n < 0xf0 { - w.write_all(&[n as u8]) - } else if 0x100 <= n && n < NUM_TAGS { - w.write_all(&[0xf0 | (n >> 8) as u8, n as u8]) - } else { - Err(io::Error::new(io::ErrorKind::Other, &format!("invalid tag: {}", n)[..])) - } - } - - fn write_sized_vuint(w: &mut W, n: usize, size: usize) -> EncodeResult { - match size { - 1 => w.write_all(&[0x80 | (n as u8)]), - 2 => w.write_all(&[0x40 | ((n >> 8) as u8), n as u8]), - 3 => w.write_all(&[0x20 | ((n >> 16) as u8), (n >> 8) as u8, n as u8]), - 4 => w.write_all(&[0x10 | ((n >> 24) as u8), (n >> 16) as u8, (n >> 8) as u8, n as u8]), - _ => Err(io::Error::new(io::ErrorKind::Other, &format!("isize too big: {}", n)[..])), - } - } - - pub fn write_vuint(w: &mut W, n: usize) -> EncodeResult { - if n < 0x7f { - return write_sized_vuint(w, n, 1); - } - if n < 0x4000 { - return write_sized_vuint(w, n, 2); - } - if n < 0x200000 { - return write_sized_vuint(w, n, 3); - } - if n < 0x10000000 { - return write_sized_vuint(w, n, 4); - } - Err(io::Error::new(io::ErrorKind::Other, &format!("isize too big: {}", n)[..])) - } - - impl<'a> Encoder<'a> { - pub fn new(w: &'a mut Cursor>) -> Encoder<'a> { - Encoder { - writer: w, - size_positions: vec![], - relax_limit: 0, - } - } - - pub fn start_tag(&mut self, tag_id: usize) -> EncodeResult { - debug!("Start tag {:?}", tag_id); - assert!(tag_id >= NUM_IMPLICIT_TAGS); - - // Write the enum ID: - write_tag(self.writer, tag_id)?; - - // Write a placeholder four-byte size. - let cur_pos = self.writer.seek(SeekFrom::Current(0))?; - self.size_positions.push(cur_pos); - let zeroes: &[u8] = &[0, 0, 0, 0]; - self.writer.write_all(zeroes) - } - - pub fn end_tag(&mut self) -> EncodeResult { - let last_size_pos = self.size_positions.pop().unwrap(); - let cur_pos = self.writer.seek(SeekFrom::Current(0))?; - self.writer.seek(SeekFrom::Start(last_size_pos))?; - let size = (cur_pos - last_size_pos - 4) as usize; - - // relax the size encoding for small tags (bigger tags are costly to move). - // we should never try to move the stable positions, however. - const RELAX_MAX_SIZE: usize = 0x100; - if size <= RELAX_MAX_SIZE && last_size_pos >= self.relax_limit { - // we can't alter the buffer in place, so have a temporary buffer - let mut buf = [0u8; RELAX_MAX_SIZE]; - { - let last_size_pos = last_size_pos as usize; - let data = &self.writer.get_ref()[last_size_pos + 4..cur_pos as usize]; - buf[..size].copy_from_slice(data); - } - - // overwrite the size and data and continue - write_vuint(self.writer, size)?; - self.writer.write_all(&buf[..size])?; - } else { - // overwrite the size with an overlong encoding and skip past the data - write_sized_vuint(self.writer, size, 4)?; - self.writer.seek(SeekFrom::Start(cur_pos))?; - } - - debug!("End tag (size = {:?})", size); - Ok(()) - } - - pub fn wr_tag(&mut self, tag_id: usize, blk: F) -> EncodeResult - where F: FnOnce() -> EncodeResult - { - self.start_tag(tag_id)?; - blk()?; - self.end_tag() - } - - pub fn wr_tagged_bytes(&mut self, tag_id: usize, b: &[u8]) -> EncodeResult { - assert!(tag_id >= NUM_IMPLICIT_TAGS); - write_tag(self.writer, tag_id)?; - write_vuint(self.writer, b.len())?; - self.writer.write_all(b) - } - - pub fn wr_tagged_u64(&mut self, tag_id: usize, v: u64) -> EncodeResult { - let bytes: [u8; 8] = unsafe { mem::transmute(v.to_be()) }; - // tagged integers are emitted in big-endian, with no - // leading zeros. - let leading_zero_bytes = v.leading_zeros() / 8; - self.wr_tagged_bytes(tag_id, &bytes[leading_zero_bytes as usize..]) - } - - #[inline] - pub fn wr_tagged_u32(&mut self, tag_id: usize, v: u32) -> EncodeResult { - self.wr_tagged_u64(tag_id, v as u64) - } - - #[inline] - pub fn wr_tagged_u16(&mut self, tag_id: usize, v: u16) -> EncodeResult { - self.wr_tagged_u64(tag_id, v as u64) - } - - #[inline] - pub fn wr_tagged_u8(&mut self, tag_id: usize, v: u8) -> EncodeResult { - self.wr_tagged_bytes(tag_id, &[v]) - } - - #[inline] - pub fn wr_tagged_i64(&mut self, tag_id: usize, v: i64) -> EncodeResult { - self.wr_tagged_u64(tag_id, v as u64) - } - - #[inline] - pub fn wr_tagged_i32(&mut self, tag_id: usize, v: i32) -> EncodeResult { - self.wr_tagged_u32(tag_id, v as u32) - } - - #[inline] - pub fn wr_tagged_i16(&mut self, tag_id: usize, v: i16) -> EncodeResult { - self.wr_tagged_u16(tag_id, v as u16) - } - - #[inline] - pub fn wr_tagged_i8(&mut self, tag_id: usize, v: i8) -> EncodeResult { - self.wr_tagged_bytes(tag_id, &[v as u8]) - } - - pub fn wr_tagged_str(&mut self, tag_id: usize, v: &str) -> EncodeResult { - self.wr_tagged_bytes(tag_id, v.as_bytes()) - } - - // for auto-serialization - fn wr_tagged_raw_bytes(&mut self, tag_id: usize, b: &[u8]) -> EncodeResult { - write_tag(self.writer, tag_id)?; - self.writer.write_all(b) - } - - fn wr_tagged_raw_u64(&mut self, tag_id: usize, v: u64) -> EncodeResult { - let bytes: [u8; 8] = unsafe { mem::transmute(v.to_be()) }; - self.wr_tagged_raw_bytes(tag_id, &bytes) - } - - fn wr_tagged_raw_u32(&mut self, tag_id: usize, v: u32) -> EncodeResult { - let bytes: [u8; 4] = unsafe { mem::transmute(v.to_be()) }; - self.wr_tagged_raw_bytes(tag_id, &bytes) - } - - fn wr_tagged_raw_u16(&mut self, tag_id: usize, v: u16) -> EncodeResult { - let bytes: [u8; 2] = unsafe { mem::transmute(v.to_be()) }; - self.wr_tagged_raw_bytes(tag_id, &bytes) - } - - fn wr_tagged_raw_u8(&mut self, tag_id: usize, v: u8) -> EncodeResult { - self.wr_tagged_raw_bytes(tag_id, &[v]) - } - - fn wr_tagged_raw_i64(&mut self, tag_id: usize, v: i64) -> EncodeResult { - self.wr_tagged_raw_u64(tag_id, v as u64) - } - - fn wr_tagged_raw_i32(&mut self, tag_id: usize, v: i32) -> EncodeResult { - self.wr_tagged_raw_u32(tag_id, v as u32) - } - - fn wr_tagged_raw_i16(&mut self, tag_id: usize, v: i16) -> EncodeResult { - self.wr_tagged_raw_u16(tag_id, v as u16) - } - - fn wr_tagged_raw_i8(&mut self, tag_id: usize, v: i8) -> EncodeResult { - self.wr_tagged_raw_bytes(tag_id, &[v as u8]) - } - - pub fn wr_bytes(&mut self, b: &[u8]) -> EncodeResult { - debug!("Write {:?} bytes", b.len()); - self.writer.write_all(b) - } - - pub fn wr_str(&mut self, s: &str) -> EncodeResult { - debug!("Write str: {:?}", s); - self.writer.write_all(s.as_bytes()) - } - - /// Returns the current position while marking it stable, i.e. - /// generated bytes so far wouldn't be affected by relaxation. - pub fn mark_stable_position(&mut self) -> u64 { - let pos = self.writer.seek(SeekFrom::Current(0)).unwrap(); - if self.relax_limit < pos { - self.relax_limit = pos; - } - pos - } - } - - impl<'a> Encoder<'a> { - // used internally to emit things like the vector length and so on - fn _emit_tagged_sub(&mut self, v: usize) -> EncodeResult { - if v as u8 as usize == v { - self.wr_tagged_raw_u8(EsSub8 as usize, v as u8) - } else if v as u32 as usize == v { - self.wr_tagged_raw_u32(EsSub32 as usize, v as u32) - } else { - Err(io::Error::new(io::ErrorKind::Other, - &format!("length or variant id too big: {}", v)[..])) - } - } - - pub fn emit_opaque(&mut self, f: F) -> EncodeResult - where F: FnOnce(&mut opaque::Encoder) -> EncodeResult - { - self.start_tag(EsOpaque as usize)?; - - { - let mut opaque_encoder = opaque::Encoder::new(self.writer); - f(&mut opaque_encoder)?; - } - - self.mark_stable_position(); - self.end_tag() - } - } - - impl<'a> serialize::Encoder for Encoder<'a> { - type Error = io::Error; - - fn emit_nil(&mut self) -> EncodeResult { - Ok(()) - } - - fn emit_usize(&mut self, v: usize) -> EncodeResult { - self.emit_u64(v as u64) - } - fn emit_u64(&mut self, v: u64) -> EncodeResult { - if v as u32 as u64 == v { - self.emit_u32(v as u32) - } else { - self.wr_tagged_raw_u64(EsU64 as usize, v) - } - } - fn emit_u32(&mut self, v: u32) -> EncodeResult { - if v as u16 as u32 == v { - self.emit_u16(v as u16) - } else { - self.wr_tagged_raw_u32(EsU32 as usize, v) - } - } - fn emit_u16(&mut self, v: u16) -> EncodeResult { - if v as u8 as u16 == v { - self.emit_u8(v as u8) - } else { - self.wr_tagged_raw_u16(EsU16 as usize, v) - } - } - fn emit_u8(&mut self, v: u8) -> EncodeResult { - self.wr_tagged_raw_u8(EsU8 as usize, v) - } - - fn emit_isize(&mut self, v: isize) -> EncodeResult { - self.emit_i64(v as i64) - } - fn emit_i64(&mut self, v: i64) -> EncodeResult { - if v as i32 as i64 == v { - self.emit_i32(v as i32) - } else { - self.wr_tagged_raw_i64(EsI64 as usize, v) - } - } - fn emit_i32(&mut self, v: i32) -> EncodeResult { - if v as i16 as i32 == v { - self.emit_i16(v as i16) - } else { - self.wr_tagged_raw_i32(EsI32 as usize, v) - } - } - fn emit_i16(&mut self, v: i16) -> EncodeResult { - if v as i8 as i16 == v { - self.emit_i8(v as i8) - } else { - self.wr_tagged_raw_i16(EsI16 as usize, v) - } - } - fn emit_i8(&mut self, v: i8) -> EncodeResult { - self.wr_tagged_raw_i8(EsI8 as usize, v) - } - - fn emit_bool(&mut self, v: bool) -> EncodeResult { - self.wr_tagged_raw_u8(EsBool as usize, v as u8) - } - - fn emit_f64(&mut self, v: f64) -> EncodeResult { - let bits = unsafe { mem::transmute(v) }; - self.wr_tagged_raw_u64(EsF64 as usize, bits) - } - fn emit_f32(&mut self, v: f32) -> EncodeResult { - let bits = unsafe { mem::transmute(v) }; - self.wr_tagged_raw_u32(EsF32 as usize, bits) - } - fn emit_char(&mut self, v: char) -> EncodeResult { - self.wr_tagged_raw_u32(EsChar as usize, v as u32) - } - - fn emit_str(&mut self, v: &str) -> EncodeResult { - self.wr_tagged_str(EsStr as usize, v) - } - - fn emit_enum(&mut self, _name: &str, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - self.start_tag(EsEnum as usize)?; - f(self)?; - self.end_tag() - } - - fn emit_enum_variant(&mut self, _: &str, v_id: usize, _: usize, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - self._emit_tagged_sub(v_id)?; - f(self) - } - - fn emit_enum_variant_arg(&mut self, _: usize, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - f(self) - } - - fn emit_enum_struct_variant(&mut self, - v_name: &str, - v_id: usize, - cnt: usize, - f: F) - -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - self.emit_enum_variant(v_name, v_id, cnt, f) - } - - fn emit_enum_struct_variant_field(&mut self, _: &str, idx: usize, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - self.emit_enum_variant_arg(idx, f) - } - - fn emit_struct(&mut self, _: &str, _len: usize, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - f(self) - } - - fn emit_struct_field(&mut self, _name: &str, _: usize, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - f(self) - } - - fn emit_tuple(&mut self, len: usize, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - self.emit_seq(len, f) - } - fn emit_tuple_arg(&mut self, idx: usize, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - self.emit_seq_elt(idx, f) - } - - fn emit_tuple_struct(&mut self, _: &str, len: usize, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - self.emit_seq(len, f) - } - fn emit_tuple_struct_arg(&mut self, idx: usize, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - self.emit_seq_elt(idx, f) - } - - fn emit_option(&mut self, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - self.emit_enum("Option", f) - } - fn emit_option_none(&mut self) -> EncodeResult { - self.emit_enum_variant("None", 0, 0, |_| Ok(())) - } - fn emit_option_some(&mut self, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - - self.emit_enum_variant("Some", 1, 1, f) - } - - fn emit_seq(&mut self, len: usize, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - if len == 0 { - // empty vector optimization - return self.wr_tagged_bytes(EsVec as usize, &[]); - } - - self.start_tag(EsVec as usize)?; - self._emit_tagged_sub(len)?; - f(self)?; - self.end_tag() - } - - fn emit_seq_elt(&mut self, _idx: usize, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - - self.start_tag(EsVecElt as usize)?; - f(self)?; - self.end_tag() - } - - fn emit_map(&mut self, len: usize, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - if len == 0 { - // empty map optimization - return self.wr_tagged_bytes(EsMap as usize, &[]); - } - - self.start_tag(EsMap as usize)?; - self._emit_tagged_sub(len)?; - f(self)?; - self.end_tag() - } - - fn emit_map_elt_key(&mut self, _idx: usize, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - - self.start_tag(EsMapKey as usize)?; - f(self)?; - self.end_tag() - } - - fn emit_map_elt_val(&mut self, _idx: usize, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - self.start_tag(EsMapVal as usize)?; - f(self)?; - self.end_tag() - } - } -} - -// ___________________________________________________________________________ -// Testing - -#[cfg(test)] -mod tests { - use super::{Doc, reader, writer}; - - use serialize::{Encodable, Decodable}; - - use std::io::Cursor; - - #[test] - fn test_vuint_at() { - let data = &[ - 0x80, - 0xff, - 0x40, 0x00, - 0x7f, 0xff, - 0x20, 0x00, 0x00, - 0x3f, 0xff, 0xff, - 0x10, 0x00, 0x00, 0x00, - 0x1f, 0xff, 0xff, 0xff - ]; - - let mut res: reader::Res; - - // Class A - res = reader::vuint_at(data, 0).unwrap(); - assert_eq!(res.val, 0); - assert_eq!(res.next, 1); - res = reader::vuint_at(data, res.next).unwrap(); - assert_eq!(res.val, (1 << 7) - 1); - assert_eq!(res.next, 2); - - // Class B - res = reader::vuint_at(data, res.next).unwrap(); - assert_eq!(res.val, 0); - assert_eq!(res.next, 4); - res = reader::vuint_at(data, res.next).unwrap(); - assert_eq!(res.val, (1 << 14) - 1); - assert_eq!(res.next, 6); - - // Class C - res = reader::vuint_at(data, res.next).unwrap(); - assert_eq!(res.val, 0); - assert_eq!(res.next, 9); - res = reader::vuint_at(data, res.next).unwrap(); - assert_eq!(res.val, (1 << 21) - 1); - assert_eq!(res.next, 12); - - // Class D - res = reader::vuint_at(data, res.next).unwrap(); - assert_eq!(res.val, 0); - assert_eq!(res.next, 16); - res = reader::vuint_at(data, res.next).unwrap(); - assert_eq!(res.val, (1 << 28) - 1); - assert_eq!(res.next, 20); - } - - #[test] - fn test_option_int() { - fn test_v(v: Option) { - debug!("v == {:?}", v); - let mut wr = Cursor::new(Vec::new()); - { - let mut rbml_w = writer::Encoder::new(&mut wr); - let _ = v.encode(&mut rbml_w); - } - let rbml_doc = Doc::new(wr.get_ref()); - let mut deser = reader::Decoder::new(rbml_doc); - let v1 = Decodable::decode(&mut deser).unwrap(); - debug!("v1 == {:?}", v1); - assert_eq!(v, v1); - } - - test_v(Some(22)); - test_v(None); - test_v(Some(3)); - } -} - -#[cfg(test)] -mod bench { - #![allow(non_snake_case)] - use test::Bencher; - use super::reader; - - #[bench] - pub fn vuint_at_A_aligned(b: &mut Bencher) { - let data = (0..4 * 100) - .map(|i| { - match i % 2 { - 0 => 0x80, - _ => i as u8, - } - }) - .collect::>(); - let mut sum = 0; - b.iter(|| { - let mut i = 0; - while i < data.len() { - sum += reader::vuint_at(&data, i).unwrap().val; - i += 4; - } - }); - } - - #[bench] - pub fn vuint_at_A_unaligned(b: &mut Bencher) { - let data = (0..4 * 100 + 1) - .map(|i| { - match i % 2 { - 1 => 0x80, - _ => i as u8, - } - }) - .collect::>(); - let mut sum = 0; - b.iter(|| { - let mut i = 1; - while i < data.len() { - sum += reader::vuint_at(&data, i).unwrap().val; - i += 4; - } - }); - } - - #[bench] - pub fn vuint_at_D_aligned(b: &mut Bencher) { - let data = (0..4 * 100) - .map(|i| { - match i % 4 { - 0 => 0x10, - 3 => i as u8, - _ => 0, - } - }) - .collect::>(); - let mut sum = 0; - b.iter(|| { - let mut i = 0; - while i < data.len() { - sum += reader::vuint_at(&data, i).unwrap().val; - i += 4; - } - }); - } - - #[bench] - pub fn vuint_at_D_unaligned(b: &mut Bencher) { - let data = (0..4 * 100 + 1) - .map(|i| { - match i % 4 { - 1 => 0x10, - 0 => i as u8, - _ => 0, - } - }) - .collect::>(); - let mut sum = 0; - b.iter(|| { - let mut i = 1; - while i < data.len() { - sum += reader::vuint_at(&data, i).unwrap().val; - i += 4; - } - }); - } -} diff --git a/src/librustc/Cargo.toml b/src/librustc/Cargo.toml index aaef8e8423cbe..578ef68b00386 100644 --- a/src/librustc/Cargo.toml +++ b/src/librustc/Cargo.toml @@ -14,7 +14,6 @@ flate = { path = "../libflate" } fmt_macros = { path = "../libfmt_macros" } graphviz = { path = "../libgraphviz" } log = { path = "../liblog" } -rbml = { path = "../librbml" } rustc_back = { path = "../librustc_back" } rustc_bitflags = { path = "../librustc_bitflags" } rustc_const_math = { path = "../librustc_const_math" } diff --git a/src/librustc/cfg/construct.rs b/src/librustc/cfg/construct.rs index 25a73226473b7..1b2976b7435d8 100644 --- a/src/librustc/cfg/construct.rs +++ b/src/librustc/cfg/construct.rs @@ -126,7 +126,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { self.add_ast_node(pat.id, &[pats_exit]) } - PatKind::Vec(ref pre, ref vec, ref post) => { + PatKind::Slice(ref pre, ref vec, ref post) => { let pre_exit = self.pats_all(pre.iter(), pred); let vec_exit = self.pats_all(vec.iter(), pre_exit); let post_exit = self.pats_all(post.iter(), vec_exit); @@ -298,7 +298,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { self.add_unreachable_node() } - hir::ExprVec(ref elems) => { + hir::ExprArray(ref elems) => { self.straightline(expr, pred, elems.iter().map(|e| &**e)) } @@ -536,7 +536,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { fn add_contained_edge(&mut self, source: CFGIndex, target: CFGIndex) { - let data = CFGEdgeData {exiting_scopes: vec!() }; + let data = CFGEdgeData {exiting_scopes: vec![] }; self.graph.add_edge(source, target, data); } @@ -545,7 +545,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { from_index: CFGIndex, to_loop: LoopScope, to_index: CFGIndex) { - let mut data = CFGEdgeData {exiting_scopes: vec!() }; + let mut data = CFGEdgeData {exiting_scopes: vec![] }; let mut scope = self.tcx.region_maps.node_extent(from_expr.id); let target_scope = self.tcx.region_maps.node_extent(to_loop.loop_id); while scope != target_scope { @@ -559,7 +559,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { _from_expr: &hir::Expr, from_index: CFGIndex) { let mut data = CFGEdgeData { - exiting_scopes: vec!(), + exiting_scopes: vec![], }; for &LoopScope { loop_id: id, .. } in self.loop_scopes.iter().rev() { data.exiting_scopes.push(id); diff --git a/src/librustc/dep_graph/dep_node.rs b/src/librustc/dep_graph/dep_node.rs index 18179027c25de..e99ffa95ed63c 100644 --- a/src/librustc/dep_graph/dep_node.rs +++ b/src/librustc/dep_graph/dep_node.rs @@ -51,7 +51,6 @@ pub enum DepNode { WorkProduct(Arc), // Represents different phases in the compiler. - CrateReader, CollectLanguageItems, CheckStaticRecursion, ResolveLifetimes, @@ -103,18 +102,13 @@ pub enum DepNode { // table in the tcx (or elsewhere) maps to one of these // nodes. Often we map multiple tables to the same node if there // is no point in distinguishing them (e.g., both the type and - // predicates for an item wind up in `ItemSignature`). Other - // times, such as `ImplItems` vs `TraitItemDefIds`, tables which - // might be mergable are kept distinct because the sets of def-ids - // to which they apply are disjoint, and hence we might as well - // have distinct labels for easier debugging. + // predicates for an item wind up in `ItemSignature`). ImplOrTraitItems(D), ItemSignature(D), FieldTy(D), SizedConstraint(D), - TraitItemDefIds(D), + ImplOrTraitItemDefIds(D), InherentImpls(D), - ImplItems(D), // The set of impls for a given trait. Ultimately, it would be // nice to get more fine-grained here (e.g., to include a @@ -162,9 +156,8 @@ impl DepNode { ImplOrTraitItems, ItemSignature, FieldTy, - TraitItemDefIds, + ImplOrTraitItemDefIds, InherentImpls, - ImplItems, TraitImpls, ReprHints, } @@ -177,7 +170,6 @@ impl DepNode { match *self { Krate => Some(Krate), - CrateReader => Some(CrateReader), CollectLanguageItems => Some(CollectLanguageItems), CheckStaticRecursion => Some(CheckStaticRecursion), ResolveLifetimes => Some(ResolveLifetimes), @@ -231,9 +223,8 @@ impl DepNode { ItemSignature(ref d) => op(d).map(ItemSignature), FieldTy(ref d) => op(d).map(FieldTy), SizedConstraint(ref d) => op(d).map(SizedConstraint), - TraitItemDefIds(ref d) => op(d).map(TraitItemDefIds), + ImplOrTraitItemDefIds(ref d) => op(d).map(ImplOrTraitItemDefIds), InherentImpls(ref d) => op(d).map(InherentImpls), - ImplItems(ref d) => op(d).map(ImplItems), TraitImpls(ref d) => op(d).map(TraitImpls), TraitItems(ref d) => op(d).map(TraitItems), ReprHints(ref d) => op(d).map(ReprHints), diff --git a/src/librustc/dep_graph/graph.rs b/src/librustc/dep_graph/graph.rs index c42eeead69ec1..fac3586afc7b9 100644 --- a/src/librustc/dep_graph/graph.rs +++ b/src/librustc/dep_graph/graph.rs @@ -51,22 +51,15 @@ impl DepGraph { } } - /// True if we are actually building a dep-graph. If this returns false, - /// then the other methods on this `DepGraph` will have no net effect. - #[inline] - pub fn enabled(&self) -> bool { - self.data.thread.enabled() - } - pub fn query(&self) -> DepGraphQuery { self.data.thread.query() } - pub fn in_ignore<'graph>(&'graph self) -> raii::IgnoreTask<'graph> { + pub fn in_ignore<'graph>(&'graph self) -> Option> { raii::IgnoreTask::new(&self.data.thread) } - pub fn in_task<'graph>(&'graph self, key: DepNode) -> raii::DepTask<'graph> { + pub fn in_task<'graph>(&'graph self, key: DepNode) -> Option> { raii::DepTask::new(&self.data.thread, key) } @@ -85,11 +78,15 @@ impl DepGraph { } pub fn read(&self, v: DepNode) { - self.data.thread.enqueue(DepMessage::Read(v)); + if self.data.thread.is_enqueue_enabled() { + self.data.thread.enqueue(DepMessage::Read(v)); + } } pub fn write(&self, v: DepNode) { - self.data.thread.enqueue(DepMessage::Write(v)); + if self.data.thread.is_enqueue_enabled() { + self.data.thread.enqueue(DepMessage::Write(v)); + } } /// Indicates that a previous work product exists for `v`. This is diff --git a/src/librustc/dep_graph/raii.rs b/src/librustc/dep_graph/raii.rs index e4f572902f9e5..e39797599acfd 100644 --- a/src/librustc/dep_graph/raii.rs +++ b/src/librustc/dep_graph/raii.rs @@ -19,15 +19,21 @@ pub struct DepTask<'graph> { impl<'graph> DepTask<'graph> { pub fn new(data: &'graph DepGraphThreadData, key: DepNode) - -> DepTask<'graph> { - data.enqueue(DepMessage::PushTask(key.clone())); - DepTask { data: data, key: Some(key) } + -> Option> { + if data.is_enqueue_enabled() { + data.enqueue(DepMessage::PushTask(key.clone())); + Some(DepTask { data: data, key: Some(key) }) + } else { + None + } } } impl<'graph> Drop for DepTask<'graph> { fn drop(&mut self) { - self.data.enqueue(DepMessage::PopTask(self.key.take().unwrap())); + if self.data.is_enqueue_enabled() { + self.data.enqueue(DepMessage::PopTask(self.key.take().unwrap())); + } } } @@ -36,15 +42,21 @@ pub struct IgnoreTask<'graph> { } impl<'graph> IgnoreTask<'graph> { - pub fn new(data: &'graph DepGraphThreadData) -> IgnoreTask<'graph> { - data.enqueue(DepMessage::PushIgnore); - IgnoreTask { data: data } + pub fn new(data: &'graph DepGraphThreadData) -> Option> { + if data.is_enqueue_enabled() { + data.enqueue(DepMessage::PushIgnore); + Some(IgnoreTask { data: data }) + } else { + None + } } } impl<'graph> Drop for IgnoreTask<'graph> { fn drop(&mut self) { - self.data.enqueue(DepMessage::PopIgnore); + if self.data.is_enqueue_enabled() { + self.data.enqueue(DepMessage::PopIgnore); + } } } diff --git a/src/librustc/dep_graph/shadow.rs b/src/librustc/dep_graph/shadow.rs index 72a321425ef06..06def4bf19af3 100644 --- a/src/librustc/dep_graph/shadow.rs +++ b/src/librustc/dep_graph/shadow.rs @@ -64,6 +64,11 @@ impl ShadowGraph { } } + #[inline] + pub fn enabled(&self) -> bool { + ENABLED + } + pub fn enqueue(&self, message: &DepMessage) { if ENABLED { match self.stack.borrow_state() { diff --git a/src/librustc/dep_graph/thread.rs b/src/librustc/dep_graph/thread.rs index 90c42d66b7adf..9f755cf86e4e4 100644 --- a/src/librustc/dep_graph/thread.rs +++ b/src/librustc/dep_graph/thread.rs @@ -88,15 +88,24 @@ impl DepGraphThreadData { } } + /// True if we are actually building the full dep-graph. #[inline] - pub fn enabled(&self) -> bool { + pub fn is_fully_enabled(&self) -> bool { self.enabled } + /// True if (a) we are actually building the full dep-graph, or (b) we are + /// only enqueuing messages in order to sanity-check them (which happens + /// when debug assertions are enabled). + #[inline] + pub fn is_enqueue_enabled(&self) -> bool { + self.is_fully_enabled() || self.shadow_graph.enabled() + } + /// Sends the current batch of messages to the thread. Installs a /// new vector of messages. fn swap(&self) { - assert!(self.enabled, "should never swap if not enabled"); + assert!(self.is_fully_enabled(), "should never swap if not fully enabled"); // should be a buffer waiting for us (though of course we may // have to wait for depgraph thread to finish processing the @@ -112,7 +121,7 @@ impl DepGraphThreadData { } pub fn query(&self) -> DepGraphQuery { - assert!(self.enabled, "cannot query if dep graph construction not enabled"); + assert!(self.is_fully_enabled(), "should never query if not fully enabled"); self.enqueue(DepMessage::Query); self.swap(); self.query_in.recv().unwrap() @@ -122,9 +131,9 @@ impl DepGraphThreadData { /// the buffer is full, this may swap.) #[inline] pub fn enqueue(&self, message: DepMessage) { + assert!(self.is_enqueue_enabled(), "should never enqueue if not enqueue-enabled"); self.shadow_graph.enqueue(&message); - - if self.enabled { + if self.is_fully_enabled() { self.enqueue_enabled(message); } } diff --git a/src/librustc/diagnostics.rs b/src/librustc/diagnostics.rs index de68cc707ad7a..465a09505e4a7 100644 --- a/src/librustc/diagnostics.rs +++ b/src/librustc/diagnostics.rs @@ -1327,30 +1327,6 @@ let x: i32 = "I am not a number!"; // | // type `i32` assigned to variable `x` ``` - -Another situation in which this occurs is when you attempt to use the `try!` -macro inside a function that does not return a `Result`: - -```compile_fail,E0308 -use std::fs::File; - -fn main() { - let mut f = try!(File::create("foo.txt")); -} -``` - -This code gives an error like this: - -```text -:5:8: 6:42 error: mismatched types: - expected `()`, - found `core::result::Result<_, _>` - (expected (), - found enum `core::result::Result`) [E0308] -``` - -`try!` returns a `Result`, and so the function must. But `main()` has -`()` as its return type, hence the error. "##, E0309: r##" @@ -1431,6 +1407,23 @@ fn make_child<'elve>(x: &mut &'elve isize, y: &mut &'elve isize) { ``` "##, +E0317: r##" +This error occurs when an `if` expression without an `else` block is used in a +context where a type other than `()` is expected, for example a `let` +expression: + +```compile_fail,E0317 +fn main() { + let x = 5; + let a = if x == 5 { 1 }; +} +``` + +An `if` expression without an `else` block has the type `()`, so this is a type +error. To resolve it, add an `else` block having the same type as the `if` +block. +"##, + E0398: r##" In Rust 1.3, the default object lifetime bounds are expected to change, as described in RFC #1156 [1]. You are getting a warning because the compiler diff --git a/src/librustc/hir/def.rs b/src/librustc/hir/def.rs index a270be4f1dfda..8b9cee1d2f6d6 100644 --- a/src/librustc/hir/def.rs +++ b/src/librustc/hir/def.rs @@ -13,37 +13,46 @@ use util::nodemap::NodeMap; use syntax::ast; use hir; +#[derive(Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] +pub enum CtorKind { + // Constructor function automatically created by a tuple struct/variant. + Fn, + // Constructor constant automatically created by a unit struct/variant. + Const, + // Unusable name in value namespace created by a struct variant. + Fictive, +} + #[derive(Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum Def { - Fn(DefId), - SelfTy(Option /* trait */, Option /* impl */), + // Type namespace Mod(DefId), - ForeignMod(DefId), - Static(DefId, bool /* is_mutbl */), - Const(DefId), - AssociatedConst(DefId), - Local(DefId, // def id of variable - ast::NodeId), // node id of variable - Variant(DefId /* enum */, DefId /* variant */), + Struct(DefId), // DefId refers to NodeId of the struct itself + Union(DefId), Enum(DefId), - TyAlias(DefId), - AssociatedTy(DefId /* trait */, DefId), + Variant(DefId), Trait(DefId), + TyAlias(DefId), + AssociatedTy(DefId), PrimTy(hir::PrimTy), TyParam(DefId), - Upvar(DefId, // def id of closed over local - ast::NodeId, // node id of closed over local - usize, // index in the freevars list of the closure - ast::NodeId), // expr node that creates the closure + SelfTy(Option /* trait */, Option /* impl */), - // If Def::Struct lives in type namespace it denotes a struct item and its DefId refers - // to NodeId of the struct itself. - // If Def::Struct lives in value namespace (e.g. tuple struct, unit struct expressions) - // it denotes a constructor and its DefId refers to NodeId of the struct's constructor. - Struct(DefId), - Union(DefId), - Label(ast::NodeId), + // Value namespace + Fn(DefId), + Const(DefId), + Static(DefId, bool /* is_mutbl */), + StructCtor(DefId, CtorKind), // DefId refers to NodeId of the struct's constructor + VariantCtor(DefId, CtorKind), Method(DefId), + AssociatedConst(DefId), + Local(DefId), + Upvar(DefId, // def id of closed over local + usize, // index in the freevars list of the closure + ast::NodeId), // expr node that creates the closure + Label(ast::NodeId), + + // Both namespaces Err, } @@ -94,37 +103,37 @@ pub type DefMap = NodeMap; // within. pub type ExportMap = NodeMap>; -#[derive(Copy, Clone)] +#[derive(Copy, Clone, RustcEncodable, RustcDecodable)] pub struct Export { - pub name: ast::Name, // The name of the target. - pub def_id: DefId, // The definition of the target. + pub name: ast::Name, // The name of the target. + pub def: Def, // The definition of the target. } -impl Def { - pub fn var_id(&self) -> ast::NodeId { - match *self { - Def::Local(_, id) | - Def::Upvar(_, id, ..) => { - id - } - - Def::Fn(..) | Def::Mod(..) | Def::ForeignMod(..) | Def::Static(..) | - Def::Variant(..) | Def::Enum(..) | Def::TyAlias(..) | Def::AssociatedTy(..) | - Def::TyParam(..) | Def::Struct(..) | Def::Union(..) | Def::Trait(..) | - Def::Method(..) | Def::Const(..) | Def::AssociatedConst(..) | - Def::PrimTy(..) | Def::Label(..) | Def::SelfTy(..) | Def::Err => { - bug!("attempted .var_id() on invalid {:?}", self) - } +impl CtorKind { + pub fn from_ast(vdata: &ast::VariantData) -> CtorKind { + match *vdata { + ast::VariantData::Tuple(..) => CtorKind::Fn, + ast::VariantData::Unit(..) => CtorKind::Const, + ast::VariantData::Struct(..) => CtorKind::Fictive, } } + pub fn from_hir(vdata: &hir::VariantData) -> CtorKind { + match *vdata { + hir::VariantData::Tuple(..) => CtorKind::Fn, + hir::VariantData::Unit(..) => CtorKind::Const, + hir::VariantData::Struct(..) => CtorKind::Fictive, + } + } +} +impl Def { pub fn def_id(&self) -> DefId { match *self { - Def::Fn(id) | Def::Mod(id) | Def::ForeignMod(id) | Def::Static(id, _) | - Def::Variant(_, id) | Def::Enum(id) | Def::TyAlias(id) | Def::AssociatedTy(_, id) | - Def::TyParam(id) | Def::Struct(id) | Def::Union(id) | Def::Trait(id) | - Def::Method(id) | Def::Const(id) | Def::AssociatedConst(id) | - Def::Local(id, _) | Def::Upvar(id, ..) => { + Def::Fn(id) | Def::Mod(id) | Def::Static(id, _) | + Def::Variant(id) | Def::VariantCtor(id, ..) | Def::Enum(id) | Def::TyAlias(id) | + Def::AssociatedTy(id) | Def::TyParam(id) | Def::Struct(id) | Def::StructCtor(id, ..) | + Def::Union(id) | Def::Trait(id) | Def::Method(id) | Def::Const(id) | + Def::AssociatedConst(id) | Def::Local(id) | Def::Upvar(id, ..) => { id } @@ -141,13 +150,18 @@ impl Def { match *self { Def::Fn(..) => "function", Def::Mod(..) => "module", - Def::ForeignMod(..) => "foreign module", Def::Static(..) => "static", Def::Variant(..) => "variant", + Def::VariantCtor(.., CtorKind::Fn) => "tuple variant", + Def::VariantCtor(.., CtorKind::Const) => "unit variant", + Def::VariantCtor(.., CtorKind::Fictive) => "struct variant", Def::Enum(..) => "enum", - Def::TyAlias(..) => "type", + Def::TyAlias(..) => "type alias", Def::AssociatedTy(..) => "associated type", Def::Struct(..) => "struct", + Def::StructCtor(.., CtorKind::Fn) => "tuple struct", + Def::StructCtor(.., CtorKind::Const) => "unit struct", + Def::StructCtor(.., CtorKind::Fictive) => bug!("impossible struct constructor"), Def::Union(..) => "union", Def::Trait(..) => "trait", Def::Method(..) => "method", diff --git a/src/librustc/hir/def_id.rs b/src/librustc/hir/def_id.rs index 16afa705e3919..399243551d651 100644 --- a/src/librustc/hir/def_id.rs +++ b/src/librustc/hir/def_id.rs @@ -8,12 +8,69 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use middle::cstore::LOCAL_CRATE; use ty; -use syntax::ast::CrateNum; + +use rustc_data_structures::indexed_vec::Idx; +use serialize::{self, Encoder, Decoder}; + use std::fmt; use std::u32; +#[derive(Clone, Copy, Eq, Ord, PartialOrd, PartialEq, Hash, Debug)] +pub struct CrateNum(u32); + +impl Idx for CrateNum { + fn new(value: usize) -> Self { + assert!(value < (u32::MAX) as usize); + CrateNum(value as u32) + } + + fn index(self) -> usize { + self.0 as usize + } +} + +/// Item definitions in the currently-compiled crate would have the CrateNum +/// LOCAL_CRATE in their DefId. +pub const LOCAL_CRATE: CrateNum = CrateNum(0); + +impl CrateNum { + pub fn new(x: usize) -> CrateNum { + assert!(x < (u32::MAX as usize)); + CrateNum(x as u32) + } + + pub fn from_u32(x: u32) -> CrateNum { + CrateNum(x) + } + + pub fn as_usize(&self) -> usize { + self.0 as usize + } + + pub fn as_u32(&self) -> u32 { + self.0 + } +} + +impl fmt::Display for CrateNum { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Display::fmt(&self.0, f) + } +} + +impl serialize::UseSpecializedEncodable for CrateNum { + fn default_encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_u32(self.0) + } +} + +impl serialize::UseSpecializedDecodable for CrateNum { + fn default_decode(d: &mut D) -> Result { + d.read_u32().map(CrateNum) + } +} + /// A DefIndex is an index into the hir-map for a crate, identifying a /// particular definition. It should really be considered an interned /// shorthand for a particular DefPath. @@ -46,8 +103,7 @@ pub const CRATE_DEF_INDEX: DefIndex = DefIndex(0); /// A DefId identifies a particular *definition*, by combining a crate /// index and a def index. -#[derive(Clone, Eq, Ord, PartialOrd, PartialEq, RustcEncodable, - RustcDecodable, Hash, Copy)] +#[derive(Clone, Eq, Ord, PartialOrd, PartialEq, RustcEncodable, RustcDecodable, Hash, Copy)] pub struct DefId { pub krate: CrateNum, pub index: DefIndex, diff --git a/src/librustc/hir/fold.rs b/src/librustc/hir/fold.rs deleted file mode 100644 index 57b5599bd1d7f..0000000000000 --- a/src/librustc/hir/fold.rs +++ /dev/null @@ -1,1131 +0,0 @@ -// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! A Folder represents an HIR->HIR fold; it accepts a HIR piece, -//! and returns a piece of the same type. - -use hir::*; -use syntax::ast::{Name, NodeId, DUMMY_NODE_ID, Attribute, Attribute_}; -use syntax::ast::{NestedMetaItem, NestedMetaItemKind, MetaItem, MetaItemKind}; -use hir; -use syntax_pos::Span; -use syntax::codemap::{respan, Spanned}; -use syntax::ptr::P; -use syntax::parse::token::keywords; -use syntax::util::move_map::MoveMap; - -pub trait Folder : Sized { - // Any additions to this trait should happen in form - // of a call to a public `noop_*` function that only calls - // out to the folder again, not other `noop_*` functions. - // - // This is a necessary API workaround to the problem of not - // being able to call out to the super default method - // in an overridden default method. - - fn fold_crate(&mut self, c: Crate) -> Crate { - noop_fold_crate(c, self) - } - - fn fold_meta_items(&mut self, meta_items: HirVec>) -> HirVec> { - noop_fold_meta_items(meta_items, self) - } - - fn fold_meta_list_item(&mut self, list_item: NestedMetaItem) -> NestedMetaItem { - noop_fold_meta_list_item(list_item, self) - } - - fn fold_meta_item(&mut self, meta_item: P) -> P { - noop_fold_meta_item(meta_item, self) - } - - fn fold_view_path(&mut self, view_path: P) -> P { - noop_fold_view_path(view_path, self) - } - - fn fold_foreign_item(&mut self, ni: ForeignItem) -> ForeignItem { - noop_fold_foreign_item(ni, self) - } - - fn fold_item(&mut self, i: Item) -> Item { - noop_fold_item(i, self) - } - - fn fold_item_id(&mut self, i: ItemId) -> ItemId { - noop_fold_item_id(i, self) - } - - fn fold_struct_field(&mut self, sf: StructField) -> StructField { - noop_fold_struct_field(sf, self) - } - - fn fold_item_underscore(&mut self, i: Item_) -> Item_ { - noop_fold_item_underscore(i, self) - } - - fn fold_trait_item(&mut self, i: TraitItem) -> TraitItem { - noop_fold_trait_item(i, self) - } - - fn fold_impl_item(&mut self, i: ImplItem) -> ImplItem { - noop_fold_impl_item(i, self) - } - - fn fold_fn_decl(&mut self, d: P) -> P { - noop_fold_fn_decl(d, self) - } - - fn fold_block(&mut self, b: P) -> P { - noop_fold_block(b, self) - } - - fn fold_stmt(&mut self, s: Stmt) -> Stmt { - noop_fold_stmt(s, self) - } - - fn fold_arm(&mut self, a: Arm) -> Arm { - noop_fold_arm(a, self) - } - - fn fold_pat(&mut self, p: P) -> P { - noop_fold_pat(p, self) - } - - fn fold_decl(&mut self, d: P) -> P { - noop_fold_decl(d, self) - } - - fn fold_expr(&mut self, e: P) -> P { - e.map(|e| noop_fold_expr(e, self)) - } - - fn fold_ty(&mut self, t: P) -> P { - noop_fold_ty(t, self) - } - - fn fold_ty_binding(&mut self, t: TypeBinding) -> TypeBinding { - noop_fold_ty_binding(t, self) - } - - fn fold_mod(&mut self, m: Mod) -> Mod { - noop_fold_mod(m, self) - } - - fn fold_foreign_mod(&mut self, nm: ForeignMod) -> ForeignMod { - noop_fold_foreign_mod(nm, self) - } - - fn fold_variant(&mut self, v: Variant) -> Variant { - noop_fold_variant(v, self) - } - - fn fold_name(&mut self, n: Name) -> Name { - noop_fold_name(n, self) - } - - fn fold_usize(&mut self, i: usize) -> usize { - noop_fold_usize(i, self) - } - - fn fold_path(&mut self, p: Path) -> Path { - noop_fold_path(p, self) - } - - fn fold_path_parameters(&mut self, p: PathParameters) -> PathParameters { - noop_fold_path_parameters(p, self) - } - - fn fold_angle_bracketed_parameter_data(&mut self, - p: AngleBracketedParameterData) - -> AngleBracketedParameterData { - noop_fold_angle_bracketed_parameter_data(p, self) - } - - fn fold_parenthesized_parameter_data(&mut self, - p: ParenthesizedParameterData) - -> ParenthesizedParameterData { - noop_fold_parenthesized_parameter_data(p, self) - } - - fn fold_local(&mut self, l: P) -> P { - noop_fold_local(l, self) - } - - fn fold_lifetime(&mut self, l: Lifetime) -> Lifetime { - noop_fold_lifetime(l, self) - } - - fn fold_lifetime_def(&mut self, l: LifetimeDef) -> LifetimeDef { - noop_fold_lifetime_def(l, self) - } - - fn fold_attribute(&mut self, at: Attribute) -> Option { - noop_fold_attribute(at, self) - } - - fn fold_arg(&mut self, a: Arg) -> Arg { - noop_fold_arg(a, self) - } - - fn fold_generics(&mut self, generics: Generics) -> Generics { - noop_fold_generics(generics, self) - } - - fn fold_trait_ref(&mut self, p: TraitRef) -> TraitRef { - noop_fold_trait_ref(p, self) - } - - fn fold_poly_trait_ref(&mut self, p: PolyTraitRef) -> PolyTraitRef { - noop_fold_poly_trait_ref(p, self) - } - - fn fold_variant_data(&mut self, vdata: VariantData) -> VariantData { - noop_fold_variant_data(vdata, self) - } - - fn fold_lifetimes(&mut self, lts: HirVec) -> HirVec { - noop_fold_lifetimes(lts, self) - } - - fn fold_lifetime_defs(&mut self, lts: HirVec) -> HirVec { - noop_fold_lifetime_defs(lts, self) - } - - fn fold_ty_param(&mut self, tp: TyParam) -> TyParam { - noop_fold_ty_param(tp, self) - } - - fn fold_ty_params(&mut self, tps: HirVec) -> HirVec { - noop_fold_ty_params(tps, self) - } - - fn fold_opt_lifetime(&mut self, o_lt: Option) -> Option { - noop_fold_opt_lifetime(o_lt, self) - } - - fn fold_opt_bounds(&mut self, - b: Option) - -> Option { - noop_fold_opt_bounds(b, self) - } - - fn fold_bounds(&mut self, b: TyParamBounds) -> TyParamBounds { - noop_fold_bounds(b, self) - } - - fn fold_ty_param_bound(&mut self, tpb: TyParamBound) -> TyParamBound { - noop_fold_ty_param_bound(tpb, self) - } - - fn fold_mt(&mut self, mt: MutTy) -> MutTy { - noop_fold_mt(mt, self) - } - - fn fold_field(&mut self, field: Field) -> Field { - noop_fold_field(field, self) - } - - fn fold_where_clause(&mut self, where_clause: WhereClause) -> WhereClause { - noop_fold_where_clause(where_clause, self) - } - - fn fold_where_predicate(&mut self, where_predicate: WherePredicate) -> WherePredicate { - noop_fold_where_predicate(where_predicate, self) - } - - /// called for the `id` on each declaration - fn new_id(&mut self, i: NodeId) -> NodeId { - i - } - - /// called for ids that are references (e.g., ItemDef) - fn map_id(&mut self, i: NodeId) -> NodeId { - i - } - - fn new_span(&mut self, sp: Span) -> Span { - sp - } -} - -pub fn noop_fold_meta_items(meta_items: HirVec>, - fld: &mut T) - -> HirVec> { - meta_items.move_map(|x| fld.fold_meta_item(x)) -} - -pub fn noop_fold_view_path(view_path: P, fld: &mut T) -> P { - view_path.map(|Spanned { node, span }| { - Spanned { - node: match node { - ViewPathSimple(name, path) => { - ViewPathSimple(name, fld.fold_path(path)) - } - ViewPathGlob(path) => { - ViewPathGlob(fld.fold_path(path)) - } - ViewPathList(path, path_list_idents) => { - ViewPathList(fld.fold_path(path), - path_list_idents.move_map(|path_list_ident| { - Spanned { - node: PathListItem_ { - id: fld.new_id(path_list_ident.node.id), - name: path_list_ident.node.name, - rename: path_list_ident.node.rename, - }, - span: fld.new_span(path_list_ident.span), - } - })) - } - }, - span: fld.new_span(span), - } - }) -} - -pub fn fold_attrs(attrs: T, fld: &mut F) -> T - where T: Into> + From>, - F: Folder, -{ - attrs.into().move_flat_map(|x| fld.fold_attribute(x)).into() -} - -pub fn noop_fold_arm(Arm { attrs, pats, guard, body }: Arm, fld: &mut T) -> Arm { - Arm { - attrs: fold_attrs(attrs, fld), - pats: pats.move_map(|x| fld.fold_pat(x)), - guard: guard.map(|x| fld.fold_expr(x)), - body: fld.fold_expr(body), - } -} - -pub fn noop_fold_decl(d: P, fld: &mut T) -> P { - d.map(|Spanned { node, span }| { - match node { - DeclLocal(l) => Spanned { - node: DeclLocal(fld.fold_local(l)), - span: fld.new_span(span), - }, - DeclItem(it) => Spanned { - node: DeclItem(fld.fold_item_id(it)), - span: fld.new_span(span), - }, - } - }) -} - -pub fn noop_fold_ty_binding(b: TypeBinding, fld: &mut T) -> TypeBinding { - TypeBinding { - id: fld.new_id(b.id), - name: b.name, - ty: fld.fold_ty(b.ty), - span: fld.new_span(b.span), - } -} - -pub fn noop_fold_ty(t: P, fld: &mut T) -> P { - t.map(|Ty { id, node, span }| { - Ty { - id: fld.new_id(id), - node: match node { - TyInfer => node, - TyVec(ty) => TyVec(fld.fold_ty(ty)), - TyPtr(mt) => TyPtr(fld.fold_mt(mt)), - TyRptr(region, mt) => { - TyRptr(fld.fold_opt_lifetime(region), fld.fold_mt(mt)) - } - TyBareFn(f) => { - TyBareFn(f.map(|BareFnTy { lifetimes, unsafety, abi, decl }| { - BareFnTy { - lifetimes: fld.fold_lifetime_defs(lifetimes), - unsafety: unsafety, - abi: abi, - decl: fld.fold_fn_decl(decl), - } - })) - } - TyNever => node, - TyTup(tys) => TyTup(tys.move_map(|ty| fld.fold_ty(ty))), - TyPath(qself, path) => { - let qself = qself.map(|QSelf { ty, position }| { - QSelf { - ty: fld.fold_ty(ty), - position: position, - } - }); - TyPath(qself, fld.fold_path(path)) - } - TyObjectSum(ty, bounds) => { - TyObjectSum(fld.fold_ty(ty), fld.fold_bounds(bounds)) - } - TyFixedLengthVec(ty, e) => { - TyFixedLengthVec(fld.fold_ty(ty), fld.fold_expr(e)) - } - TyTypeof(expr) => { - TyTypeof(fld.fold_expr(expr)) - } - TyPolyTraitRef(bounds) => { - TyPolyTraitRef(bounds.move_map(|b| fld.fold_ty_param_bound(b))) - } - TyImplTrait(bounds) => { - TyImplTrait(bounds.move_map(|b| fld.fold_ty_param_bound(b))) - } - }, - span: fld.new_span(span), - } - }) -} - -pub fn noop_fold_foreign_mod(ForeignMod { abi, items }: ForeignMod, - fld: &mut T) - -> ForeignMod { - ForeignMod { - abi: abi, - items: items.move_map(|x| fld.fold_foreign_item(x)), - } -} - -pub fn noop_fold_variant(v: Variant, fld: &mut T) -> Variant { - Spanned { - node: Variant_ { - name: v.node.name, - attrs: fold_attrs(v.node.attrs, fld), - data: fld.fold_variant_data(v.node.data), - disr_expr: v.node.disr_expr.map(|e| fld.fold_expr(e)), - }, - span: fld.new_span(v.span), - } -} - -pub fn noop_fold_name(n: Name, _: &mut T) -> Name { - n -} - -pub fn noop_fold_usize(i: usize, _: &mut T) -> usize { - i -} - -pub fn noop_fold_path(Path { global, segments, span }: Path, fld: &mut T) -> Path { - Path { - global: global, - segments: segments.move_map(|PathSegment { name, parameters }| { - PathSegment { - name: fld.fold_name(name), - parameters: fld.fold_path_parameters(parameters), - } - }), - span: fld.new_span(span), - } -} - -pub fn noop_fold_path_parameters(path_parameters: PathParameters, - fld: &mut T) - -> PathParameters { - match path_parameters { - AngleBracketedParameters(data) => - AngleBracketedParameters(fld.fold_angle_bracketed_parameter_data(data)), - ParenthesizedParameters(data) => - ParenthesizedParameters(fld.fold_parenthesized_parameter_data(data)), - } -} - -pub fn noop_fold_angle_bracketed_parameter_data(data: AngleBracketedParameterData, - fld: &mut T) - -> AngleBracketedParameterData { - let AngleBracketedParameterData { lifetimes, types, bindings } = data; - AngleBracketedParameterData { - lifetimes: fld.fold_lifetimes(lifetimes), - types: types.move_map(|ty| fld.fold_ty(ty)), - bindings: bindings.move_map(|b| fld.fold_ty_binding(b)), - } -} - -pub fn noop_fold_parenthesized_parameter_data(data: ParenthesizedParameterData, - fld: &mut T) - -> ParenthesizedParameterData { - let ParenthesizedParameterData { inputs, output, span } = data; - ParenthesizedParameterData { - inputs: inputs.move_map(|ty| fld.fold_ty(ty)), - output: output.map(|ty| fld.fold_ty(ty)), - span: fld.new_span(span), - } -} - -pub fn noop_fold_local(l: P, fld: &mut T) -> P { - l.map(|Local { id, pat, ty, init, span, attrs }| { - Local { - id: fld.new_id(id), - ty: ty.map(|t| fld.fold_ty(t)), - pat: fld.fold_pat(pat), - init: init.map(|e| fld.fold_expr(e)), - span: fld.new_span(span), - attrs: fold_attrs(attrs, fld), - } - }) -} - -pub fn noop_fold_attribute(at: Attribute, fld: &mut T) -> Option { - let Spanned {node: Attribute_ {id, style, value, is_sugared_doc}, span} = at; - Some(Spanned { - node: Attribute_ { - id: id, - style: style, - value: fld.fold_meta_item(value), - is_sugared_doc: is_sugared_doc, - }, - span: fld.new_span(span), - }) -} - -pub fn noop_fold_meta_list_item(li: NestedMetaItem, fld: &mut T) - -> NestedMetaItem { - Spanned { - node: match li.node { - NestedMetaItemKind::MetaItem(mi) => { - NestedMetaItemKind::MetaItem(fld.fold_meta_item(mi)) - }, - NestedMetaItemKind::Literal(lit) => NestedMetaItemKind::Literal(lit) - }, - span: fld.new_span(li.span) - } -} - -pub fn noop_fold_meta_item(mi: P, fld: &mut T) -> P { - mi.map(|Spanned { node, span }| { - Spanned { - node: match node { - MetaItemKind::Word(id) => MetaItemKind::Word(id), - MetaItemKind::List(id, mis) => { - MetaItemKind::List(id, mis.move_map(|e| fld.fold_meta_list_item(e))) - } - MetaItemKind::NameValue(id, s) => MetaItemKind::NameValue(id, s), - }, - span: fld.new_span(span), - } - }) -} - -pub fn noop_fold_arg(Arg { id, pat, ty }: Arg, fld: &mut T) -> Arg { - Arg { - id: fld.new_id(id), - pat: fld.fold_pat(pat), - ty: fld.fold_ty(ty), - } -} - -pub fn noop_fold_fn_decl(decl: P, fld: &mut T) -> P { - decl.map(|FnDecl { inputs, output, variadic }| { - FnDecl { - inputs: inputs.move_map(|x| fld.fold_arg(x)), - output: match output { - Return(ty) => Return(fld.fold_ty(ty)), - DefaultReturn(span) => DefaultReturn(span), - }, - variadic: variadic, - } - }) -} - -pub fn noop_fold_ty_param_bound(tpb: TyParamBound, fld: &mut T) -> TyParamBound - where T: Folder -{ - match tpb { - TraitTyParamBound(ty, modifier) => TraitTyParamBound(fld.fold_poly_trait_ref(ty), modifier), - RegionTyParamBound(lifetime) => RegionTyParamBound(fld.fold_lifetime(lifetime)), - } -} - -pub fn noop_fold_ty_param(tp: TyParam, fld: &mut T) -> TyParam { - let TyParam {id, name, bounds, default, span} = tp; - TyParam { - id: fld.new_id(id), - name: name, - bounds: fld.fold_bounds(bounds), - default: default.map(|x| fld.fold_ty(x)), - span: span, - } -} - -pub fn noop_fold_ty_params(tps: HirVec, - fld: &mut T) - -> HirVec { - tps.move_map(|tp| fld.fold_ty_param(tp)) -} - -pub fn noop_fold_lifetime(l: Lifetime, fld: &mut T) -> Lifetime { - Lifetime { - id: fld.new_id(l.id), - name: l.name, - span: fld.new_span(l.span), - } -} - -pub fn noop_fold_lifetime_def(l: LifetimeDef, fld: &mut T) -> LifetimeDef { - LifetimeDef { - lifetime: fld.fold_lifetime(l.lifetime), - bounds: fld.fold_lifetimes(l.bounds), - } -} - -pub fn noop_fold_lifetimes(lts: HirVec, fld: &mut T) -> HirVec { - lts.move_map(|l| fld.fold_lifetime(l)) -} - -pub fn noop_fold_lifetime_defs(lts: HirVec, - fld: &mut T) - -> HirVec { - lts.move_map(|l| fld.fold_lifetime_def(l)) -} - -pub fn noop_fold_opt_lifetime(o_lt: Option, fld: &mut T) -> Option { - o_lt.map(|lt| fld.fold_lifetime(lt)) -} - -pub fn noop_fold_generics(Generics {ty_params, lifetimes, where_clause, span}: Generics, - fld: &mut T) - -> Generics { - Generics { - ty_params: fld.fold_ty_params(ty_params), - lifetimes: fld.fold_lifetime_defs(lifetimes), - where_clause: fld.fold_where_clause(where_clause), - span: fld.new_span(span), - } -} - -pub fn noop_fold_where_clause(WhereClause { id, predicates }: WhereClause, - fld: &mut T) - -> WhereClause { - WhereClause { - id: fld.new_id(id), - predicates: predicates.move_map(|predicate| fld.fold_where_predicate(predicate)), - } -} - -pub fn noop_fold_where_predicate(pred: WherePredicate, fld: &mut T) -> WherePredicate { - match pred { - hir::WherePredicate::BoundPredicate(hir::WhereBoundPredicate{bound_lifetimes, - bounded_ty, - bounds, - span}) => { - hir::WherePredicate::BoundPredicate(hir::WhereBoundPredicate { - bound_lifetimes: fld.fold_lifetime_defs(bound_lifetimes), - bounded_ty: fld.fold_ty(bounded_ty), - bounds: bounds.move_map(|x| fld.fold_ty_param_bound(x)), - span: fld.new_span(span), - }) - } - hir::WherePredicate::RegionPredicate(hir::WhereRegionPredicate{lifetime, - bounds, - span}) => { - hir::WherePredicate::RegionPredicate(hir::WhereRegionPredicate { - span: fld.new_span(span), - lifetime: fld.fold_lifetime(lifetime), - bounds: bounds.move_map(|bound| fld.fold_lifetime(bound)), - }) - } - hir::WherePredicate::EqPredicate(hir::WhereEqPredicate{id, - path, - ty, - span}) => { - hir::WherePredicate::EqPredicate(hir::WhereEqPredicate { - id: fld.new_id(id), - path: fld.fold_path(path), - ty: fld.fold_ty(ty), - span: fld.new_span(span), - }) - } - } -} - -pub fn noop_fold_variant_data(vdata: VariantData, fld: &mut T) -> VariantData { - match vdata { - VariantData::Struct(fields, id) => { - VariantData::Struct(fields.move_map(|f| fld.fold_struct_field(f)), - fld.new_id(id)) - } - VariantData::Tuple(fields, id) => { - VariantData::Tuple(fields.move_map(|f| fld.fold_struct_field(f)), - fld.new_id(id)) - } - VariantData::Unit(id) => VariantData::Unit(fld.new_id(id)), - } -} - -pub fn noop_fold_trait_ref(p: TraitRef, fld: &mut T) -> TraitRef { - let id = fld.new_id(p.ref_id); - let TraitRef { - path, - ref_id: _, - } = p; - hir::TraitRef { - path: fld.fold_path(path), - ref_id: id, - } -} - -pub fn noop_fold_poly_trait_ref(p: PolyTraitRef, fld: &mut T) -> PolyTraitRef { - hir::PolyTraitRef { - bound_lifetimes: fld.fold_lifetime_defs(p.bound_lifetimes), - trait_ref: fld.fold_trait_ref(p.trait_ref), - span: fld.new_span(p.span), - } -} - -pub fn noop_fold_struct_field(f: StructField, fld: &mut T) -> StructField { - StructField { - span: fld.new_span(f.span), - id: fld.new_id(f.id), - name: f.name, - vis: f.vis, - ty: fld.fold_ty(f.ty), - attrs: fold_attrs(f.attrs, fld), - } -} - -pub fn noop_fold_field(Field { name, expr, span }: Field, folder: &mut T) -> Field { - Field { - name: respan(folder.new_span(name.span), folder.fold_name(name.node)), - expr: folder.fold_expr(expr), - span: folder.new_span(span), - } -} - -pub fn noop_fold_mt(MutTy { ty, mutbl }: MutTy, folder: &mut T) -> MutTy { - MutTy { - ty: folder.fold_ty(ty), - mutbl: mutbl, - } -} - -pub fn noop_fold_opt_bounds(b: Option, - folder: &mut T) - -> Option { - b.map(|bounds| folder.fold_bounds(bounds)) -} - -fn noop_fold_bounds(bounds: TyParamBounds, folder: &mut T) -> TyParamBounds { - bounds.move_map(|bound| folder.fold_ty_param_bound(bound)) -} - -pub fn noop_fold_block(b: P, folder: &mut T) -> P { - b.map(|Block { id, stmts, expr, rules, span }| { - Block { - id: folder.new_id(id), - stmts: stmts.move_map(|s| folder.fold_stmt(s)), - expr: expr.map(|x| folder.fold_expr(x)), - rules: rules, - span: folder.new_span(span), - } - }) -} - -pub fn noop_fold_item_underscore(i: Item_, folder: &mut T) -> Item_ { - match i { - ItemExternCrate(string) => ItemExternCrate(string), - ItemUse(view_path) => { - ItemUse(folder.fold_view_path(view_path)) - } - ItemStatic(t, m, e) => { - ItemStatic(folder.fold_ty(t), m, folder.fold_expr(e)) - } - ItemConst(t, e) => { - ItemConst(folder.fold_ty(t), folder.fold_expr(e)) - } - ItemFn(decl, unsafety, constness, abi, generics, body) => { - ItemFn(folder.fold_fn_decl(decl), - unsafety, - constness, - abi, - folder.fold_generics(generics), - folder.fold_block(body)) - } - ItemMod(m) => ItemMod(folder.fold_mod(m)), - ItemForeignMod(nm) => ItemForeignMod(folder.fold_foreign_mod(nm)), - ItemTy(t, generics) => { - ItemTy(folder.fold_ty(t), folder.fold_generics(generics)) - } - ItemEnum(enum_definition, generics) => { - ItemEnum(hir::EnumDef { - variants: enum_definition.variants.move_map(|x| folder.fold_variant(x)), - }, - folder.fold_generics(generics)) - } - ItemStruct(struct_def, generics) => { - let struct_def = folder.fold_variant_data(struct_def); - ItemStruct(struct_def, folder.fold_generics(generics)) - } - ItemUnion(struct_def, generics) => { - let struct_def = folder.fold_variant_data(struct_def); - ItemUnion(struct_def, folder.fold_generics(generics)) - } - ItemDefaultImpl(unsafety, ref trait_ref) => { - ItemDefaultImpl(unsafety, folder.fold_trait_ref((*trait_ref).clone())) - } - ItemImpl(unsafety, polarity, generics, ifce, ty, impl_items) => { - let new_impl_items = impl_items - .move_map(|item| folder.fold_impl_item(item)); - let ifce = match ifce { - None => None, - Some(ref trait_ref) => { - Some(folder.fold_trait_ref((*trait_ref).clone())) - } - }; - ItemImpl(unsafety, - polarity, - folder.fold_generics(generics), - ifce, - folder.fold_ty(ty), - new_impl_items) - } - ItemTrait(unsafety, generics, bounds, items) => { - let bounds = folder.fold_bounds(bounds); - let items = items.move_map(|item| folder.fold_trait_item(item)); - ItemTrait(unsafety, folder.fold_generics(generics), bounds, items) - } - } -} - -pub fn noop_fold_trait_item(i: TraitItem, - folder: &mut T) - -> TraitItem { - TraitItem { - id: folder.new_id(i.id), - name: folder.fold_name(i.name), - attrs: fold_attrs(i.attrs, folder), - node: match i.node { - ConstTraitItem(ty, default) => { - ConstTraitItem(folder.fold_ty(ty), default.map(|x| folder.fold_expr(x))) - } - MethodTraitItem(sig, body) => { - MethodTraitItem(noop_fold_method_sig(sig, folder), - body.map(|x| folder.fold_block(x))) - } - TypeTraitItem(bounds, default) => { - TypeTraitItem(folder.fold_bounds(bounds), - default.map(|x| folder.fold_ty(x))) - } - }, - span: folder.new_span(i.span), - } -} - -pub fn noop_fold_impl_item(i: ImplItem, folder: &mut T) -> ImplItem { - ImplItem { - id: folder.new_id(i.id), - name: folder.fold_name(i.name), - attrs: fold_attrs(i.attrs, folder), - vis: i.vis, - defaultness: i.defaultness, - node: match i.node { - ImplItemKind::Const(ty, expr) => { - ImplItemKind::Const(folder.fold_ty(ty), folder.fold_expr(expr)) - } - ImplItemKind::Method(sig, body) => { - ImplItemKind::Method(noop_fold_method_sig(sig, folder), folder.fold_block(body)) - } - ImplItemKind::Type(ty) => ImplItemKind::Type(folder.fold_ty(ty)), - }, - span: folder.new_span(i.span), - } -} - -pub fn noop_fold_mod(Mod { inner, item_ids }: Mod, folder: &mut T) -> Mod { - Mod { - inner: folder.new_span(inner), - item_ids: item_ids.move_map(|x| folder.fold_item_id(x)), - } -} - -pub fn noop_fold_crate(Crate { module, attrs, config, span, - exported_macros, items }: Crate, - folder: &mut T) - -> Crate { - let config = folder.fold_meta_items(config); - - let crate_mod = folder.fold_item(hir::Item { - name: keywords::Invalid.name(), - attrs: attrs, - id: DUMMY_NODE_ID, - vis: hir::Public, - span: span, - node: hir::ItemMod(module), - }); - - let (module, attrs, span) = match crate_mod { - hir::Item { attrs, span, node, .. } => { - match node { - hir::ItemMod(m) => (m, attrs, span), - _ => panic!("fold converted a module to not a module"), - } - } - }; - - let items = items.into_iter() - .map(|(id, item)| (id, folder.fold_item(item))) - .collect(); - - Crate { - module: module, - attrs: attrs, - config: config, - span: span, - exported_macros: exported_macros, - items: items, - } -} - -pub fn noop_fold_item_id(i: ItemId, folder: &mut T) -> ItemId { - let id = folder.map_id(i.id); - ItemId { id: id } -} - -// fold one item into one item -pub fn noop_fold_item(item: Item, folder: &mut T) -> Item { - let Item { id, name, attrs, node, vis, span } = item; - let id = folder.new_id(id); - let node = folder.fold_item_underscore(node); - - Item { - id: id, - name: folder.fold_name(name), - attrs: fold_attrs(attrs, folder), - node: node, - vis: vis, - span: folder.new_span(span), - } -} - -pub fn noop_fold_foreign_item(ni: ForeignItem, folder: &mut T) -> ForeignItem { - ForeignItem { - id: folder.new_id(ni.id), - name: folder.fold_name(ni.name), - attrs: fold_attrs(ni.attrs, folder), - node: match ni.node { - ForeignItemFn(fdec, generics) => { - ForeignItemFn(folder.fold_fn_decl(fdec), folder.fold_generics(generics)) - } - ForeignItemStatic(t, m) => { - ForeignItemStatic(folder.fold_ty(t), m) - } - }, - vis: ni.vis, - span: folder.new_span(ni.span), - } -} - -pub fn noop_fold_method_sig(sig: MethodSig, folder: &mut T) -> MethodSig { - MethodSig { - generics: folder.fold_generics(sig.generics), - abi: sig.abi, - unsafety: sig.unsafety, - constness: sig.constness, - decl: folder.fold_fn_decl(sig.decl), - } -} - -pub fn noop_fold_pat(p: P, folder: &mut T) -> P { - p.map(|Pat { id, node, span }| { - Pat { - id: folder.new_id(id), - node: match node { - PatKind::Wild => PatKind::Wild, - PatKind::Binding(binding_mode, pth1, sub) => { - PatKind::Binding(binding_mode, - Spanned { - span: folder.new_span(pth1.span), - node: folder.fold_name(pth1.node), - }, - sub.map(|x| folder.fold_pat(x))) - } - PatKind::Lit(e) => PatKind::Lit(folder.fold_expr(e)), - PatKind::TupleStruct(pth, pats, ddpos) => { - PatKind::TupleStruct(folder.fold_path(pth), - pats.move_map(|x| folder.fold_pat(x)), ddpos) - } - PatKind::Path(opt_qself, pth) => { - let opt_qself = opt_qself.map(|qself| { - QSelf { ty: folder.fold_ty(qself.ty), position: qself.position } - }); - PatKind::Path(opt_qself, folder.fold_path(pth)) - } - PatKind::Struct(pth, fields, etc) => { - let pth = folder.fold_path(pth); - let fs = fields.move_map(|f| { - Spanned { - span: folder.new_span(f.span), - node: hir::FieldPat { - name: f.node.name, - pat: folder.fold_pat(f.node.pat), - is_shorthand: f.node.is_shorthand, - }, - } - }); - PatKind::Struct(pth, fs, etc) - } - PatKind::Tuple(elts, ddpos) => { - PatKind::Tuple(elts.move_map(|x| folder.fold_pat(x)), ddpos) - } - PatKind::Box(inner) => PatKind::Box(folder.fold_pat(inner)), - PatKind::Ref(inner, mutbl) => PatKind::Ref(folder.fold_pat(inner), mutbl), - PatKind::Range(e1, e2) => { - PatKind::Range(folder.fold_expr(e1), folder.fold_expr(e2)) - } - PatKind::Vec(before, slice, after) => { - PatKind::Vec(before.move_map(|x| folder.fold_pat(x)), - slice.map(|x| folder.fold_pat(x)), - after.move_map(|x| folder.fold_pat(x))) - } - }, - span: folder.new_span(span), - } - }) -} - -pub fn noop_fold_expr(Expr { id, node, span, attrs }: Expr, folder: &mut T) -> Expr { - Expr { - id: folder.new_id(id), - node: match node { - ExprBox(e) => { - ExprBox(folder.fold_expr(e)) - } - ExprVec(exprs) => { - ExprVec(exprs.move_map(|x| folder.fold_expr(x))) - } - ExprRepeat(expr, count) => { - ExprRepeat(folder.fold_expr(expr), folder.fold_expr(count)) - } - ExprTup(elts) => ExprTup(elts.move_map(|x| folder.fold_expr(x))), - ExprCall(f, args) => { - ExprCall(folder.fold_expr(f), args.move_map(|x| folder.fold_expr(x))) - } - ExprMethodCall(name, tps, args) => { - ExprMethodCall(respan(folder.new_span(name.span), folder.fold_name(name.node)), - tps.move_map(|x| folder.fold_ty(x)), - args.move_map(|x| folder.fold_expr(x))) - } - ExprBinary(binop, lhs, rhs) => { - ExprBinary(binop, folder.fold_expr(lhs), folder.fold_expr(rhs)) - } - ExprUnary(binop, ohs) => { - ExprUnary(binop, folder.fold_expr(ohs)) - } - ExprLit(l) => ExprLit(l), - ExprCast(expr, ty) => { - ExprCast(folder.fold_expr(expr), folder.fold_ty(ty)) - } - ExprType(expr, ty) => { - ExprType(folder.fold_expr(expr), folder.fold_ty(ty)) - } - ExprAddrOf(m, ohs) => ExprAddrOf(m, folder.fold_expr(ohs)), - ExprIf(cond, tr, fl) => { - ExprIf(folder.fold_expr(cond), - folder.fold_block(tr), - fl.map(|x| folder.fold_expr(x))) - } - ExprWhile(cond, body, opt_name) => { - ExprWhile(folder.fold_expr(cond), - folder.fold_block(body), - opt_name.map(|label| { - respan(folder.new_span(label.span), folder.fold_name(label.node)) - })) - } - ExprLoop(body, opt_name) => { - ExprLoop(folder.fold_block(body), - opt_name.map(|label| { - respan(folder.new_span(label.span), folder.fold_name(label.node)) - })) - } - ExprMatch(expr, arms, source) => { - ExprMatch(folder.fold_expr(expr), - arms.move_map(|x| folder.fold_arm(x)), - source) - } - ExprClosure(capture_clause, decl, body, fn_decl_span) => { - ExprClosure(capture_clause, - folder.fold_fn_decl(decl), - folder.fold_block(body), - folder.new_span(fn_decl_span)) - } - ExprBlock(blk) => ExprBlock(folder.fold_block(blk)), - ExprAssign(el, er) => { - ExprAssign(folder.fold_expr(el), folder.fold_expr(er)) - } - ExprAssignOp(op, el, er) => { - ExprAssignOp(op, folder.fold_expr(el), folder.fold_expr(er)) - } - ExprField(el, name) => { - ExprField(folder.fold_expr(el), - respan(folder.new_span(name.span), folder.fold_name(name.node))) - } - ExprTupField(el, index) => { - ExprTupField(folder.fold_expr(el), - respan(folder.new_span(index.span), folder.fold_usize(index.node))) - } - ExprIndex(el, er) => { - ExprIndex(folder.fold_expr(el), folder.fold_expr(er)) - } - ExprPath(qself, path) => { - let qself = qself.map(|QSelf { ty, position }| { - QSelf { - ty: folder.fold_ty(ty), - position: position, - } - }); - ExprPath(qself, folder.fold_path(path)) - } - ExprBreak(opt_name) => ExprBreak(opt_name.map(|label| { - respan(folder.new_span(label.span), folder.fold_name(label.node)) - })), - ExprAgain(opt_name) => ExprAgain(opt_name.map(|label| { - respan(folder.new_span(label.span), folder.fold_name(label.node)) - })), - ExprRet(e) => ExprRet(e.map(|x| folder.fold_expr(x))), - ExprInlineAsm(asm, outputs, inputs) => { - ExprInlineAsm(asm, - outputs.move_map(|x| folder.fold_expr(x)), - inputs.move_map(|x| folder.fold_expr(x))) - } - ExprStruct(path, fields, maybe_expr) => { - ExprStruct(folder.fold_path(path), - fields.move_map(|x| folder.fold_field(x)), - maybe_expr.map(|x| folder.fold_expr(x))) - } - }, - span: folder.new_span(span), - attrs: fold_attrs(attrs, folder), - } -} - -pub fn noop_fold_stmt(stmt: Stmt, folder: &mut T) -> Stmt { - let span = folder.new_span(stmt.span); - match stmt.node { - StmtDecl(d, id) => { - let id = folder.new_id(id); - Spanned { - node: StmtDecl(folder.fold_decl(d), id), - span: span - } - } - StmtExpr(e, id) => { - let id = folder.new_id(id); - Spanned { - node: StmtExpr(folder.fold_expr(e), id), - span: span, - } - } - StmtSemi(e, id) => { - let id = folder.new_id(id); - Spanned { - node: StmtSemi(folder.fold_expr(e), id), - span: span, - } - } - } -} diff --git a/src/librustc/hir/intravisit.rs b/src/librustc/hir/intravisit.rs index f0caa971d9699..b1771f52da2c6 100644 --- a/src/librustc/hir/intravisit.rs +++ b/src/librustc/hir/intravisit.rs @@ -394,7 +394,7 @@ pub fn walk_ty<'v, V: Visitor<'v>>(visitor: &mut V, typ: &'v Ty) { visitor.visit_id(typ.id); match typ.node { - TyVec(ref ty) => { + TySlice(ref ty) => { visitor.visit_ty(ty) } TyPtr(ref mutable_type) => { @@ -422,7 +422,7 @@ pub fn walk_ty<'v, V: Visitor<'v>>(visitor: &mut V, typ: &'v Ty) { visitor.visit_ty(ty); walk_list!(visitor, visit_ty_param_bound, bounds); } - TyFixedLengthVec(ref ty, ref expression) => { + TyArray(ref ty, ref expression) => { visitor.visit_ty(ty); visitor.visit_expr(expression) } @@ -520,7 +520,7 @@ pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat) { visitor.visit_expr(upper_bound) } PatKind::Wild => (), - PatKind::Vec(ref prepatterns, ref slice_pattern, ref postpatterns) => { + PatKind::Slice(ref prepatterns, ref slice_pattern, ref postpatterns) => { walk_list!(visitor, visit_pat, prepatterns); walk_list!(visitor, visit_pat, slice_pattern); walk_list!(visitor, visit_pat, postpatterns); @@ -749,7 +749,7 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) { ExprBox(ref subexpression) => { visitor.visit_expr(subexpression) } - ExprVec(ref subexpressions) => { + ExprArray(ref subexpressions) => { walk_list!(visitor, visit_expr, subexpressions); } ExprRepeat(ref element, ref count) => { @@ -881,8 +881,8 @@ pub struct IdRange { impl IdRange { pub fn max() -> IdRange { IdRange { - min: u32::MAX, - max: u32::MIN, + min: NodeId::from_u32(u32::MAX), + max: NodeId::from_u32(u32::MIN), } } @@ -896,7 +896,7 @@ impl IdRange { pub fn add(&mut self, id: NodeId) { self.min = cmp::min(self.min, id); - self.max = cmp::max(self.max, id + 1); + self.max = cmp::max(self.max, NodeId::from_u32(id.as_u32() + 1)); } } diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index 80e034721d63f..620ee30c95628 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -61,7 +61,7 @@ use syntax_pos::Span; pub struct LoweringContext<'a> { crate_root: Option<&'static str>, // Use to assign ids to hir nodes that do not directly correspond to an ast node - sess: Option<&'a Session>, + sess: &'a Session, // As we walk the AST we must keep track of the current 'parent' def id (in // the form of a DefIndex) so that if we create a new node which introduces // a definition, then we can properly create the def id. @@ -81,21 +81,7 @@ pub trait Resolver { // We must keep the set of definitions up to date as we add nodes that weren't in the AST. // This should only return `None` during testing. - fn definitions(&mut self) -> Option<&mut Definitions>; -} - -pub struct DummyResolver; -impl Resolver for DummyResolver { - fn resolve_generated_global_path(&mut self, _path: &hir::Path, _is_value: bool) -> Def { - Def::Err - } - fn get_resolution(&mut self, _id: NodeId) -> Option { - None - } - fn record_resolution(&mut self, _id: NodeId, _def: Def) {} - fn definitions(&mut self) -> Option<&mut Definitions> { - None - } + fn definitions(&mut self) -> &mut Definitions; } pub fn lower_crate(sess: &Session, @@ -108,29 +94,14 @@ pub fn lower_crate(sess: &Session, let _ignore = sess.dep_graph.in_ignore(); LoweringContext { - crate_root: if std_inject::no_core(krate) { - None - } else if std_inject::no_std(krate) { - Some("core") - } else { - Some("std") - }, - sess: Some(sess), + crate_root: std_inject::injected_crate_name(krate), + sess: sess, parent_def: None, resolver: resolver, }.lower_crate(krate) } impl<'a> LoweringContext<'a> { - pub fn testing_context(resolver: &'a mut Resolver) -> Self { - LoweringContext { - crate_root: None, - sess: None, - parent_def: None, - resolver: resolver, - } - } - fn lower_crate(&mut self, c: &Crate) -> hir::Crate { struct ItemLowerer<'lcx, 'interner: 'lcx> { items: BTreeMap, @@ -153,7 +124,6 @@ impl<'a> LoweringContext<'a> { hir::Crate { module: self.lower_mod(&c.module), attrs: self.lower_attrs(&c.attrs), - config: c.config.clone().into(), span: c.span, exported_macros: c.exported_macros.iter().map(|m| self.lower_macro_def(m)).collect(), items: items, @@ -161,12 +131,11 @@ impl<'a> LoweringContext<'a> { } fn next_id(&self) -> NodeId { - self.sess.map(Session::next_node_id).unwrap_or(0) + self.sess.next_node_id() } fn diagnostic(&self) -> &errors::Handler { - self.sess.map(Session::diagnostic) - .unwrap_or_else(|| panic!("this lowerer cannot emit diagnostics")) + self.sess.diagnostic() } fn str_to_ident(&self, s: &'static str) -> Name { @@ -177,9 +146,9 @@ impl<'a> LoweringContext<'a> { where F: FnOnce(&mut LoweringContext) -> T { let old_def = self.parent_def; - self.parent_def = match self.resolver.definitions() { - Some(defs) => Some(defs.opt_def_index(parent_id).unwrap()), - None => old_def, + self.parent_def = { + let defs = self.resolver.definitions(); + Some(defs.opt_def_index(parent_id).unwrap()) }; let result = f(self); @@ -246,17 +215,16 @@ impl<'a> LoweringContext<'a> { } fn lower_ty(&mut self, t: &Ty) -> P { - use syntax::ast::TyKind::*; P(hir::Ty { id: t.id, node: match t.node { - Infer | ImplicitSelf => hir::TyInfer, - Vec(ref ty) => hir::TyVec(self.lower_ty(ty)), - Ptr(ref mt) => hir::TyPtr(self.lower_mt(mt)), - Rptr(ref region, ref mt) => { + TyKind::Infer | TyKind::ImplicitSelf => hir::TyInfer, + TyKind::Slice(ref ty) => hir::TySlice(self.lower_ty(ty)), + TyKind::Ptr(ref mt) => hir::TyPtr(self.lower_mt(mt)), + TyKind::Rptr(ref region, ref mt) => { hir::TyRptr(self.lower_opt_lifetime(region), self.lower_mt(mt)) } - BareFn(ref f) => { + TyKind::BareFn(ref f) => { hir::TyBareFn(P(hir::BareFnTy { lifetimes: self.lower_lifetime_defs(&f.lifetimes), unsafety: self.lower_unsafety(f.unsafety), @@ -264,12 +232,14 @@ impl<'a> LoweringContext<'a> { decl: self.lower_fn_decl(&f.decl), })) } - Never => hir::TyNever, - Tup(ref tys) => hir::TyTup(tys.iter().map(|ty| self.lower_ty(ty)).collect()), - Paren(ref ty) => { + TyKind::Never => hir::TyNever, + TyKind::Tup(ref tys) => { + hir::TyTup(tys.iter().map(|ty| self.lower_ty(ty)).collect()) + } + TyKind::Paren(ref ty) => { return self.lower_ty(ty); } - Path(ref qself, ref path) => { + TyKind::Path(ref qself, ref path) => { let qself = qself.as_ref().map(|&QSelf { ref ty, position }| { hir::QSelf { ty: self.lower_ty(ty), @@ -278,22 +248,22 @@ impl<'a> LoweringContext<'a> { }); hir::TyPath(qself, self.lower_path(path)) } - ObjectSum(ref ty, ref bounds) => { + TyKind::ObjectSum(ref ty, ref bounds) => { hir::TyObjectSum(self.lower_ty(ty), self.lower_bounds(bounds)) } - FixedLengthVec(ref ty, ref e) => { - hir::TyFixedLengthVec(self.lower_ty(ty), self.lower_expr(e)) + TyKind::Array(ref ty, ref e) => { + hir::TyArray(self.lower_ty(ty), self.lower_expr(e)) } - Typeof(ref expr) => { + TyKind::Typeof(ref expr) => { hir::TyTypeof(self.lower_expr(expr)) } - PolyTraitRef(ref bounds) => { + TyKind::PolyTraitRef(ref bounds) => { hir::TyPolyTraitRef(self.lower_bounds(bounds)) } - ImplTrait(ref bounds) => { + TyKind::ImplTrait(ref bounds) => { hir::TyImplTrait(self.lower_bounds(bounds)) } - Mac(_) => panic!("TyMac should have been expanded by now."), + TyKind::Mac(_) => panic!("TyMac should have been expanded by now."), }, span: t.span, }) @@ -415,12 +385,22 @@ impl<'a> LoweringContext<'a> { } fn lower_ty_param(&mut self, tp: &TyParam) -> hir::TyParam { + let mut name = tp.ident.name; + + // Don't expose `Self` (recovered "keyword used as ident" parse error). + // `rustc::ty` expects `Self` to be only used for a trait's `Self`. + // Instead, use gensym("Self") to create a distinct name that looks the same. + if name == token::keywords::SelfType.name() { + name = token::gensym("Self"); + } + hir::TyParam { id: tp.id, - name: tp.ident.name, + name: name, bounds: self.lower_bounds(&tp.bounds), default: tp.default.as_ref().map(|x| self.lower_ty(x)), span: tp.span, + pure_wrt_drop: tp.attrs.iter().any(|attr| attr.check_name("may_dangle")), } } @@ -440,6 +420,7 @@ impl<'a> LoweringContext<'a> { hir::LifetimeDef { lifetime: self.lower_lifetime(&l.lifetime), bounds: self.lower_lifetimes(&l.bounds), + pure_wrt_drop: l.attrs.iter().any(|attr| attr.check_name("may_dangle")), } } @@ -561,6 +542,7 @@ impl<'a> LoweringContext<'a> { name: respan(f.ident.span, f.ident.node.name), expr: self.lower_expr(&f.expr), span: f.span, + is_shorthand: f.is_shorthand, } } @@ -734,8 +716,6 @@ impl<'a> LoweringContext<'a> { id: m.id, span: m.span, imported_from: m.imported_from.map(|x| x.name), - export: m.export, - use_locally: m.use_locally, allow_internal_unstable: m.allow_internal_unstable, body: m.body.clone().into(), } @@ -869,10 +849,9 @@ impl<'a> LoweringContext<'a> { }) } PatKind::Lit(ref e) => hir::PatKind::Lit(self.lower_expr(e)), - PatKind::TupleStruct(ref pth, ref pats, ddpos) => { - hir::PatKind::TupleStruct(self.lower_path(pth), - pats.iter().map(|x| self.lower_pat(x)).collect(), - ddpos) + PatKind::TupleStruct(ref path, ref pats, ddpos) => { + hir::PatKind::TupleStruct(self.lower_path(path), + pats.iter().map(|x| self.lower_pat(x)).collect(), ddpos) } PatKind::Path(ref opt_qself, ref path) => { let opt_qself = opt_qself.as_ref().map(|qself| { @@ -906,8 +885,8 @@ impl<'a> LoweringContext<'a> { PatKind::Range(ref e1, ref e2) => { hir::PatKind::Range(self.lower_expr(e1), self.lower_expr(e2)) } - PatKind::Vec(ref before, ref slice, ref after) => { - hir::PatKind::Vec(before.iter().map(|x| self.lower_pat(x)).collect(), + PatKind::Slice(ref before, ref slice, ref after) => { + hir::PatKind::Slice(before.iter().map(|x| self.lower_pat(x)).collect(), slice.as_ref().map(|x| self.lower_pat(x)), after.iter().map(|x| self.lower_pat(x)).collect()) } @@ -1046,7 +1025,7 @@ impl<'a> LoweringContext<'a> { } ExprKind::Vec(ref exprs) => { - hir::ExprVec(exprs.iter().map(|x| self.lower_expr(x)).collect()) + hir::ExprArray(exprs.iter().map(|x| self.lower_expr(x)).collect()) } ExprKind::Repeat(ref expr, ref count) => { let expr = self.lower_expr(expr); @@ -1239,7 +1218,7 @@ impl<'a> LoweringContext<'a> { alignstack, dialect, expn_id, - }) => hir::ExprInlineAsm(hir::InlineAsm { + }) => hir::ExprInlineAsm(P(hir::InlineAsm { inputs: inputs.iter().map(|&(ref c, _)| c.clone()).collect(), outputs: outputs.iter() .map(|out| { @@ -1257,7 +1236,7 @@ impl<'a> LoweringContext<'a> { alignstack: alignstack, dialect: dialect, expn_id: expn_id, - }, outputs.iter().map(|out| self.lower_expr(&out.expr)).collect(), + }), outputs.iter().map(|out| self.lower_expr(&out.expr)).collect(), inputs.iter().map(|&(_, ref input)| self.lower_expr(input)).collect()), ExprKind::Struct(ref path, ref fields, ref maybe_expr) => { hir::ExprStruct(self.lower_path(path), @@ -1703,6 +1682,7 @@ impl<'a> LoweringContext<'a> { }, span: span, expr: expr, + is_shorthand: false, } } @@ -1719,9 +1699,10 @@ impl<'a> LoweringContext<'a> { let expr_path = hir::ExprPath(None, self.path_ident(span, id)); let expr = self.expr(span, expr_path, ThinVec::new()); - let def = self.resolver.definitions().map(|defs| { - Def::Local(defs.local_def_id(binding), binding) - }).unwrap_or(Def::Err); + let def = { + let defs = self.resolver.definitions(); + Def::Local(defs.local_def_id(binding)) + }; self.resolver.record_resolution(expr.id, def); expr @@ -1869,11 +1850,12 @@ impl<'a> LoweringContext<'a> { let pat = self.pat(span, pat_ident); let parent_def = self.parent_def; - let def = self.resolver.definitions().map(|defs| { + let def = { + let defs = self.resolver.definitions(); let def_path_data = DefPathData::Binding(name.as_str()); let def_index = defs.create_def_with_parent(parent_def, pat.id, def_path_data); - Def::Local(DefId::local(def_index), pat.id) - }).unwrap_or(Def::Err); + Def::Local(DefId::local(def_index)) + }; self.resolver.record_resolution(pat.id, def); pat diff --git a/src/librustc/hir/map/collector.rs b/src/librustc/hir/map/collector.rs index d4e1eb70ae8f9..3d9031a136e28 100644 --- a/src/librustc/hir/map/collector.rs +++ b/src/librustc/hir/map/collector.rs @@ -27,6 +27,10 @@ pub struct NodeCollector<'ast> { pub map: Vec>, /// The parent of this node pub parent_node: NodeId, + /// If true, completely ignore nested items. We set this when loading + /// HIR from metadata, since in that case we only want the HIR for + /// one specific item (and not the ones nested inside of it). + pub ignore_nested_items: bool } impl<'ast> NodeCollector<'ast> { @@ -35,6 +39,7 @@ impl<'ast> NodeCollector<'ast> { krate: krate, map: vec![], parent_node: CRATE_NODE_ID, + ignore_nested_items: false }; collector.insert_entry(CRATE_NODE_ID, RootCrate); @@ -52,6 +57,7 @@ impl<'ast> NodeCollector<'ast> { krate: krate, map: map, parent_node: parent_node, + ignore_nested_items: true }; assert_eq!(parent_def_path.krate, parent_def_id.krate); @@ -63,10 +69,10 @@ impl<'ast> NodeCollector<'ast> { fn insert_entry(&mut self, id: NodeId, entry: MapEntry<'ast>) { debug!("ast_map: {:?} => {:?}", id, entry); let len = self.map.len(); - if id as usize >= len { - self.map.extend(repeat(NotPresent).take(id as usize - len + 1)); + if id.as_usize() >= len { + self.map.extend(repeat(NotPresent).take(id.as_usize() - len + 1)); } - self.map[id as usize] = entry; + self.map[id.as_usize()] = entry; } fn insert(&mut self, id: NodeId, node: Node<'ast>) { @@ -88,7 +94,9 @@ impl<'ast> Visitor<'ast> for NodeCollector<'ast> { /// their outer items. fn visit_nested_item(&mut self, item: ItemId) { debug!("visit_nested_item: {:?}", item); - self.visit_item(self.krate.item(item.id)) + if !self.ignore_nested_items { + self.visit_item(self.krate.item(item.id)) + } } fn visit_item(&mut self, i: &'ast Item) { diff --git a/src/librustc/hir/map/def_collector.rs b/src/librustc/hir/map/def_collector.rs index 29fb19fd42152..421843a7f11d8 100644 --- a/src/librustc/hir/map/def_collector.rs +++ b/src/librustc/hir/map/def_collector.rs @@ -17,44 +17,42 @@ use hir::def_id::{CRATE_DEF_INDEX, DefId, DefIndex}; use middle::cstore::InlinedItem; use syntax::ast::*; +use syntax::ext::hygiene::Mark; use syntax::visit; -use syntax::parse::token; +use syntax::parse::token::{self, keywords}; /// Creates def ids for nodes in the HIR. -pub struct DefCollector<'ast> { +pub struct DefCollector<'a> { // If we are walking HIR (c.f., AST), we need to keep a reference to the // crate. - hir_crate: Option<&'ast hir::Crate>, - definitions: &'ast mut Definitions, + hir_crate: Option<&'a hir::Crate>, + definitions: &'a mut Definitions, parent_def: Option, + pub visit_macro_invoc: Option<&'a mut FnMut(MacroInvocationData)>, } -impl<'ast> DefCollector<'ast> { - pub fn root(definitions: &'ast mut Definitions) -> DefCollector<'ast> { - let mut collector = DefCollector { +pub struct MacroInvocationData { + pub mark: Mark, + pub def_index: DefIndex, + pub const_integer: bool, +} + +impl<'a> DefCollector<'a> { + pub fn new(definitions: &'a mut Definitions) -> Self { + DefCollector { hir_crate: None, definitions: definitions, parent_def: None, - }; - let root = collector.create_def_with_parent(None, CRATE_NODE_ID, DefPathData::CrateRoot); - assert_eq!(root, CRATE_DEF_INDEX); - collector.parent_def = Some(root); - - collector.create_def_with_parent(Some(CRATE_DEF_INDEX), DUMMY_NODE_ID, DefPathData::Misc); - - collector + visit_macro_invoc: None, + } } pub fn extend(parent_node: NodeId, parent_def_path: DefPath, parent_def_id: DefId, - definitions: &'ast mut Definitions) - -> DefCollector<'ast> { - let mut collector = DefCollector { - hir_crate: None, - parent_def: None, - definitions: definitions, - }; + definitions: &'a mut Definitions) + -> Self { + let mut collector = DefCollector::new(definitions); assert_eq!(parent_def_path.krate, parent_def_id.krate); let root_path = Box::new(InlinedRootPath { @@ -68,17 +66,21 @@ impl<'ast> DefCollector<'ast> { collector } - pub fn walk_item(&mut self, ii: &'ast InlinedItem, krate: &'ast hir::Crate) { - self.hir_crate = Some(krate); - ii.visit(self); + pub fn collect_root(&mut self) { + let root = self.create_def_with_parent(None, CRATE_NODE_ID, DefPathData::CrateRoot); + assert_eq!(root, CRATE_DEF_INDEX); + self.parent_def = Some(root); + + self.create_def_with_parent(Some(CRATE_DEF_INDEX), DUMMY_NODE_ID, DefPathData::Misc); } - fn parent_def(&self) -> Option { - self.parent_def + pub fn walk_item(&mut self, ii: &'a InlinedItem, krate: &'a hir::Crate) { + self.hir_crate = Some(krate); + ii.visit(self); } fn create_def(&mut self, node_id: NodeId, data: DefPathData) -> DefIndex { - let parent_def = self.parent_def(); + let parent_def = self.parent_def; debug!("create_def(node_id={:?}, data={:?}, parent_def={:?})", node_id, data, parent_def); self.definitions.create_def_with_parent(parent_def, node_id, data) } @@ -91,29 +93,28 @@ impl<'ast> DefCollector<'ast> { self.definitions.create_def_with_parent(parent, node_id, data) } - fn with_parent(&mut self, parent_def: DefIndex, f: F) { + pub fn with_parent(&mut self, parent_def: DefIndex, f: F) { let parent = self.parent_def; self.parent_def = Some(parent_def); f(self); self.parent_def = parent; } - fn visit_ast_const_integer(&mut self, expr: &Expr) { - // Find the node which will be used after lowering. - if let ExprKind::Paren(ref inner) = expr.node { - return self.visit_ast_const_integer(inner); - } - - // FIXME(eddyb) Closures should have separate - // function definition IDs and expression IDs. - if let ExprKind::Closure(..) = expr.node { - return; + pub fn visit_ast_const_integer(&mut self, expr: &Expr) { + match expr.node { + // Find the node which will be used after lowering. + ExprKind::Paren(ref inner) => return self.visit_ast_const_integer(inner), + ExprKind::Mac(..) => return self.visit_macro_invoc(expr.id, true), + // FIXME(eddyb) Closures should have separate + // function definition IDs and expression IDs. + ExprKind::Closure(..) => return, + _ => {} } self.create_def(expr.id, DefPathData::Initializer); } - fn visit_hir_const_integer(&mut self, expr: &'ast hir::Expr) { + fn visit_hir_const_integer(&mut self, expr: &hir::Expr) { // FIXME(eddyb) Closures should have separate // function definition IDs and expression IDs. if let hir::ExprClosure(..) = expr.node { @@ -122,9 +123,19 @@ impl<'ast> DefCollector<'ast> { self.create_def(expr.id, DefPathData::Initializer); } + + fn visit_macro_invoc(&mut self, id: NodeId, const_integer: bool) { + if let Some(ref mut visit) = self.visit_macro_invoc { + visit(MacroInvocationData { + mark: Mark::from_placeholder_id(id), + const_integer: const_integer, + def_index: self.parent_def.unwrap(), + }) + } + } } -impl<'ast> visit::Visitor for DefCollector<'ast> { +impl<'a> visit::Visitor for DefCollector<'a> { fn visit_item(&mut self, i: &Item) { debug!("visit_item: {:?}", i); @@ -136,10 +147,14 @@ impl<'ast> visit::Visitor for DefCollector<'ast> { ItemKind::Enum(..) | ItemKind::Struct(..) | ItemKind::Union(..) | ItemKind::Trait(..) | ItemKind::ExternCrate(..) | ItemKind::ForeignMod(..) | ItemKind::Ty(..) => DefPathData::TypeNs(i.ident.name.as_str()), + ItemKind::Mod(..) if i.ident == keywords::Invalid.ident() => { + return visit::walk_item(self, i); + } ItemKind::Mod(..) => DefPathData::Module(i.ident.name.as_str()), ItemKind::Static(..) | ItemKind::Const(..) | ItemKind::Fn(..) => DefPathData::ValueNs(i.ident.name.as_str()), - ItemKind::Mac(..) => DefPathData::MacroDef(i.ident.name.as_str()), + ItemKind::Mac(..) if i.id == DUMMY_NODE_ID => return, // Scope placeholder + ItemKind::Mac(..) => return self.visit_macro_invoc(i.id, false), ItemKind::Use(..) => DefPathData::Misc, }; let def = self.create_def(i.id, def_data); @@ -205,7 +220,7 @@ impl<'ast> visit::Visitor for DefCollector<'ast> { TraitItemKind::Method(..) | TraitItemKind::Const(..) => DefPathData::ValueNs(ti.ident.name.as_str()), TraitItemKind::Type(..) => DefPathData::TypeNs(ti.ident.name.as_str()), - TraitItemKind::Macro(..) => DefPathData::MacroDef(ti.ident.name.as_str()), + TraitItemKind::Macro(..) => return self.visit_macro_invoc(ti.id, false), }; let def = self.create_def(ti.id, def_data); @@ -223,7 +238,7 @@ impl<'ast> visit::Visitor for DefCollector<'ast> { ImplItemKind::Method(..) | ImplItemKind::Const(..) => DefPathData::ValueNs(ii.ident.name.as_str()), ImplItemKind::Type(..) => DefPathData::TypeNs(ii.ident.name.as_str()), - ImplItemKind::Macro(..) => DefPathData::MacroDef(ii.ident.name.as_str()), + ImplItemKind::Macro(..) => return self.visit_macro_invoc(ii.id, false), }; let def = self.create_def(ii.id, def_data); @@ -239,9 +254,13 @@ impl<'ast> visit::Visitor for DefCollector<'ast> { fn visit_pat(&mut self, pat: &Pat) { let parent_def = self.parent_def; - if let PatKind::Ident(_, id, _) = pat.node { - let def = self.create_def(pat.id, DefPathData::Binding(id.node.name.as_str())); - self.parent_def = Some(def); + match pat.node { + PatKind::Mac(..) => return self.visit_macro_invoc(pat.id, false), + PatKind::Ident(_, id, _) => { + let def = self.create_def(pat.id, DefPathData::Binding(id.node.name.as_str())); + self.parent_def = Some(def); + } + _ => {} } visit::walk_pat(self, pat); @@ -251,13 +270,14 @@ impl<'ast> visit::Visitor for DefCollector<'ast> { fn visit_expr(&mut self, expr: &Expr) { let parent_def = self.parent_def; - if let ExprKind::Repeat(_, ref count) = expr.node { - self.visit_ast_const_integer(count); - } - - if let ExprKind::Closure(..) = expr.node { - let def = self.create_def(expr.id, DefPathData::ClosureExpr); - self.parent_def = Some(def); + match expr.node { + ExprKind::Mac(..) => return self.visit_macro_invoc(expr.id, false), + ExprKind::Repeat(_, ref count) => self.visit_ast_const_integer(count), + ExprKind::Closure(..) => { + let def = self.create_def(expr.id, DefPathData::ClosureExpr); + self.parent_def = Some(def); + } + _ => {} } visit::walk_expr(self, expr); @@ -265,11 +285,13 @@ impl<'ast> visit::Visitor for DefCollector<'ast> { } fn visit_ty(&mut self, ty: &Ty) { - if let TyKind::FixedLengthVec(_, ref length) = ty.node { - self.visit_ast_const_integer(length); - } - if let TyKind::ImplTrait(..) = ty.node { - self.create_def(ty.id, DefPathData::ImplTrait); + match ty.node { + TyKind::Mac(..) => return self.visit_macro_invoc(ty.id, false), + TyKind::Array(_, ref length) => self.visit_ast_const_integer(length), + TyKind::ImplTrait(..) => { + self.create_def(ty.id, DefPathData::ImplTrait); + } + _ => {} } visit::walk_ty(self, ty); } @@ -281,19 +303,17 @@ impl<'ast> visit::Visitor for DefCollector<'ast> { fn visit_macro_def(&mut self, macro_def: &MacroDef) { self.create_def(macro_def.id, DefPathData::MacroDef(macro_def.ident.name.as_str())); } + + fn visit_stmt(&mut self, stmt: &Stmt) { + match stmt.node { + StmtKind::Mac(..) => self.visit_macro_invoc(stmt.id, false), + _ => visit::walk_stmt(self, stmt), + } + } } // We walk the HIR rather than the AST when reading items from metadata. impl<'ast> intravisit::Visitor<'ast> for DefCollector<'ast> { - /// Because we want to track parent items and so forth, enable - /// deep walking so that we walk nested items in the context of - /// their outer items. - fn visit_nested_item(&mut self, item_id: hir::ItemId) { - debug!("visit_nested_item: {:?}", item_id); - let item = self.hir_crate.unwrap().item(item_id.id); - self.visit_item(item) - } - fn visit_item(&mut self, i: &'ast hir::Item) { debug!("visit_item: {:?}", i); @@ -429,7 +449,7 @@ impl<'ast> intravisit::Visitor<'ast> for DefCollector<'ast> { } fn visit_ty(&mut self, ty: &'ast hir::Ty) { - if let hir::TyFixedLengthVec(_, ref length) = ty.node { + if let hir::TyArray(_, ref length) = ty.node { self.visit_hir_const_integer(length); } if let hir::TyImplTrait(..) = ty.node { diff --git a/src/librustc/hir/map/definitions.rs b/src/librustc/hir/map/definitions.rs index 901a489728eec..e8b3714bbe3b8 100644 --- a/src/librustc/hir/map/definitions.rs +++ b/src/librustc/hir/map/definitions.rs @@ -8,14 +8,13 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use middle::cstore::LOCAL_CRATE; -use hir::def_id::{DefId, DefIndex}; -use hir::map::def_collector::DefCollector; +use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE}; use rustc_data_structures::fnv::FnvHashMap; use std::fmt::Write; -use std::hash::{Hash, Hasher, SipHasher}; -use syntax::{ast, visit}; -use syntax::parse::token::InternedString; +use std::hash::{Hash, Hasher}; +use std::collections::hash_map::DefaultHasher; +use syntax::ast; +use syntax::parse::token::{self, InternedString}; use ty::TyCtxt; use util::nodemap::NodeMap; @@ -70,7 +69,7 @@ pub struct DefPath { pub data: Vec, /// what krate root is this path relative to? - pub krate: ast::CrateNum, + pub krate: CrateNum, } impl DefPath { @@ -78,7 +77,7 @@ impl DefPath { self.krate == LOCAL_CRATE } - pub fn make(start_krate: ast::CrateNum, + pub fn make(start_krate: CrateNum, start_index: DefIndex, mut get_key: FN) -> DefPath where FN: FnMut(DefIndex) -> DefKey @@ -116,11 +115,7 @@ impl DefPath { pub fn to_string(&self, tcx: TyCtxt) -> String { let mut s = String::with_capacity(self.data.len() * 16); - if self.krate == LOCAL_CRATE { - s.push_str(&tcx.crate_name(self.krate)); - } else { - s.push_str(&tcx.sess.cstore.original_crate_name(self.krate)); - } + s.push_str(&tcx.original_crate_name(self.krate)); s.push_str("/"); s.push_str(&tcx.crate_disambiguator(self.krate)); @@ -136,13 +131,13 @@ impl DefPath { } pub fn deterministic_hash(&self, tcx: TyCtxt) -> u64 { - let mut state = SipHasher::new(); + let mut state = DefaultHasher::new(); self.deterministic_hash_to(tcx, &mut state); state.finish() } pub fn deterministic_hash_to(&self, tcx: TyCtxt, state: &mut H) { - tcx.crate_name(self.krate).hash(state); + tcx.original_crate_name(self.krate).hash(state); tcx.crate_disambiguator(self.krate).hash(state); self.data.hash(state); } @@ -229,11 +224,6 @@ impl Definitions { } } - pub fn collect(&mut self, krate: &ast::Crate) { - let mut def_collector = DefCollector::root(self); - visit::walk_crate(&mut def_collector, krate); - } - /// Get the number of definitions. pub fn len(&self) -> usize { self.data.len() @@ -327,6 +317,30 @@ impl Definitions { } impl DefPathData { + pub fn get_opt_name(&self) -> Option { + use self::DefPathData::*; + match *self { + TypeNs(ref name) | + ValueNs(ref name) | + Module(ref name) | + MacroDef(ref name) | + TypeParam(ref name) | + LifetimeDef(ref name) | + EnumVariant(ref name) | + Binding(ref name) | + Field(ref name) => Some(token::intern(name)), + + Impl | + CrateRoot | + InlinedRoot(_) | + Misc | + ClosureExpr | + StructCtor | + Initializer | + ImplTrait => None + } + } + pub fn as_interned_str(&self) -> InternedString { use self::DefPathData::*; match *self { diff --git a/src/librustc/hir/map/mod.rs b/src/librustc/hir/map/mod.rs index 5c302d927a718..39114ec4238e7 100644 --- a/src/librustc/hir/map/mod.rs +++ b/src/librustc/hir/map/mod.rs @@ -11,7 +11,7 @@ pub use self::Node::*; use self::MapEntry::*; use self::collector::NodeCollector; -use self::def_collector::DefCollector; +pub use self::def_collector::{DefCollector, MacroInvocationData}; pub use self::definitions::{Definitions, DefKey, DefPath, DefPathData, DisambiguatedDefPathData, InlinedRootPath}; @@ -22,17 +22,15 @@ use middle::cstore::InlinedItem as II; use hir::def_id::{CRATE_DEF_INDEX, DefId, DefIndex}; use syntax::abi::Abi; -use syntax::ast::{self, Name, NodeId, DUMMY_NODE_ID, }; +use syntax::ast::{self, Name, NodeId, CRATE_NODE_ID}; use syntax::codemap::Spanned; use syntax_pos::Span; use hir::*; -use hir::fold::Folder; use hir::print as pprust; use arena::TypedArena; use std::cell::RefCell; -use std::cmp; use std::io; use std::mem; @@ -240,7 +238,7 @@ impl<'ast> Map<'ast> { let mut id = id0; if !self.is_inlined_node_id(id) { loop { - match map[id as usize] { + match map[id.as_usize()] { EntryItem(_, item) => { let def_id = self.local_def_id(item.id); // NB ^~~~~~~ @@ -262,7 +260,7 @@ impl<'ast> Map<'ast> { EntryVariant(p, _) | EntryExpr(p, _) | EntryStmt(p, _) | - EntryTy(p, _) | + EntryTy(p, _) | EntryLocal(p, _) | EntryPat(p, _) | EntryBlock(p, _) | @@ -295,7 +293,7 @@ impl<'ast> Map<'ast> { // reading from an inlined def-id is really a read out of // the metadata from which we loaded the item. loop { - match map[id as usize] { + match map[id.as_usize()] { EntryItem(p, _) | EntryForeignItem(p, _) | EntryTraitItem(p, _) | @@ -373,7 +371,7 @@ impl<'ast> Map<'ast> { } fn find_entry(&self, id: NodeId) -> Option> { - self.map.borrow().get(id as usize).cloned() + self.map.borrow().get(id.as_usize()).cloned() } pub fn krate(&self) -> &'ast Crate { @@ -456,8 +454,8 @@ impl<'ast> Map<'ast> { let mut id = start_id; loop { let parent_node = self.get_parent_node(id); - if parent_node == 0 { - return Ok(0); + if parent_node == CRATE_NODE_ID { + return Ok(CRATE_NODE_ID); } if parent_node == id { return Err(id); @@ -582,22 +580,24 @@ impl<'ast> Map<'ast> { } } - pub fn expect_struct(&self, id: NodeId) -> &'ast VariantData { + pub fn expect_variant_data(&self, id: NodeId) -> &'ast VariantData { match self.find(id) { Some(NodeItem(i)) => { match i.node { - ItemStruct(ref struct_def, _) => struct_def, - _ => bug!("struct ID bound to non-struct") + ItemStruct(ref struct_def, _) | + ItemUnion(ref struct_def, _) => struct_def, + _ => { + bug!("struct ID bound to non-struct {}", + self.node_to_string(id)); + } } } - Some(NodeVariant(variant)) => { - if variant.node.data.is_struct() { - &variant.node.data - } else { - bug!("struct ID bound to enum variant that isn't struct-like") - } + Some(NodeStructCtor(data)) => data, + Some(NodeVariant(variant)) => &variant.node.data, + _ => { + bug!("expected struct or variant, found {}", + self.node_to_string(id)); } - _ => bug!("expected struct, found {}", self.node_to_string(id)), } } @@ -680,7 +680,7 @@ impl<'ast> Map<'ast> { map: self, item_name: parts.last().unwrap(), in_which: &parts[..parts.len() - 1], - idx: 0, + idx: CRATE_NODE_ID, } } @@ -801,10 +801,10 @@ impl<'a, 'ast> Iterator for NodesMatchingSuffix<'a, 'ast> { fn next(&mut self) -> Option { loop { let idx = self.idx; - if idx as usize >= self.map.entry_count() { + if idx.as_usize() >= self.map.entry_count() { return None; } - self.idx += 1; + self.idx = NodeId::from_u32(self.idx.as_u32() + 1); let name = match self.map.find_entry(idx) { Some(EntryItem(_, n)) => n.name(), Some(EntryForeignItem(_, n))=> n.name(), @@ -832,57 +832,6 @@ impl Named for Variant_ { fn name(&self) -> Name { self.name } } impl Named for TraitItem { fn name(&self) -> Name { self.name } } impl Named for ImplItem { fn name(&self) -> Name { self.name } } -pub trait FoldOps { - fn new_id(&self, id: NodeId) -> NodeId { - id - } - fn new_def_id(&self, def_id: DefId) -> DefId { - def_id - } - fn new_span(&self, span: Span) -> Span { - span - } -} - -/// A Folder that updates IDs and Span's according to fold_ops. -pub struct IdAndSpanUpdater { - fold_ops: F, - min_id_assigned: NodeId, - max_id_assigned: NodeId, -} - -impl IdAndSpanUpdater { - pub fn new(fold_ops: F) -> IdAndSpanUpdater { - IdAndSpanUpdater { - fold_ops: fold_ops, - min_id_assigned: ::std::u32::MAX, - max_id_assigned: ::std::u32::MIN, - } - } - - pub fn id_range(&self) -> intravisit::IdRange { - intravisit::IdRange { - min: self.min_id_assigned, - max: self.max_id_assigned + 1, - } - } -} - -impl Folder for IdAndSpanUpdater { - fn new_id(&mut self, id: NodeId) -> NodeId { - let id = self.fold_ops.new_id(id); - - self.min_id_assigned = cmp::min(self.min_id_assigned, id); - self.max_id_assigned = cmp::max(self.max_id_assigned, id); - - id - } - - fn new_span(&mut self, span: Span) -> Span { - self.fold_ops.new_span(span) - } -} - pub fn map_crate<'ast>(forest: &'ast mut Forest, definitions: Definitions) -> Map<'ast> { @@ -906,7 +855,7 @@ pub fn map_crate<'ast>(forest: &'ast mut Forest, entries, vector_length, (entries as f64 / vector_length as f64) * 100.); } - let local_node_id_watermark = map.len() as NodeId; + let local_node_id_watermark = NodeId::new(map.len()); let local_def_id_watermark = definitions.len(); Map { @@ -921,36 +870,15 @@ pub fn map_crate<'ast>(forest: &'ast mut Forest, /// Used for items loaded from external crate that are being inlined into this /// crate. -pub fn map_decoded_item<'ast, F: FoldOps>(map: &Map<'ast>, - parent_def_path: DefPath, - parent_def_id: DefId, - ii: InlinedItem, - fold_ops: F) - -> &'ast InlinedItem { +pub fn map_decoded_item<'ast>(map: &Map<'ast>, + parent_def_path: DefPath, + parent_def_id: DefId, + ii: InlinedItem, + ii_parent_id: NodeId) + -> &'ast InlinedItem { let _ignore = map.forest.dep_graph.in_ignore(); - let mut fld = IdAndSpanUpdater::new(fold_ops); - let ii = match ii { - II::Item(d, i) => II::Item(fld.fold_ops.new_def_id(d), - i.map(|i| fld.fold_item(i))), - II::TraitItem(d, ti) => { - II::TraitItem(fld.fold_ops.new_def_id(d), - ti.map(|ti| fld.fold_trait_item(ti))) - } - II::ImplItem(d, ii) => { - II::ImplItem(fld.fold_ops.new_def_id(d), - ii.map(|ii| fld.fold_impl_item(ii))) - } - }; - let ii = map.forest.inlined_items.alloc(ii); - let ii_parent_id = fld.new_id(DUMMY_NODE_ID); - - // Assert that the ii_parent_id is the last NodeId in our reserved range - assert!(ii_parent_id == fld.max_id_assigned); - // Assert that we did not violate the invariant that all inlined HIR items - // have NodeIds greater than or equal to `local_node_id_watermark` - assert!(fld.min_id_assigned >= map.local_node_id_watermark); let defs = &mut *map.definitions.borrow_mut(); let mut def_collector = DefCollector::extend(ii_parent_id, diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index e22c9869ab176..c451a789193aa 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -67,7 +67,6 @@ macro_rules! hir_vec { pub mod check_attr; pub mod def; pub mod def_id; -pub mod fold; pub mod intravisit; pub mod lowering; pub mod map; @@ -96,6 +95,7 @@ impl fmt::Debug for Lifetime { pub struct LifetimeDef { pub lifetime: Lifetime, pub bounds: HirVec, + pub pure_wrt_drop: bool, } /// A "Path" is essentially Rust's notion of a name; for instance: @@ -291,6 +291,7 @@ pub struct TyParam { pub bounds: TyParamBounds, pub default: Option>, pub span: Span, + pub pure_wrt_drop: bool, } /// Represents lifetimes and type parameters attached to a declaration @@ -329,6 +330,36 @@ impl Generics { } } +pub enum UnsafeGeneric { + Region(LifetimeDef, &'static str), + Type(TyParam, &'static str), +} + +impl UnsafeGeneric { + pub fn attr_name(&self) -> &'static str { + match *self { + UnsafeGeneric::Region(_, s) => s, + UnsafeGeneric::Type(_, s) => s, + } + } +} + +impl Generics { + pub fn carries_unsafe_attr(&self) -> Option { + for r in &self.lifetimes { + if r.pure_wrt_drop { + return Some(UnsafeGeneric::Region(r.clone(), "may_dangle")); + } + } + for t in &self.ty_params { + if t.pure_wrt_drop { + return Some(UnsafeGeneric::Type(t.clone(), "may_dangle")); + } + } + return None; + } +} + /// A `where` clause in a definition #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct WhereClause { @@ -382,7 +413,6 @@ pub type CrateConfig = HirVec>; pub struct Crate { pub module: Mod, pub attrs: HirVec, - pub config: CrateConfig, pub span: Span, pub exported_macros: HirVec, @@ -427,8 +457,6 @@ pub struct MacroDef { pub id: NodeId, pub span: Span, pub imported_from: Option, - pub export: bool, - pub use_locally: bool, pub allow_internal_unstable: bool, pub body: HirVec, } @@ -479,7 +507,7 @@ impl Pat { PatKind::Box(ref s) | PatKind::Ref(ref s, _) => { s.walk_(it) } - PatKind::Vec(ref before, ref slice, ref after) => { + PatKind::Slice(ref before, ref slice, ref after) => { before.iter().all(|p| p.walk_(it)) && slice.iter().all(|p| p.walk_(it)) && after.iter().all(|p| p.walk_(it)) @@ -555,8 +583,8 @@ pub enum PatKind { /// A range pattern, e.g. `1...2` Range(P, P), /// `[a, b, ..i, y, z]` is represented as: - /// `PatKind::Vec(box [a, b], Some(i), box [y, z])` - Vec(HirVec>, Option>, HirVec>), + /// `PatKind::Slice(box [a, b], Some(i), box [y, z])` + Slice(HirVec>, Option>, HirVec>), } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] @@ -788,6 +816,7 @@ pub struct Field { pub name: Spanned, pub expr: P, pub span: Span, + pub is_shorthand: bool, } #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] @@ -827,7 +856,7 @@ pub enum Expr_ { /// A `box x` expression. ExprBox(P), /// An array (`[a, b, c, d]`) - ExprVec(HirVec>), + ExprArray(HirVec>), /// A function call /// /// The first field resolves to the function itself (usually an `ExprPath`), @@ -911,7 +940,7 @@ pub enum Expr_ { ExprRet(Option>), /// Inline assembly (from `asm!`), with its outputs and inputs. - ExprInlineAsm(InlineAsm, Vec>, Vec>), + ExprInlineAsm(P, HirVec>, HirVec>), /// A struct or struct-like variant literal expression. /// @@ -1081,10 +1110,10 @@ pub struct BareFnTy { #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] /// The different kinds of types recognized by the compiler pub enum Ty_ { - /// A variable length array (`[T]`) - TyVec(P), + /// A variable length slice (`[T]`) + TySlice(P), /// A fixed length array (`[T; n]`) - TyFixedLengthVec(P, P), + TyArray(P, P), /// A raw pointer (`*const T` or `*mut T`) TyPtr(MutTy), /// A reference (`&'a T` or `&'a mut T`) diff --git a/src/librustc/hir/pat_util.rs b/src/librustc/hir/pat_util.rs index a63bf14cb0238..0deea94146361 100644 --- a/src/librustc/hir/pat_util.rs +++ b/src/librustc/hir/pat_util.rs @@ -58,11 +58,11 @@ pub fn pat_is_refutable(dm: &DefMap, pat: &hir::Pat) -> bool { PatKind::Path(..) | PatKind::Struct(..) => { match dm.get(&pat.id).map(|d| d.full_def()) { - Some(Def::Variant(..)) => true, + Some(Def::Variant(..)) | Some(Def::VariantCtor(..)) => true, _ => false } } - PatKind::Vec(..) => true, + PatKind::Slice(..) => true, _ => false } } @@ -173,10 +173,9 @@ pub fn necessary_variants(dm: &DefMap, pat: &hir::Pat) -> Vec { PatKind::TupleStruct(..) | PatKind::Path(..) | PatKind::Struct(..) => { - match dm.get(&p.id) { - Some(&PathResolution { base_def: Def::Variant(_, id), .. }) => { - variants.push(id); - } + match dm.get(&p.id).map(|d| d.full_def()) { + Some(Def::Variant(id)) | + Some(Def::VariantCtor(id, ..)) => variants.push(id), _ => () } } diff --git a/src/librustc/hir/print.rs b/src/librustc/hir/print.rs index eebc8fa9e5d5d..657c10bab12eb 100644 --- a/src/librustc/hir/print.rs +++ b/src/librustc/hir/print.rs @@ -486,7 +486,7 @@ impl<'a> State<'a> { self.maybe_print_comment(ty.span.lo)?; self.ibox(0)?; match ty.node { - hir::TyVec(ref ty) => { + hir::TySlice(ref ty) => { word(&mut self.s, "[")?; self.print_type(&ty)?; word(&mut self.s, "]")?; @@ -543,7 +543,7 @@ impl<'a> State<'a> { hir::TyImplTrait(ref bounds) => { self.print_bounds("impl ", &bounds[..])?; } - hir::TyFixedLengthVec(ref ty, ref v) => { + hir::TyArray(ref ty, ref v) => { word(&mut self.s, "[")?; self.print_type(&ty)?; word(&mut self.s, "; ")?; @@ -1229,8 +1229,10 @@ impl<'a> State<'a> { &fields[..], |s, field| { s.ibox(indent_unit)?; - s.print_name(field.name.node)?; - s.word_space(":")?; + if !field.is_shorthand { + s.print_name(field.name.node)?; + s.word_space(":")?; + } s.print_expr(&field.expr)?; s.end() }, @@ -1319,7 +1321,7 @@ impl<'a> State<'a> { self.word_space("box")?; self.print_expr(expr)?; } - hir::ExprVec(ref exprs) => { + hir::ExprArray(ref exprs) => { self.print_expr_vec(&exprs[..])?; } hir::ExprRepeat(ref element, ref count) => { @@ -1829,7 +1831,7 @@ impl<'a> State<'a> { word(&mut self.s, "...")?; self.print_expr(&end)?; } - PatKind::Vec(ref before, ref slice, ref after) => { + PatKind::Slice(ref before, ref slice, ref after) => { word(&mut self.s, "[")?; self.commasep(Inconsistent, &before[..], |s, p| s.print_pat(&p))?; if let Some(ref p) = *slice { diff --git a/src/librustc/infer/combine.rs b/src/librustc/infer/combine.rs index 5ce30484ede00..fda08eba02135 100644 --- a/src/librustc/infer/combine.rs +++ b/src/librustc/infer/combine.rs @@ -49,6 +49,7 @@ use ty::relate::{RelateResult, TypeRelation}; use traits::PredicateObligations; use syntax::ast; +use syntax::util::small_vector::SmallVector; use syntax_pos::Span; #[derive(Clone)] @@ -181,7 +182,9 @@ impl<'infcx, 'gcx, 'tcx> CombineFields<'infcx, 'gcx, 'tcx> { a_is_expected: bool) -> RelateResult<'tcx, ()> { - let mut stack = Vec::new(); + // We use SmallVector here instead of Vec because this code is hot and + // it's rare that the stack length exceeds 1. + let mut stack = SmallVector::zero(); stack.push((a_ty, dir, b_vid)); loop { // For each turn of the loop, we extract a tuple diff --git a/src/librustc/infer/error_reporting.rs b/src/librustc/infer/error_reporting.rs index 2792968d427aa..5e3925b0b3c98 100644 --- a/src/librustc/infer/error_reporting.rs +++ b/src/librustc/infer/error_reporting.rs @@ -457,7 +457,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { } same_regions.push(SameRegions { scope_id: scope_id, - regions: vec!(sub_fr.bound_region, sup_fr.bound_region) + regions: vec![sub_fr.bound_region, sup_fr.bound_region] }) } } @@ -577,11 +577,16 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { terr: &TypeError<'tcx>) -> DiagnosticBuilder<'tcx> { - // FIXME: do we want to use a different error code for each origin? - let mut diag = struct_span_err!( - self.tcx.sess, trace.origin.span(), E0308, - "{}", trace.origin.as_failure_str() - ); + let span = trace.origin.span(); + let failure_str = trace.origin.as_failure_str(); + let mut diag = match trace.origin { + TypeOrigin::IfExpressionWithNoElse(_) => { + struct_span_err!(self.tcx.sess, span, E0317, "{}", failure_str) + }, + _ => { + struct_span_err!(self.tcx.sess, span, E0308, "{}", failure_str) + }, + }; self.note_type_err(&mut diag, trace.origin, None, Some(trace.values), terr); diag } @@ -1226,16 +1231,17 @@ impl<'a, 'gcx, 'tcx> Rebuilder<'a, 'gcx, 'tcx> { lifetime: hir::Lifetime, region_names: &HashSet) -> hir::HirVec { - ty_params.iter().map(|ty_param| { - let bounds = self.rebuild_ty_param_bounds(ty_param.bounds.clone(), + ty_params.into_iter().map(|ty_param| { + let bounds = self.rebuild_ty_param_bounds(ty_param.bounds, lifetime, region_names); hir::TyParam { name: ty_param.name, id: ty_param.id, bounds: bounds, - default: ty_param.default.clone(), + default: ty_param.default, span: ty_param.span, + pure_wrt_drop: ty_param.pure_wrt_drop, } }).collect() } @@ -1294,8 +1300,11 @@ impl<'a, 'gcx, 'tcx> Rebuilder<'a, 'gcx, 'tcx> { -> hir::Generics { let mut lifetimes = Vec::new(); for lt in add { - lifetimes.push(hir::LifetimeDef { lifetime: *lt, - bounds: hir::HirVec::new() }); + lifetimes.push(hir::LifetimeDef { + lifetime: *lt, + bounds: hir::HirVec::new(), + pure_wrt_drop: false, + }); } for lt in &generics.lifetimes { if keep.contains(<.lifetime.name) || @@ -1350,7 +1359,7 @@ impl<'a, 'gcx, 'tcx> Rebuilder<'a, 'gcx, 'tcx> { region_names: &HashSet) -> P { let mut new_ty = P(ty.clone()); - let mut ty_queue = vec!(ty); + let mut ty_queue = vec![ty]; while !ty_queue.is_empty() { let cur_ty = ty_queue.remove(0); match cur_ty.node { @@ -1433,8 +1442,8 @@ impl<'a, 'gcx, 'tcx> Rebuilder<'a, 'gcx, 'tcx> { hir::TyPtr(ref mut_ty) => { ty_queue.push(&mut_ty.ty); } - hir::TyVec(ref ty) | - hir::TyFixedLengthVec(ref ty, _) => { + hir::TySlice(ref ty) | + hir::TyArray(ref ty, _) => { ty_queue.push(&ty); } hir::TyTup(ref tys) => ty_queue.extend(tys.iter().map(|ty| &**ty)), @@ -1469,9 +1478,9 @@ impl<'a, 'gcx, 'tcx> Rebuilder<'a, 'gcx, 'tcx> { ty: build_to(mut_ty.ty, to), }) } - hir::TyVec(ty) => hir::TyVec(build_to(ty, to)), - hir::TyFixedLengthVec(ty, e) => { - hir::TyFixedLengthVec(build_to(ty, to), e) + hir::TySlice(ty) => hir::TySlice(build_to(ty, to)), + hir::TyArray(ty, e) => { + hir::TyArray(build_to(ty, to), e) } hir::TyTup(tys) => { hir::TyTup(tys.into_iter().map(|ty| build_to(ty, to)).collect()) diff --git a/src/librustc/infer/freshen.rs b/src/librustc/infer/freshen.rs index eea12b7f19712..828f9f32baac8 100644 --- a/src/librustc/infer/freshen.rs +++ b/src/librustc/infer/freshen.rs @@ -61,9 +61,8 @@ impl<'a, 'gcx, 'tcx> TypeFreshener<'a, 'gcx, 'tcx> { -> Ty<'tcx> where F: FnOnce(u32) -> ty::InferTy, { - match opt_ty { - Some(ty) => { return ty.fold_with(self); } - None => { } + if let Some(ty) = opt_ty { + return ty.fold_with(self); } match self.freshen_map.entry(key) { diff --git a/src/librustc/infer/higher_ranked/mod.rs b/src/librustc/infer/higher_ranked/mod.rs index 7c02de05d26d5..25b899b3c56cd 100644 --- a/src/librustc/infer/higher_ranked/mod.rs +++ b/src/librustc/infer/higher_ranked/mod.rs @@ -749,13 +749,17 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { pub fn plug_leaks(&self, skol_map: SkolemizationMap<'tcx>, snapshot: &CombinedSnapshot, - value: &T) -> T + value: T) -> T where T : TypeFoldable<'tcx> { debug!("plug_leaks(skol_map={:?}, value={:?})", skol_map, value); + if skol_map.is_empty() { + return value; + } + // Compute a mapping from the "taint set" of each skolemized // region back to the `ty::BoundRegion` that it originally // represented. Because `leak_check` passed, we know that @@ -775,7 +779,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { // Remove any instantiated type variables from `value`; those can hide // references to regions from the `fold_regions` code below. - let value = self.resolve_type_vars_if_possible(value); + let value = self.resolve_type_vars_if_possible(&value); // Map any skolemization byproducts back to a late-bound // region. Put that late-bound region at whatever the outermost @@ -813,9 +817,6 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { } }); - debug!("plug_leaks: result={:?}", - result); - self.pop_skolemized(skol_map, snapshot); debug!("plug_leaks: result={:?}", result); @@ -838,5 +839,9 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { debug!("pop_skolemized({:?})", skol_map); let skol_regions: FnvHashSet<_> = skol_map.values().cloned().collect(); self.region_vars.pop_skolemized(&skol_regions, &snapshot.region_vars_snapshot); + if !skol_map.is_empty() { + self.projection_cache.borrow_mut().rollback_skolemized( + &snapshot.projection_cache_snapshot); + } } } diff --git a/src/librustc/infer/mod.rs b/src/librustc/infer/mod.rs index 39fc50666a8ce..af994e884fe47 100644 --- a/src/librustc/infer/mod.rs +++ b/src/librustc/infer/mod.rs @@ -25,7 +25,7 @@ use middle::mem_categorization as mc; use middle::mem_categorization::McResult; use middle::region::CodeExtent; use mir::tcx::LvalueTy; -use ty::subst::{Subst, Substs}; +use ty::subst::{Kind, Subst, Substs}; use ty::adjustment; use ty::{TyVid, IntVid, FloatVid}; use ty::{self, Ty, TyCtxt}; @@ -583,7 +583,8 @@ impl_trans_normalize!('gcx, ty::FnSig<'gcx>, &'gcx ty::BareFnTy<'gcx>, ty::ClosureSubsts<'gcx>, - ty::PolyTraitRef<'gcx> + ty::PolyTraitRef<'gcx>, + ty::ExistentialTraitRef<'gcx> ); impl<'gcx> TransNormalize<'gcx> for LvalueTy<'gcx> { @@ -603,6 +604,18 @@ impl<'gcx> TransNormalize<'gcx> for LvalueTy<'gcx> { // NOTE: Callable from trans only! impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { + /// Currently, higher-ranked type bounds inhibit normalization. Therefore, + /// each time we erase them in translation, we need to normalize + /// the contents. + pub fn erase_late_bound_regions_and_normalize(self, value: &ty::Binder) + -> T + where T: TransNormalize<'tcx> + { + assert!(!value.needs_subst()); + let value = self.erase_late_bound_regions(value); + self.normalize_associated_type(&value) + } + pub fn normalize_associated_type(self, value: &T) -> T where T: TransNormalize<'tcx> { @@ -1208,7 +1221,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { pub fn type_var_for_def(&self, span: Span, def: &ty::TypeParameterDef<'tcx>, - substs: &Substs<'tcx>) + substs: &[Kind<'tcx>]) -> Ty<'tcx> { let default = def.default.map(|default| { type_variable::Default { diff --git a/src/librustc/infer/region_inference/graphviz.rs b/src/librustc/infer/region_inference/graphviz.rs index 1c64ebc0537ae..289f7d6c73800 100644 --- a/src/librustc/infer/region_inference/graphviz.rs +++ b/src/librustc/infer/region_inference/graphviz.rs @@ -63,9 +63,8 @@ pub fn maybe_print_constraints_for<'a, 'gcx, 'tcx>( return; } - let requested_node: Option = env::var("RUST_REGION_GRAPH_NODE") - .ok() - .and_then(|s| s.parse().ok()); + let requested_node = env::var("RUST_REGION_GRAPH_NODE") + .ok().and_then(|s| s.parse().map(ast::NodeId::new).ok()); if requested_node.is_some() && requested_node != Some(subject_node) { return; diff --git a/src/librustc/infer/type_variable.rs b/src/librustc/infer/type_variable.rs index da9fd1cff2b46..3856ea420b0f3 100644 --- a/src/librustc/infer/type_variable.rs +++ b/src/librustc/infer/type_variable.rs @@ -12,8 +12,9 @@ pub use self::RelationDir::*; use self::TypeVariableValue::*; use self::UndoEntry::*; use hir::def_id::{DefId}; -use ty::{self, Ty}; +use syntax::util::small_vector::SmallVector; use syntax_pos::Span; +use ty::{self, Ty}; use std::cmp::min; use std::marker::PhantomData; @@ -149,7 +150,7 @@ impl<'tcx> TypeVariableTable<'tcx> { &mut self, vid: ty::TyVid, ty: Ty<'tcx>, - stack: &mut Vec<(Ty<'tcx>, RelationDir, ty::TyVid)>) + stack: &mut SmallVector<(Ty<'tcx>, RelationDir, ty::TyVid)>) { debug_assert!(self.root_var(vid) == vid); let old_value = { diff --git a/src/librustc/lib.rs b/src/librustc/lib.rs index d2a2f8a972d96..d17402d21342b 100644 --- a/src/librustc/lib.rs +++ b/src/librustc/lib.rs @@ -40,7 +40,7 @@ #![feature(rustc_private)] #![feature(slice_patterns)] #![feature(staged_api)] -#![feature(question_mark)] +#![cfg_attr(stage0, feature(question_mark))] #![cfg_attr(test, feature(test))] extern crate arena; @@ -50,7 +50,6 @@ extern crate fmt_macros; extern crate getopts; extern crate graphviz; extern crate libc; -extern crate rbml; extern crate rustc_llvm as llvm; extern crate rustc_back; extern crate rustc_data_structures; @@ -106,23 +105,12 @@ pub mod middle { pub mod weak_lang_items; } -pub mod mir { - mod cache; - pub mod repr; - pub mod tcx; - pub mod visit; - pub mod transform; - pub mod traversal; - pub mod mir_map; -} - +pub mod mir; pub mod session; pub mod traits; pub mod ty; pub mod util { - pub use rustc_back::sha2; - pub mod common; pub mod ppaux; pub mod nodemap; diff --git a/src/librustc/lint/builtin.rs b/src/librustc/lint/builtin.rs index d378772e65570..3472c77cf4227 100644 --- a/src/librustc/lint/builtin.rs +++ b/src/librustc/lint/builtin.rs @@ -120,35 +120,29 @@ declare_lint! { declare_lint! { pub INACCESSIBLE_EXTERN_CRATE, - Warn, + Deny, "use of inaccessible extern crate erroneously allowed" } declare_lint! { pub INVALID_TYPE_PARAM_DEFAULT, - Warn, + Deny, "type parameter default erroneously allowed in invalid location" } declare_lint! { pub ILLEGAL_FLOATING_POINT_CONSTANT_PATTERN, - Warn, + Deny, "floating-point constants cannot be used in patterns" } declare_lint! { pub ILLEGAL_STRUCT_OR_ENUM_CONSTANT_PATTERN, - Warn, + Deny, "constants of struct or enum type can only be used in a pattern if \ the struct or enum has `#[derive(PartialEq, Eq)]`" } -declare_lint! { - pub MATCH_OF_UNIT_VARIANT_VIA_PAREN_DOTDOT, - Deny, - "unit struct or enum variant erroneously allowed to match via path::ident(..)" -} - declare_lint! { pub RAW_POINTER_DERIVE, Warn, @@ -170,7 +164,7 @@ declare_lint! { declare_lint! { pub OVERLAPPING_INHERENT_IMPLS, - Warn, + Deny, "two overlapping inherent impls define an item with the same name were erroneously allowed" } @@ -182,13 +176,13 @@ declare_lint! { declare_lint! { pub SUPER_OR_SELF_IN_GLOBAL_PATH, - Warn, + Deny, "detects super or self keywords at the beginning of global path" } declare_lint! { pub LIFETIME_UNDERSCORE, - Warn, + Deny, "lifetimes or labels named `'_` were erroneously allowed" } @@ -198,6 +192,12 @@ declare_lint! { "safe access to extern statics was erroneously allowed" } +declare_lint! { + pub PATTERNS_IN_FNS_WITHOUT_BODY, + Warn, + "patterns in functions without body were erroneously allowed" +} + /// Does nothing as a lint pass, but registers some `Lint`s /// which are used by other parts of the compiler. #[derive(Copy, Clone)] @@ -226,7 +226,6 @@ impl LintPass for HardwiredLints { INVALID_TYPE_PARAM_DEFAULT, ILLEGAL_FLOATING_POINT_CONSTANT_PATTERN, ILLEGAL_STRUCT_OR_ENUM_CONSTANT_PATTERN, - MATCH_OF_UNIT_VARIANT_VIA_PAREN_DOTDOT, CONST_ERR, RAW_POINTER_DERIVE, TRANSMUTE_FROM_FN_ITEM_TYPES, @@ -235,7 +234,8 @@ impl LintPass for HardwiredLints { SUPER_OR_SELF_IN_GLOBAL_PATH, HR_LIFETIME_IN_ASSOC_TYPE, LIFETIME_UNDERSCORE, - SAFE_EXTERN_STATICS + SAFE_EXTERN_STATICS, + PATTERNS_IN_FNS_WITHOUT_BODY ) } } diff --git a/src/librustc/lint/context.rs b/src/librustc/lint/context.rs index 81d3d440b566f..20463f42d3b1d 100644 --- a/src/librustc/lint/context.rs +++ b/src/librustc/lint/context.rs @@ -127,9 +127,9 @@ impl LintStore { pub fn new() -> LintStore { LintStore { - lints: vec!(), - early_passes: Some(vec!()), - late_passes: Some(vec!()), + lints: vec![], + early_passes: Some(vec![]), + late_passes: Some(vec![]), by_name: FnvHashMap(), levels: FnvHashMap(), future_incompatible: FnvHashMap(), @@ -345,7 +345,7 @@ macro_rules! run_lints { ($cx:expr, $f:ident, $ps:ident, $($args:expr),*) => ({ // See also the hir version just below. pub fn gather_attrs(attrs: &[ast::Attribute]) -> Vec> { - let mut out = vec!(); + let mut out = vec![]; for attr in attrs { let r = gather_attr(attr); out.extend(r.into_iter()); @@ -355,7 +355,7 @@ pub fn gather_attrs(attrs: &[ast::Attribute]) pub fn gather_attr(attr: &ast::Attribute) -> Vec> { - let mut out = vec!(); + let mut out = vec![]; let level = match Level::from_str(&attr.name()) { None => return out, @@ -452,8 +452,7 @@ pub fn raw_struct_lint<'a>(sess: &'a Session, } if let Some(span) = def { - let explanation = "lint level defined here"; - err.span_note(span, &explanation); + sess.diag_span_note_once(&mut err, lint, span, "lint level defined here"); } err diff --git a/src/librustc/lint/mod.rs b/src/librustc/lint/mod.rs index 0938086b000c0..7eea6a2fcf270 100644 --- a/src/librustc/lint/mod.rs +++ b/src/librustc/lint/mod.rs @@ -314,5 +314,4 @@ pub enum LintSource { pub type LevelSource = (Level, LintSource); pub mod builtin; - mod context; diff --git a/src/librustc/middle/cstore.rs b/src/librustc/middle/cstore.rs index b2c293a290ab3..59a5147ed1c16 100644 --- a/src/librustc/middle/cstore.rs +++ b/src/librustc/middle/cstore.rs @@ -23,31 +23,28 @@ // probably get a better home if someone can find one. use hir::def::{self, Def}; -use hir::def_id::{DefId, DefIndex}; +use hir::def_id::{CrateNum, DefId, DefIndex}; use hir::map as hir_map; -use hir::map::definitions::DefKey; +use hir::map::definitions::{Definitions, DefKey}; use hir::svh::Svh; use middle::lang_items; -use ty::{self, Ty, TyCtxt, VariantKind}; -use mir::repr::Mir; -use mir::mir_map::MirMap; +use ty::{self, Ty, TyCtxt}; +use mir::Mir; use session::Session; -use session::config::PanicStrategy; use session::search_paths::PathKind; -use util::nodemap::{FnvHashMap, NodeSet, DefIdMap}; -use std::rc::Rc; +use util::nodemap::{NodeSet, DefIdMap}; use std::path::PathBuf; use syntax::ast; use syntax::attr; -use syntax::ext::base::LoadedMacro; +use syntax::ext::base::SyntaxExtension; use syntax::ptr::P; use syntax::parse::token::InternedString; use syntax_pos::Span; use rustc_back::target::Target; use hir; use hir::intravisit::Visitor; +use rustc_back::PanicStrategy; -pub use self::DefLike::{DlDef, DlField, DlImpl}; pub use self::NativeLibraryKind::{NativeStatic, NativeFramework, NativeUnknown}; // lonely orphan structs and enums looking for a better home @@ -64,30 +61,20 @@ pub struct LinkMeta { pub struct CrateSource { pub dylib: Option<(PathBuf, PathKind)>, pub rlib: Option<(PathBuf, PathKind)>, - pub cnum: ast::CrateNum, + pub cnum: CrateNum, } -#[derive(Copy, Debug, PartialEq, Clone)] +#[derive(Copy, Debug, PartialEq, Clone, RustcEncodable, RustcDecodable)] pub enum LinkagePreference { RequireDynamic, RequireStatic, } -enum_from_u32! { - #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] - pub enum NativeLibraryKind { - NativeStatic, // native static library (.a archive) - NativeFramework, // OSX-specific - NativeUnknown, // default way to specify a dynamic library - } -} - -// Something that a name can resolve to. -#[derive(Copy, Clone, Debug)] -pub enum DefLike { - DlDef(Def), - DlImpl(DefId), - DlField +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)] +pub enum NativeLibraryKind { + NativeStatic, // native static library (.a archive) + NativeFramework, // OSX-specific + NativeUnknown, // default way to specify a dynamic library } /// The data we save and restore about an inlined item or method. This is not @@ -101,24 +88,13 @@ pub enum InlinedItem { } /// A borrowed version of `hir::InlinedItem`. -#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +#[derive(Clone, Copy, PartialEq, Eq, RustcEncodable, Hash, Debug)] pub enum InlinedItemRef<'a> { Item(DefId, &'a hir::Item), TraitItem(DefId, &'a hir::TraitItem), ImplItem(DefId, &'a hir::ImplItem) } -/// Item definitions in the currently-compiled crate would have the CrateNum -/// LOCAL_CRATE in their DefId. -pub const LOCAL_CRATE: ast::CrateNum = 0; - -#[derive(Copy, Clone)] -pub struct ChildItem { - pub def: DefLike, - pub name: ast::Name, - pub vis: ty::Visibility, -} - #[derive(Copy, Clone, Debug)] pub struct ExternCrate { /// def_id of an `extern crate` in the current crate that caused @@ -144,6 +120,7 @@ pub struct ExternCrate { /// can be accessed. pub trait CrateStore<'tcx> { // item info + fn describe_def(&self, def: DefId) -> Option; fn stability(&self, def: DefId) -> Option; fn deprecation(&self, def: DefId) -> Option; fn visibility(&self, def: DefId) -> ty::Visibility; @@ -151,40 +128,31 @@ pub trait CrateStore<'tcx> { fn closure_ty<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> ty::ClosureTy<'tcx>; fn item_variances(&self, def: DefId) -> Vec; - fn repr_attrs(&self, def: DefId) -> Vec; fn item_type<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> Ty<'tcx>; fn visible_parent_map<'a>(&'a self) -> ::std::cell::RefMut<'a, DefIdMap>; - fn item_name(&self, def: DefId) -> ast::Name; - fn opt_item_name(&self, def: DefId) -> Option; fn item_predicates<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> ty::GenericPredicates<'tcx>; fn item_super_predicates<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> ty::GenericPredicates<'tcx>; fn item_generics<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) - -> &'tcx ty::Generics<'tcx>; + -> ty::Generics<'tcx>; fn item_attrs(&self, def_id: DefId) -> Vec; fn trait_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)-> ty::TraitDef<'tcx>; fn adt_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> ty::AdtDefMaster<'tcx>; - fn method_arg_names(&self, did: DefId) -> Vec; + fn fn_arg_names(&self, did: DefId) -> Vec; fn inherent_implementations_for_type(&self, def_id: DefId) -> Vec; // trait info - fn implementations_of_trait(&self, def_id: DefId) -> Vec; - fn provided_trait_methods<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) - -> Vec>>; - fn trait_item_def_ids(&self, def: DefId) - -> Vec; + fn implementations_of_trait(&self, filter: Option) -> Vec; // impl info - fn impl_items(&self, impl_def_id: DefId) -> Vec; + fn impl_or_trait_items(&self, def_id: DefId) -> Vec; fn impl_trait_ref<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> Option>; - fn impl_polarity(&self, def: DefId) -> Option; + fn impl_polarity(&self, def: DefId) -> hir::ImplPolarity; fn custom_coerce_unsized_kind(&self, def: DefId) -> Option; - fn associated_consts<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) - -> Vec>>; fn impl_parent(&self, impl_def_id: DefId) -> Option; // trait/impl-item info @@ -195,53 +163,43 @@ pub trait CrateStore<'tcx> { // flags fn is_const_fn(&self, did: DefId) -> bool; fn is_defaulted_trait(&self, did: DefId) -> bool; - fn is_impl(&self, did: DefId) -> bool; fn is_default_impl(&self, impl_did: DefId) -> bool; - fn is_extern_item<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, did: DefId) -> bool; fn is_foreign_item(&self, did: DefId) -> bool; fn is_statically_included_foreign_item(&self, id: ast::NodeId) -> bool; - fn is_typedef(&self, did: DefId) -> bool; // crate metadata - fn dylib_dependency_formats(&self, cnum: ast::CrateNum) - -> Vec<(ast::CrateNum, LinkagePreference)>; - fn lang_items(&self, cnum: ast::CrateNum) -> Vec<(DefIndex, usize)>; - fn missing_lang_items(&self, cnum: ast::CrateNum) -> Vec; - fn is_staged_api(&self, cnum: ast::CrateNum) -> bool; - fn is_explicitly_linked(&self, cnum: ast::CrateNum) -> bool; - fn is_allocator(&self, cnum: ast::CrateNum) -> bool; - fn is_panic_runtime(&self, cnum: ast::CrateNum) -> bool; - fn is_compiler_builtins(&self, cnum: ast::CrateNum) -> bool; - fn panic_strategy(&self, cnum: ast::CrateNum) -> PanicStrategy; - fn extern_crate(&self, cnum: ast::CrateNum) -> Option; - fn crate_attrs(&self, cnum: ast::CrateNum) -> Vec; + fn dylib_dependency_formats(&self, cnum: CrateNum) + -> Vec<(CrateNum, LinkagePreference)>; + fn lang_items(&self, cnum: CrateNum) -> Vec<(DefIndex, usize)>; + fn missing_lang_items(&self, cnum: CrateNum) -> Vec; + fn is_staged_api(&self, cnum: CrateNum) -> bool; + fn is_explicitly_linked(&self, cnum: CrateNum) -> bool; + fn is_allocator(&self, cnum: CrateNum) -> bool; + fn is_panic_runtime(&self, cnum: CrateNum) -> bool; + fn is_compiler_builtins(&self, cnum: CrateNum) -> bool; + fn panic_strategy(&self, cnum: CrateNum) -> PanicStrategy; + fn extern_crate(&self, cnum: CrateNum) -> Option; /// The name of the crate as it is referred to in source code of the current /// crate. - fn crate_name(&self, cnum: ast::CrateNum) -> InternedString; + fn crate_name(&self, cnum: CrateNum) -> InternedString; /// The name of the crate as it is stored in the crate's metadata. - fn original_crate_name(&self, cnum: ast::CrateNum) -> InternedString; - fn crate_hash(&self, cnum: ast::CrateNum) -> Svh; - fn crate_disambiguator(&self, cnum: ast::CrateNum) -> InternedString; - fn crate_struct_field_attrs(&self, cnum: ast::CrateNum) - -> FnvHashMap>; - fn plugin_registrar_fn(&self, cnum: ast::CrateNum) -> Option; - fn native_libraries(&self, cnum: ast::CrateNum) -> Vec<(NativeLibraryKind, String)>; - fn reachable_ids(&self, cnum: ast::CrateNum) -> Vec; - fn is_no_builtins(&self, cnum: ast::CrateNum) -> bool; + fn original_crate_name(&self, cnum: CrateNum) -> InternedString; + fn crate_hash(&self, cnum: CrateNum) -> Svh; + fn crate_disambiguator(&self, cnum: CrateNum) -> InternedString; + fn plugin_registrar_fn(&self, cnum: CrateNum) -> Option; + fn native_libraries(&self, cnum: CrateNum) -> Vec<(NativeLibraryKind, String)>; + fn reachable_ids(&self, cnum: CrateNum) -> Vec; + fn is_no_builtins(&self, cnum: CrateNum) -> bool; // resolve fn def_index_for_def_key(&self, - cnum: ast::CrateNum, + cnum: CrateNum, def: DefKey) -> Option; fn def_key(&self, def: DefId) -> hir_map::DefKey; fn relative_def_path(&self, def: DefId) -> Option; - fn variant_kind(&self, def_id: DefId) -> Option; - fn struct_ctor_def_id(&self, struct_def_id: DefId) -> Option; - fn tuple_struct_definition_if_ctor(&self, did: DefId) -> Option; fn struct_field_names(&self, def: DefId) -> Vec; - fn item_children(&self, did: DefId) -> Vec; - fn crate_top_level_items(&self, cnum: ast::CrateNum) -> Vec; + fn item_children(&self, did: DefId) -> Vec; // misc. metadata fn maybe_get_item_ast<'a>(&'tcx self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) @@ -249,33 +207,25 @@ pub trait CrateStore<'tcx> { fn local_node_for_inlined_defid(&'tcx self, def_id: DefId) -> Option; fn defid_for_inlined_node(&'tcx self, node_id: ast::NodeId) -> Option; - fn maybe_get_item_mir<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) - -> Option>; + fn get_item_mir<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> Mir<'tcx>; fn is_item_mir_available(&self, def: DefId) -> bool; // This is basically a 1-based range of ints, which is a little // silly - I may fix that. - fn crates(&self) -> Vec; + fn crates(&self) -> Vec; fn used_libraries(&self) -> Vec<(String, NativeLibraryKind)>; fn used_link_args(&self) -> Vec; // utility functions fn metadata_filename(&self) -> &str; fn metadata_section_name(&self, target: &Target) -> &str; - fn encode_type<'a>(&self, - tcx: TyCtxt<'a, 'tcx, 'tcx>, - ty: Ty<'tcx>, - def_id_to_string: for<'b> fn(TyCtxt<'b, 'tcx, 'tcx>, DefId) -> String) - -> Vec; - fn used_crates(&self, prefer: LinkagePreference) -> Vec<(ast::CrateNum, Option)>; - fn used_crate_source(&self, cnum: ast::CrateNum) -> CrateSource; - fn extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option; + fn used_crates(&self, prefer: LinkagePreference) -> Vec<(CrateNum, Option)>; + fn used_crate_source(&self, cnum: CrateNum) -> CrateSource; + fn extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option; fn encode_metadata<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, reexports: &def::ExportMap, link_meta: &LinkMeta, - reachable: &NodeSet, - mir_map: &MirMap<'tcx>, - krate: &hir::Crate) -> Vec; + reachable: &NodeSet) -> Vec; fn metadata_encoding_version(&self) -> &[u8]; } @@ -324,59 +274,51 @@ pub struct DummyCrateStore; #[allow(unused_variables)] impl<'tcx> CrateStore<'tcx> for DummyCrateStore { // item info + fn describe_def(&self, def: DefId) -> Option { bug!("describe_def") } fn stability(&self, def: DefId) -> Option { bug!("stability") } fn deprecation(&self, def: DefId) -> Option { bug!("deprecation") } fn visibility(&self, def: DefId) -> ty::Visibility { bug!("visibility") } - fn closure_kind(&self, def_id: DefId) -> ty::ClosureKind { bug!("closure_kind") } + fn closure_kind(&self, def_id: DefId) -> ty::ClosureKind { bug!("closure_kind") } fn closure_ty<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> ty::ClosureTy<'tcx> { bug!("closure_ty") } fn item_variances(&self, def: DefId) -> Vec { bug!("item_variances") } - fn repr_attrs(&self, def: DefId) -> Vec { bug!("repr_attrs") } fn item_type<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> Ty<'tcx> { bug!("item_type") } fn visible_parent_map<'a>(&'a self) -> ::std::cell::RefMut<'a, DefIdMap> { bug!("visible_parent_map") } - fn item_name(&self, def: DefId) -> ast::Name { bug!("item_name") } - fn opt_item_name(&self, def: DefId) -> Option { bug!("opt_item_name") } fn item_predicates<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> ty::GenericPredicates<'tcx> { bug!("item_predicates") } fn item_super_predicates<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> ty::GenericPredicates<'tcx> { bug!("item_super_predicates") } fn item_generics<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) - -> &'tcx ty::Generics<'tcx> { bug!("item_generics") } + -> ty::Generics<'tcx> { bug!("item_generics") } fn item_attrs(&self, def_id: DefId) -> Vec { bug!("item_attrs") } fn trait_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)-> ty::TraitDef<'tcx> { bug!("trait_def") } fn adt_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> ty::AdtDefMaster<'tcx> { bug!("adt_def") } - fn method_arg_names(&self, did: DefId) -> Vec { bug!("method_arg_names") } + fn fn_arg_names(&self, did: DefId) -> Vec { bug!("fn_arg_names") } fn inherent_implementations_for_type(&self, def_id: DefId) -> Vec { vec![] } // trait info - fn implementations_of_trait(&self, def_id: DefId) -> Vec { vec![] } - fn provided_trait_methods<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) - -> Vec>> { bug!("provided_trait_methods") } - fn trait_item_def_ids(&self, def: DefId) - -> Vec { bug!("trait_item_def_ids") } + fn implementations_of_trait(&self, filter: Option) -> Vec { vec![] } fn def_index_for_def_key(&self, - cnum: ast::CrateNum, + cnum: CrateNum, def: DefKey) -> Option { None } // impl info - fn impl_items(&self, impl_def_id: DefId) -> Vec - { bug!("impl_items") } + fn impl_or_trait_items(&self, def_id: DefId) -> Vec + { bug!("impl_or_trait_items") } fn impl_trait_ref<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> Option> { bug!("impl_trait_ref") } - fn impl_polarity(&self, def: DefId) -> Option { bug!("impl_polarity") } + fn impl_polarity(&self, def: DefId) -> hir::ImplPolarity { bug!("impl_polarity") } fn custom_coerce_unsized_kind(&self, def: DefId) -> Option { bug!("custom_coerce_unsized_kind") } - fn associated_consts<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) - -> Vec>> { bug!("associated_consts") } fn impl_parent(&self, def: DefId) -> Option { bug!("impl_parent") } // trait/impl-item info @@ -387,64 +329,48 @@ impl<'tcx> CrateStore<'tcx> for DummyCrateStore { // flags fn is_const_fn(&self, did: DefId) -> bool { bug!("is_const_fn") } fn is_defaulted_trait(&self, did: DefId) -> bool { bug!("is_defaulted_trait") } - fn is_impl(&self, did: DefId) -> bool { bug!("is_impl") } fn is_default_impl(&self, impl_did: DefId) -> bool { bug!("is_default_impl") } - fn is_extern_item<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, did: DefId) -> bool - { bug!("is_extern_item") } fn is_foreign_item(&self, did: DefId) -> bool { bug!("is_foreign_item") } fn is_statically_included_foreign_item(&self, id: ast::NodeId) -> bool { false } - fn is_typedef(&self, did: DefId) -> bool { bug!("is_typedef") } // crate metadata - fn dylib_dependency_formats(&self, cnum: ast::CrateNum) - -> Vec<(ast::CrateNum, LinkagePreference)> + fn dylib_dependency_formats(&self, cnum: CrateNum) + -> Vec<(CrateNum, LinkagePreference)> { bug!("dylib_dependency_formats") } - fn lang_items(&self, cnum: ast::CrateNum) -> Vec<(DefIndex, usize)> + fn lang_items(&self, cnum: CrateNum) -> Vec<(DefIndex, usize)> { bug!("lang_items") } - fn missing_lang_items(&self, cnum: ast::CrateNum) -> Vec + fn missing_lang_items(&self, cnum: CrateNum) -> Vec { bug!("missing_lang_items") } - fn is_staged_api(&self, cnum: ast::CrateNum) -> bool { bug!("is_staged_api") } - fn is_explicitly_linked(&self, cnum: ast::CrateNum) -> bool { bug!("is_explicitly_linked") } - fn is_allocator(&self, cnum: ast::CrateNum) -> bool { bug!("is_allocator") } - fn is_panic_runtime(&self, cnum: ast::CrateNum) -> bool { bug!("is_panic_runtime") } - fn is_compiler_builtins(&self, cnum: ast::CrateNum) -> bool { bug!("is_compiler_builtins") } - fn panic_strategy(&self, cnum: ast::CrateNum) -> PanicStrategy { + fn is_staged_api(&self, cnum: CrateNum) -> bool { bug!("is_staged_api") } + fn is_explicitly_linked(&self, cnum: CrateNum) -> bool { bug!("is_explicitly_linked") } + fn is_allocator(&self, cnum: CrateNum) -> bool { bug!("is_allocator") } + fn is_panic_runtime(&self, cnum: CrateNum) -> bool { bug!("is_panic_runtime") } + fn is_compiler_builtins(&self, cnum: CrateNum) -> bool { bug!("is_compiler_builtins") } + fn panic_strategy(&self, cnum: CrateNum) -> PanicStrategy { bug!("panic_strategy") } - fn extern_crate(&self, cnum: ast::CrateNum) -> Option { bug!("extern_crate") } - fn crate_attrs(&self, cnum: ast::CrateNum) -> Vec - { bug!("crate_attrs") } - fn crate_name(&self, cnum: ast::CrateNum) -> InternedString { bug!("crate_name") } - fn original_crate_name(&self, cnum: ast::CrateNum) -> InternedString { + fn extern_crate(&self, cnum: CrateNum) -> Option { bug!("extern_crate") } + fn crate_name(&self, cnum: CrateNum) -> InternedString { bug!("crate_name") } + fn original_crate_name(&self, cnum: CrateNum) -> InternedString { bug!("original_crate_name") } - fn crate_hash(&self, cnum: ast::CrateNum) -> Svh { bug!("crate_hash") } - fn crate_disambiguator(&self, cnum: ast::CrateNum) + fn crate_hash(&self, cnum: CrateNum) -> Svh { bug!("crate_hash") } + fn crate_disambiguator(&self, cnum: CrateNum) -> InternedString { bug!("crate_disambiguator") } - fn crate_struct_field_attrs(&self, cnum: ast::CrateNum) - -> FnvHashMap> - { bug!("crate_struct_field_attrs") } - fn plugin_registrar_fn(&self, cnum: ast::CrateNum) -> Option + fn plugin_registrar_fn(&self, cnum: CrateNum) -> Option { bug!("plugin_registrar_fn") } - fn native_libraries(&self, cnum: ast::CrateNum) -> Vec<(NativeLibraryKind, String)> + fn native_libraries(&self, cnum: CrateNum) -> Vec<(NativeLibraryKind, String)> { bug!("native_libraries") } - fn reachable_ids(&self, cnum: ast::CrateNum) -> Vec { bug!("reachable_ids") } - fn is_no_builtins(&self, cnum: ast::CrateNum) -> bool { bug!("is_no_builtins") } + fn reachable_ids(&self, cnum: CrateNum) -> Vec { bug!("reachable_ids") } + fn is_no_builtins(&self, cnum: CrateNum) -> bool { bug!("is_no_builtins") } // resolve fn def_key(&self, def: DefId) -> hir_map::DefKey { bug!("def_key") } fn relative_def_path(&self, def: DefId) -> Option { bug!("relative_def_path") } - fn variant_kind(&self, def_id: DefId) -> Option { bug!("variant_kind") } - fn struct_ctor_def_id(&self, struct_def_id: DefId) -> Option - { bug!("struct_ctor_def_id") } - fn tuple_struct_definition_if_ctor(&self, did: DefId) -> Option - { bug!("tuple_struct_definition_if_ctor") } fn struct_field_names(&self, def: DefId) -> Vec { bug!("struct_field_names") } - fn item_children(&self, did: DefId) -> Vec { bug!("item_children") } - fn crate_top_level_items(&self, cnum: ast::CrateNum) -> Vec - { bug!("crate_top_level_items") } + fn item_children(&self, did: DefId) -> Vec { bug!("item_children") } // misc. metadata fn maybe_get_item_ast<'a>(&'tcx self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) @@ -458,195 +384,48 @@ impl<'tcx> CrateStore<'tcx> for DummyCrateStore { bug!("defid_for_inlined_node") } - fn maybe_get_item_mir<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) - -> Option> { bug!("maybe_get_item_mir") } + fn get_item_mir<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> Mir<'tcx> { bug!("get_item_mir") } fn is_item_mir_available(&self, def: DefId) -> bool { bug!("is_item_mir_available") } // This is basically a 1-based range of ints, which is a little // silly - I may fix that. - fn crates(&self) -> Vec { vec![] } + fn crates(&self) -> Vec { vec![] } fn used_libraries(&self) -> Vec<(String, NativeLibraryKind)> { vec![] } fn used_link_args(&self) -> Vec { vec![] } // utility functions fn metadata_filename(&self) -> &str { bug!("metadata_filename") } fn metadata_section_name(&self, target: &Target) -> &str { bug!("metadata_section_name") } - fn encode_type<'a>(&self, - tcx: TyCtxt<'a, 'tcx, 'tcx>, - ty: Ty<'tcx>, - def_id_to_string: for<'b> fn(TyCtxt<'b, 'tcx, 'tcx>, DefId) -> String) - -> Vec { - bug!("encode_type") - } - fn used_crates(&self, prefer: LinkagePreference) -> Vec<(ast::CrateNum, Option)> + fn used_crates(&self, prefer: LinkagePreference) -> Vec<(CrateNum, Option)> { vec![] } - fn used_crate_source(&self, cnum: ast::CrateNum) -> CrateSource { bug!("used_crate_source") } - fn extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option { None } + fn used_crate_source(&self, cnum: CrateNum) -> CrateSource { bug!("used_crate_source") } + fn extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option { None } fn encode_metadata<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, reexports: &def::ExportMap, link_meta: &LinkMeta, - reachable: &NodeSet, - mir_map: &MirMap<'tcx>, - krate: &hir::Crate) -> Vec { vec![] } + reachable: &NodeSet) -> Vec { vec![] } fn metadata_encoding_version(&self) -> &[u8] { bug!("metadata_encoding_version") } } -pub trait MacroLoader { - fn load_crate(&mut self, extern_crate: &ast::Item, allows_macros: bool) -> Vec; +pub enum LoadedMacros { + MacroRules(Vec), + ProcMacros(Vec<(ast::Name, SyntaxExtension)>), } -/// Metadata encoding and decoding can make use of thread-local encoding and -/// decoding contexts. These allow implementers of serialize::Encodable and -/// Decodable to access information and datastructures that would otherwise not -/// be available to them. For example, we can automatically translate def-id and -/// span information during decoding because the decoding context knows which -/// crate the data is decoded from. Or it allows to make ty::Ty decodable -/// because the context has access to the TyCtxt that is needed for creating -/// ty::Ty instances. -/// -/// Note, however, that this only works for RBML-based encoding and decoding at -/// the moment. -pub mod tls { - use rbml::opaque::Encoder as OpaqueEncoder; - use rbml::opaque::Decoder as OpaqueDecoder; - use serialize; - use std::cell::Cell; - use std::mem; - use ty::{self, Ty, TyCtxt}; - use ty::subst::Substs; - use hir::def_id::DefId; - - pub trait EncodingContext<'tcx> { - fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx>; - fn encode_ty(&self, encoder: &mut OpaqueEncoder, t: Ty<'tcx>); - fn encode_substs(&self, encoder: &mut OpaqueEncoder, substs: &Substs<'tcx>); - } - - /// Marker type used for the TLS slot. - /// The type context cannot be used directly because the TLS - /// in libstd doesn't allow types generic over lifetimes. - struct TlsPayload; - - thread_local! { - static TLS_ENCODING: Cell> = Cell::new(None) - } - - /// Execute f after pushing the given EncodingContext onto the TLS stack. - pub fn enter_encoding_context<'tcx, F, R>(ecx: &EncodingContext<'tcx>, - encoder: &mut OpaqueEncoder, - f: F) -> R - where F: FnOnce(&EncodingContext<'tcx>, &mut OpaqueEncoder) -> R - { - let tls_payload = (ecx as *const _, encoder as *mut _); - let tls_ptr = &tls_payload as *const _ as *const TlsPayload; - TLS_ENCODING.with(|tls| { - let prev = tls.get(); - tls.set(Some(tls_ptr)); - let ret = f(ecx, encoder); - tls.set(prev); - return ret - }) - } - - /// Execute f with access to the thread-local encoding context and - /// rbml encoder. This function will panic if the encoder passed in and the - /// context encoder are not the same. - /// - /// Note that this method is 'practically' safe due to its checking that the - /// encoder passed in is the same as the one in TLS, but it would still be - /// possible to construct cases where the EncodingContext is exchanged - /// while the same encoder is used, thus working with a wrong context. - pub fn with_encoding_context<'tcx, E, F, R>(encoder: &mut E, f: F) -> R - where F: FnOnce(&EncodingContext<'tcx>, &mut OpaqueEncoder) -> R, - E: serialize::Encoder - { - unsafe { - unsafe_with_encoding_context(|ecx, tls_encoder| { - assert!(encoder as *mut _ as usize == tls_encoder as *mut _ as usize); - - let ecx: &EncodingContext<'tcx> = mem::transmute(ecx); - - f(ecx, tls_encoder) - }) - } - } - - /// Execute f with access to the thread-local encoding context and - /// rbml encoder. - pub unsafe fn unsafe_with_encoding_context(f: F) -> R - where F: FnOnce(&EncodingContext, &mut OpaqueEncoder) -> R - { - TLS_ENCODING.with(|tls| { - let tls = tls.get().unwrap(); - let tls_payload = tls as *mut (&EncodingContext, &mut OpaqueEncoder); - f((*tls_payload).0, (*tls_payload).1) - }) - } - - pub trait DecodingContext<'tcx> { - fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx>; - fn decode_ty(&self, decoder: &mut OpaqueDecoder) -> ty::Ty<'tcx>; - fn decode_substs(&self, decoder: &mut OpaqueDecoder) -> &'tcx Substs<'tcx>; - fn translate_def_id(&self, def_id: DefId) -> DefId; - } - - thread_local! { - static TLS_DECODING: Cell> = Cell::new(None) - } - - /// Execute f after pushing the given DecodingContext onto the TLS stack. - pub fn enter_decoding_context<'tcx, F, R>(dcx: &DecodingContext<'tcx>, - decoder: &mut OpaqueDecoder, - f: F) -> R - where F: FnOnce(&DecodingContext<'tcx>, &mut OpaqueDecoder) -> R - { - let tls_payload = (dcx as *const _, decoder as *mut _); - let tls_ptr = &tls_payload as *const _ as *const TlsPayload; - TLS_DECODING.with(|tls| { - let prev = tls.get(); - tls.set(Some(tls_ptr)); - let ret = f(dcx, decoder); - tls.set(prev); - return ret - }) - } - - /// Execute f with access to the thread-local decoding context and - /// rbml decoder. This function will panic if the decoder passed in and the - /// context decoder are not the same. - /// - /// Note that this method is 'practically' safe due to its checking that the - /// decoder passed in is the same as the one in TLS, but it would still be - /// possible to construct cases where the DecodingContext is exchanged - /// while the same decoder is used, thus working with a wrong context. - pub fn with_decoding_context<'decoder, 'tcx, D, F, R>(d: &'decoder mut D, f: F) -> R - where D: serialize::Decoder, - F: FnOnce(&DecodingContext<'tcx>, - &mut OpaqueDecoder) -> R, - 'tcx: 'decoder - { - unsafe { - unsafe_with_decoding_context(|dcx, decoder| { - assert!((d as *mut _ as usize) == (decoder as *mut _ as usize)); - - let dcx: &DecodingContext<'tcx> = mem::transmute(dcx); - - f(dcx, decoder) - }) +impl LoadedMacros { + pub fn is_proc_macros(&self) -> bool { + match *self { + LoadedMacros::ProcMacros(_) => true, + _ => false, } } +} - /// Execute f with access to the thread-local decoding context and - /// rbml decoder. - pub unsafe fn unsafe_with_decoding_context(f: F) -> R - where F: FnOnce(&DecodingContext, &mut OpaqueDecoder) -> R - { - TLS_DECODING.with(|tls| { - let tls = tls.get().unwrap(); - let tls_payload = tls as *mut (&DecodingContext, &mut OpaqueDecoder); - f((*tls_payload).0, (*tls_payload).1) - }) - } +pub trait CrateLoader { + fn process_item(&mut self, item: &ast::Item, defs: &Definitions, load_macros: bool) + -> Option; + fn postprocess(&mut self, krate: &ast::Crate); } diff --git a/src/librustc/middle/dataflow.rs b/src/librustc/middle/dataflow.rs index fc1294c86c44f..7f3a58808c225 100644 --- a/src/librustc/middle/dataflow.rs +++ b/src/librustc/middle/dataflow.rs @@ -112,10 +112,10 @@ impl<'a, 'tcx, O:DataFlowOperator> pprust::PpAnn for DataFlowContext<'a, 'tcx, O ps: &mut pprust::State, node: pprust::AnnNode) -> io::Result<()> { let id = match node { - pprust::NodeName(_) => 0, + pprust::NodeName(_) => ast::CRATE_NODE_ID, pprust::NodeExpr(expr) => expr.id, pprust::NodeBlock(blk) => blk.id, - pprust::NodeItem(_) | pprust::NodeSubItem(_) => 0, + pprust::NodeItem(_) | pprust::NodeSubItem(_) => ast::CRATE_NODE_ID, pprust::NodePat(pat) => pat.id }; diff --git a/src/librustc/middle/dead.rs b/src/librustc/middle/dead.rs index 9db6ac1dcefd0..dc634b08784a4 100644 --- a/src/librustc/middle/dead.rs +++ b/src/librustc/middle/dead.rs @@ -106,10 +106,11 @@ impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> { self.check_def_id(def.def_id()); } _ if self.ignore_non_const_paths => (), - Def::PrimTy(_) => (), - Def::SelfTy(..) => (), - Def::Variant(enum_id, variant_id) => { - self.check_def_id(enum_id); + Def::PrimTy(..) | Def::SelfTy(..) => (), + Def::Variant(variant_id) | Def::VariantCtor(variant_id, ..) => { + if let Some(enum_id) = self.tcx.parent_def_id(variant_id) { + self.check_def_id(enum_id); + } if !self.ignore_variant_stack.contains(&variant_id) { self.check_def_id(variant_id); } @@ -470,13 +471,12 @@ impl<'a, 'tcx> DeadVisitor<'a, 'tcx> { // This is done to handle the case where, for example, the static // method of a private type is used, but the type itself is never // called directly. - let impl_items = self.tcx.impl_items.borrow(); + let impl_items = self.tcx.impl_or_trait_item_def_ids.borrow(); if let Some(impl_list) = self.tcx.inherent_impls.borrow().get(&self.tcx.map.local_def_id(id)) { for impl_did in impl_list.iter() { - for item_did in impl_items.get(impl_did).unwrap().iter() { - if let Some(item_node_id) = - self.tcx.map.as_local_node_id(item_did.def_id()) { + for &item_did in &impl_items[impl_did][..] { + if let Some(item_node_id) = self.tcx.map.as_local_node_id(item_did) { if self.live_symbols.contains(&item_node_id) { return true; } diff --git a/src/librustc/middle/dependency_format.rs b/src/librustc/middle/dependency_format.rs index 7822fe2536f1f..656d3146fe5d1 100644 --- a/src/librustc/middle/dependency_format.rs +++ b/src/librustc/middle/dependency_format.rs @@ -61,12 +61,13 @@ //! Additionally, the algorithm is geared towards finding *any* solution rather //! than finding a number of solutions (there are normally quite a few). -use syntax::ast; +use hir::def_id::CrateNum; use session; -use session::config::{self, PanicStrategy}; +use session::config; use middle::cstore::LinkagePreference::{self, RequireStatic, RequireDynamic}; use util::nodemap::FnvHashMap; +use rustc_back::PanicStrategy; /// A list of dependencies for a certain crate type. /// @@ -140,12 +141,12 @@ fn calculate_type(sess: &session::Session, } // Everything else falls through below. This will happen either with the - // `-C prefer-dynamic` or because we're a rustc-macro crate. Note that - // rustc-macro crates are required to be dylibs, and they're currently + // `-C prefer-dynamic` or because we're a proc-macro crate. Note that + // proc-macro crates are required to be dylibs, and they're currently // required to link to libsyntax as well. config::CrateTypeExecutable | config::CrateTypeDylib | - config::CrateTypeRustcMacro => {}, + config::CrateTypeProcMacro => {}, } let mut formats = FnvHashMap(); @@ -169,9 +170,9 @@ fn calculate_type(sess: &session::Session, } // Collect what we've got so far in the return vector. - let last_crate = sess.cstore.crates().len() as ast::CrateNum; + let last_crate = sess.cstore.crates().len(); let mut ret = (1..last_crate+1).map(|cnum| { - match formats.get(&cnum) { + match formats.get(&CrateNum::new(cnum)) { Some(&RequireDynamic) => Linkage::Dynamic, Some(&RequireStatic) => Linkage::IncludedFromDylib, None => Linkage::NotLinked, @@ -191,7 +192,7 @@ fn calculate_type(sess: &session::Session, assert!(src.rlib.is_some()); info!("adding staticlib: {}", sess.cstore.crate_name(cnum)); add_library(sess, cnum, RequireStatic, &mut formats); - ret[cnum as usize - 1] = Linkage::Static; + ret[cnum.as_usize() - 1] = Linkage::Static; } } @@ -213,7 +214,7 @@ fn calculate_type(sess: &session::Session, // For situations like this, we perform one last pass over the dependencies, // making sure that everything is available in the requested format. for (cnum, kind) in ret.iter().enumerate() { - let cnum = (cnum + 1) as ast::CrateNum; + let cnum = CrateNum::new(cnum + 1); let src = sess.cstore.used_crate_source(cnum); match *kind { Linkage::NotLinked | @@ -237,9 +238,9 @@ fn calculate_type(sess: &session::Session, } fn add_library(sess: &session::Session, - cnum: ast::CrateNum, + cnum: CrateNum, link: LinkagePreference, - m: &mut FnvHashMap) { + m: &mut FnvHashMap) { match m.get(&cnum) { Some(&link2) => { // If the linkages differ, then we'd have two copies of the library @@ -269,9 +270,9 @@ fn attempt_static(sess: &session::Session) -> Option { // All crates are available in an rlib format, so we're just going to link // everything in explicitly so long as it's actually required. - let last_crate = sess.cstore.crates().len() as ast::CrateNum; + let last_crate = sess.cstore.crates().len(); let mut ret = (1..last_crate+1).map(|cnum| { - if sess.cstore.is_explicitly_linked(cnum) { + if sess.cstore.is_explicitly_linked(CrateNum::new(cnum)) { Linkage::Static } else { Linkage::NotLinked @@ -298,11 +299,11 @@ fn attempt_static(sess: &session::Session) -> Option { // a required dependency) in one of the session's field. If this field is not // set then this compilation doesn't actually need the dependency and we can // also skip this step entirely. -fn activate_injected_dep(injected: Option, +fn activate_injected_dep(injected: Option, list: &mut DependencyList, - replaces_injected: &Fn(ast::CrateNum) -> bool) { + replaces_injected: &Fn(CrateNum) -> bool) { for (i, slot) in list.iter().enumerate() { - let cnum = (i + 1) as ast::CrateNum; + let cnum = CrateNum::new(i + 1); if !replaces_injected(cnum) { continue } @@ -311,7 +312,7 @@ fn activate_injected_dep(injected: Option, } } if let Some(injected) = injected { - let idx = injected as usize - 1; + let idx = injected.as_usize() - 1; assert_eq!(list[idx], Linkage::NotLinked); list[idx] = Linkage::Static; } @@ -329,7 +330,7 @@ fn verify_ok(sess: &session::Session, list: &[Linkage]) { if let Linkage::NotLinked = *linkage { continue } - let cnum = (i + 1) as ast::CrateNum; + let cnum = CrateNum::new(i + 1); if sess.cstore.is_allocator(cnum) { if let Some(prev) = allocator { let prev_name = sess.cstore.crate_name(prev); @@ -357,7 +358,7 @@ fn verify_ok(sess: &session::Session, list: &[Linkage]) { // only one, but we perform validation here that all the panic strategy // compilation modes for the whole DAG are valid. if let Some((cnum, found_strategy)) = panic_runtime { - let desired_strategy = sess.opts.cg.panic.clone(); + let desired_strategy = sess.panic_strategy(); // First up, validate that our selected panic runtime is indeed exactly // our same strategy. @@ -380,7 +381,7 @@ fn verify_ok(sess: &session::Session, list: &[Linkage]) { if desired_strategy == PanicStrategy::Abort { continue } - let cnum = (i + 1) as ast::CrateNum; + let cnum = CrateNum::new(i + 1); let found_strategy = sess.cstore.panic_strategy(cnum); if desired_strategy == found_strategy { continue diff --git a/src/librustc/middle/expr_use_visitor.rs b/src/librustc/middle/expr_use_visitor.rs index 9f05dde4e66f4..c37b6df369dfc 100644 --- a/src/librustc/middle/expr_use_visitor.rs +++ b/src/librustc/middle/expr_use_visitor.rs @@ -442,7 +442,7 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { } } - hir::ExprVec(ref exprs) => { + hir::ExprArray(ref exprs) => { self.consume_exprs(exprs); } @@ -1003,7 +1003,9 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { // the leaves of the pattern tree structure. return_if_err!(mc.cat_pattern(cmt_discr, pat, |mc, cmt_pat, pat| { match tcx.expect_def_or_none(pat.id) { - Some(Def::Variant(enum_did, variant_did)) => { + Some(Def::Variant(variant_did)) | + Some(Def::VariantCtor(variant_did, ..)) => { + let enum_did = tcx.parent_def_id(variant_did).unwrap(); let downcast_cmt = if tcx.lookup_adt_def(enum_did).is_univariant() { cmt_pat } else { @@ -1014,12 +1016,14 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { debug!("variant downcast_cmt={:?} pat={:?}", downcast_cmt, pat); delegate.matched_pat(pat, downcast_cmt, match_mode); } - Some(Def::Struct(..)) | Some(Def::Union(..)) | - Some(Def::TyAlias(..)) | Some(Def::AssociatedTy(..)) => { + Some(Def::Struct(..)) | Some(Def::StructCtor(..)) | Some(Def::Union(..)) | + Some(Def::TyAlias(..)) | Some(Def::AssociatedTy(..)) | Some(Def::SelfTy(..)) => { debug!("struct cmt_pat={:?} pat={:?}", cmt_pat, pat); delegate.matched_pat(pat, cmt_pat, match_mode); } - _ => {} + None | Some(Def::Local(..)) | + Some(Def::Const(..)) | Some(Def::AssociatedConst(..)) => {} + def => bug!("unexpected definition: {:?}", def) } })); } @@ -1029,7 +1033,8 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { self.tcx().with_freevars(closure_expr.id, |freevars| { for freevar in freevars { - let id_var = freevar.def.var_id(); + let def_id = freevar.def.def_id(); + let id_var = self.tcx().map.as_local_node_id(def_id).unwrap(); let upvar_id = ty::UpvarId { var_id: id_var, closure_expr_id: closure_expr.id }; let upvar_capture = self.mc.infcx.upvar_capture(upvar_id).unwrap(); @@ -1061,7 +1066,7 @@ impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { -> mc::McResult> { // Create the cmt for the variable being borrowed, from the // caller's perspective - let var_id = upvar_def.var_id(); + let var_id = self.tcx().map.as_local_node_id(upvar_def.def_id()).unwrap(); let var_ty = self.mc.infcx.node_ty(var_id)?; self.mc.cat_def(closure_id, closure_span, var_ty, upvar_def) } diff --git a/src/librustc/middle/intrinsicck.rs b/src/librustc/middle/intrinsicck.rs index 61bcc05bbb4f4..1acd0fb0f79c0 100644 --- a/src/librustc/middle/intrinsicck.rs +++ b/src/librustc/middle/intrinsicck.rs @@ -103,11 +103,16 @@ impl<'a, 'gcx, 'tcx> ExprVisitor<'a, 'gcx, 'tcx> { } }; - span_err!(self.infcx.tcx.sess, span, E0512, + struct_span_err!(self.infcx.tcx.sess, span, E0512, "transmute called with differently sized types: \ {} ({}) to {} ({})", from, skeleton_string(from, sk_from), - to, skeleton_string(to, sk_to)); + to, skeleton_string(to, sk_to)) + .span_label(span, + &format!("transmuting between {} and {}", + skeleton_string(from, sk_from), + skeleton_string(to, sk_to))) + .emit(); } } diff --git a/src/librustc/middle/lang_items.rs b/src/librustc/middle/lang_items.rs index d1769d5cbc51b..3175230ab6a5e 100644 --- a/src/librustc/middle/lang_items.rs +++ b/src/librustc/middle/lang_items.rs @@ -43,7 +43,7 @@ macro_rules! language_item_table { enum_from_u32! { - #[derive(Copy, Clone, PartialEq, Eq, Hash)] + #[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub enum LangItem { $($variant,)* } @@ -59,7 +59,7 @@ impl LanguageItems { fn foo(_: LangItem) -> Option { None } LanguageItems { - items: vec!($(foo($variant)),*), + items: vec![$(foo($variant)),*], missing: Vec::new(), } } diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index b579c69cd05c0..79396b9ca4dab 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -465,7 +465,8 @@ fn visit_expr(ir: &mut IrMaps, expr: &Expr) { let mut call_caps = Vec::new(); ir.tcx.with_freevars(expr.id, |freevars| { for fv in freevars { - if let Def::Local(_, rv) = fv.def { + if let Def::Local(def_id) = fv.def { + let rv = ir.tcx.map.as_local_node_id(def_id).unwrap(); let fv_ln = ir.add_live_node(FreeVarNode(fv.span)); call_caps.push(CaptureInfo {ln: fv_ln, var_nid: rv}); @@ -489,7 +490,7 @@ fn visit_expr(ir: &mut IrMaps, expr: &Expr) { // otherwise, live nodes are not required: hir::ExprIndex(..) | hir::ExprField(..) | hir::ExprTupField(..) | - hir::ExprVec(..) | hir::ExprCall(..) | hir::ExprMethodCall(..) | + hir::ExprArray(..) | hir::ExprCall(..) | hir::ExprMethodCall(..) | hir::ExprTup(..) | hir::ExprBinary(..) | hir::ExprAddrOf(..) | hir::ExprCast(..) | hir::ExprUnary(..) | hir::ExprBreak(_) | hir::ExprAgain(_) | hir::ExprLit(_) | hir::ExprRet(..) | @@ -1094,7 +1095,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { // Uninteresting cases: just propagate in rev exec order - hir::ExprVec(ref exprs) => { + hir::ExprArray(ref exprs) => { self.propagate_through_exprs(&exprs[..], succ) } @@ -1270,7 +1271,8 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { fn access_path(&mut self, expr: &Expr, succ: LiveNode, acc: u32) -> LiveNode { match self.ir.tcx.expect_def(expr.id) { - Def::Local(_, nid) => { + Def::Local(def_id) => { + let nid = self.ir.tcx.map.as_local_node_id(def_id).unwrap(); let ln = self.live_node(expr.id, expr.span); if acc != 0 { self.init_from_succ(ln, succ); @@ -1434,7 +1436,7 @@ fn check_expr(this: &mut Liveness, expr: &Expr) { hir::ExprCall(..) | hir::ExprMethodCall(..) | hir::ExprIf(..) | hir::ExprMatch(..) | hir::ExprWhile(..) | hir::ExprLoop(..) | hir::ExprIndex(..) | hir::ExprField(..) | hir::ExprTupField(..) | - hir::ExprVec(..) | hir::ExprTup(..) | hir::ExprBinary(..) | + hir::ExprArray(..) | hir::ExprTup(..) | hir::ExprBinary(..) | hir::ExprCast(..) | hir::ExprUnary(..) | hir::ExprRet(..) | hir::ExprBreak(..) | hir::ExprAgain(..) | hir::ExprLit(_) | hir::ExprBlock(..) | hir::ExprAddrOf(..) | @@ -1529,11 +1531,12 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { fn check_lvalue(&mut self, expr: &Expr) { match expr.node { hir::ExprPath(..) => { - if let Def::Local(_, nid) = self.ir.tcx.expect_def(expr.id) { + if let Def::Local(def_id) = self.ir.tcx.expect_def(expr.id) { // Assignment to an immutable variable or argument: only legal // if there is no later assignment. If this local is actually // mutable, then check for a reassignment to flag the mutability // as being used. + let nid = self.ir.tcx.map.as_local_node_id(def_id).unwrap(); let ln = self.live_node(expr.id, expr.span); let var = self.variable(nid, expr.span); self.warn_about_dead_assign(expr.span, expr.id, ln, var); diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs index c419f96e82095..9214138d21077 100644 --- a/src/librustc/middle/mem_categorization.rs +++ b/src/librustc/middle/mem_categorization.rs @@ -74,7 +74,7 @@ use hir::def_id::DefId; use hir::map as ast_map; use infer::InferCtxt; use middle::const_qualif::ConstQualif; -use hir::def::Def; +use hir::def::{Def, CtorKind}; use ty::adjustment; use ty::{self, Ty, TyCtxt}; @@ -503,7 +503,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { hir::ExprClosure(..) | hir::ExprRet(..) | hir::ExprUnary(..) | hir::ExprMethodCall(..) | hir::ExprCast(..) | - hir::ExprVec(..) | hir::ExprTup(..) | hir::ExprIf(..) | + hir::ExprArray(..) | hir::ExprTup(..) | hir::ExprIf(..) | hir::ExprBinary(..) | hir::ExprWhile(..) | hir::ExprBlock(..) | hir::ExprLoop(..) | hir::ExprMatch(..) | hir::ExprLit(..) | hir::ExprBreak(..) | @@ -524,20 +524,11 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { id, expr_ty, def); match def { - Def::Struct(..) | Def::Union(..) | Def::Variant(..) | Def::Const(..) | + Def::StructCtor(..) | Def::VariantCtor(..) | Def::Const(..) | Def::AssociatedConst(..) | Def::Fn(..) | Def::Method(..) => { Ok(self.cat_rvalue_node(id, span, expr_ty)) } - Def::Mod(_) | Def::ForeignMod(_) | - Def::Trait(_) | Def::Enum(..) | Def::TyAlias(..) | Def::PrimTy(_) | - Def::TyParam(..) | - Def::Label(_) | Def::SelfTy(..) | - Def::AssociatedTy(..) => { - span_bug!(span, "Unexpected definition in \ - memory categorization: {:?}", def); - } - Def::Static(_, mutbl) => { Ok(Rc::new(cmt_ { id:id, @@ -549,7 +540,8 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { })) } - Def::Upvar(_, var_id, _, fn_node_id) => { + Def::Upvar(def_id, _, fn_node_id) => { + let var_id = self.tcx().map.as_local_node_id(def_id).unwrap(); let ty = self.node_ty(fn_node_id)?; match ty.sty { ty::TyClosure(closure_id, _) => { @@ -585,7 +577,8 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { } } - Def::Local(_, vid) => { + Def::Local(def_id) => { + let vid = self.tcx().map.as_local_node_id(def_id).unwrap(); Ok(Rc::new(cmt_ { id: id, span: span, @@ -596,7 +589,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { })) } - Def::Err => bug!("Def::Err in memory categorization") + def => span_bug!(span, "unexpected definition in memory categorization: {:?}", def) } } @@ -1075,21 +1068,27 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { // alone) because PatKind::Struct can also refer to variants. let cmt = match self.tcx().expect_def_or_none(pat.id) { Some(Def::Err) => return Err(()), - Some(Def::Variant(enum_did, variant_did)) + Some(Def::Variant(variant_did)) | + Some(Def::VariantCtor(variant_did, ..)) => { // univariant enums do not need downcasts - if !self.tcx().lookup_adt_def(enum_did).is_univariant() => { + let enum_did = self.tcx().parent_def_id(variant_did).unwrap(); + if !self.tcx().lookup_adt_def(enum_did).is_univariant() { self.cat_downcast(pat, cmt.clone(), cmt.ty, variant_did) + } else { + cmt } + } _ => cmt }; match pat.node { PatKind::TupleStruct(_, ref subpats, ddpos) => { let expected_len = match self.tcx().expect_def(pat.id) { - Def::Variant(enum_def, def_id) => { + Def::VariantCtor(def_id, CtorKind::Fn) => { + let enum_def = self.tcx().parent_def_id(def_id).unwrap(); self.tcx().lookup_adt_def(enum_def).variant_with_id(def_id).fields.len() } - Def::Struct(..) => { + Def::StructCtor(_, CtorKind::Fn) => { match self.pat_ty(&pat)?.sty { ty::TyAdt(adt_def, _) => { adt_def.struct_variant().fields.len() @@ -1148,7 +1147,7 @@ impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { self.cat_pattern_(subcmt, &subpat, op)?; } - PatKind::Vec(ref before, ref slice, ref after) => { + PatKind::Slice(ref before, ref slice, ref after) => { let context = InteriorOffsetKind::Pattern; let elt_cmt = self.cat_index(pat, cmt, context)?; for before_pat in before { diff --git a/src/librustc/middle/reachable.rs b/src/librustc/middle/reachable.rs index beffaff1e5b8a..a476d0f6f30a6 100644 --- a/src/librustc/middle/reachable.rs +++ b/src/librustc/middle/reachable.rs @@ -139,7 +139,7 @@ impl<'a, 'tcx> ReachableContext<'a, 'tcx> { fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> ReachableContext<'a, 'tcx> { let any_library = tcx.sess.crate_types.borrow().iter().any(|ty| { *ty == config::CrateTypeRlib || *ty == config::CrateTypeDylib || - *ty == config::CrateTypeRustcMacro + *ty == config::CrateTypeProcMacro }); ReachableContext { tcx: tcx, diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index fb99820f7c855..30b735b9c24e3 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -20,7 +20,6 @@ use dep_graph::DepNode; use hir::map as ast_map; use session::Session; use util::nodemap::{FnvHashMap, NodeMap, NodeSet}; -use middle::cstore::InlinedItem; use ty; use std::cell::RefCell; @@ -479,12 +478,9 @@ impl RegionMaps { //! Returns the scope when temp created by expr_id will be cleaned up // check for a designated rvalue scope - match self.rvalue_scopes.borrow().get(&expr_id) { - Some(&s) => { - debug!("temporary_scope({:?}) = {:?} [custom]", expr_id, s); - return Some(s); - } - None => { } + if let Some(&s) = self.rvalue_scopes.borrow().get(&expr_id) { + debug!("temporary_scope({:?}) = {:?} [custom]", expr_id, s); + return Some(s); } let scope_map : &[CodeExtent] = &self.scope_map.borrow(); @@ -929,19 +925,15 @@ fn resolve_local(visitor: &mut RegionResolutionVisitor, local: &hir::Local) { // // FIXME(#6308) -- Note that `[]` patterns work more smoothly post-DST. - match local.init { - Some(ref expr) => { - record_rvalue_scope_if_borrow_expr(visitor, &expr, blk_scope); + if let Some(ref expr) = local.init { + record_rvalue_scope_if_borrow_expr(visitor, &expr, blk_scope); - let is_borrow = - if let Some(ref ty) = local.ty { is_borrowed_ty(&ty) } else { false }; + let is_borrow = + if let Some(ref ty) = local.ty { is_borrowed_ty(&ty) } else { false }; - if is_binding_pat(&local.pat) || is_borrow { - record_rvalue_scope(visitor, &expr, blk_scope); - } + if is_binding_pat(&local.pat) || is_borrow { + record_rvalue_scope(visitor, &expr, blk_scope); } - - None => { } } intravisit::walk_local(visitor, local); @@ -962,7 +954,7 @@ fn resolve_local(visitor: &mut RegionResolutionVisitor, local: &hir::Local) { field_pats.iter().any(|fp| is_binding_pat(&fp.node.pat)) } - PatKind::Vec(ref pats1, ref pats2, ref pats3) => { + PatKind::Slice(ref pats1, ref pats2, ref pats3) => { pats1.iter().any(|p| is_binding_pat(&p)) || pats2.iter().any(|p| is_binding_pat(&p)) || pats3.iter().any(|p| is_binding_pat(&p)) @@ -1013,7 +1005,7 @@ fn resolve_local(visitor: &mut RegionResolutionVisitor, local: &hir::Local) { visitor, &field.expr, blk_id); } } - hir::ExprVec(ref subexprs) | + hir::ExprArray(ref subexprs) | hir::ExprTup(ref subexprs) => { for subexpr in subexprs { record_rvalue_scope_if_borrow_expr( @@ -1024,16 +1016,12 @@ fn resolve_local(visitor: &mut RegionResolutionVisitor, local: &hir::Local) { record_rvalue_scope_if_borrow_expr(visitor, &subexpr, blk_id) } hir::ExprBlock(ref block) => { - match block.expr { - Some(ref subexpr) => { - record_rvalue_scope_if_borrow_expr( - visitor, &subexpr, blk_id); - } - None => { } + if let Some(ref subexpr) = block.expr { + record_rvalue_scope_if_borrow_expr( + visitor, &subexpr, blk_id); } } - _ => { - } + _ => {} } } @@ -1256,19 +1244,3 @@ pub fn resolve_crate(sess: &Session, map: &ast_map::Map) -> RegionMaps { } return maps; } - -pub fn resolve_inlined_item(sess: &Session, - region_maps: &RegionMaps, - item: &InlinedItem) { - let mut visitor = RegionResolutionVisitor { - sess: sess, - region_maps: region_maps, - cx: Context { - root_id: None, - parent: ROOT_CODE_EXTENT, - var_parent: ROOT_CODE_EXTENT - }, - terminating_scopes: NodeSet() - }; - item.visit(&mut visitor); -} diff --git a/src/librustc/middle/resolve_lifetime.rs b/src/librustc/middle/resolve_lifetime.rs index 95706b5677a62..2d93c33afb409 100644 --- a/src/librustc/middle/resolve_lifetime.rs +++ b/src/librustc/middle/resolve_lifetime.rs @@ -337,7 +337,6 @@ impl<'a, 'tcx, 'v> Visitor<'v> for LifetimeContext<'a, 'tcx> { if !self.trait_ref_hack || !trait_ref.bound_lifetimes.is_empty() { if self.trait_ref_hack { - println!("{:?}", trait_ref.span); span_err!(self.sess, trait_ref.span, E0316, "nested quantification of lifetimes"); } diff --git a/src/librustc/middle/stability.rs b/src/librustc/middle/stability.rs index 9a56959de38bc..5192575972b02 100644 --- a/src/librustc/middle/stability.rs +++ b/src/librustc/middle/stability.rs @@ -17,9 +17,8 @@ use dep_graph::DepNode; use hir::map as hir_map; use session::Session; use lint; -use middle::cstore::LOCAL_CRATE; use hir::def::Def; -use hir::def_id::{CRATE_DEF_INDEX, DefId, DefIndex}; +use hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId, DefIndex, LOCAL_CRATE}; use ty::{self, TyCtxt, AdtKind}; use middle::privacy::AccessLevels; use syntax::parse::token::InternedString; @@ -103,7 +102,7 @@ pub struct Index<'tcx> { depr_map: DefIdMap>, /// Maps for each crate whether it is part of the staged API. - staged_api: FnvHashMap + staged_api: FnvHashMap } // A private tree-walker for producing an Index. @@ -412,8 +411,8 @@ impl<'a, 'tcx> Checker<'a, 'tcx> { &feature, &r), None => format!("use of unstable library feature '{}'", &feature) }; - emit_feature_err(&self.tcx.sess.parse_sess.span_diagnostic, - &feature, span, GateIssue::Library(Some(issue)), &msg); + emit_feature_err(&self.tcx.sess.parse_sess, &feature, span, + GateIssue::Library(Some(issue)), &msg); } } Some(&Stability { ref level, ref feature, .. }) => { @@ -618,12 +617,8 @@ pub fn check_path<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, &Option)) { // Paths in import prefixes may have no resolution. match tcx.expect_def_or_none(id) { - Some(Def::PrimTy(..)) => {} - Some(Def::SelfTy(..)) => {} - Some(def) => { - maybe_do_stability_check(tcx, def.def_id(), path.span, cb); - } - None => {} + None | Some(Def::PrimTy(..)) | Some(Def::SelfTy(..)) => {} + Some(def) => maybe_do_stability_check(tcx, def.def_id(), path.span, cb) } } @@ -632,12 +627,7 @@ pub fn check_path_list_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, cb: &mut FnMut(DefId, Span, &Option<&Stability>, &Option)) { - match tcx.expect_def(item.node.id) { - Def::PrimTy(..) => {} - def => { - maybe_do_stability_check(tcx, def.def_id(), item.span, cb); - } - } + maybe_do_stability_check(tcx, tcx.expect_def(item.node.id).def_id(), item.span, cb); } pub fn check_pat<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, pat: &hir::Pat, @@ -696,10 +686,9 @@ fn is_internal<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, span: Span) -> bool { fn is_staged_api<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, id: DefId) -> bool { match tcx.trait_item_of_item(id) { - Some(ty::MethodTraitItemId(trait_method_id)) - if trait_method_id != id => { - is_staged_api(tcx, trait_method_id) - } + Some(trait_method_id) if trait_method_id != id => { + is_staged_api(tcx, trait_method_id) + } _ => { *tcx.stability.borrow_mut().staged_api.entry(id.krate).or_insert_with( || tcx.sess.cstore.is_staged_api(id.krate)) diff --git a/src/librustc/middle/weak_lang_items.rs b/src/librustc/middle/weak_lang_items.rs index c2f275e6deaf8..2c08e17f8f1a5 100644 --- a/src/librustc/middle/weak_lang_items.rs +++ b/src/librustc/middle/weak_lang_items.rs @@ -10,10 +10,11 @@ //! Validity checking for weak lang items -use session::config::{self, PanicStrategy}; +use session::config; use session::Session; use middle::lang_items; +use rustc_back::PanicStrategy; use syntax::ast; use syntax::parse::token::InternedString; use syntax_pos::Span; @@ -70,7 +71,7 @@ fn verify(sess: &Session, items: &lang_items::LanguageItems) { let needs_check = sess.crate_types.borrow().iter().any(|kind| { match *kind { config::CrateTypeDylib | - config::CrateTypeRustcMacro | + config::CrateTypeProcMacro | config::CrateTypeCdylib | config::CrateTypeExecutable | config::CrateTypeStaticlib => true, @@ -92,7 +93,7 @@ fn verify(sess: &Session, items: &lang_items::LanguageItems) { // symbols. Other panic runtimes ensure that the relevant symbols are // available to link things together, but they're never exercised. let mut whitelisted = HashSet::new(); - if sess.opts.cg.panic != PanicStrategy::Unwind { + if sess.panic_strategy() != PanicStrategy::Unwind { whitelisted.insert(lang_items::EhPersonalityLangItem); whitelisted.insert(lang_items::EhUnwindResumeLangItem); } diff --git a/src/librustc/mir/cache.rs b/src/librustc/mir/cache.rs index 1be7d00f072cd..bc9bbebb1796a 100644 --- a/src/librustc/mir/cache.rs +++ b/src/librustc/mir/cache.rs @@ -11,7 +11,7 @@ use std::cell::{Ref, RefCell}; use rustc_data_structures::indexed_vec::IndexVec; -use mir::repr::{Mir, BasicBlock}; +use mir::{Mir, BasicBlock}; use rustc_serialize as serialize; diff --git a/src/librustc/mir/mir_map.rs b/src/librustc/mir/mir_map.rs deleted file mode 100644 index 92de65798d3cb..0000000000000 --- a/src/librustc/mir/mir_map.rs +++ /dev/null @@ -1,38 +0,0 @@ -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use dep_graph::{DepGraph, DepNode, DepTrackingMap, DepTrackingMapConfig}; -use hir::def_id::DefId; -use mir::repr::Mir; -use std::marker::PhantomData; - -pub struct MirMap<'tcx> { - pub map: DepTrackingMap>, -} - -impl<'tcx> MirMap<'tcx> { - pub fn new(graph: DepGraph) -> Self { - MirMap { - map: DepTrackingMap::new(graph) - } - } -} - -pub struct MirMapConfig<'tcx> { - data: PhantomData<&'tcx ()> -} - -impl<'tcx> DepTrackingMapConfig for MirMapConfig<'tcx> { - type Key = DefId; - type Value = Mir<'tcx>; - fn to_dep_node(key: &DefId) -> DepNode { - DepNode::Mir(*key) - } -} diff --git a/src/librustc/mir/repr.rs b/src/librustc/mir/mod.rs similarity index 83% rename from src/librustc/mir/repr.rs rename to src/librustc/mir/mod.rs index 53b6ccdbd530a..9d82006ac9f9d 100644 --- a/src/librustc/mir/repr.rs +++ b/src/librustc/mir/mod.rs @@ -15,6 +15,7 @@ use rustc_data_structures::indexed_vec::{IndexVec, Idx}; use rustc_data_structures::control_flow_graph::dominators::{Dominators, dominators}; use rustc_data_structures::control_flow_graph::{GraphPredecessors, GraphSuccessors}; use rustc_data_structures::control_flow_graph::ControlFlowGraph; +use hir::def::CtorKind; use hir::def_id::DefId; use ty::subst::Substs; use ty::{self, AdtDef, ClosureSubsts, Region, Ty}; @@ -31,7 +32,11 @@ use std::vec::IntoIter; use syntax::ast::{self, Name}; use syntax_pos::Span; -use super::cache::Cache; +mod cache; +pub mod tcx; +pub mod visit; +pub mod transform; +pub mod traversal; macro_rules! newtype_index { ($name:ident, $debug_name:expr) => ( @@ -58,7 +63,8 @@ macro_rules! newtype_index { } /// Lowered representation of a single function. -#[derive(Clone, RustcEncodable, RustcDecodable, Debug)] +// Do not implement clone for Mir, its easy to do so accidently and its kind of expensive. +#[derive(RustcEncodable, RustcDecodable, Debug)] pub struct Mir<'tcx> { /// List of basic blocks. References to basic block use a newtyped index type `BasicBlock` /// that indexes into this vector. @@ -70,34 +76,42 @@ pub struct Mir<'tcx> { /// Rvalues promoted from this function, such as borrows of constants. /// Each of them is the Mir of a constant with the fn's type parameters - /// in scope, but no vars or args and a separate set of temps. + /// in scope, but a separate set of locals. pub promoted: IndexVec>, /// Return type of the function. pub return_ty: Ty<'tcx>, - /// Variables: these are stack slots corresponding to user variables. They may be - /// assigned many times. - pub var_decls: IndexVec>, - - /// Args: these are stack slots corresponding to the input arguments. - pub arg_decls: IndexVec>, + /// Declarations of locals. + /// + /// The first local is the return value pointer, followed by `arg_count` + /// locals for the function arguments, followed by any user-declared + /// variables and temporaries. + pub local_decls: IndexVec>, - /// Temp declarations: stack slots that for temporaries created by - /// the compiler. These are assigned once, but they are not SSA - /// values in that it is possible to borrow them and mutate them - /// through the resulting reference. - pub temp_decls: IndexVec>, + /// Number of arguments this function takes. + /// + /// Starting at local 1, `arg_count` locals will be provided by the caller + /// and can be assumed to be initialized. + /// + /// If this MIR was built for a constant, this will be 0. + pub arg_count: usize, /// Names and capture modes of all the closure upvars, assuming /// the first argument is either the closure or a reference to it. pub upvar_decls: Vec, + /// Mark an argument local (which must be a tuple) as getting passed as + /// its individual components at the LLVM level. + /// + /// This is used for the "rust-call" ABI. + pub spread_arg: Option, + /// A span representing this MIR, for error reporting pub span: Span, /// A cache for various calculations - cache: Cache + cache: cache::Cache } /// where execution begins @@ -108,23 +122,27 @@ impl<'tcx> Mir<'tcx> { visibility_scopes: IndexVec, promoted: IndexVec>, return_ty: Ty<'tcx>, - var_decls: IndexVec>, - arg_decls: IndexVec>, - temp_decls: IndexVec>, + local_decls: IndexVec>, + arg_count: usize, upvar_decls: Vec, span: Span) -> Self { + // We need `arg_count` locals, and one for the return pointer + assert!(local_decls.len() >= arg_count + 1, + "expected at least {} locals, got {}", arg_count + 1, local_decls.len()); + assert_eq!(local_decls[RETURN_POINTER].ty, return_ty); + Mir { basic_blocks: basic_blocks, visibility_scopes: visibility_scopes, promoted: promoted, return_ty: return_ty, - var_decls: var_decls, - arg_decls: arg_decls, - temp_decls: temp_decls, + local_decls: local_decls, + arg_count: arg_count, upvar_decls: upvar_decls, + spread_arg: None, span: span, - cache: Cache::new() + cache: cache::Cache::new() } } @@ -154,38 +172,74 @@ impl<'tcx> Mir<'tcx> { dominators(self) } - /// Maps locals (Arg's, Var's, Temp's and ReturnPointer, in that order) - /// to their index in the whole list of locals. This is useful if you - /// want to treat all locals the same instead of repeating yourself. - pub fn local_index(&self, lvalue: &Lvalue<'tcx>) -> Option { - let idx = match *lvalue { - Lvalue::Arg(arg) => arg.index(), - Lvalue::Var(var) => { - self.arg_decls.len() + - var.index() - } - Lvalue::Temp(temp) => { - self.arg_decls.len() + - self.var_decls.len() + - temp.index() + #[inline] + pub fn local_kind(&self, local: Local) -> LocalKind { + let index = local.0 as usize; + if index == 0 { + debug_assert!(self.local_decls[local].mutability == Mutability::Mut, + "return pointer should be mutable"); + + LocalKind::ReturnPointer + } else if index < self.arg_count + 1 { + LocalKind::Arg + } else if self.local_decls[local].name.is_some() { + LocalKind::Var + } else { + debug_assert!(self.local_decls[local].mutability == Mutability::Mut, + "temp should be mutable"); + + LocalKind::Temp + } + } + + /// Returns an iterator over all temporaries. + #[inline] + pub fn temps_iter<'a>(&'a self) -> impl Iterator + 'a { + (self.arg_count+1..self.local_decls.len()).filter_map(move |index| { + let local = Local::new(index); + if self.local_decls[local].source_info.is_none() { + Some(local) + } else { + None } - Lvalue::ReturnPointer => { - self.arg_decls.len() + - self.var_decls.len() + - self.temp_decls.len() + }) + } + + /// Returns an iterator over all user-declared locals. + #[inline] + pub fn vars_iter<'a>(&'a self) -> impl Iterator + 'a { + (self.arg_count+1..self.local_decls.len()).filter_map(move |index| { + let local = Local::new(index); + if self.local_decls[local].source_info.is_none() { + None + } else { + Some(local) } - Lvalue::Static(_) | - Lvalue::Projection(_) => return None - }; - Some(Local::new(idx)) + }) } - /// Counts the number of locals, such that that local_index - /// will always return an index smaller than this count. - pub fn count_locals(&self) -> usize { - self.arg_decls.len() + - self.var_decls.len() + - self.temp_decls.len() + 1 + /// Returns an iterator over all function arguments. + #[inline] + pub fn args_iter(&self) -> impl Iterator { + let arg_count = self.arg_count; + (1..arg_count+1).map(Local::new) + } + + /// Returns an iterator over all user-defined variables and compiler-generated temporaries (all + /// locals that are neither arguments nor the return pointer). + #[inline] + pub fn vars_and_temps_iter(&self) -> impl Iterator { + let arg_count = self.arg_count; + let local_count = self.local_decls.len(); + (arg_count+1..local_count).map(Local::new) + } + + /// Changes a statement to a nop. This is both faster than deleting instructions and avoids + /// invalidating statement indices in `Location`s. + pub fn make_statement_nop(&mut self, location: Location) { + let block = &mut self[location.block]; + debug_assert!(location.statement_index < block.statements.len()); + block.statements[location.statement_index].make_nop() } } @@ -275,53 +329,76 @@ pub enum BorrowKind { /////////////////////////////////////////////////////////////////////////// // Variables and temps -/// A "variable" is a binding declared by the user as part of the fn -/// decl, a let, etc. -#[derive(Clone, Debug, RustcEncodable, RustcDecodable)] -pub struct VarDecl<'tcx> { - /// `let mut x` vs `let x` - pub mutability: Mutability, - - /// name that user gave the variable; not that, internally, - /// mir references variables by index - pub name: Name, - - /// type inferred for this variable (`let x: ty = ...`) - pub ty: Ty<'tcx>, +newtype_index!(Local, "_"); - /// source information (span, scope, etc.) for the declaration - pub source_info: SourceInfo, -} +pub const RETURN_POINTER: Local = Local(0); -/// A "temp" is a temporary that we place on the stack. They are -/// anonymous, always mutable, and have only a type. -#[derive(Clone, Debug, RustcEncodable, RustcDecodable)] -pub struct TempDecl<'tcx> { - pub ty: Ty<'tcx>, +/// Classifies locals into categories. See `Mir::local_kind`. +#[derive(PartialEq, Eq, Debug)] +pub enum LocalKind { + /// User-declared variable binding + Var, + /// Compiler-introduced temporary + Temp, + /// Function argument + Arg, + /// Location of function's return value + ReturnPointer, } -/// A "arg" is one of the function's formal arguments. These are -/// anonymous and distinct from the bindings that the user declares. +/// A MIR local. /// -/// For example, in this function: -/// -/// ``` -/// fn foo((x, y): (i32, u32)) { ... } -/// ``` -/// -/// there is only one argument, of type `(i32, u32)`, but two bindings -/// (`x` and `y`). +/// This can be a binding declared by the user, a temporary inserted by the compiler, a function +/// argument, or the return pointer. #[derive(Clone, Debug, RustcEncodable, RustcDecodable)] -pub struct ArgDecl<'tcx> { +pub struct LocalDecl<'tcx> { + /// `let mut x` vs `let x`. + /// + /// Temporaries and the return pointer are always mutable. + pub mutability: Mutability, + + /// Type of this local. pub ty: Ty<'tcx>, - /// If true, this argument is a tuple after monomorphization, - /// and has to be collected from multiple actual arguments. - pub spread: bool, + /// Name of the local, used in debuginfo and pretty-printing. + /// + /// Note that function arguments can also have this set to `Some(_)` + /// to generate better debuginfo. + pub name: Option, + + /// For user-declared variables, stores their source information. + /// + /// For temporaries, this is `None`. + /// + /// This is the primary way to differentiate between user-declared + /// variables and compiler-generated temporaries. + pub source_info: Option, +} + +impl<'tcx> LocalDecl<'tcx> { + /// Create a new `LocalDecl` for a temporary. + #[inline] + pub fn new_temp(ty: Ty<'tcx>) -> Self { + LocalDecl { + mutability: Mutability::Mut, + ty: ty, + name: None, + source_info: None, + } + } - /// Either keywords::Invalid or the name of a single-binding - /// pattern associated with this argument. Useful for debuginfo. - pub debug_name: Name + /// Builds a `LocalDecl` for the return pointer. + /// + /// This must be inserted into the `local_decls` list as the first local. + #[inline] + pub fn new_return_pointer(return_ty: Ty) -> LocalDecl { + LocalDecl { + mutability: Mutability::Mut, + ty: return_ty, + source_info: None, + name: None, // FIXME maybe we do want some name here? + } + } } /// A closure capture, with its name and mode. @@ -413,7 +490,7 @@ pub enum TerminatorKind<'tcx> { /// continue. Emitted by build::scope::diverge_cleanup. Resume, - /// Indicates a normal return. The ReturnPointer lvalue should + /// Indicates a normal return. The return pointer lvalue should /// have been filled in by now. This should occur at most once. Return, @@ -686,6 +763,14 @@ pub struct Statement<'tcx> { pub kind: StatementKind<'tcx>, } +impl<'tcx> Statement<'tcx> { + /// Changes a statement to a nop. This is both faster than deleting instructions and avoids + /// invalidating statement indices in `Location`s. + pub fn make_nop(&mut self) { + self.kind = StatementKind::Nop + } +} + #[derive(Clone, Debug, RustcEncodable, RustcDecodable)] pub enum StatementKind<'tcx> { /// Write the RHS Rvalue to the LHS Lvalue. @@ -699,6 +784,9 @@ pub enum StatementKind<'tcx> { /// End the current live range for the storage of the local. StorageDead(Lvalue<'tcx>), + + /// No-op. Useful for deleting instructions without affecting statement indices. + Nop, } impl<'tcx> Debug for Statement<'tcx> { @@ -711,6 +799,7 @@ impl<'tcx> Debug for Statement<'tcx> { SetDiscriminant{lvalue: ref lv, variant_index: index} => { write!(fmt, "discriminant({:?}) = {:?}", lv, index) } + Nop => write!(fmt, "nop"), } } } @@ -718,31 +807,16 @@ impl<'tcx> Debug for Statement<'tcx> { /////////////////////////////////////////////////////////////////////////// // Lvalues -newtype_index!(Var, "var"); -newtype_index!(Temp, "tmp"); -newtype_index!(Arg, "arg"); -newtype_index!(Local, "local"); - /// A path to a value; something that can be evaluated without /// changing or disturbing program state. #[derive(Clone, PartialEq, RustcEncodable, RustcDecodable)] pub enum Lvalue<'tcx> { - /// local variable declared by the user - Var(Var), - - /// temporary introduced during lowering into MIR - Temp(Temp), - - /// formal parameter of the function; note that these are NOT the - /// bindings that the user declares, which are vars - Arg(Arg), + /// local variable + Local(Local), /// static or static mut variable Static(DefId), - /// the return pointer of the fn - ReturnPointer, - /// projection out of an lvalue (access a field, deref a pointer, etc) Projection(Box>), } @@ -831,13 +905,9 @@ impl<'tcx> Debug for Lvalue<'tcx> { use self::Lvalue::*; match *self { - Var(id) => write!(fmt, "{:?}", id), - Arg(id) => write!(fmt, "{:?}", id), - Temp(id) => write!(fmt, "{:?}", id), + Local(id) => write!(fmt, "{:?}", id), Static(def_id) => write!(fmt, "{}", ty::tls::with(|tcx| tcx.item_path_str(def_id))), - ReturnPointer => - write!(fmt, "return"), Projection(ref data) => match data.elem { ProjectionElem::Downcast(ref adt_def, index) => @@ -960,7 +1030,7 @@ pub enum CastKind { #[derive(Clone, Debug, PartialEq, Eq, RustcEncodable, RustcDecodable)] pub enum AggregateKind<'tcx> { - Vec, + Array, Tuple, /// The second field is variant number (discriminant), it's equal to 0 /// for struct and union expressions. The fourth field is active field @@ -1051,8 +1121,6 @@ impl<'tcx> Debug for Rvalue<'tcx> { } Aggregate(ref kind, ref lvs) => { - use self::AggregateKind::*; - fn fmt_tuple(fmt: &mut Formatter, lvs: &[Operand]) -> fmt::Result { let mut tuple_fmt = fmt.debug_tuple(""); for lv in lvs { @@ -1062,9 +1130,9 @@ impl<'tcx> Debug for Rvalue<'tcx> { } match *kind { - Vec => write!(fmt, "{:?}", lvs), + AggregateKind::Array => write!(fmt, "{:?}", lvs), - Tuple => { + AggregateKind::Tuple => { match lvs.len() { 0 => write!(fmt, "()"), 1 => write!(fmt, "({:?},)", lvs[0]), @@ -1072,16 +1140,15 @@ impl<'tcx> Debug for Rvalue<'tcx> { } } - Adt(adt_def, variant, substs, _) => { + AggregateKind::Adt(adt_def, variant, substs, _) => { let variant_def = &adt_def.variants[variant]; - ppaux::parameterized(fmt, substs, variant_def.did, - ppaux::Ns::Value, &[])?; + ppaux::parameterized(fmt, substs, variant_def.did, &[])?; - match variant_def.kind { - ty::VariantKind::Unit => Ok(()), - ty::VariantKind::Tuple => fmt_tuple(fmt, lvs), - ty::VariantKind::Struct => { + match variant_def.ctor_kind { + CtorKind::Const => Ok(()), + CtorKind::Fn => fmt_tuple(fmt, lvs), + CtorKind::Fictive => { let mut struct_fmt = fmt.debug_struct(""); for (field, lv) in variant_def.fields.iter().zip(lvs) { struct_fmt.field(&field.name.as_str(), lv); @@ -1091,14 +1158,16 @@ impl<'tcx> Debug for Rvalue<'tcx> { } } - Closure(def_id, _) => ty::tls::with(|tcx| { + AggregateKind::Closure(def_id, _) => ty::tls::with(|tcx| { if let Some(node_id) = tcx.map.as_local_node_id(def_id) { let name = format!("[closure@{:?}]", tcx.map.span(node_id)); let mut struct_fmt = fmt.debug_struct(&name); tcx.with_freevars(node_id, |freevars| { for (freevar, lv) in freevars.iter().zip(lvs) { - let var_name = tcx.local_var_name_str(freevar.def.var_id()); + let def_id = freevar.def.def_id(); + let var_id = tcx.map.as_local_node_id(def_id).unwrap(); + let var_name = tcx.local_var_name_str(var_id); struct_fmt.field(&var_name, lv); } }); @@ -1169,7 +1238,7 @@ impl<'tcx> Debug for Literal<'tcx> { use self::Literal::*; match *self { Item { def_id, substs } => { - ppaux::parameterized(fmt, substs, def_id, ppaux::Ns::Value, &[]) + ppaux::parameterized(fmt, substs, def_id, &[]) } Value { ref value } => { write!(fmt, "const ")?; @@ -1258,3 +1327,13 @@ impl fmt::Debug for Location { write!(fmt, "{:?}[{}]", self.block, self.statement_index) } } + +impl Location { + pub fn dominates(&self, other: &Location, dominators: &Dominators) -> bool { + if self.block == other.block { + self.statement_index <= other.statement_index + } else { + dominators.is_dominated_by(other.block, self.block) + } + } +} diff --git a/src/librustc/mir/tcx.rs b/src/librustc/mir/tcx.rs index 74ad6c602f6cd..f9afbaf104a66 100644 --- a/src/librustc/mir/tcx.rs +++ b/src/librustc/mir/tcx.rs @@ -13,7 +13,7 @@ * building is complete. */ -use mir::repr::*; +use mir::*; use ty::subst::{Subst, Substs}; use ty::{self, AdtDef, Ty, TyCtxt}; use ty::fold::{TypeFoldable, TypeFolder, TypeVisitor}; @@ -49,12 +49,17 @@ impl<'a, 'gcx, 'tcx> LvalueTy<'tcx> { -> LvalueTy<'tcx> { match *elem { - ProjectionElem::Deref => + ProjectionElem::Deref => { + let ty = self.to_ty(tcx) + .builtin_deref(true, ty::LvaluePreference::NoPreference) + .unwrap_or_else(|| { + bug!("deref projection of non-dereferencable ty {:?}", self) + }) + .ty; LvalueTy::Ty { - ty: self.to_ty(tcx).builtin_deref(true, ty::LvaluePreference::NoPreference) - .unwrap() - .ty - }, + ty: ty, + } + } ProjectionElem::Index(_) | ProjectionElem::ConstantIndex { .. } => LvalueTy::Ty { ty: self.to_ty(tcx).builtin_index().unwrap() @@ -116,18 +121,12 @@ impl<'tcx> TypeFoldable<'tcx> for LvalueTy<'tcx> { impl<'tcx> Lvalue<'tcx> { pub fn ty<'a, 'gcx>(&self, mir: &Mir<'tcx>, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> LvalueTy<'tcx> { - match self { - &Lvalue::Var(index) => - LvalueTy::Ty { ty: mir.var_decls[index].ty }, - &Lvalue::Temp(index) => - LvalueTy::Ty { ty: mir.temp_decls[index].ty }, - &Lvalue::Arg(index) => - LvalueTy::Ty { ty: mir.arg_decls[index].ty }, - &Lvalue::Static(def_id) => + match *self { + Lvalue::Local(index) => + LvalueTy::Ty { ty: mir.local_decls[index].ty }, + Lvalue::Static(def_id) => LvalueTy::Ty { ty: tcx.lookup_item_type(def_id).ty }, - &Lvalue::ReturnPointer => - LvalueTy::Ty { ty: mir.return_ty }, - &Lvalue::Projection(ref proj) => + Lvalue::Projection(ref proj) => proj.base.ty(mir, tcx).projection_ty(tcx, &proj.elem), } } @@ -164,7 +163,7 @@ impl<'tcx> Rvalue<'tcx> { let lhs_ty = lhs.ty(mir, tcx); let rhs_ty = rhs.ty(mir, tcx); let ty = op.ty(tcx, lhs_ty, rhs_ty); - let ty = tcx.mk_tup(vec![ty, tcx.types.bool]); + let ty = tcx.intern_tup(&[ty, tcx.types.bool]); Some(ty) } &Rvalue::UnaryOp(_, ref operand) => { @@ -175,7 +174,7 @@ impl<'tcx> Rvalue<'tcx> { } &Rvalue::Aggregate(ref ak, ref ops) => { match *ak { - AggregateKind::Vec => { + AggregateKind::Array => { if let Some(operand) = ops.get(0) { let ty = operand.ty(mir, tcx); Some(tcx.mk_array(ty, ops.len())) @@ -185,7 +184,7 @@ impl<'tcx> Rvalue<'tcx> { } AggregateKind::Tuple => { Some(tcx.mk_tup( - ops.iter().map(|op| op.ty(mir, tcx)).collect() + ops.iter().map(|op| op.ty(mir, tcx)) )) } AggregateKind::Adt(def, _, substs, _) => { diff --git a/src/librustc/mir/transform.rs b/src/librustc/mir/transform.rs index 8cd5f5844d21c..3576ae662a005 100644 --- a/src/librustc/mir/transform.rs +++ b/src/librustc/mir/transform.rs @@ -11,8 +11,7 @@ use dep_graph::DepNode; use hir; use hir::map::DefPathData; -use mir::mir_map::MirMap; -use mir::repr::{Mir, Promoted}; +use mir::{Mir, Promoted}; use ty::TyCtxt; use syntax::ast::NodeId; use util::common::time; @@ -85,12 +84,11 @@ pub trait Pass { fn disambiguator<'a>(&'a self) -> Option> { None } } -/// A pass which inspects the whole MirMap. +/// A pass which inspects the whole Mir map. pub trait MirMapPass<'tcx>: Pass { fn run_pass<'a>( &mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, - map: &mut MirMap<'tcx>, hooks: &mut [Box MirPassHook<'s>>]); } @@ -114,13 +112,18 @@ pub trait MirPass<'tcx>: Pass { impl<'tcx, T: MirPass<'tcx>> MirMapPass<'tcx> for T { fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, - map: &mut MirMap<'tcx>, hooks: &mut [Box MirPassHook<'s>>]) { - let def_ids = map.map.keys(); + let def_ids = tcx.mir_map.borrow().keys(); for def_id in def_ids { + if !def_id.is_local() { + continue; + } + let _task = tcx.dep_graph.in_task(DepNode::Mir(def_id)); - let mir = map.map.get_mut(&def_id).unwrap(); + let mir = &mut tcx.mir_map.borrow()[&def_id].borrow_mut(); + tcx.dep_graph.write(DepNode::Mir(def_id)); + let id = tcx.map.as_local_node_id(def_id).unwrap(); let src = MirSource::from_node(tcx, id); @@ -163,11 +166,11 @@ impl<'a, 'tcx> Passes { passes } - pub fn run_passes(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, map: &mut MirMap<'tcx>) { + pub fn run_passes(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>) { let Passes { ref mut passes, ref mut plugin_passes, ref mut pass_hooks } = *self; for pass in plugin_passes.iter_mut().chain(passes.iter_mut()) { time(tcx.sess.time_passes(), &*pass.name(), - || pass.run_pass(tcx, map, pass_hooks)); + || pass.run_pass(tcx, pass_hooks)); } } diff --git a/src/librustc/mir/traversal.rs b/src/librustc/mir/traversal.rs index 1af5123b4df60..6057e7ec7e0f5 100644 --- a/src/librustc/mir/traversal.rs +++ b/src/librustc/mir/traversal.rs @@ -13,7 +13,7 @@ use std::vec; use rustc_data_structures::bitvec::BitVector; use rustc_data_structures::indexed_vec::Idx; -use super::repr::*; +use super::*; /// Preorder traversal of a graph. /// diff --git a/src/librustc/mir/visit.rs b/src/librustc/mir/visit.rs index 16e0b376f4b53..db7267ca0d4b8 100644 --- a/src/librustc/mir/visit.rs +++ b/src/librustc/mir/visit.rs @@ -12,7 +12,7 @@ use middle::const_val::ConstVal; use hir::def_id::DefId; use ty::subst::Substs; use ty::{ClosureSubsts, Region, Ty}; -use mir::repr::*; +use mir::*; use rustc_const_math::ConstUsize; use rustc_data_structures::tuple_slice::TupleSlice; use rustc_data_structures::indexed_vec::Idx; @@ -150,7 +150,7 @@ macro_rules! make_mir_visitor { fn visit_lvalue(&mut self, lvalue: & $($mutability)* Lvalue<'tcx>, - context: LvalueContext, + context: LvalueContext<'tcx>, location: Location) { self.super_lvalue(lvalue, context, location); } @@ -236,19 +236,9 @@ macro_rules! make_mir_visitor { self.super_typed_const_val(val, location); } - fn visit_var_decl(&mut self, - var_decl: & $($mutability)* VarDecl<'tcx>) { - self.super_var_decl(var_decl); - } - - fn visit_temp_decl(&mut self, - temp_decl: & $($mutability)* TempDecl<'tcx>) { - self.super_temp_decl(temp_decl); - } - - fn visit_arg_decl(&mut self, - arg_decl: & $($mutability)* ArgDecl<'tcx>) { - self.super_arg_decl(arg_decl); + fn visit_local_decl(&mut self, + local_decl: & $($mutability)* LocalDecl<'tcx>) { + self.super_local_decl(local_decl); } fn visit_visibility_scope(&mut self, @@ -272,16 +262,8 @@ macro_rules! make_mir_visitor { self.visit_ty(&$($mutability)* mir.return_ty); - for var_decl in &$($mutability)* mir.var_decls { - self.visit_var_decl(var_decl); - } - - for arg_decl in &$($mutability)* mir.arg_decls { - self.visit_arg_decl(arg_decl); - } - - for temp_decl in &$($mutability)* mir.temp_decls { - self.visit_temp_decl(temp_decl); + for local_decl in &$($mutability)* mir.local_decls { + self.visit_local_decl(local_decl); } self.visit_span(&$($mutability)* mir.span); @@ -346,6 +328,7 @@ macro_rules! make_mir_visitor { StatementKind::StorageDead(ref $($mutability)* lvalue) => { self.visit_lvalue(lvalue, LvalueContext::StorageDead, location); } + StatementKind::Nop => {} } } @@ -530,7 +513,7 @@ macro_rules! make_mir_visitor { Rvalue::Aggregate(ref $($mutability)* kind, ref $($mutability)* operands) => { match *kind { - AggregateKind::Vec => { + AggregateKind::Array => { } AggregateKind::Tuple => { } @@ -580,13 +563,10 @@ macro_rules! make_mir_visitor { fn super_lvalue(&mut self, lvalue: & $($mutability)* Lvalue<'tcx>, - context: LvalueContext, + context: LvalueContext<'tcx>, location: Location) { match *lvalue { - Lvalue::Var(_) | - Lvalue::Temp(_) | - Lvalue::Arg(_) | - Lvalue::ReturnPointer => { + Lvalue::Local(_) => { } Lvalue::Static(ref $($mutability)* def_id) => { self.visit_def_id(def_id, location); @@ -605,7 +585,12 @@ macro_rules! make_mir_visitor { ref $($mutability)* base, ref $($mutability)* elem, } = *proj; - self.visit_lvalue(base, LvalueContext::Projection, location); + let context = if context.is_mutating_use() { + LvalueContext::Projection(Mutability::Mut) + } else { + LvalueContext::Projection(Mutability::Not) + }; + self.visit_lvalue(base, context, location); self.visit_projection_elem(elem, context, location); } @@ -633,37 +618,19 @@ macro_rules! make_mir_visitor { } } - fn super_var_decl(&mut self, - var_decl: & $($mutability)* VarDecl<'tcx>) { - let VarDecl { + fn super_local_decl(&mut self, + local_decl: & $($mutability)* LocalDecl<'tcx>) { + let LocalDecl { mutability: _, - name: _, ref $($mutability)* ty, + name: _, ref $($mutability)* source_info, - } = *var_decl; - - self.visit_ty(ty); - self.visit_source_info(source_info); - } - - fn super_temp_decl(&mut self, - temp_decl: & $($mutability)* TempDecl<'tcx>) { - let TempDecl { - ref $($mutability)* ty, - } = *temp_decl; - - self.visit_ty(ty); - } - - fn super_arg_decl(&mut self, - arg_decl: & $($mutability)* ArgDecl<'tcx>) { - let ArgDecl { - ref $($mutability)* ty, - spread: _, - debug_name: _ - } = *arg_decl; + } = *local_decl; self.visit_ty(ty); + if let Some(ref $($mutability)* info) = *source_info { + self.visit_source_info(info); + } } fn super_visibility_scope(&mut self, @@ -750,6 +717,21 @@ macro_rules! make_mir_visitor { fn super_const_usize(&mut self, _substs: & $($mutability)* ConstUsize) { } + + // Convenience methods + + fn visit_location(&mut self, mir: & $($mutability)* Mir<'tcx>, location: Location) { + let basic_block = & $($mutability)* mir[location.block]; + if basic_block.statements.len() == location.statement_index { + if let Some(ref $($mutability)* terminator) = basic_block.terminator { + self.visit_terminator(location.block, terminator, location) + } + } else { + let statement = & $($mutability)* + basic_block.statements[location.statement_index]; + self.visit_statement(location.block, statement, location) + } + } } } } @@ -774,8 +756,20 @@ pub enum LvalueContext<'tcx> { // Being borrowed Borrow { region: &'tcx Region, kind: BorrowKind }, - // Used as base for another lvalue, e.g. `x` in `x.y` - Projection, + // Used as base for another lvalue, e.g. `x` in `x.y`. + // + // The `Mutability` argument specifies whether the projection is being performed in order to + // (potentially) mutate the lvalue. For example, the projection `x.y` is marked as a mutation + // in these cases: + // + // x.y = ...; + // f(&mut x.y); + // + // But not in these cases: + // + // z = x.y; + // f(&x.y); + Projection(Mutability), // Consumed as part of an operand Consume, @@ -784,3 +778,69 @@ pub enum LvalueContext<'tcx> { StorageLive, StorageDead, } + +impl<'tcx> LvalueContext<'tcx> { + /// Returns true if this lvalue context represents a drop. + pub fn is_drop(&self) -> bool { + match *self { + LvalueContext::Drop => true, + _ => false, + } + } + + /// Returns true if this lvalue context represents a storage live or storage dead marker. + pub fn is_storage_marker(&self) -> bool { + match *self { + LvalueContext::StorageLive | LvalueContext::StorageDead => true, + _ => false, + } + } + + /// Returns true if this lvalue context represents a storage live marker. + pub fn is_storage_live_marker(&self) -> bool { + match *self { + LvalueContext::StorageLive => true, + _ => false, + } + } + + /// Returns true if this lvalue context represents a storage dead marker. + pub fn is_storage_dead_marker(&self) -> bool { + match *self { + LvalueContext::StorageDead => true, + _ => false, + } + } + + /// Returns true if this lvalue context represents a use that potentially changes the value. + pub fn is_mutating_use(&self) -> bool { + match *self { + LvalueContext::Store | LvalueContext::Call | + LvalueContext::Borrow { kind: BorrowKind::Mut, .. } | + LvalueContext::Projection(Mutability::Mut) | + LvalueContext::Drop => true, + LvalueContext::Inspect | + LvalueContext::Borrow { kind: BorrowKind::Shared, .. } | + LvalueContext::Borrow { kind: BorrowKind::Unique, .. } | + LvalueContext::Projection(Mutability::Not) | LvalueContext::Consume | + LvalueContext::StorageLive | LvalueContext::StorageDead => false, + } + } + + /// Returns true if this lvalue context represents a use that does not change the value. + pub fn is_nonmutating_use(&self) -> bool { + match *self { + LvalueContext::Inspect | LvalueContext::Borrow { kind: BorrowKind::Shared, .. } | + LvalueContext::Borrow { kind: BorrowKind::Unique, .. } | + LvalueContext::Projection(Mutability::Not) | LvalueContext::Consume => true, + LvalueContext::Borrow { kind: BorrowKind::Mut, .. } | LvalueContext::Store | + LvalueContext::Call | LvalueContext::Projection(Mutability::Mut) | + LvalueContext::Drop | LvalueContext::StorageLive | LvalueContext::StorageDead => false, + } + } + + pub fn is_use(&self) -> bool { + self.is_mutating_use() || self.is_nonmutating_use() + } +} + diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index 2009e18f6ee20..87a5c6410a841 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -19,6 +19,7 @@ pub use self::DebugInfoLevel::*; use session::{early_error, early_warn, Session}; use session::search_paths::SearchPaths; +use rustc_back::PanicStrategy; use rustc_back::target::Target; use lint; use middle::cstore; @@ -37,9 +38,9 @@ use std::collections::btree_map::Iter as BTreeMapIter; use std::collections::btree_map::Keys as BTreeMapKeysIter; use std::collections::btree_map::Values as BTreeMapValuesIter; -use std::env; use std::fmt; -use std::hash::{Hasher, SipHasher}; +use std::hash::Hasher; +use std::collections::hash_map::DefaultHasher; use std::iter::FromIterator; use std::path::PathBuf; @@ -212,7 +213,7 @@ macro_rules! top_level_options { $warn_text, self.error_format)*]); })* - let mut hasher = SipHasher::new(); + let mut hasher = DefaultHasher::new(); dep_tracking::stable_hash(sub_hashes, &mut hasher, self.error_format); @@ -288,6 +289,11 @@ top_level_options!( alt_std_name: Option [TRACKED], // Indicates how the compiler should treat unstable features unstable_features: UnstableFeatures [TRACKED], + + // Indicates whether this run of the compiler is actually rustdoc. This + // is currently just a hack and will be removed eventually, so please + // try to not rely on this too much. + actually_rustdoc: bool [TRACKED], } ); @@ -440,6 +446,7 @@ pub fn basic_options() -> Options { libs: Vec::new(), unstable_features: UnstableFeatures::Disallow, debug_assertions: true, + actually_rustdoc: false, } } @@ -475,7 +482,7 @@ pub enum CrateType { CrateTypeRlib, CrateTypeStaticlib, CrateTypeCdylib, - CrateTypeRustcMacro, + CrateTypeProcMacro, } #[derive(Clone, Hash)] @@ -493,21 +500,6 @@ impl Passes { } } -#[derive(Clone, PartialEq, Hash)] -pub enum PanicStrategy { - Unwind, - Abort, -} - -impl PanicStrategy { - pub fn desc(&self) -> &str { - match *self { - PanicStrategy::Unwind => "unwind", - PanicStrategy::Abort => "abort", - } - } -} - /// Declare a macro that will define all CodegenOptions/DebuggingOptions fields and parsers all /// at once. The goal of this macro is to define an interface that can be /// programmatically used by the option parser in order to initialize the struct @@ -581,7 +573,7 @@ macro_rules! options { impl<'a> dep_tracking::DepTrackingHash for $struct_name { - fn hash(&self, hasher: &mut SipHasher, error_format: ErrorOutputType) { + fn hash(&self, hasher: &mut DefaultHasher, error_format: ErrorOutputType) { let mut sub_hashes = BTreeMap::new(); $({ hash_option!($opt, @@ -606,6 +598,7 @@ macro_rules! options { pub const parse_opt_bool: Option<&'static str> = Some("one of: `y`, `yes`, `on`, `n`, `no`, or `off`"); pub const parse_string: Option<&'static str> = Some("a string"); + pub const parse_string_push: Option<&'static str> = Some("a string"); pub const parse_opt_string: Option<&'static str> = Some("a string"); pub const parse_list: Option<&'static str> = Some("a space-separated list of strings"); pub const parse_opt_list: Option<&'static str> = Some("a space-separated list of strings"); @@ -620,7 +613,8 @@ macro_rules! options { #[allow(dead_code)] mod $mod_set { - use super::{$struct_name, Passes, SomePasses, AllPasses, PanicStrategy}; + use super::{$struct_name, Passes, SomePasses, AllPasses}; + use rustc_back::PanicStrategy; $( pub fn $opt(cg: &mut $struct_name, v: Option<&str>) -> bool { @@ -668,6 +662,13 @@ macro_rules! options { } } + fn parse_string_push(slot: &mut Vec, v: Option<&str>) -> bool { + match v { + Some(s) => { slot.push(s.to_string()); true }, + None => false, + } + } + fn parse_list(slot: &mut Vec, v: Option<&str>) -> bool { match v { @@ -714,7 +715,7 @@ macro_rules! options { true } v => { - let mut passes = vec!(); + let mut passes = vec![]; if parse_list(&mut passes, v) { *slot = SomePasses(passes); true @@ -725,10 +726,10 @@ macro_rules! options { } } - fn parse_panic_strategy(slot: &mut PanicStrategy, v: Option<&str>) -> bool { + fn parse_panic_strategy(slot: &mut Option, v: Option<&str>) -> bool { match v { - Some("unwind") => *slot = PanicStrategy::Unwind, - Some("abort") => *slot = PanicStrategy::Abort, + Some("unwind") => *slot = Some(PanicStrategy::Unwind), + Some("abort") => *slot = Some(PanicStrategy::Abort), _ => return false } true @@ -743,6 +744,8 @@ options! {CodegenOptions, CodegenSetter, basic_codegen_options, "tool to assemble archives with"), linker: Option = (None, parse_opt_string, [UNTRACKED], "system linker to link outputs with"), + link_arg: Vec = (vec![], parse_string_push, [UNTRACKED], + "a single extra argument to pass to the linker (can be used several times)"), link_args: Option> = (None, parse_opt_list, [UNTRACKED], "extra arguments to pass to the linker (space separated)"), link_dead_code: bool = (false, parse_bool, [UNTRACKED], @@ -770,7 +773,7 @@ options! {CodegenOptions, CodegenSetter, basic_codegen_options, no_vectorize_slp: bool = (false, parse_bool, [TRACKED], "don't run LLVM's SLP vectorization pass"), soft_float: bool = (false, parse_bool, [TRACKED], - "generate software floating point library calls"), + "use soft float ABI (*eabihf targets only)"), prefer_dynamic: bool = (false, parse_bool, [TRACKED], "prefer dynamic linking to static linking"), no_integrated_as: bool = (false, parse_bool, [TRACKED], @@ -800,7 +803,7 @@ options! {CodegenOptions, CodegenSetter, basic_codegen_options, "explicitly enable the cfg(debug_assertions) directive"), inline_threshold: Option = (None, parse_opt_uint, [TRACKED], "set the inlining threshold for"), - panic: PanicStrategy = (PanicStrategy::Unwind, parse_panic_strategy, + panic: Option = (None, parse_panic_strategy, [TRACKED], "panic strategy to compile crate with"), } @@ -930,7 +933,7 @@ pub fn default_configuration(sess: &Session) -> ast::CrateConfig { let os = &sess.target.target.target_os; let env = &sess.target.target.target_env; let vendor = &sess.target.target.target_vendor; - let max_atomic_width = sess.target.target.options.max_atomic_width; + let max_atomic_width = sess.target.target.max_atomic_width(); let fam = if let Some(ref fam) = sess.target.target.options.target_family { intern(fam) @@ -969,8 +972,8 @@ pub fn default_configuration(sess: &Session) -> ast::CrateConfig { if sess.opts.debug_assertions { ret.push(attr::mk_word_item(InternedString::new("debug_assertions"))); } - if sess.opts.crate_types.contains(&CrateTypeRustcMacro) { - ret.push(attr::mk_word_item(InternedString::new("rustc_macro"))); + if sess.opts.crate_types.contains(&CrateTypeProcMacro) { + ret.push(attr::mk_word_item(InternedString::new("proc_macro"))); } return ret; } @@ -1234,10 +1237,9 @@ pub fn rustc_optgroups() -> Vec { pub fn parse_cfgspecs(cfgspecs: Vec ) -> ast::CrateConfig { cfgspecs.into_iter().map(|s| { let sess = parse::ParseSess::new(); - let mut parser = parse::new_parser_from_source_str(&sess, - Vec::new(), - "cfgspec".to_string(), - s.to_string()); + let mut parser = + parse::new_parser_from_source_str(&sess, "cfgspec".to_string(), s.to_string()); + let meta_item = panictry!(parser.parse_meta_item()); if !parser.reader.is_eof() { @@ -1291,7 +1293,7 @@ pub fn build_session_options_and_crate_config(matches: &getopts::Matches) let crate_types = parse_crate_types_from_list(unparsed_crate_types) .unwrap_or_else(|e| early_error(error_format, &e[..])); - let mut lint_opts = vec!(); + let mut lint_opts = vec![]; let mut describe_lints = false; for &level in &[lint::Allow, lint::Warn, lint::Deny, lint::Forbid] { @@ -1525,27 +1527,13 @@ pub fn build_session_options_and_crate_config(matches: &getopts::Matches) crate_name: crate_name, alt_std_name: None, libs: libs, - unstable_features: get_unstable_features_setting(), + unstable_features: UnstableFeatures::from_environment(), debug_assertions: debug_assertions, + actually_rustdoc: false, }, cfg) } -pub fn get_unstable_features_setting() -> UnstableFeatures { - // Whether this is a feature-staged build, i.e. on the beta or stable channel - let disable_unstable_features = option_env!("CFG_DISABLE_UNSTABLE_FEATURES").is_some(); - // The secret key needed to get through the rustc build itself by - // subverting the unstable features lints - let bootstrap_secret_key = option_env!("CFG_BOOTSTRAP_KEY"); - // The matching key to the above, only known by the build system - let bootstrap_provided_key = env::var("RUSTC_BOOTSTRAP_KEY").ok(); - match (disable_unstable_features, bootstrap_secret_key, bootstrap_provided_key) { - (_, Some(ref s), Some(ref p)) if s == p => UnstableFeatures::Cheat, - (true, ..) => UnstableFeatures::Disallow, - (false, ..) => UnstableFeatures::Allow - } -} - pub fn parse_crate_types_from_list(list_list: Vec) -> Result, String> { let mut crate_types: Vec = Vec::new(); for unparsed_crate_type in &list_list { @@ -1557,7 +1545,7 @@ pub fn parse_crate_types_from_list(list_list: Vec) -> Result CrateTypeDylib, "cdylib" => CrateTypeCdylib, "bin" => CrateTypeExecutable, - "rustc-macro" => CrateTypeRustcMacro, + "proc-macro" => CrateTypeProcMacro, _ => { return Err(format!("unknown crate type: `{}`", part)); @@ -1575,7 +1563,7 @@ pub fn parse_crate_types_from_list(list_list: Vec) -> Result bool { @@ -1583,18 +1571,13 @@ pub mod nightly_options { } pub fn is_nightly_build() -> bool { - match get_unstable_features_setting() { - UnstableFeatures::Allow | UnstableFeatures::Cheat => true, - _ => false, - } + UnstableFeatures::from_environment().is_nightly_build() } pub fn check_nightly_options(matches: &getopts::Matches, flags: &[RustcOptGroup]) { let has_z_unstable_option = matches.opt_strs("Z").iter().any(|x| *x == "unstable-options"); - let really_allows_unstable_options = match get_unstable_features_setting() { - UnstableFeatures::Disallow => false, - _ => true, - }; + let really_allows_unstable_options = UnstableFeatures::from_environment() + .is_nightly_build(); for opt in flags.iter() { if opt.stability == OptionStability::Stable { @@ -1646,7 +1629,7 @@ impl fmt::Display for CrateType { CrateTypeRlib => "rlib".fmt(f), CrateTypeStaticlib => "staticlib".fmt(f), CrateTypeCdylib => "cdylib".fmt(f), - CrateTypeRustcMacro => "rustc-macro".fmt(f), + CrateTypeProcMacro => "proc-macro".fmt(f), } } } @@ -1674,20 +1657,22 @@ mod dep_tracking { use middle::cstore; use session::search_paths::{PathKind, SearchPaths}; use std::collections::BTreeMap; - use std::hash::{Hash, SipHasher}; + use std::hash::Hash; use std::path::PathBuf; - use super::{Passes, PanicStrategy, CrateType, OptLevel, DebugInfoLevel, + use std::collections::hash_map::DefaultHasher; + use super::{Passes, CrateType, OptLevel, DebugInfoLevel, OutputTypes, Externs, ErrorOutputType}; use syntax::feature_gate::UnstableFeatures; + use rustc_back::PanicStrategy; pub trait DepTrackingHash { - fn hash(&self, &mut SipHasher, ErrorOutputType); + fn hash(&self, &mut DefaultHasher, ErrorOutputType); } macro_rules! impl_dep_tracking_hash_via_hash { ($t:ty) => ( impl DepTrackingHash for $t { - fn hash(&self, hasher: &mut SipHasher, _: ErrorOutputType) { + fn hash(&self, hasher: &mut DefaultHasher, _: ErrorOutputType) { Hash::hash(self, hasher); } } @@ -1697,7 +1682,7 @@ mod dep_tracking { macro_rules! impl_dep_tracking_hash_for_sortable_vec_of { ($t:ty) => ( impl DepTrackingHash for Vec<$t> { - fn hash(&self, hasher: &mut SipHasher, error_format: ErrorOutputType) { + fn hash(&self, hasher: &mut DefaultHasher, error_format: ErrorOutputType) { let mut elems: Vec<&$t> = self.iter().collect(); elems.sort(); Hash::hash(&elems.len(), hasher); @@ -1717,6 +1702,7 @@ mod dep_tracking { impl_dep_tracking_hash_via_hash!(Option); impl_dep_tracking_hash_via_hash!(Option); impl_dep_tracking_hash_via_hash!(Option); + impl_dep_tracking_hash_via_hash!(Option); impl_dep_tracking_hash_via_hash!(Option); impl_dep_tracking_hash_via_hash!(Option); impl_dep_tracking_hash_via_hash!(CrateType); @@ -1735,7 +1721,7 @@ mod dep_tracking { impl_dep_tracking_hash_for_sortable_vec_of!((String, cstore::NativeLibraryKind)); impl DepTrackingHash for SearchPaths { - fn hash(&self, hasher: &mut SipHasher, _: ErrorOutputType) { + fn hash(&self, hasher: &mut DefaultHasher, _: ErrorOutputType) { let mut elems: Vec<_> = self .iter(PathKind::All) .collect(); @@ -1748,7 +1734,7 @@ mod dep_tracking { where T1: DepTrackingHash, T2: DepTrackingHash { - fn hash(&self, hasher: &mut SipHasher, error_format: ErrorOutputType) { + fn hash(&self, hasher: &mut DefaultHasher, error_format: ErrorOutputType) { Hash::hash(&0, hasher); DepTrackingHash::hash(&self.0, hasher, error_format); Hash::hash(&1, hasher); @@ -1758,7 +1744,7 @@ mod dep_tracking { // This is a stable hash because BTreeMap is a sorted container pub fn stable_hash(sub_hashes: BTreeMap<&'static str, &DepTrackingHash>, - hasher: &mut SipHasher, + hasher: &mut DefaultHasher, error_format: ErrorOutputType) { for (key, sub_hash) in sub_hashes { // Using Hash::hash() instead of DepTrackingHash::hash() is fine for @@ -1783,7 +1769,8 @@ mod tests { use std::iter::FromIterator; use std::path::PathBuf; use std::rc::Rc; - use super::{OutputType, OutputTypes, Externs, PanicStrategy}; + use super::{OutputType, OutputTypes, Externs}; + use rustc_back::PanicStrategy; use syntax::{ast, attr}; use syntax::parse::token::InternedString; use syntax::codemap::dummy_spanned; @@ -2329,7 +2316,7 @@ mod tests { assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash()); opts = reference.clone(); - opts.cg.panic = PanicStrategy::Abort; + opts.cg.panic = Some(PanicStrategy::Abort); assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash()); } diff --git a/src/librustc/session/filesearch.rs b/src/librustc/session/filesearch.rs index a3eea324fd820..82c2425aead73 100644 --- a/src/librustc/session/filesearch.rs +++ b/src/librustc/session/filesearch.rs @@ -12,6 +12,7 @@ pub use self::FileMatch::*; +use std::borrow::Cow; use std::collections::HashSet; use std::env; use std::fs; @@ -67,33 +68,32 @@ impl<'a> FileSearch<'a> { { self.for_each_lib_search_path(|lib_search_path, kind| { debug!("searching {}", lib_search_path.display()); - match fs::read_dir(lib_search_path) { - Ok(files) => { - let files = files.filter_map(|p| p.ok().map(|s| s.path())) - .collect::>(); - fn is_rlib(p: &Path) -> bool { - p.extension().and_then(|s| s.to_str()) == Some("rlib") + let files = match fs::read_dir(lib_search_path) { + Ok(files) => files, + Err(..) => return, + }; + let files = files.filter_map(|p| p.ok().map(|s| s.path())) + .collect::>(); + fn is_rlib(p: &Path) -> bool { + p.extension() == Some("rlib".as_ref()) + } + // Reading metadata out of rlibs is faster, and if we find both + // an rlib and a dylib we only read one of the files of + // metadata, so in the name of speed, bring all rlib files to + // the front of the search list. + let files1 = files.iter().filter(|p| is_rlib(p)); + let files2 = files.iter().filter(|p| !is_rlib(p)); + for path in files1.chain(files2) { + debug!("testing {}", path.display()); + let maybe_picked = pick(path, kind); + match maybe_picked { + FileMatches => { + debug!("picked {}", path.display()); } - // Reading metadata out of rlibs is faster, and if we find both - // an rlib and a dylib we only read one of the files of - // metadata, so in the name of speed, bring all rlib files to - // the front of the search list. - let files1 = files.iter().filter(|p| is_rlib(p)); - let files2 = files.iter().filter(|p| !is_rlib(p)); - for path in files1.chain(files2) { - debug!("testing {}", path.display()); - let maybe_picked = pick(path, kind); - match maybe_picked { - FileMatches => { - debug!("picked {}", path.display()); - } - FileDoesntMatch => { - debug!("rejected {}", path.display()); - } - } + FileDoesntMatch => { + debug!("rejected {}", path.display()); } } - Err(..) => (), } }); } @@ -123,8 +123,8 @@ impl<'a> FileSearch<'a> { // Returns a list of directories where target-specific tool binaries are located. pub fn get_tools_search_paths(&self) -> Vec { let mut p = PathBuf::from(self.sysroot); - p.push(&find_libdir(self.sysroot)); - p.push(&rustlibdir()); + p.push(find_libdir(self.sysroot).as_ref()); + p.push(RUST_LIB_DIR); p.push(&self.triple); p.push("bin"); vec![p] @@ -132,9 +132,9 @@ impl<'a> FileSearch<'a> { } pub fn relative_target_lib_path(sysroot: &Path, target_triple: &str) -> PathBuf { - let mut p = PathBuf::from(&find_libdir(sysroot)); + let mut p = PathBuf::from(find_libdir(sysroot).as_ref()); assert!(p.is_relative()); - p.push(&rustlibdir()); + p.push(RUST_LIB_DIR); p.push(target_triple); p.push("lib"); p @@ -166,7 +166,7 @@ pub fn get_or_default_sysroot() -> PathBuf { } // The name of the directory rustc expects libraries to be located. -fn find_libdir(sysroot: &Path) -> String { +fn find_libdir(sysroot: &Path) -> Cow<'static, str> { // FIXME: This is a quick hack to make the rustc binary able to locate // Rust libraries in Linux environments where libraries might be installed // to lib64/lib32. This would be more foolproof by basing the sysroot off @@ -176,31 +176,23 @@ fn find_libdir(sysroot: &Path) -> String { // "lib" (i.e. non-default), this value is used (see issue #16552). match option_env!("CFG_LIBDIR_RELATIVE") { - Some(libdir) if libdir != "lib" => return libdir.to_string(), - _ => if sysroot.join(&primary_libdir_name()).join(&rustlibdir()).exists() { - return primary_libdir_name(); + Some(libdir) if libdir != "lib" => return libdir.into(), + _ => if sysroot.join(PRIMARY_LIB_DIR).join(RUST_LIB_DIR).exists() { + return PRIMARY_LIB_DIR.into(); } else { - return secondary_libdir_name(); + return SECONDARY_LIB_DIR.into(); } } #[cfg(target_pointer_width = "64")] - fn primary_libdir_name() -> String { - "lib64".to_string() - } + const PRIMARY_LIB_DIR: &'static str = "lib64"; #[cfg(target_pointer_width = "32")] - fn primary_libdir_name() -> String { - "lib32".to_string() - } + const PRIMARY_LIB_DIR: &'static str = "lib32"; - fn secondary_libdir_name() -> String { - "lib".to_string() - } + const SECONDARY_LIB_DIR: &'static str = "lib"; } // The name of rustc's own place to organize libraries. // Used to be "rustc", now the default is "rustlib" -pub fn rustlibdir() -> String { - "rustlib".to_string() -} +const RUST_LIB_DIR: &'static str = "rustlib"; diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs index 49686d63ee43b..1ce5b223fbefe 100644 --- a/src/librustc/session/mod.rs +++ b/src/librustc/session/mod.rs @@ -9,15 +9,15 @@ // except according to those terms. use dep_graph::DepGraph; -use hir::def_id::DefIndex; +use hir::def_id::{CrateNum, DefIndex}; use hir::svh::Svh; use lint; use middle::cstore::CrateStore; use middle::dependency_format; use session::search_paths::PathKind; -use session::config::{DebugInfoLevel, PanicStrategy}; +use session::config::DebugInfoLevel; use ty::tls; -use util::nodemap::{NodeMap, FnvHashMap}; +use util::nodemap::{NodeMap, FnvHashMap, FnvHashSet}; use util::common::duration_to_secs_str; use mir::transform as mir_pass; @@ -33,6 +33,7 @@ use syntax::{ast, codemap}; use syntax::feature_gate::AttributeType; use syntax_pos::{Span, MultiSpan}; +use rustc_back::PanicStrategy; use rustc_back::target::Target; use rustc_data_structures::flock; use llvm; @@ -42,6 +43,7 @@ use std::cell::{self, Cell, RefCell}; use std::collections::HashMap; use std::env; use std::ffi::CString; +use std::io::Write; use std::rc::Rc; use std::fmt; use std::time::Duration; @@ -73,6 +75,10 @@ pub struct Session { pub working_dir: PathBuf, pub lint_store: RefCell, pub lints: RefCell>>, + /// Set of (LintId, span, message) tuples tracking lint (sub)diagnostics + /// that have been set once, but should not be set again, in order to avoid + /// redundantly verbose output (Issue #24690). + pub one_time_diagnostics: RefCell>, pub plugin_llvm_passes: RefCell>, pub mir_passes: RefCell, pub plugin_attributes: RefCell>, @@ -93,8 +99,8 @@ pub struct Session { /// The metadata::creader module may inject an allocator/panic_runtime /// dependency if it didn't already find one, and this tracks what was /// injected. - pub injected_allocator: Cell>, - pub injected_panic_runtime: Cell>, + pub injected_allocator: Cell>, + pub injected_panic_runtime: Cell>, /// Map from imported macro spans (which consist of /// the localized span for the macro body) to the @@ -116,6 +122,8 @@ pub struct PerfStats { pub incr_comp_hashes_time: Cell, // The number of incr. comp. hash computations performed pub incr_comp_hashes_count: Cell, + // The number of bytes hashed when computing ICH values + pub incr_comp_bytes_hashed: Cell, // The accumulated time spent on computing symbol hashes pub symbol_hash_time: Cell, } @@ -264,13 +272,15 @@ impl Session { } return; } - lints.insert(id, vec!((lint_id, sp, msg))); + lints.insert(id, vec![(lint_id, sp, msg)]); } - pub fn reserve_node_ids(&self, count: ast::NodeId) -> ast::NodeId { + pub fn reserve_node_ids(&self, count: usize) -> ast::NodeId { let id = self.next_node_id.get(); - match id.checked_add(count) { - Some(next) => self.next_node_id.set(next), + match id.as_usize().checked_add(count) { + Some(next) => { + self.next_node_id.set(ast::NodeId::new(next)); + } None => bug!("Input too large, ran out of node ids!") } @@ -282,6 +292,35 @@ impl Session { pub fn diagnostic<'a>(&'a self) -> &'a errors::Handler { &self.parse_sess.span_diagnostic } + + /// Analogous to calling `.span_note` on the given DiagnosticBuilder, but + /// deduplicates on lint ID, span, and message for this `Session` if we're + /// not outputting in JSON mode. + // + // FIXME: if the need arises for one-time diagnostics other than + // `span_note`, we almost certainly want to generalize this + // "check/insert-into the one-time diagnostics map, then set message if + // it's not already there" code to accomodate all of them + pub fn diag_span_note_once<'a, 'b>(&'a self, + diag_builder: &'b mut DiagnosticBuilder<'a>, + lint: &'static lint::Lint, span: Span, message: &str) { + match self.opts.error_format { + // when outputting JSON for tool consumption, the tool might want + // the duplicates + config::ErrorOutputType::Json => { + diag_builder.span_note(span, &message); + }, + _ => { + let lint_id = lint::LintId::of(lint); + let id_span_message = (lint_id, span, message.to_owned()); + let fresh = self.one_time_diagnostics.borrow_mut().insert(id_span_message); + if fresh { + diag_builder.span_note(span, &message); + } + } + } + } + pub fn codemap<'a>(&'a self) -> &'a codemap::CodeMap { self.parse_sess.codemap() } @@ -304,9 +343,13 @@ impl Session { pub fn lto(&self) -> bool { self.opts.cg.lto } + /// Returns the panic strategy for this compile session. If the user explicitly selected one + /// using '-C panic', use that, otherwise use the panic strategy defined by the target. + pub fn panic_strategy(&self) -> PanicStrategy { + self.opts.cg.panic.unwrap_or(self.target.target.options.panic_strategy) + } pub fn no_landing_pads(&self) -> bool { - self.opts.debugging_opts.no_landing_pads || - self.opts.cg.panic == PanicStrategy::Abort + self.opts.debugging_opts.no_landing_pads || self.panic_strategy() == PanicStrategy::Abort } pub fn unstable_options(&self) -> bool { self.opts.debugging_opts.unstable_options @@ -431,6 +474,11 @@ impl Session { duration_to_secs_str(self.perf_stats.incr_comp_hashes_time.get())); println!("Total number of incr. comp. hashes computed: {}", self.perf_stats.incr_comp_hashes_count.get()); + println!("Total number of bytes hashed for incr. comp.: {}", + self.perf_stats.incr_comp_bytes_hashed.get()); + println!("Average bytes hashed per incr. comp. HIR node: {}", + self.perf_stats.incr_comp_bytes_hashed.get() / + self.perf_stats.incr_comp_hashes_count.get()); println!("Total time spent computing symbol hashes: {}", duration_to_secs_str(self.perf_stats.symbol_hash_time.get())); } @@ -447,7 +495,8 @@ pub fn build_session(sopts: config::Options, local_crate_source_file, registry, cstore, - Rc::new(codemap::CodeMap::new())) + Rc::new(codemap::CodeMap::new()), + None) } pub fn build_session_with_codemap(sopts: config::Options, @@ -455,7 +504,8 @@ pub fn build_session_with_codemap(sopts: config::Options, local_crate_source_file: Option, registry: errors::registry::Registry, cstore: Rc CrateStore<'a>>, - codemap: Rc) + codemap: Rc, + emitter_dest: Option>) -> Session { // FIXME: This is not general enough to make the warning lint completely override // normal diagnostic warnings, since the warning lint can also be denied and changed @@ -468,14 +518,21 @@ pub fn build_session_with_codemap(sopts: config::Options, .unwrap_or(true); let treat_err_as_bug = sopts.debugging_opts.treat_err_as_bug; - let emitter: Box = match sopts.error_format { - config::ErrorOutputType::HumanReadable(color_config) => { + let emitter: Box = match (sopts.error_format, emitter_dest) { + (config::ErrorOutputType::HumanReadable(color_config), None) => { Box::new(EmitterWriter::stderr(color_config, Some(codemap.clone()))) } - config::ErrorOutputType::Json => { + (config::ErrorOutputType::HumanReadable(_), Some(dst)) => { + Box::new(EmitterWriter::new(dst, + Some(codemap.clone()))) + } + (config::ErrorOutputType::Json, None) => { Box::new(JsonEmitter::stderr(Some(registry), codemap.clone())) } + (config::ErrorOutputType::Json, Some(dst)) => { + Box::new(JsonEmitter::new(dst, Some(registry), codemap.clone())) + } }; let diagnostic_handler = @@ -537,6 +594,7 @@ pub fn build_session_(sopts: config::Options, working_dir: env::current_dir().unwrap(), lint_store: RefCell::new(lint::LintStore::new()), lints: RefCell::new(NodeMap()), + one_time_diagnostics: RefCell::new(FnvHashSet()), plugin_llvm_passes: RefCell::new(Vec::new()), mir_passes: RefCell::new(mir_pass::Passes::new()), plugin_attributes: RefCell::new(Vec::new()), @@ -545,7 +603,7 @@ pub fn build_session_(sopts: config::Options, crate_disambiguator: RefCell::new(token::intern("").as_str()), features: RefCell::new(feature_gate::Features::new()), recursion_limit: Cell::new(64), - next_node_id: Cell::new(1), + next_node_id: Cell::new(NodeId::new(1)), injected_allocator: Cell::new(None), injected_panic_runtime: Cell::new(None), imported_macro_spans: RefCell::new(HashMap::new()), @@ -554,6 +612,7 @@ pub fn build_session_(sopts: config::Options, svh_time: Cell::new(Duration::from_secs(0)), incr_comp_hashes_time: Cell::new(Duration::from_secs(0)), incr_comp_hashes_count: Cell::new(0), + incr_comp_bytes_hashed: Cell::new(0), symbol_hash_time: Cell::new(Duration::from_secs(0)), } }; diff --git a/src/librustc/traits/coherence.rs b/src/librustc/traits/coherence.rs index 83774f0cf7ead..68c88249ec0c3 100644 --- a/src/librustc/traits/coherence.rs +++ b/src/librustc/traits/coherence.rs @@ -12,8 +12,7 @@ use super::{SelectionContext, Obligation, ObligationCause}; -use middle::cstore::LOCAL_CRATE; -use hir::def_id::DefId; +use hir::def_id::{DefId, LOCAL_CRATE}; use ty::{self, Ty, TyCtxt}; use infer::{InferCtxt, TypeOrigin}; use syntax_pos::DUMMY_SP; diff --git a/src/librustc/traits/error_reporting.rs b/src/librustc/traits/error_reporting.rs index 52ddd8ab5dac0..9435f96c08a22 100644 --- a/src/librustc/traits/error_reporting.rs +++ b/src/librustc/traits/error_reporting.rs @@ -445,8 +445,10 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { let mut err = struct_span_err!(self.tcx.sess, span, E0277, "the trait bound `{}` is not satisfied", trait_ref.to_predicate()); - err.span_label(span, &format!("trait `{}` not satisfied", - trait_ref.to_predicate())); + err.span_label(span, &format!("the trait `{}` is not implemented \ + for `{}`", + trait_ref, + trait_ref.self_ty())); // Try to report a help message @@ -856,8 +858,7 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { trait_name)); } ObligationCauseCode::FieldSized => { - err.note("only the last field of a struct or enum variant \ - may have a dynamically sized type"); + err.note("only the last field of a struct may have a dynamically sized type"); } ObligationCauseCode::ConstSized => { err.note("constant expressions must have a statically known size"); diff --git a/src/librustc/traits/fulfill.rs b/src/librustc/traits/fulfill.rs index 65860671c4c63..ded2fdc58b42b 100644 --- a/src/librustc/traits/fulfill.rs +++ b/src/librustc/traits/fulfill.rs @@ -11,7 +11,7 @@ use dep_graph::DepGraph; use infer::{InferCtxt, InferOk}; use ty::{self, Ty, TypeFoldable, ToPolyTraitRef, TyCtxt, ToPredicate}; -use ty::subst::{Substs, Subst}; +use ty::subst::Subst; use rustc_data_structures::obligation_forest::{ObligationForest, Error}; use rustc_data_structures::obligation_forest::{ForestObligation, ObligationProcessor}; use std::marker::PhantomData; @@ -159,7 +159,7 @@ impl<'a, 'gcx, 'tcx> DeferredObligation<'tcx> { let concrete_ty = ty_scheme.ty.subst(tcx, substs); let predicate = ty::TraitRef { def_id: self.predicate.def_id(), - substs: Substs::new_trait(tcx, concrete_ty, &[]) + substs: tcx.mk_substs_trait(concrete_ty, &[]) }.to_predicate(); let original_obligation = Obligation::new(self.cause.clone(), diff --git a/src/librustc/traits/mod.rs b/src/librustc/traits/mod.rs index b86a54f01cf49..7ba10d9c0a58e 100644 --- a/src/librustc/traits/mod.rs +++ b/src/librustc/traits/mod.rs @@ -40,7 +40,7 @@ pub use self::select::{EvaluationCache, SelectionContext, SelectionCache}; pub use self::select::{MethodMatchResult, MethodMatched, MethodAmbiguous, MethodDidNotMatch}; pub use self::select::{MethodMatchedData}; // intentionally don't export variants pub use self::specialize::{OverlapError, specialization_graph, specializes, translate_substs}; -pub use self::specialize::{SpecializesCache}; +pub use self::specialize::{SpecializesCache, find_method}; pub use self::util::elaborate_predicates; pub use self::util::supertraits; pub use self::util::Supertraits; @@ -527,6 +527,88 @@ pub fn fully_normalize<'a, 'gcx, 'tcx, T>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, Ok(resolved_value) } +/// Normalizes the predicates and checks whether they hold. If this +/// returns false, then either normalize encountered an error or one +/// of the predicates did not hold. Used when creating vtables to +/// check for unsatisfiable methods. +pub fn normalize_and_test_predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + predicates: Vec>) + -> bool +{ + debug!("normalize_and_test_predicates(predicates={:?})", + predicates); + + tcx.infer_ctxt(None, None, Reveal::All).enter(|infcx| { + let mut selcx = SelectionContext::new(&infcx); + let mut fulfill_cx = FulfillmentContext::new(); + let cause = ObligationCause::dummy(); + let Normalized { value: predicates, obligations } = + normalize(&mut selcx, cause.clone(), &predicates); + for obligation in obligations { + fulfill_cx.register_predicate_obligation(&infcx, obligation); + } + for predicate in predicates { + let obligation = Obligation::new(cause.clone(), predicate); + fulfill_cx.register_predicate_obligation(&infcx, obligation); + } + + fulfill_cx.select_all_or_error(&infcx).is_ok() + }) +} + +/// Given a trait `trait_ref`, iterates the vtable entries +/// that come from `trait_ref`, including its supertraits. +#[inline] // FIXME(#35870) Avoid closures being unexported due to impl Trait. +pub fn get_vtable_methods<'a, 'tcx>( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + trait_ref: ty::PolyTraitRef<'tcx>) + -> impl Iterator)>> + 'a +{ + debug!("get_vtable_methods({:?})", trait_ref); + + supertraits(tcx, trait_ref).flat_map(move |trait_ref| { + tcx.populate_implementations_for_trait_if_necessary(trait_ref.def_id()); + + let trait_item_def_ids = tcx.impl_or_trait_items(trait_ref.def_id()); + let trait_methods = (0..trait_item_def_ids.len()).filter_map(move |i| { + match tcx.impl_or_trait_item(trait_item_def_ids[i]) { + ty::MethodTraitItem(m) => Some(m), + _ => None + } + }); + + // Now list each method's DefId and Substs (for within its trait). + // If the method can never be called from this object, produce None. + trait_methods.map(move |trait_method| { + debug!("get_vtable_methods: trait_method={:?}", trait_method); + + // Some methods cannot be called on an object; skip those. + if !tcx.is_vtable_safe_method(trait_ref.def_id(), &trait_method) { + debug!("get_vtable_methods: not vtable safe"); + return None; + } + + // the method may have some early-bound lifetimes, add + // regions for those + let substs = Substs::for_item(tcx, trait_method.def_id, + |_, _| tcx.mk_region(ty::ReErased), + |def, _| trait_ref.substs().type_for_def(def)); + + // It's possible that the method relies on where clauses that + // do not hold for this particular set of type parameters. + // Note that this method could then never be called, so we + // do not want to try and trans it, in that case (see #23435). + let predicates = trait_method.predicates.instantiate_own(tcx, substs); + if !normalize_and_test_predicates(tcx, predicates.predicates) { + debug!("get_vtable_methods: predicates do not hold"); + return None; + } + + Some((trait_method.def_id, substs)) + }) + }) +} + impl<'tcx,O> Obligation<'tcx,O> { pub fn new(cause: ObligationCause<'tcx>, trait_ref: O) @@ -571,7 +653,7 @@ impl<'tcx> ObligationCause<'tcx> { } pub fn dummy() -> ObligationCause<'tcx> { - ObligationCause { span: DUMMY_SP, body_id: 0, code: MiscObligation } + ObligationCause { span: DUMMY_SP, body_id: ast::CRATE_NODE_ID, code: MiscObligation } } } diff --git a/src/librustc/traits/project.rs b/src/librustc/traits/project.rs index ea4fc1c25ab42..ce882c48377f7 100644 --- a/src/librustc/traits/project.rs +++ b/src/librustc/traits/project.rs @@ -167,18 +167,20 @@ pub fn poly_project_and_unify_type<'cx, 'gcx, 'tcx>( infcx.skolemize_late_bound_regions(&obligation.predicate, snapshot); let skol_obligation = obligation.with(skol_predicate); - match project_and_unify_type(selcx, &skol_obligation) { + let r = match project_and_unify_type(selcx, &skol_obligation) { Ok(result) => { let span = obligation.cause.span; match infcx.leak_check(false, span, &skol_map, snapshot) { - Ok(()) => Ok(infcx.plug_leaks(skol_map, snapshot, &result)), + Ok(()) => Ok(infcx.plug_leaks(skol_map, snapshot, result)), Err(e) => Err(MismatchedProjectionTypes { err: e }), } } Err(e) => { Err(e) } - } + }; + + r }) } @@ -273,7 +275,7 @@ impl<'a, 'b, 'gcx, 'tcx> AssociatedTypeNormalizer<'a, 'b, 'gcx, 'tcx> { AssociatedTypeNormalizer { selcx: selcx, cause: cause, - obligations: vec!(), + obligations: vec![], depth: depth, } } @@ -394,7 +396,7 @@ pub fn normalize_projection_type<'a, 'b, 'gcx, 'tcx>( cause, depth + 1, projection.to_predicate()); Normalized { value: ty_var, - obligations: vec!(obligation) + obligations: vec![obligation] } }) } @@ -543,7 +545,7 @@ fn opt_normalize_projection_type<'a, 'b, 'gcx, 'tcx>( projected_ty); let result = Normalized { value: projected_ty, - obligations: vec!() + obligations: vec![] }; infcx.projection_cache.borrow_mut() .complete(projection_ty, &result, true); @@ -602,7 +604,7 @@ fn normalize_to_error<'a, 'gcx, 'tcx>(selcx: &mut SelectionContext<'a, 'gcx, 'tc let new_value = selcx.infcx().next_ty_var(); Normalized { value: new_value, - obligations: vec!(trait_obligation) + obligations: vec![trait_obligation] } } @@ -1396,6 +1398,10 @@ impl<'tcx> ProjectionCache<'tcx> { self.map.rollback_to(snapshot.snapshot); } + pub fn rollback_skolemized(&mut self, snapshot: &ProjectionCacheSnapshot) { + self.map.partial_rollback(&snapshot.snapshot, &|k| k.has_re_skol()); + } + pub fn commit(&mut self, snapshot: ProjectionCacheSnapshot) { self.map.commit(snapshot.snapshot); } @@ -1405,9 +1411,8 @@ impl<'tcx> ProjectionCache<'tcx> { /// cache hit, so it's actually a good thing). fn try_start(&mut self, key: ty::ProjectionTy<'tcx>) -> Result<(), ProjectionCacheEntry<'tcx>> { - match self.map.get(&key) { - Some(entry) => return Err(entry.clone()), - None => { } + if let Some(entry) = self.map.get(&key) { + return Err(entry.clone()); } self.map.insert(key, ProjectionCacheEntry::InProgress); diff --git a/src/librustc/traits/select.rs b/src/librustc/traits/select.rs index 94dba7d12a8c7..e75c8bd433404 100644 --- a/src/librustc/traits/select.rs +++ b/src/librustc/traits/select.rs @@ -336,7 +336,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { self.infcx.tcx } - pub fn param_env(&self) -> &'cx ty::ParameterEnvironment<'tcx> { + pub fn param_env(&self) -> &'cx ty::ParameterEnvironment<'gcx> { self.infcx.param_env() } @@ -788,14 +788,11 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { stack); assert!(!stack.obligation.predicate.has_escaping_regions()); - match self.check_candidate_cache(&cache_fresh_trait_pred) { - Some(c) => { - debug!("CACHE HIT: SELECT({:?})={:?}", - cache_fresh_trait_pred, - c); - return c; - } - None => { } + if let Some(c) = self.check_candidate_cache(&cache_fresh_trait_pred) { + debug!("CACHE HIT: SELECT({:?})={:?}", + cache_fresh_trait_pred, + c); + return c; } // If no match, compute result and insert into cache. @@ -814,7 +811,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { fn filter_negative_impls(&self, candidate: SelectionCandidate<'tcx>) -> SelectionResult<'tcx, SelectionCandidate<'tcx>> { if let ImplCandidate(def_id) = candidate { - if self.tcx().trait_impl_polarity(def_id) == Some(hir::ImplPolarity::Negative) { + if self.tcx().trait_impl_polarity(def_id) == hir::ImplPolarity::Negative { return Err(Unimplemented) } } @@ -1617,7 +1614,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { // // We always upcast when we can because of reason // #2 (region bounds). - data_a.principal.def_id() == data_a.principal.def_id() && + data_a.principal.def_id() == data_b.principal.def_id() && data_a.builtin_bounds.is_superset(&data_b.builtin_bounds) } @@ -1980,7 +1977,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { normalized_ty, &[]); obligations.push(skol_obligation); - this.infcx().plug_leaks(skol_map, snapshot, &obligations) + this.infcx().plug_leaks(skol_map, snapshot, obligations) }) }).collect() } @@ -2599,7 +2596,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { k } }); - let substs = Substs::new(tcx, params); + let substs = tcx.mk_substs(params); for &ty in fields.split_last().unwrap().1 { if ty.subst(tcx, substs).references_error() { return Err(Unimplemented); @@ -2619,7 +2616,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { k } }); - let new_struct = tcx.mk_adt(def, Substs::new(tcx, params)); + let new_struct = tcx.mk_adt(def, tcx.mk_substs(params)); let origin = TypeOrigin::Misc(obligation.cause.span); let InferOk { obligations, .. } = self.infcx.sub_types(false, origin, new_struct, target) @@ -2899,7 +2896,7 @@ impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { predicate: predicate.value })) }).collect(); - self.infcx().plug_leaks(skol_map, snapshot, &predicates) + self.infcx().plug_leaks(skol_map, snapshot, predicates) } } diff --git a/src/librustc/traits/specialize/mod.rs b/src/librustc/traits/specialize/mod.rs index 2f63526bf6c27..e37425901c8c8 100644 --- a/src/librustc/traits/specialize/mod.rs +++ b/src/librustc/traits/specialize/mod.rs @@ -26,9 +26,11 @@ use infer::{InferCtxt, TypeOrigin}; use middle::region; use ty::subst::{Subst, Substs}; use traits::{self, Reveal, ObligationCause, Normalized}; -use ty::{self, TyCtxt}; +use ty::{self, TyCtxt, TypeFoldable}; use syntax_pos::DUMMY_SP; +use syntax::ast; + pub mod specialization_graph; /// Information pertinent to an overlapping impl error. @@ -103,6 +105,41 @@ pub fn translate_substs<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, source_substs.rebase_onto(infcx.tcx, source_impl, target_substs) } +/// Given a selected impl described by `impl_data`, returns the +/// definition and substitions for the method with the name `name`, +/// and trait method substitutions `substs`, in that impl, a less +/// specialized impl, or the trait default, whichever applies. +pub fn find_method<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + name: ast::Name, + substs: &'tcx Substs<'tcx>, + impl_data: &super::VtableImplData<'tcx, ()>) + -> (DefId, &'tcx Substs<'tcx>) +{ + assert!(!substs.needs_infer()); + + let trait_def_id = tcx.trait_id_of_impl(impl_data.impl_def_id).unwrap(); + let trait_def = tcx.lookup_trait_def(trait_def_id); + + match trait_def.ancestors(impl_data.impl_def_id).fn_defs(tcx, name).next() { + Some(node_item) => { + let substs = tcx.infer_ctxt(None, None, Reveal::All).enter(|infcx| { + let substs = substs.rebase_onto(tcx, trait_def_id, impl_data.substs); + let substs = translate_substs(&infcx, impl_data.impl_def_id, + substs, node_item.node); + tcx.lift(&substs).unwrap_or_else(|| { + bug!("find_method: translate_substs \ + returned {:?} which contains inference types/regions", + substs); + }) + }); + (node_item.item.def_id, substs) + } + None => { + bug!("method {:?} not found in {:?}", name, impl_data.impl_def_id) + } + } +} + /// Is impl1 a specialization of impl2? /// /// Specialization is determined by the sets of types to which the impls apply; diff --git a/src/librustc/traits/specialize/specialization_graph.rs b/src/librustc/traits/specialize/specialization_graph.rs index 13193e1b2d71b..1374719ef49c4 100644 --- a/src/librustc/traits/specialize/specialization_graph.rs +++ b/src/librustc/traits/specialize/specialization_graph.rs @@ -8,7 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use std::cell; use std::rc::Rc; use super::{OverlapError, specializes}; @@ -287,21 +286,10 @@ impl<'a, 'gcx, 'tcx> Node { /// Iterate over the items defined directly by the given (impl or trait) node. pub fn items(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> NodeItems<'a, 'gcx> { - match *self { - Node::Impl(impl_def_id) => { - NodeItems::Impl { - tcx: tcx.global_tcx(), - items: cell::Ref::map(tcx.impl_items.borrow(), - |impl_items| &impl_items[&impl_def_id]), - idx: 0, - } - } - Node::Trait(trait_def_id) => { - NodeItems::Trait { - items: tcx.trait_items(trait_def_id).clone(), - idx: 0, - } - } + NodeItems { + tcx: tcx.global_tcx(), + items: tcx.impl_or_trait_items(self.def_id()), + idx: 0, } } @@ -314,42 +302,23 @@ impl<'a, 'gcx, 'tcx> Node { } /// An iterator over the items defined within a trait or impl. -pub enum NodeItems<'a, 'tcx: 'a> { - Impl { - tcx: TyCtxt<'a, 'tcx, 'tcx>, - items: cell::Ref<'a, Vec>, - idx: usize, - }, - Trait { - items: Rc>>, - idx: usize, - }, +pub struct NodeItems<'a, 'tcx: 'a> { + tcx: TyCtxt<'a, 'tcx, 'tcx>, + items: Rc>, + idx: usize } impl<'a, 'tcx> Iterator for NodeItems<'a, 'tcx> { type Item = ImplOrTraitItem<'tcx>; fn next(&mut self) -> Option> { - match *self { - NodeItems::Impl { tcx, ref items, ref mut idx } => { - let items_table = tcx.impl_or_trait_items.borrow(); - if *idx < items.len() { - let item_def_id = items[*idx].def_id(); - let item = items_table[&item_def_id].clone(); - *idx += 1; - Some(item) - } else { - None - } - } - NodeItems::Trait { ref items, ref mut idx } => { - if *idx < items.len() { - let item = items[*idx].clone(); - *idx += 1; - Some(item) - } else { - None - } - } + if self.idx < self.items.len() { + let item_def_id = self.items[self.idx]; + let items_table = self.tcx.impl_or_trait_items.borrow(); + let item = items_table[&item_def_id].clone(); + self.idx += 1; + Some(item) + } else { + None } } } diff --git a/src/librustc/traits/util.rs b/src/librustc/traits/util.rs index 2cefc2ad79646..a0792dcf4dd59 100644 --- a/src/librustc/traits/util.rs +++ b/src/librustc/traits/util.rs @@ -380,7 +380,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { Ok(def_id) => { Ok(ty::TraitRef { def_id: def_id, - substs: Substs::new_trait(self, param_ty, &[]) + substs: self.mk_substs_trait(param_ty, &[]) }) } Err(e) => { @@ -400,7 +400,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { { let trait_ref = ty::TraitRef { def_id: trait_def_id, - substs: Substs::new_trait(self, param_ty, ty_params) + substs: self.mk_substs_trait(param_ty, ty_params) }; predicate_for_trait_ref(cause, trait_ref, recursion_depth) } @@ -486,11 +486,11 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { { let arguments_tuple = match tuple_arguments { TupleArgumentsFlag::No => sig.0.inputs[0], - TupleArgumentsFlag::Yes => self.mk_tup(sig.0.inputs.to_vec()), + TupleArgumentsFlag::Yes => self.intern_tup(&sig.0.inputs[..]), }; let trait_ref = ty::TraitRef { def_id: fn_trait_def_id, - substs: Substs::new_trait(self, self_ty, &[arguments_tuple]), + substs: self.mk_substs_trait(self_ty, &[arguments_tuple]), }; ty::Binder((trait_ref, sig.0.output)) } diff --git a/src/librustc/ty/adjustment.rs b/src/librustc/ty/adjustment.rs index 3386d894196fa..cfe370343ae49 100644 --- a/src/librustc/ty/adjustment.rs +++ b/src/librustc/ty/adjustment.rs @@ -19,7 +19,7 @@ use syntax_pos::Span; use hir; -#[derive(Copy, Clone)] +#[derive(Copy, Clone, RustcEncodable, RustcDecodable)] pub enum AutoAdjustment<'tcx> { AdjustNeverToAny(Ty<'tcx>), // go from ! to any type AdjustReifyFnPointer, // go from a fn-item type to a fn-pointer type @@ -90,7 +90,7 @@ pub enum AutoAdjustment<'tcx> { /// unsize: Some(Box<[i32]>), /// } /// ``` -#[derive(Copy, Clone)] +#[derive(Copy, Clone, RustcEncodable, RustcDecodable)] pub struct AutoDerefRef<'tcx> { /// Step 1. Apply a number of dereferences, producing an lvalue. pub autoderefs: usize, @@ -122,7 +122,7 @@ impl<'tcx> AutoDerefRef<'tcx> { } -#[derive(Copy, Clone, PartialEq, Debug)] +#[derive(Copy, Clone, PartialEq, Debug, RustcEncodable, RustcDecodable)] pub enum AutoRef<'tcx> { /// Convert from T to &T. AutoPtr(&'tcx ty::Region, hir::Mutability), diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index 6d7a2d6cba1c7..62cc78141db46 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -13,20 +13,20 @@ use dep_graph::{DepGraph, DepTrackingMap}; use session::Session; use middle; -use middle::cstore::LOCAL_CRATE; use hir::TraitMap; use hir::def::DefMap; -use hir::def_id::{DefId, DefIndex}; +use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE}; use hir::map as ast_map; -use hir::map::{DefKey, DefPath, DefPathData, DisambiguatedDefPathData}; +use hir::map::{DefKey, DefPathData, DisambiguatedDefPathData}; use middle::free_region::FreeRegionMap; use middle::region::RegionMaps; use middle::resolve_lifetime; use middle::stability; -use ty::subst::Substs; +use mir::Mir; +use ty::subst::{Kind, Substs}; use traits; use ty::{self, TraitRef, Ty, TypeAndMut}; -use ty::{TyS, TypeVariants}; +use ty::{TyS, TypeVariants, Slice}; use ty::{AdtKind, AdtDef, ClosureSubsts, Region}; use hir::FreevarMap; use ty::{BareFnTy, InferTy, ParamTy, ProjectionTy, TraitObject}; @@ -37,6 +37,7 @@ use ty::maps; use util::common::MemoizationMap; use util::nodemap::{NodeMap, NodeSet, DefIdMap, DefIdSet}; use util::nodemap::{FnvHashMap, FnvHashSet}; +use rustc_data_structures::accumulate_vec::AccumulateVec; use arena::TypedArena; use std::borrow::Borrow; @@ -45,6 +46,7 @@ use std::hash::{Hash, Hasher}; use std::mem; use std::ops::Deref; use std::rc::Rc; +use std::iter; use syntax::ast::{self, Name, NodeId}; use syntax::attr; use syntax::parse::token::{self, keywords}; @@ -55,8 +57,8 @@ use hir; pub struct CtxtArenas<'tcx> { // internings type_: TypedArena>, - type_list: TypedArena>>, - substs: TypedArena>, + type_list: TypedArena>, + substs: TypedArena>, bare_fn: TypedArena>, region: TypedArena, stability: TypedArena, @@ -64,8 +66,9 @@ pub struct CtxtArenas<'tcx> { // references generics: TypedArena>, - trait_defs: TypedArena>, - adt_defs: TypedArena>, + trait_def: TypedArena>, + adt_def: TypedArena>, + mir: TypedArena>>, } impl<'tcx> CtxtArenas<'tcx> { @@ -80,8 +83,9 @@ impl<'tcx> CtxtArenas<'tcx> { layout: TypedArena::new(), generics: TypedArena::new(), - trait_defs: TypedArena::new(), - adt_defs: TypedArena::new() + trait_def: TypedArena::new(), + adt_def: TypedArena::new(), + mir: TypedArena::new() } } } @@ -93,7 +97,7 @@ pub struct CtxtInterners<'tcx> { /// Specifically use a speedy hash algorithm for these hash sets, /// they're accessed quite often. type_: RefCell>>>, - type_list: RefCell]>>>, + type_list: RefCell>>>>, substs: RefCell>>>, bare_fn: RefCell>>>, region: RefCell>>, @@ -331,8 +335,8 @@ pub struct GlobalCtxt<'tcx> { /// Maps from a trait item to the trait item "descriptor" pub impl_or_trait_items: RefCell>>, - /// Maps from a trait def-id to a list of the def-ids of its trait items - pub trait_item_def_ids: RefCell>>, + /// Maps from an impl/trait def-id to a list of the def-ids of its items + pub impl_or_trait_item_def_ids: RefCell>>, /// A cache for the trait_items() routine; note that the routine /// itself pushes the `TraitItems` dependency node. @@ -357,6 +361,15 @@ pub struct GlobalCtxt<'tcx> { pub map: ast_map::Map<'tcx>, + /// Maps from the def-id of a function/method or const/static + /// to its MIR. Mutation is done at an item granularity to + /// allow MIR optimization passes to function and still + /// access cross-crate MIR (e.g. inlining or const eval). + /// + /// Note that cross-crate MIR appears to be always borrowed + /// (in the `RefCell` sense) to prevent accidental mutation. + pub mir_map: RefCell>>, + // Records the free variables refrenced by every closure // expression. Do not track deps for this, just recompute it from // scratch every time. @@ -393,12 +406,6 @@ pub struct GlobalCtxt<'tcx> { /// Methods in these implementations don't need to be exported. pub inherent_impls: RefCell>>, - /// Maps a DefId of an impl to a list of its items. - /// Note that this contains all of the impls that we know about, - /// including ones in other crates. It's not clear that this is the best - /// way to do it. - pub impl_items: RefCell>>, - /// Set of used unsafe nodes (functions or blocks). Unsafe nodes not /// present in this set can be warned about. pub used_unsafe: RefCell, @@ -496,8 +503,11 @@ pub struct GlobalCtxt<'tcx> { /// Cache for layouts computed from types. pub layout_cache: RefCell, &'tcx Layout>>, + /// Used to prevent layout from recursing too deeply. + pub layout_depth: Cell, + /// Map from function to the `#[derive]` mode that it's defining. Only used - /// by `rustc-macro` crates. + /// by `proc-macro` crates. pub derive_macros: RefCell>, } @@ -512,7 +522,7 @@ impl<'tcx> GlobalCtxt<'tcx> { } impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { - pub fn crate_name(self, cnum: ast::CrateNum) -> token::InternedString { + pub fn crate_name(self, cnum: CrateNum) -> token::InternedString { if cnum == LOCAL_CRATE { self.crate_name.clone() } else { @@ -520,7 +530,15 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - pub fn crate_disambiguator(self, cnum: ast::CrateNum) -> token::InternedString { + pub fn original_crate_name(self, cnum: CrateNum) -> token::InternedString { + if cnum == LOCAL_CRATE { + self.crate_name.clone() + } else { + self.sess.cstore.original_crate_name(cnum) + } + } + + pub fn crate_disambiguator(self, cnum: CrateNum) -> token::InternedString { if cnum == LOCAL_CRATE { self.sess.local_crate_disambiguator() } else { @@ -533,7 +551,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// relative to `krate`. /// /// Returns `None` if there is no `DefIndex` with that key. - pub fn def_index_for_def_key(self, krate: ast::CrateNum, key: DefKey) + pub fn def_index_for_def_key(self, krate: CrateNum, key: DefKey) -> Option { if krate == LOCAL_CRATE { self.map.def_index_for_def_key(key) @@ -542,8 +560,11 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - pub fn retrace_path(self, path: &DefPath) -> Option { - debug!("retrace_path(path={:?}, krate={:?})", path, self.crate_name(path.krate)); + pub fn retrace_path(self, + krate: CrateNum, + path_data: &[DisambiguatedDefPathData]) + -> Option { + debug!("retrace_path(path={:?}, krate={:?})", path_data, self.crate_name(krate)); let root_key = DefKey { parent: None, @@ -553,22 +574,22 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { }, }; - let root_index = self.def_index_for_def_key(path.krate, root_key) + let root_index = self.def_index_for_def_key(krate, root_key) .expect("no root key?"); debug!("retrace_path: root_index={:?}", root_index); let mut index = root_index; - for data in &path.data { + for data in path_data { let key = DefKey { parent: Some(index), disambiguated_data: data.clone() }; debug!("retrace_path: key={:?}", key); - match self.def_index_for_def_key(path.krate, key) { + match self.def_index_for_def_key(krate, key) { Some(i) => index = i, None => return None, } } - Some(DefId { krate: path.krate, index: index }) + Some(DefId { krate: krate, index: index }) } pub fn type_parameter_def(self, @@ -595,6 +616,10 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self.global_interners.arenas.generics.alloc(generics) } + pub fn alloc_mir(self, mir: Mir<'gcx>) -> &'gcx RefCell> { + self.global_interners.arenas.mir.alloc(RefCell::new(mir)) + } + pub fn intern_trait_def(self, def: ty::TraitDef<'gcx>) -> &'gcx ty::TraitDef<'gcx> { let did = def.trait_ref.def_id; @@ -608,7 +633,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { pub fn alloc_trait_def(self, def: ty::TraitDef<'gcx>) -> &'gcx ty::TraitDef<'gcx> { - self.global_interners.arenas.trait_defs.alloc(def) + self.global_interners.arenas.trait_def.alloc(def) } pub fn insert_adt_def(self, did: DefId, adt_def: ty::AdtDefMaster<'gcx>) { @@ -624,7 +649,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { variants: Vec>) -> ty::AdtDefMaster<'gcx> { let def = ty::AdtDefData::new(self, did, kind, variants); - let interned = self.global_interners.arenas.adt_defs.alloc(def); + let interned = self.global_interners.arenas.adt_def.alloc(def); self.insert_adt_def(did, interned); interned } @@ -729,19 +754,19 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { super_predicates: RefCell::new(DepTrackingMap::new(dep_graph.clone())), fulfilled_predicates: RefCell::new(fulfilled_predicates), map: map, + mir_map: RefCell::new(DepTrackingMap::new(dep_graph.clone())), freevars: RefCell::new(freevars), maybe_unused_trait_imports: maybe_unused_trait_imports, tcache: RefCell::new(DepTrackingMap::new(dep_graph.clone())), rcache: RefCell::new(FnvHashMap()), tc_cache: RefCell::new(FnvHashMap()), impl_or_trait_items: RefCell::new(DepTrackingMap::new(dep_graph.clone())), - trait_item_def_ids: RefCell::new(DepTrackingMap::new(dep_graph.clone())), + impl_or_trait_item_def_ids: RefCell::new(DepTrackingMap::new(dep_graph.clone())), trait_items_cache: RefCell::new(DepTrackingMap::new(dep_graph.clone())), ty_param_defs: RefCell::new(NodeMap()), normalized_cache: RefCell::new(FnvHashMap()), lang_items: lang_items, inherent_impls: RefCell::new(DepTrackingMap::new(dep_graph.clone())), - impl_items: RefCell::new(DepTrackingMap::new(dep_graph.clone())), used_unsafe: RefCell::new(NodeSet()), used_mut_nodes: RefCell::new(NodeSet()), used_trait_imports: RefCell::new(NodeSet()), @@ -760,6 +785,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { crate_name: token::intern_and_get_ident(crate_name), data_layout: data_layout, layout_cache: RefCell::new(FnvHashMap()), + layout_depth: Cell::new(0), derive_macros: RefCell::new(NodeMap()), }, f) } @@ -817,7 +843,10 @@ impl<'a, 'tcx> Lift<'tcx> for Ty<'a> { impl<'a, 'tcx> Lift<'tcx> for &'a Substs<'a> { type Lifted = &'tcx Substs<'tcx>; fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Substs<'tcx>> { - if let Some(&Interned(substs)) = tcx.interners.substs.borrow().get(*self) { + if self.len() == 0 { + return Some(Slice::empty()); + } + if let Some(&Interned(substs)) = tcx.interners.substs.borrow().get(&self[..]) { if *self as *const _ == substs as *const _ { return Some(substs); } @@ -848,10 +877,14 @@ impl<'a, 'tcx> Lift<'tcx> for &'a Region { } } -impl<'a, 'tcx> Lift<'tcx> for &'a [Ty<'a>] { - type Lifted = &'tcx [Ty<'tcx>]; - fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx [Ty<'tcx>]> { - if let Some(&Interned(list)) = tcx.interners.type_list.borrow().get(*self) { +impl<'a, 'tcx> Lift<'tcx> for &'a Slice> { + type Lifted = &'tcx Slice>; + fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) + -> Option<&'tcx Slice>> { + if self.len() == 0 { + return Some(Slice::empty()); + } + if let Some(&Interned(list)) = tcx.interners.type_list.borrow().get(&self[..]) { if *self as *const _ == list as *const _ { return Some(list); } @@ -1068,15 +1101,30 @@ impl<'tcx: 'lcx, 'lcx> Borrow> for Interned<'tcx, TyS<'tcx>> } } -impl<'tcx: 'lcx, 'lcx> Borrow<[Ty<'lcx>]> for Interned<'tcx, [Ty<'tcx>]> { +// NB: An Interned> compares and hashes as its elements. +impl<'tcx, T: PartialEq> PartialEq for Interned<'tcx, Slice> { + fn eq(&self, other: &Interned<'tcx, Slice>) -> bool { + self.0[..] == other.0[..] + } +} + +impl<'tcx, T: Eq> Eq for Interned<'tcx, Slice> {} + +impl<'tcx, T: Hash> Hash for Interned<'tcx, Slice> { + fn hash(&self, s: &mut H) { + self.0[..].hash(s) + } +} + +impl<'tcx: 'lcx, 'lcx> Borrow<[Ty<'lcx>]> for Interned<'tcx, Slice>> { fn borrow<'a>(&'a self) -> &'a [Ty<'lcx>] { - self.0 + &self.0[..] } } -impl<'tcx: 'lcx, 'lcx> Borrow> for Interned<'tcx, Substs<'tcx>> { - fn borrow<'a>(&'a self) -> &'a Substs<'lcx> { - self.0 +impl<'tcx: 'lcx, 'lcx> Borrow<[Kind<'lcx>]> for Interned<'tcx, Substs<'tcx>> { + fn borrow<'a>(&'a self) -> &'a [Kind<'lcx>] { + &self.0[..] } } @@ -1092,32 +1140,24 @@ impl<'tcx> Borrow for Interned<'tcx, Region> { } } -macro_rules! items { ($($item:item)+) => ($($item)+) } -macro_rules! impl_interners { - ($lt_tcx:tt, $($name:ident: $method:ident($alloc:ty, $needs_infer:expr)-> $ty:ty),+) => { - items!($(impl<$lt_tcx> PartialEq for Interned<$lt_tcx, $ty> { - fn eq(&self, other: &Self) -> bool { - self.0 == other.0 - } - } - - impl<$lt_tcx> Eq for Interned<$lt_tcx, $ty> {} - - impl<$lt_tcx> Hash for Interned<$lt_tcx, $ty> { - fn hash(&self, s: &mut H) { - self.0.hash(s) - } - } - +macro_rules! intern_method { + ($lt_tcx:tt, $name:ident: $method:ident($alloc:ty, + $alloc_method:ident, + $alloc_to_key:expr, + $alloc_to_ret:expr, + $needs_infer:expr) -> $ty:ty) => { impl<'a, 'gcx, $lt_tcx> TyCtxt<'a, 'gcx, $lt_tcx> { pub fn $method(self, v: $alloc) -> &$lt_tcx $ty { - if let Some(i) = self.interners.$name.borrow().get::<$ty>(&v) { - return i.0; - } - if !self.is_global() { - if let Some(i) = self.global_interners.$name.borrow().get::<$ty>(&v) { + { + let key = ($alloc_to_key)(&v); + if let Some(i) = self.interners.$name.borrow().get(key) { return i.0; } + if !self.is_global() { + if let Some(i) = self.global_interners.$name.borrow().get(key) { + return i.0; + } + } } // HACK(eddyb) Depend on flags being accurate to @@ -1128,7 +1168,8 @@ macro_rules! impl_interners { let v = unsafe { mem::transmute(v) }; - let i = self.global_interners.arenas.$name.alloc(v); + let i = ($alloc_to_ret)(self.global_interners.arenas.$name + .$alloc_method(v)); self.global_interners.$name.borrow_mut().insert(Interned(i)); return i; } @@ -1142,11 +1183,31 @@ macro_rules! impl_interners { } } - let i = self.interners.arenas.$name.alloc(v); + let i = ($alloc_to_ret)(self.interners.arenas.$name.$alloc_method(v)); self.interners.$name.borrow_mut().insert(Interned(i)); i } - })+); + } + } +} + +macro_rules! direct_interners { + ($lt_tcx:tt, $($name:ident: $method:ident($needs_infer:expr) -> $ty:ty),+) => { + $(impl<$lt_tcx> PartialEq for Interned<$lt_tcx, $ty> { + fn eq(&self, other: &Self) -> bool { + self.0 == other.0 + } + } + + impl<$lt_tcx> Eq for Interned<$lt_tcx, $ty> {} + + impl<$lt_tcx> Hash for Interned<$lt_tcx, $ty> { + fn hash(&self, s: &mut H) { + self.0.hash(s) + } + } + + intern_method!($lt_tcx, $name: $method($ty, alloc, |x| x, |x| x, $needs_infer) -> $ty);)+ } } @@ -1154,15 +1215,11 @@ fn keep_local<'tcx, T: ty::TypeFoldable<'tcx>>(x: &T) -> bool { x.has_type_flags(ty::TypeFlags::KEEP_IN_LOCAL_TCX) } -impl_interners!('tcx, - type_list: mk_type_list(Vec>, keep_local) -> [Ty<'tcx>], - substs: mk_substs(Substs<'tcx>, |substs: &Substs| { - substs.params().iter().any(keep_local) - }) -> Substs<'tcx>, - bare_fn: mk_bare_fn(BareFnTy<'tcx>, |fty: &BareFnTy| { +direct_interners!('tcx, + bare_fn: mk_bare_fn(|fty: &BareFnTy| { keep_local(&fty.sig) }) -> BareFnTy<'tcx>, - region: mk_region(Region, |r| { + region: mk_region(|r| { match r { &ty::ReVar(_) | &ty::ReSkolemized(..) => true, _ => false @@ -1170,6 +1227,20 @@ impl_interners!('tcx, }) -> Region ); +macro_rules! slice_interners { + ($($field:ident: $method:ident($ty:ident)),+) => ( + $(intern_method!('tcx, $field: $method(&[$ty<'tcx>], alloc_slice, Deref::deref, + |xs: &[$ty]| -> &Slice<$ty> { + unsafe { mem::transmute(xs) } + }, |xs: &[$ty]| xs.iter().any(keep_local)) -> Slice<$ty<'tcx>>);)+ + ) +} + +slice_interners!( + type_list: _intern_type_list(Ty), + substs: _intern_substs(Kind) +); + impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// Create an unsafe fn ty based on a safe fn ty. pub fn safe_to_unsafe_fn_ty(self, bare_fn: &BareFnTy<'tcx>) -> Ty<'tcx> { @@ -1272,12 +1343,16 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self.mk_ty(TySlice(ty)) } - pub fn mk_tup(self, ts: Vec>) -> Ty<'tcx> { - self.mk_ty(TyTuple(self.mk_type_list(ts))) + pub fn intern_tup(self, ts: &[Ty<'tcx>]) -> Ty<'tcx> { + self.mk_ty(TyTuple(self.intern_type_list(ts))) + } + + pub fn mk_tup], Ty<'tcx>>>(self, iter: I) -> I::Output { + iter.intern_with(|ts| self.mk_ty(TyTuple(self.intern_type_list(ts)))) } pub fn mk_nil(self) -> Ty<'tcx> { - self.mk_tup(Vec::new()) + self.intern_tup(&[]) } pub fn mk_diverging_default(self) -> Ty<'tcx> { @@ -1319,11 +1394,11 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { pub fn mk_closure(self, closure_id: DefId, substs: &'tcx Substs<'tcx>, - tys: Vec>) + tys: &[Ty<'tcx>]) -> Ty<'tcx> { self.mk_closure_from_closure_substs(closure_id, ClosureSubsts { func_substs: substs, - upvar_tys: self.mk_type_list(tys) + upvar_tys: self.intern_type_list(tys) }) } @@ -1368,11 +1443,45 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self.mk_ty(TyAnon(def_id, substs)) } + pub fn intern_type_list(self, ts: &[Ty<'tcx>]) -> &'tcx Slice> { + if ts.len() == 0 { + Slice::empty() + } else { + self._intern_type_list(ts) + } + } + + pub fn intern_substs(self, ts: &[Kind<'tcx>]) -> &'tcx Slice> { + if ts.len() == 0 { + Slice::empty() + } else { + self._intern_substs(ts) + } + } + + pub fn mk_type_list], + &'tcx Slice>>>(self, iter: I) -> I::Output { + iter.intern_with(|xs| self.intern_type_list(xs)) + } + + pub fn mk_substs], + &'tcx Slice>>>(self, iter: I) -> I::Output { + iter.intern_with(|xs| self.intern_substs(xs)) + } + + pub fn mk_substs_trait(self, + s: Ty<'tcx>, + t: &[Ty<'tcx>]) + -> &'tcx Substs<'tcx> + { + self.mk_substs(iter::once(s).chain(t.into_iter().cloned()).map(Kind::from)) + } + pub fn trait_items(self, trait_did: DefId) -> Rc>> { self.trait_items_cache.memoize(trait_did, || { - let def_ids = self.trait_item_def_ids(trait_did); + let def_ids = self.impl_or_trait_items(trait_did); Rc::new(def_ids.iter() - .map(|d| self.impl_or_trait_item(d.def_id())) + .map(|&def_id| self.impl_or_trait_item(def_id)) .collect()) }) } @@ -1380,13 +1489,45 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// Obtain the representation annotation for a struct definition. pub fn lookup_repr_hints(self, did: DefId) -> Rc> { self.repr_hint_cache.memoize(did, || { - Rc::new(if did.is_local() { - self.get_attrs(did).iter().flat_map(|meta| { - attr::find_repr_attrs(self.sess.diagnostic(), meta).into_iter() - }).collect() - } else { - self.sess.cstore.repr_attrs(did) - }) + Rc::new(self.get_attrs(did).iter().flat_map(|meta| { + attr::find_repr_attrs(self.sess.diagnostic(), meta).into_iter() + }).collect()) }) } } + +pub trait InternAs { + type Output; + fn intern_with(self, F) -> Self::Output + where F: FnOnce(&T) -> R; +} + +impl InternAs<[T], R> for I + where E: InternIteratorElement, + I: Iterator { + type Output = E::Output; + fn intern_with(self, f: F) -> Self::Output + where F: FnOnce(&[T]) -> R { + E::intern_with(self, f) + } +} + +pub trait InternIteratorElement: Sized { + type Output; + fn intern_with, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output; +} + +impl InternIteratorElement for T { + type Output = R; + fn intern_with, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output { + f(&iter.collect::>()) + } +} + +impl InternIteratorElement for Result { + type Output = Result; + fn intern_with, F: FnOnce(&[T]) -> R>(iter: I, f: F) -> Self::Output { + Ok(f(&iter.collect::, _>>()?)) + } +} + diff --git a/src/librustc/ty/error.rs b/src/librustc/ty/error.rs index 001f47af68c3b..9b345c2d02329 100644 --- a/src/librustc/ty/error.rs +++ b/src/librustc/ty/error.rs @@ -33,13 +33,8 @@ pub enum TypeError<'tcx> { UnsafetyMismatch(ExpectedFound), AbiMismatch(ExpectedFound), Mutability, - BoxMutability, - PtrMutability, - RefMutability, - VecMutability, TupleSize(ExpectedFound), FixedArraySize(ExpectedFound), - TyParamSize(ExpectedFound), ArgCount, RegionsDoesNotOutlive(&'tcx Region, &'tcx Region), RegionsNotSame(&'tcx Region, &'tcx Region), @@ -47,14 +42,12 @@ pub enum TypeError<'tcx> { RegionsInsufficientlyPolymorphic(BoundRegion, &'tcx Region), RegionsOverlyPolymorphic(BoundRegion, &'tcx Region), Sorts(ExpectedFound>), - IntegerAsChar, IntMismatch(ExpectedFound), FloatMismatch(ExpectedFound), Traits(ExpectedFound), BuiltinBoundsMismatch(ExpectedFound), VariadicMismatch(ExpectedFound), CyclicTy, - ConvergenceMismatch(ExpectedFound), ProjectionNameMismatched(ExpectedFound), ProjectionBoundsLength(ExpectedFound), TyParamDefaultMismatch(ExpectedFound>) @@ -99,18 +92,6 @@ impl<'tcx> fmt::Display for TypeError<'tcx> { values.found) } Mutability => write!(f, "types differ in mutability"), - BoxMutability => { - write!(f, "boxed types differ in mutability") - } - VecMutability => write!(f, "vectors differ in mutability"), - PtrMutability => write!(f, "pointers differ in mutability"), - RefMutability => write!(f, "references differ in mutability"), - TyParamSize(values) => { - write!(f, "expected a type with {} type params, \ - found one with {} type params", - values.expected, - values.found) - } FixedArraySize(values) => { write!(f, "expected an array with a fixed size of {} elements, \ found one with {} elements", @@ -167,9 +148,6 @@ impl<'tcx> fmt::Display for TypeError<'tcx> { values.found) } } - IntegerAsChar => { - write!(f, "expected an integral type, found `char`") - } IntMismatch(ref values) => { write!(f, "expected `{:?}`, found `{:?}`", values.expected, @@ -185,11 +163,6 @@ impl<'tcx> fmt::Display for TypeError<'tcx> { if values.expected { "variadic" } else { "non-variadic" }, if values.found { "variadic" } else { "non-variadic" }) } - ConvergenceMismatch(ref values) => { - write!(f, "expected {} fn, found {} function", - if values.expected { "converging" } else { "diverging" }, - if values.found { "converging" } else { "diverging" }) - } ProjectionNameMismatched(ref values) => { write!(f, "expected {}, found {}", values.expected, diff --git a/src/librustc/ty/fast_reject.rs b/src/librustc/ty/fast_reject.rs index ee1544d2d996d..befc9533c387b 100644 --- a/src/librustc/ty/fast_reject.rs +++ b/src/librustc/ty/fast_reject.rs @@ -24,7 +24,7 @@ pub enum SimplifiedType { FloatSimplifiedType(ast::FloatTy), AdtSimplifiedType(DefId), StrSimplifiedType, - VecSimplifiedType, + ArraySimplifiedType, PtrSimplifiedType, NeverSimplifiedType, TupleSimplifiedType(usize), @@ -57,7 +57,7 @@ pub fn simplify_type<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, ty::TyFloat(float_type) => Some(FloatSimplifiedType(float_type)), ty::TyAdt(def, _) => Some(AdtSimplifiedType(def.did)), ty::TyStr => Some(StrSimplifiedType), - ty::TyArray(..) | ty::TySlice(_) => Some(VecSimplifiedType), + ty::TyArray(..) | ty::TySlice(_) => Some(ArraySimplifiedType), ty::TyRawPtr(_) => Some(PtrSimplifiedType), ty::TyTrait(ref trait_info) => { Some(TraitSimplifiedType(trait_info.principal.def_id())) diff --git a/src/librustc/ty/flags.rs b/src/librustc/ty/flags.rs index cddd59fa83c1b..649d78f9d9e2d 100644 --- a/src/librustc/ty/flags.rs +++ b/src/librustc/ty/flags.rs @@ -11,6 +11,7 @@ use ty::subst::Substs; use ty::{self, Ty, TypeFlags, TypeFoldable}; +#[derive(Debug)] pub struct FlagComputation { pub flags: TypeFlags, @@ -107,6 +108,11 @@ impl FlagComputation { } &ty::TyProjection(ref data) => { + // currently we can't normalize projections that + // include bound regions, so track those separately. + if !data.has_escaping_regions() { + self.add_flags(TypeFlags::HAS_NORMALIZABLE_PROJECTION); + } self.add_flags(TypeFlags::HAS_PROJECTION); self.add_projection_ty(data); } @@ -177,24 +183,9 @@ impl FlagComputation { } fn add_region(&mut self, r: &ty::Region) { - match *r { - ty::ReVar(..) => { - self.add_flags(TypeFlags::HAS_RE_INFER); - self.add_flags(TypeFlags::KEEP_IN_LOCAL_TCX); - } - ty::ReSkolemized(..) => { - self.add_flags(TypeFlags::HAS_RE_INFER); - self.add_flags(TypeFlags::HAS_RE_SKOL); - self.add_flags(TypeFlags::KEEP_IN_LOCAL_TCX); - } - ty::ReLateBound(debruijn, _) => { self.add_depth(debruijn.depth); } - ty::ReEarlyBound(..) => { self.add_flags(TypeFlags::HAS_RE_EARLY_BOUND); } - ty::ReStatic | ty::ReErased => {} - _ => { self.add_flags(TypeFlags::HAS_FREE_REGIONS); } - } - - if !r.is_global() { - self.add_flags(TypeFlags::HAS_LOCAL_NAMES); + self.add_flags(r.type_flags()); + if let ty::ReLateBound(debruijn, _) = *r { + self.add_depth(debruijn.depth); } } diff --git a/src/librustc/ty/fold.rs b/src/librustc/ty/fold.rs index 2c18d1d52547f..ae0a4a0e6bd11 100644 --- a/src/librustc/ty/fold.rs +++ b/src/librustc/ty/fold.rs @@ -91,6 +91,9 @@ pub trait TypeFoldable<'tcx>: fmt::Debug + Clone { fn needs_subst(&self) -> bool { self.has_type_flags(TypeFlags::NEEDS_SUBST) } + fn has_re_skol(&self) -> bool { + self.has_type_flags(TypeFlags::HAS_RE_SKOL) + } fn has_closure_types(&self) -> bool { self.has_type_flags(TypeFlags::HAS_TY_CLOSURE) } @@ -105,7 +108,7 @@ pub trait TypeFoldable<'tcx>: fmt::Debug + Clone { TypeFlags::HAS_FREE_REGIONS | TypeFlags::HAS_TY_INFER | TypeFlags::HAS_PARAMS | - TypeFlags::HAS_PROJECTION | + TypeFlags::HAS_NORMALIZABLE_PROJECTION | TypeFlags::HAS_TY_ERR | TypeFlags::HAS_SELF) } @@ -632,26 +635,15 @@ struct HasTypeFlagsVisitor { impl<'tcx> TypeVisitor<'tcx> for HasTypeFlagsVisitor { fn visit_ty(&mut self, t: Ty) -> bool { - t.flags.get().intersects(self.flags) + let flags = t.flags.get(); + debug!("HasTypeFlagsVisitor: t={:?} t.flags={:?} self.flags={:?}", t, flags, self.flags); + flags.intersects(self.flags) } fn visit_region(&mut self, r: &'tcx ty::Region) -> bool { - if self.flags.intersects(ty::TypeFlags::HAS_LOCAL_NAMES) { - // does this represent a region that cannot be named - // in a global way? used in fulfillment caching. - match *r { - ty::ReStatic | ty::ReEmpty | ty::ReErased => {} - _ => return true, - } - } - if self.flags.intersects(ty::TypeFlags::HAS_RE_INFER | - ty::TypeFlags::KEEP_IN_LOCAL_TCX) { - match *r { - ty::ReVar(_) | ty::ReSkolemized(..) => { return true } - _ => {} - } - } - false + let flags = r.type_flags(); + debug!("HasTypeFlagsVisitor: r={:?} r.flags={:?} self.flags={:?}", r, flags, self.flags); + flags.intersects(self.flags) } } diff --git a/src/librustc/ty/item_path.rs b/src/librustc/ty/item_path.rs index b6b55fc0e33dd..fdf5185eb69e2 100644 --- a/src/librustc/ty/item_path.rs +++ b/src/librustc/ty/item_path.rs @@ -9,8 +9,7 @@ // except according to those terms. use hir::map::DefPathData; -use middle::cstore::LOCAL_CRATE; -use hir::def_id::{DefId, CRATE_DEF_INDEX}; +use hir::def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE}; use ty::{self, Ty, TyCtxt}; use syntax::ast; use syntax::parse::token; @@ -67,7 +66,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// Returns the "path" to a particular crate. This can proceed in /// various ways, depending on the `root_mode` of the `buffer`. /// (See `RootMode` enum for more details.) - pub fn push_krate_path(self, buffer: &mut T, cnum: ast::CrateNum) + pub fn push_krate_path(self, buffer: &mut T, cnum: CrateNum) where T: ItemPathBuffer { match *buffer.root_mode() { @@ -102,11 +101,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { RootMode::Absolute => { // In absolute mode, just write the crate name // unconditionally. - if cnum == LOCAL_CRATE { - buffer.push(&self.crate_name(cnum)); - } else { - buffer.push(&self.sess.cstore.original_crate_name(cnum)); - } + buffer.push(&self.original_crate_name(cnum)); } } } @@ -139,7 +134,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } - cur_path.push(self.sess.cstore.opt_item_name(cur_def).unwrap_or_else(|| + cur_path.push(self.sess.cstore.def_key(cur_def) + .disambiguated_data.data.get_opt_name().unwrap_or_else(|| token::intern(""))); match visible_parent_map.get(&cur_def) { Some(&def) => cur_def = def, @@ -304,7 +300,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// Returns the def-id of `def_id`'s parent in the def tree. If /// this returns `None`, then `def_id` represents a crate root or /// inlined root. - fn parent_def_id(&self, def_id: DefId) -> Option { + pub fn parent_def_id(self, def_id: DefId) -> Option { let key = self.def_key(def_id); key.parent.map(|index| DefId { krate: def_id.krate, index: index }) } diff --git a/src/librustc/ty/layout.rs b/src/librustc/ty/layout.rs index 6fec698cfac9b..5ce43d905ec71 100644 --- a/src/librustc/ty/layout.rs +++ b/src/librustc/ty/layout.rs @@ -328,6 +328,42 @@ pub enum Integer { } impl Integer { + pub fn size(&self) -> Size { + match *self { + I1 => Size::from_bits(1), + I8 => Size::from_bytes(1), + I16 => Size::from_bytes(2), + I32 => Size::from_bytes(4), + I64 => Size::from_bytes(8), + } + } + + pub fn align(&self, dl: &TargetDataLayout)-> Align { + match *self { + I1 => dl.i1_align, + I8 => dl.i8_align, + I16 => dl.i16_align, + I32 => dl.i32_align, + I64 => dl.i64_align, + } + } + + pub fn to_ty<'a, 'tcx>(&self, tcx: &ty::TyCtxt<'a, 'tcx, 'tcx>, + signed: bool) -> Ty<'tcx> { + match (*self, signed) { + (I1, false) => tcx.types.u8, + (I8, false) => tcx.types.u8, + (I16, false) => tcx.types.u16, + (I32, false) => tcx.types.u32, + (I64, false) => tcx.types.u64, + (I1, true) => tcx.types.i8, + (I8, true) => tcx.types.i8, + (I16, true) => tcx.types.i16, + (I32, true) => tcx.types.i32, + (I64, true) => tcx.types.i64, + } + } + /// Find the smallest Integer type which can represent the signed value. pub fn fit_signed(x: i64) -> Integer { match x { @@ -350,6 +386,18 @@ impl Integer { } } + /// Find the smallest integer with the given alignment. + pub fn for_abi_align(dl: &TargetDataLayout, align: Align) -> Option { + let wanted = align.abi(); + for &candidate in &[I8, I16, I32, I64] { + let ty = Int(candidate); + if wanted == ty.align(dl).abi() && wanted == ty.size(dl).bytes() { + return Some(candidate); + } + } + None + } + /// Get the Integer type from an attr::IntType. pub fn from_attr(dl: &TargetDataLayout, ity: attr::IntType) -> Integer { match ity { @@ -367,7 +415,7 @@ impl Integer { /// signed discriminant range and #[repr] attribute. /// N.B.: u64 values above i64::MAX will be treated as signed, but /// that shouldn't affect anything, other than maybe debuginfo. - pub fn repr_discr(tcx: TyCtxt, hint: attr::ReprAttr, min: i64, max: i64) + pub fn repr_discr(tcx: TyCtxt, ty: Ty, hint: attr::ReprAttr, min: i64, max: i64) -> (Integer, bool) { // Theoretically, negative values could be larger in unsigned representation // than the unsigned representation of the signed minimum. However, if there @@ -377,11 +425,12 @@ impl Integer { let signed_fit = cmp::max(Integer::fit_signed(min), Integer::fit_signed(max)); let at_least = match hint { - attr::ReprInt(span, ity) => { + attr::ReprInt(ity) => { let discr = Integer::from_attr(&tcx.data_layout, ity); let fit = if ity.is_signed() { signed_fit } else { unsigned_fit }; if discr < fit { - span_bug!(span, "representation hint insufficient for discriminant range") + bug!("Integer::repr_discr: `#[repr]` hint too small for \ + discriminant range of enum `{}", ty) } return (discr, ity.is_signed()); } @@ -397,10 +446,10 @@ impl Integer { } attr::ReprAny => I8, attr::ReprPacked => { - bug!("Integer::repr_discr: found #[repr(packed)] on an enum"); + bug!("Integer::repr_discr: found #[repr(packed)] on enum `{}", ty); } attr::ReprSimd => { - bug!("Integer::repr_discr: found #[repr(simd)] on an enum"); + bug!("Integer::repr_discr: found #[repr(simd)] on enum `{}", ty); } }; @@ -462,11 +511,11 @@ pub struct Struct { /// If true, the size is exact, otherwise it's only a lower bound. pub sized: bool, - /// Offsets for the first byte after each field. - /// That is, field_offset(i) = offset_after_field[i - 1] and the - /// whole structure's size is the last offset, excluding padding. - // FIXME(eddyb) use small vector optimization for the common case. - pub offset_after_field: Vec + /// Offsets for the first byte of each field. + /// FIXME(eddyb) use small vector optimization for the common case. + pub offsets: Vec, + + pub min_size: Size, } impl<'a, 'gcx, 'tcx> Struct { @@ -475,7 +524,8 @@ impl<'a, 'gcx, 'tcx> Struct { align: if packed { dl.i8_align } else { dl.aggregate_align }, packed: packed, sized: true, - offset_after_field: vec![] + offsets: vec![], + min_size: Size::from_bytes(0), } } @@ -485,12 +535,14 @@ impl<'a, 'gcx, 'tcx> Struct { scapegoat: Ty<'gcx>) -> Result<(), LayoutError<'gcx>> where I: Iterator>> { - self.offset_after_field.reserve(fields.size_hint().0); + self.offsets.reserve(fields.size_hint().0); + + let mut offset = self.min_size; for field in fields { if !self.sized { bug!("Struct::extend: field #{} of `{}` comes after unsized field", - self.offset_after_field.len(), scapegoat); + self.offsets.len(), scapegoat); } let field = field?; @@ -499,34 +551,29 @@ impl<'a, 'gcx, 'tcx> Struct { } // Invariant: offset < dl.obj_size_bound() <= 1<<61 - let mut offset = if !self.packed { + if !self.packed { let align = field.align(dl); self.align = self.align.max(align); - self.offset_after_field.last_mut().map_or(Size::from_bytes(0), |last| { - *last = last.abi_align(align); - *last - }) - } else { - self.offset_after_field.last().map_or(Size::from_bytes(0), |&last| last) - }; + offset = offset.abi_align(align); + } + + self.offsets.push(offset); + offset = offset.checked_add(field.size(dl), dl) .map_or(Err(LayoutError::SizeOverflow(scapegoat)), Ok)?; - - self.offset_after_field.push(offset); } + self.min_size = offset; + Ok(()) } /// Get the size without trailing alignment padding. - pub fn min_size(&self) -> Size { - self.offset_after_field.last().map_or(Size::from_bytes(0), |&last| last) - } /// Get the size with trailing aligment padding. pub fn stride(&self) -> Size { - self.min_size().abi_align(self.align) + self.min_size.abi_align(self.align) } /// Determine whether a structure would be zero-sized, given its fields. @@ -550,7 +597,8 @@ impl<'a, 'gcx, 'tcx> Struct { -> Result, LayoutError<'gcx>> { let tcx = infcx.tcx.global_tcx(); match (ty.layout(infcx)?, &ty.sty) { - (&Scalar { non_zero: true, .. }, _) => Ok(Some(vec![])), + (&Scalar { non_zero: true, .. }, _) | + (&CEnum { non_zero: true, .. }, _) => Ok(Some(vec![])), (&FatPointer { non_zero: true, .. }, _) => { Ok(Some(vec![FAT_PTR_ADDR as u32])) } @@ -722,6 +770,7 @@ pub enum Layout { CEnum { discr: Integer, signed: bool, + non_zero: bool, // Inclusive discriminant range. // If min > max, it represents min...u64::MAX followed by 0...max. // FIXME(eddyb) always use the shortest range, e.g. by finding @@ -911,7 +960,7 @@ impl<'a, 'gcx, 'tcx> Layout { Univariant { variant: unit, non_zero: false } } - // Tuples. + // Tuples and closures. ty::TyClosure(_, ty::ClosureSubsts { upvar_tys: tys, .. }) | ty::TyTuple(tys) => { let mut st = Struct::new(dl, false); @@ -955,26 +1004,28 @@ impl<'a, 'gcx, 'tcx> Layout { if def.is_enum() && def.variants.iter().all(|v| v.fields.is_empty()) { // All bodies empty -> intlike - let (mut min, mut max) = (i64::MAX, i64::MIN); + let (mut min, mut max, mut non_zero) = (i64::MAX, i64::MIN, true); for v in &def.variants { let x = v.disr_val.to_u64_unchecked() as i64; + if x == 0 { non_zero = false; } if x < min { min = x; } if x > max { max = x; } } - let (discr, signed) = Integer::repr_discr(tcx, hint, min, max); + let (discr, signed) = Integer::repr_discr(tcx, ty, hint, min, max); return success(CEnum { discr: discr, signed: signed, + non_zero: non_zero, min: min as u64, max: max as u64 }); } - if def.variants.len() == 1 { + if !def.is_enum() || def.variants.len() == 1 && hint == attr::ReprAny { // Struct, or union, or univariant enum equivalent to a struct. // (Typechecking will reject discriminant-sizing attrs.) - assert!(!def.is_enum() || hint == attr::ReprAny); + let fields = def.variants[0].fields.iter().map(|field| { field.ty(tcx, substs).layout(infcx) }); @@ -1022,19 +1073,17 @@ impl<'a, 'gcx, 'tcx> Layout { // FIXME(eddyb) should take advantage of a newtype. if path == &[0] && variants[discr].len() == 1 { - match *variants[discr][0].layout(infcx)? { - Scalar { value, .. } => { - return success(RawNullablePointer { - nndiscr: discr as u64, - value: value - }); - } - _ => { - bug!("Layout::compute: `{}`'s non-zero \ - `{}` field not scalar?!", - ty, variants[discr][0]) - } - } + let value = match *variants[discr][0].layout(infcx)? { + Scalar { value, .. } => value, + CEnum { discr, .. } => Int(discr), + _ => bug!("Layout::compute: `{}`'s non-zero \ + `{}` field not scalar?!", + ty, variants[discr][0]) + }; + return success(RawNullablePointer { + nndiscr: discr as u64, + value: value, + }); } path.push(0); // For GEP through a pointer. @@ -1052,7 +1101,7 @@ impl<'a, 'gcx, 'tcx> Layout { // The general case. let discr_max = (variants.len() - 1) as i64; assert!(discr_max >= 0); - let (min_ity, _) = Integer::repr_discr(tcx, hint, 0, discr_max); + let (min_ity, _) = Integer::repr_discr(tcx, ty, hint, 0, discr_max); let mut align = dl.aggregate_align; let mut size = Size::from_bytes(0); @@ -1080,7 +1129,7 @@ impl<'a, 'gcx, 'tcx> Layout { }); let mut st = Struct::new(dl, false); st.extend(dl, discr.iter().map(Ok).chain(fields), ty)?; - size = cmp::max(size, st.min_size()); + size = cmp::max(size, st.min_size); align = align.max(st.align); Ok(st) }).collect::, _>>()?; @@ -1102,20 +1151,7 @@ impl<'a, 'gcx, 'tcx> Layout { // won't be so conservative. // Use the initial field alignment - let wanted = start_align.abi(); - let mut ity = min_ity; - for &candidate in &[I16, I32, I64] { - let ty = Int(candidate); - if wanted == ty.align(dl).abi() && wanted == ty.size(dl).bytes() { - ity = candidate; - break; - } - } - - // FIXME(eddyb) conservative only to avoid diverging from trans::adt. - if align.abi() != start_align.abi() { - ity = min_ity; - } + let mut ity = Integer::for_abi_align(dl, start_align).unwrap_or(min_ity); // If the alignment is not larger than the chosen discriminant size, // don't use the alignment as the final size. @@ -1126,12 +1162,16 @@ impl<'a, 'gcx, 'tcx> Layout { let old_ity_size = Int(min_ity).size(dl); let new_ity_size = Int(ity).size(dl); for variant in &mut variants { - for offset in &mut variant.offset_after_field { + for offset in &mut variant.offsets[1..] { if *offset > old_ity_size { break; } *offset = new_ity_size; } + // We might be making the struct larger. + if variant.min_size <= old_ity_size { + variant.min_size = new_ity_size; + } } } diff --git a/src/librustc/ty/maps.rs b/src/librustc/ty/maps.rs index 5772d16c6d43d..cad87081a93b4 100644 --- a/src/librustc/ty/maps.rs +++ b/src/librustc/ty/maps.rs @@ -10,7 +10,10 @@ use dep_graph::{DepNode, DepTrackingMapConfig}; use hir::def_id::DefId; +use mir; use ty::{self, Ty}; + +use std::cell::RefCell; use std::marker::PhantomData; use std::rc::Rc; use syntax::{attr, ast}; @@ -34,13 +37,13 @@ dep_map_ty! { Tcache: ItemSignature(DefId) -> Ty<'tcx> } dep_map_ty! { Generics: ItemSignature(DefId) -> &'tcx ty::Generics<'tcx> } dep_map_ty! { Predicates: ItemSignature(DefId) -> ty::GenericPredicates<'tcx> } dep_map_ty! { SuperPredicates: ItemSignature(DefId) -> ty::GenericPredicates<'tcx> } -dep_map_ty! { TraitItemDefIds: TraitItemDefIds(DefId) -> Rc> } +dep_map_ty! { ImplOrTraitItemDefIds: ImplOrTraitItemDefIds(DefId) -> Rc> } dep_map_ty! { ImplTraitRefs: ItemSignature(DefId) -> Option> } dep_map_ty! { TraitDefs: ItemSignature(DefId) -> &'tcx ty::TraitDef<'tcx> } dep_map_ty! { AdtDefs: ItemSignature(DefId) -> ty::AdtDefMaster<'tcx> } dep_map_ty! { ItemVariances: ItemSignature(DefId) -> Rc> } dep_map_ty! { InherentImpls: InherentImpls(DefId) -> Vec } -dep_map_ty! { ImplItems: ImplItems(DefId) -> Vec } dep_map_ty! { TraitItems: TraitItems(DefId) -> Rc>> } dep_map_ty! { ReprHints: ReprHints(DefId) -> Rc> } dep_map_ty! { InlinedClosures: Hir(DefId) -> ast::NodeId } +dep_map_ty! { Mir: Mir(DefId) -> &'tcx RefCell> } diff --git a/src/librustc/ty/mod.rs b/src/librustc/ty/mod.rs index 14eb2fb7914c3..588857e557c22 100644 --- a/src/librustc/ty/mod.rs +++ b/src/librustc/ty/mod.rs @@ -8,7 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -pub use self::ImplOrTraitItemId::*; pub use self::Variance::*; pub use self::DtorKind::*; pub use self::ImplOrTraitItemContainer::*; @@ -21,11 +20,11 @@ pub use self::fold::TypeFoldable; use dep_graph::{self, DepNode}; use hir::map as ast_map; use middle; -use middle::cstore::{self, LOCAL_CRATE}; -use hir::def::{Def, PathResolution, ExportMap}; -use hir::def_id::DefId; +use hir::def::{Def, CtorKind, PathResolution, ExportMap}; +use hir::def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE}; use middle::lang_items::{FnTraitLangItem, FnMutTraitLangItem, FnOnceTraitLangItem}; use middle::region::{CodeExtent, ROOT_CODE_EXTENT}; +use mir::Mir; use traits; use ty; use ty::subst::{Subst, Substs}; @@ -34,23 +33,24 @@ use util::common::MemoizationMap; use util::nodemap::NodeSet; use util::nodemap::FnvHashMap; -use serialize::{Encodable, Encoder, Decodable, Decoder}; +use serialize::{self, Encodable, Encoder}; use std::borrow::Cow; -use std::cell::Cell; +use std::cell::{Cell, RefCell, Ref}; use std::hash::{Hash, Hasher}; use std::iter; +use std::ops::Deref; use std::rc::Rc; use std::slice; use std::vec::IntoIter; -use syntax::ast::{self, CrateNum, Name, NodeId}; +use std::mem; +use syntax::ast::{self, Name, NodeId}; use syntax::attr; -use syntax::parse::token::InternedString; +use syntax::parse::token::{self, InternedString}; use syntax_pos::{DUMMY_SP, Span}; use rustc_const_math::ConstInt; use hir; -use hir::{ItemImpl, ItemTrait, PatKind}; use hir::intravisit::Visitor; pub use self::sty::{Binder, DebruijnIndex}; @@ -191,23 +191,11 @@ pub enum ImplOrTraitItem<'tcx> { } impl<'tcx> ImplOrTraitItem<'tcx> { - fn id(&self) -> ImplOrTraitItemId { - match *self { - ConstTraitItem(ref associated_const) => { - ConstTraitItemId(associated_const.def_id) - } - MethodTraitItem(ref method) => MethodTraitItemId(method.def_id), - TypeTraitItem(ref associated_type) => { - TypeTraitItemId(associated_type.def_id) - } - } - } - pub fn def(&self) -> Def { match *self { ConstTraitItem(ref associated_const) => Def::AssociatedConst(associated_const.def_id), MethodTraitItem(ref method) => Def::Method(method.def_id), - TypeTraitItem(ref ty) => Def::AssociatedTy(ty.container.id(), ty.def_id), + TypeTraitItem(ref ty) => Def::AssociatedTy(ty.def_id), } } @@ -251,24 +239,7 @@ impl<'tcx> ImplOrTraitItem<'tcx> { } } -#[derive(Clone, Copy, Debug)] -pub enum ImplOrTraitItemId { - ConstTraitItemId(DefId), - MethodTraitItemId(DefId), - TypeTraitItemId(DefId), -} - -impl ImplOrTraitItemId { - pub fn def_id(&self) -> DefId { - match *self { - ConstTraitItemId(def_id) => def_id, - MethodTraitItemId(def_id) => def_id, - TypeTraitItemId(def_id) => def_id, - } - } -} - -#[derive(Clone, Debug, PartialEq, Eq, Copy)] +#[derive(Clone, Debug, PartialEq, Eq, Copy, RustcEncodable, RustcDecodable)] pub enum Visibility { /// Visible everywhere (including in other crates). Public, @@ -346,34 +317,12 @@ pub struct Method<'tcx> { pub explicit_self: ExplicitSelfCategory<'tcx>, pub vis: Visibility, pub defaultness: hir::Defaultness, + pub has_body: bool, pub def_id: DefId, pub container: ImplOrTraitItemContainer, } impl<'tcx> Method<'tcx> { - pub fn new(name: Name, - generics: &'tcx ty::Generics<'tcx>, - predicates: GenericPredicates<'tcx>, - fty: &'tcx BareFnTy<'tcx>, - explicit_self: ExplicitSelfCategory<'tcx>, - vis: Visibility, - defaultness: hir::Defaultness, - def_id: DefId, - container: ImplOrTraitItemContainer) - -> Method<'tcx> { - Method { - name: name, - generics: generics, - predicates: predicates, - fty: fty, - explicit_self: explicit_self, - vis: vis, - defaultness: defaultness, - def_id: def_id, - container: container, - } - } - pub fn container_id(&self) -> DefId { match self.container { TraitContainer(id) => id, @@ -425,7 +374,7 @@ pub enum Variance { Bivariant, // T <: T -- e.g., unused type parameter } -#[derive(Clone, Copy, Debug)] +#[derive(Clone, Copy, Debug, RustcDecodable, RustcEncodable)] pub struct MethodCallee<'tcx> { /// Impl method ID, for inherent methods, or trait method ID, otherwise. pub def_id: DefId, @@ -515,6 +464,10 @@ bitflags! { // Only set for TyInfer other than Fresh. const KEEP_IN_LOCAL_TCX = 1 << 11, + // Is there a projection that does not involve a bound region? + // Currently we can't normalize projections w/ bound regions. + const HAS_NORMALIZABLE_PROJECTION = 1 << 12, + const NEEDS_SUBST = TypeFlags::HAS_PARAMS.bits | TypeFlags::HAS_SELF.bits | TypeFlags::HAS_RE_EARLY_BOUND.bits, @@ -526,6 +479,7 @@ bitflags! { TypeFlags::HAS_SELF.bits | TypeFlags::HAS_TY_INFER.bits | TypeFlags::HAS_RE_INFER.bits | + TypeFlags::HAS_RE_SKOL.bits | TypeFlags::HAS_RE_EARLY_BOUND.bits | TypeFlags::HAS_FREE_REGIONS.bits | TypeFlags::HAS_TY_ERR.bits | @@ -567,23 +521,55 @@ impl<'tcx> Hash for TyS<'tcx> { pub type Ty<'tcx> = &'tcx TyS<'tcx>; -impl<'tcx> Encodable for Ty<'tcx> { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - cstore::tls::with_encoding_context(s, |ecx, rbml_w| { - ecx.encode_ty(rbml_w, *self); - Ok(()) - }) +impl<'tcx> serialize::UseSpecializedEncodable for Ty<'tcx> {} +impl<'tcx> serialize::UseSpecializedDecodable for Ty<'tcx> {} + +/// A wrapper for slices with the additional invariant +/// that the slice is interned and no other slice with +/// the same contents can exist in the same context. +/// This means we can use pointer + length for both +/// equality comparisons and hashing. +#[derive(Debug, RustcEncodable)] +pub struct Slice([T]); + +impl PartialEq for Slice { + #[inline] + fn eq(&self, other: &Slice) -> bool { + (&self.0 as *const [T]) == (&other.0 as *const [T]) } } +impl Eq for Slice {} -impl<'tcx> Decodable for Ty<'tcx> { - fn decode(d: &mut D) -> Result, D::Error> { - cstore::tls::with_decoding_context(d, |dcx, rbml_r| { - Ok(dcx.decode_ty(rbml_r)) - }) +impl Hash for Slice { + fn hash(&self, s: &mut H) { + (self.as_ptr(), self.len()).hash(s) + } +} + +impl Deref for Slice { + type Target = [T]; + fn deref(&self) -> &[T] { + &self.0 + } +} + +impl<'a, T> IntoIterator for &'a Slice { + type Item = &'a T; + type IntoIter = <&'a [T] as IntoIterator>::IntoIter; + fn into_iter(self) -> Self::IntoIter { + self[..].iter() } } +impl<'tcx> serialize::UseSpecializedDecodable for &'tcx Slice> {} + +impl Slice { + pub fn empty<'a>() -> &'a Slice { + unsafe { + mem::transmute(slice::from_raw_parts(0x1 as *const T, 0)) + } + } +} /// Upvars do not get their own node-id. Instead, we use the pair of /// the original var id (that is, the root variable that is referenced @@ -642,7 +628,7 @@ pub enum BorrowKind { /// Information describing the capture of an upvar. This is computed /// during `typeck`, specifically by `regionck`. -#[derive(PartialEq, Clone, Debug, Copy)] +#[derive(PartialEq, Clone, Debug, Copy, RustcEncodable, RustcDecodable)] pub enum UpvarCapture<'tcx> { /// Upvar is captured by value. This is always true when the /// closure is labeled `move`, but can also be true in other cases @@ -653,7 +639,7 @@ pub enum UpvarCapture<'tcx> { ByRef(UpvarBorrow<'tcx>), } -#[derive(PartialEq, Clone, Copy)] +#[derive(PartialEq, Clone, Copy, RustcEncodable, RustcDecodable)] pub struct UpvarBorrow<'tcx> { /// The kind of borrow: by-ref upvars have access to shared /// immutable borrows, which are not part of the normal language @@ -684,7 +670,7 @@ pub enum IntVarValue { /// from `T:'a` annotations appearing in the type definition. If /// this is `None`, then the default is inherited from the /// surrounding context. See RFC #599 for details. -#[derive(Copy, Clone)] +#[derive(Copy, Clone, RustcEncodable, RustcDecodable)] pub enum ObjectLifetimeDefault<'tcx> { /// Require an explicit annotation. Occurs when multiple /// `T:'a` constraints are found. @@ -697,7 +683,7 @@ pub enum ObjectLifetimeDefault<'tcx> { Specific(&'tcx Region), } -#[derive(Clone)] +#[derive(Clone, RustcEncodable, RustcDecodable)] pub struct TypeParameterDef<'tcx> { pub name: Name, pub def_id: DefId, @@ -705,14 +691,24 @@ pub struct TypeParameterDef<'tcx> { pub default_def_id: DefId, // for use in error reporing about defaults pub default: Option>, pub object_lifetime_default: ObjectLifetimeDefault<'tcx>, + + /// `pure_wrt_drop`, set by the (unsafe) `#[may_dangle]` attribute + /// on generic parameter `T`, asserts data behind the parameter + /// `T` won't be accessed during the parent type's `Drop` impl. + pub pure_wrt_drop: bool, } -#[derive(Clone)] +#[derive(Clone, RustcEncodable, RustcDecodable)] pub struct RegionParameterDef<'tcx> { pub name: Name, pub def_id: DefId, pub index: u32, pub bounds: Vec<&'tcx ty::Region>, + + /// `pure_wrt_drop`, set by the (unsafe) `#[may_dangle]` attribute + /// on generic parameter `'a`, asserts data of lifetime `'a` + /// won't be accessed during the parent type's `Drop` impl. + pub pure_wrt_drop: bool, } impl<'tcx> RegionParameterDef<'tcx> { @@ -735,7 +731,7 @@ impl<'tcx> RegionParameterDef<'tcx> { /// Information about the formal type/lifetime parameters associated /// with an item or method. Analogous to hir::Generics. -#[derive(Clone, Debug)] +#[derive(Clone, Debug, RustcEncodable, RustcDecodable)] pub struct Generics<'tcx> { pub parent: Option, pub parent_regions: u32, @@ -757,6 +753,14 @@ impl<'tcx> Generics<'tcx> { pub fn count(&self) -> usize { self.parent_count() + self.own_count() } + + pub fn region_param(&self, param: &EarlyBoundRegion) -> &RegionParameterDef<'tcx> { + &self.regions[param.index as usize - self.has_self as usize] + } + + pub fn type_param(&self, param: &ParamTy) -> &TypeParameterDef<'tcx> { + &self.types[param.idx as usize - self.has_self as usize - self.regions.len()] + } } /// Bounds on generics. @@ -766,6 +770,9 @@ pub struct GenericPredicates<'tcx> { pub predicates: Vec>, } +impl<'tcx> serialize::UseSpecializedEncodable for GenericPredicates<'tcx> {} +impl<'tcx> serialize::UseSpecializedDecodable for GenericPredicates<'tcx> {} + impl<'a, 'gcx, 'tcx> GenericPredicates<'tcx> { pub fn instantiate(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, substs: &Substs<'tcx>) -> InstantiatedPredicates<'tcx> { @@ -802,7 +809,7 @@ impl<'a, 'gcx, 'tcx> GenericPredicates<'tcx> { } } -#[derive(Clone, PartialEq, Eq, Hash)] +#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub enum Predicate<'tcx> { /// Corresponds to `where Foo : Bar`. `Foo` here would be /// the `Self` type of the trait reference and `A`, `B`, and `C` @@ -926,7 +933,7 @@ impl<'a, 'gcx, 'tcx> Predicate<'tcx> { } } -#[derive(Clone, PartialEq, Eq, Hash)] +#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub struct TraitPredicate<'tcx> { pub trait_ref: TraitRef<'tcx> } @@ -983,11 +990,11 @@ impl<'tcx> PolyTraitPredicate<'tcx> { } } -#[derive(Clone, PartialEq, Eq, Hash, Debug)] +#[derive(Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)] pub struct EquatePredicate<'tcx>(pub Ty<'tcx>, pub Ty<'tcx>); // `0 == 1` pub type PolyEquatePredicate<'tcx> = ty::Binder>; -#[derive(Clone, PartialEq, Eq, Hash, Debug)] +#[derive(Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)] pub struct OutlivesPredicate(pub A, pub B); // `A : B` pub type PolyOutlivesPredicate = ty::Binder>; pub type PolyRegionOutlivesPredicate<'tcx> = PolyOutlivesPredicate<&'tcx ty::Region, @@ -1006,7 +1013,7 @@ pub type PolyTypeOutlivesPredicate<'tcx> = PolyOutlivesPredicate, &'tcx /// equality between arbitrary types. Processing an instance of Form /// #2 eventually yields one of these `ProjectionPredicate` /// instances to normalize the LHS. -#[derive(Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub struct ProjectionPredicate<'tcx> { pub projection_ty: ProjectionTy<'tcx>, pub ty: Ty<'tcx>, @@ -1242,6 +1249,12 @@ pub struct ParameterEnvironment<'tcx> { /// regions don't have this implicit scope and instead introduce /// relationships in the environment. pub free_id_outlive: CodeExtent, + + /// A cache for `moves_by_default`. + pub is_copy_cache: RefCell, bool>>, + + /// A cache for `type_is_sized` + pub is_sized_cache: RefCell, bool>>, } impl<'a, 'tcx> ParameterEnvironment<'tcx> { @@ -1254,6 +1267,8 @@ impl<'a, 'tcx> ParameterEnvironment<'tcx> { implicit_region_bound: self.implicit_region_bound, caller_bounds: caller_bounds, free_id_outlive: self.free_id_outlive, + is_copy_cache: RefCell::new(FnvHashMap()), + is_sized_cache: RefCell::new(FnvHashMap()), } } @@ -1442,7 +1457,7 @@ pub struct VariantDefData<'tcx, 'container: 'tcx> { pub name: Name, // struct's name if this is a struct pub disr_val: Disr, pub fields: Vec>, - pub kind: VariantKind, + pub ctor_kind: CtorKind, } pub struct FieldDefData<'tcx, 'container: 'tcx> { @@ -1496,40 +1511,17 @@ impl<'tcx, 'container> Hash for AdtDefData<'tcx, 'container> { } } -impl<'tcx> Encodable for AdtDef<'tcx> { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { +impl<'tcx> serialize::UseSpecializedEncodable for AdtDef<'tcx> { + fn default_encode(&self, s: &mut S) -> Result<(), S::Error> { self.did.encode(s) } } -impl<'tcx> Decodable for AdtDef<'tcx> { - fn decode(d: &mut D) -> Result, D::Error> { - let def_id: DefId = Decodable::decode(d)?; - - cstore::tls::with_decoding_context(d, |dcx, _| { - let def_id = dcx.translate_def_id(def_id); - Ok(dcx.tcx().lookup_adt_def(def_id)) - }) - } -} - +impl<'tcx> serialize::UseSpecializedDecodable for AdtDef<'tcx> {} #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub enum AdtKind { Struct, Union, Enum } -#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)] -pub enum VariantKind { Struct, Tuple, Unit } - -impl VariantKind { - pub fn from_variant_data(vdata: &hir::VariantData) -> Self { - match *vdata { - hir::VariantData::Struct(..) => VariantKind::Struct, - hir::VariantData::Tuple(..) => VariantKind::Tuple, - hir::VariantData::Unit(..) => VariantKind::Unit, - } - } -} - impl<'a, 'gcx, 'tcx, 'container> AdtDefData<'gcx, 'container> { fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>, did: DefId, @@ -1705,9 +1697,9 @@ impl<'a, 'gcx, 'tcx, 'container> AdtDefData<'gcx, 'container> { pub fn variant_of_def(&self, def: Def) -> &VariantDefData<'gcx, 'container> { match def { - Def::Variant(_, vid) => self.variant_with_id(vid), - Def::Struct(..) | Def::Union(..) | - Def::TyAlias(..) | Def::AssociatedTy(..) => self.struct_variant(), + Def::Variant(vid) | Def::VariantCtor(vid, ..) => self.variant_with_id(vid), + Def::Struct(..) | Def::StructCtor(..) | Def::Union(..) | + Def::TyAlias(..) | Def::AssociatedTy(..) | Def::SelfTy(..) => self.struct_variant(), _ => bug!("unexpected def {:?} in variant_of_def", def) } } @@ -1816,7 +1808,7 @@ impl<'a, 'tcx> AdtDefData<'tcx, 'tcx> { _ if tys.references_error() => tcx.types.err, 0 => tcx.types.bool, 1 => tys[0], - _ => tcx.mk_tup(tys) + _ => tcx.intern_tup(&tys[..]) }; match self.sized_constraint.get(dep_node()) { @@ -1892,7 +1884,7 @@ impl<'a, 'tcx> AdtDefData<'tcx, 'tcx> { }; let sized_predicate = Binder(TraitRef { def_id: sized_trait, - substs: Substs::new_trait(tcx, ty, &[]) + substs: tcx.mk_substs_trait(ty, &[]) }).to_predicate(); let predicates = tcx.lookup_predicates(self.did).predicates; if predicates.into_iter().any(|p| p == sized_predicate) { @@ -1965,7 +1957,7 @@ impl<'a, 'gcx, 'tcx, 'container> FieldDefData<'tcx, 'container> { /// Records the substitutions used to translate the polytype for an /// item into the monotype of an item reference. -#[derive(Clone)] +#[derive(Clone, RustcEncodable, RustcDecodable)] pub struct ItemSubsts<'tcx> { pub substs: &'tcx Substs<'tcx>, } @@ -2143,7 +2135,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { pub fn node_id_item_substs(self, id: NodeId) -> ItemSubsts<'gcx> { match self.tables.borrow().item_substs.get(&id) { None => ItemSubsts { - substs: Substs::empty(self.global_tcx()) + substs: self.global_tcx().intern_substs(&[]) }, Some(ts) => ts.clone(), } @@ -2222,7 +2214,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { match self.map.find(id) { Some(ast_map::NodeLocal(pat)) => { match pat.node { - PatKind::Binding(_, ref path1, _) => path1.node.as_str(), + hir::PatKind::Binding(_, ref path1, _) => path1.node.as_str(), _ => { bug!("Variable id {} maps to {:?}, not local", id, pat); }, @@ -2264,7 +2256,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { hir::ExprClosure(..) | hir::ExprBlock(..) | hir::ExprRepeat(..) | - hir::ExprVec(..) | + hir::ExprArray(..) | hir::ExprBreak(..) | hir::ExprAgain(..) | hir::ExprRet(..) | @@ -2285,84 +2277,19 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } pub fn provided_trait_methods(self, id: DefId) -> Vec>> { - if let Some(id) = self.map.as_local_node_id(id) { - if let ItemTrait(.., ref ms) = self.map.expect_item(id).node { - ms.iter().filter_map(|ti| { - if let hir::MethodTraitItem(_, Some(_)) = ti.node { - match self.impl_or_trait_item(self.map.local_def_id(ti.id)) { - MethodTraitItem(m) => Some(m), - _ => { - bug!("provided_trait_methods(): \ - non-method item found from \ - looking up provided method?!") - } - } - } else { - None - } - }).collect() - } else { - bug!("provided_trait_methods: `{:?}` is not a trait", id) + self.impl_or_trait_items(id).iter().filter_map(|&def_id| { + match self.impl_or_trait_item(def_id) { + MethodTraitItem(ref m) if m.has_body => Some(m.clone()), + _ => None } - } else { - self.sess.cstore.provided_trait_methods(self.global_tcx(), id) - } + }).collect() } - pub fn associated_consts(self, id: DefId) -> Vec>> { + pub fn trait_impl_polarity(self, id: DefId) -> hir::ImplPolarity { if let Some(id) = self.map.as_local_node_id(id) { match self.map.expect_item(id).node { - ItemTrait(.., ref tis) => { - tis.iter().filter_map(|ti| { - if let hir::ConstTraitItem(..) = ti.node { - match self.impl_or_trait_item(self.map.local_def_id(ti.id)) { - ConstTraitItem(ac) => Some(ac), - _ => { - bug!("associated_consts(): \ - non-const item found from \ - looking up a constant?!") - } - } - } else { - None - } - }).collect() - } - ItemImpl(.., ref iis) => { - iis.iter().filter_map(|ii| { - if let hir::ImplItemKind::Const(..) = ii.node { - match self.impl_or_trait_item(self.map.local_def_id(ii.id)) { - ConstTraitItem(ac) => Some(ac), - _ => { - bug!("associated_consts(): \ - non-const item found from \ - looking up a constant?!") - } - } - } else { - None - } - }).collect() - } - _ => { - bug!("associated_consts: `{:?}` is not a trait or impl", id) - } - } - } else { - self.sess.cstore.associated_consts(self.global_tcx(), id) - } - } - - pub fn trait_impl_polarity(self, id: DefId) -> Option { - if let Some(id) = self.map.as_local_node_id(id) { - match self.map.find(id) { - Some(ast_map::NodeItem(item)) => { - match item.node { - hir::ItemImpl(_, polarity, ..) => Some(polarity), - _ => None - } - } - _ => None + hir::ItemImpl(_, polarity, ..) => polarity, + ref item => bug!("trait_impl_polarity: {:?} not an impl", item) } } else { self.sess.cstore.impl_polarity(id) @@ -2395,10 +2322,10 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { .expect("missing ImplOrTraitItem in metadata")) } - pub fn trait_item_def_ids(self, id: DefId) -> Rc> { + pub fn impl_or_trait_items(self, id: DefId) -> Rc> { lookup_locally_or_in_crate_store( - "trait_item_def_ids", id, &self.trait_item_def_ids, - || Rc::new(self.sess.cstore.trait_item_def_ids(id))) + "impl_or_trait_items", id, &self.impl_or_trait_item_def_ids, + || Rc::new(self.sess.cstore.impl_or_trait_items(id))) } /// Returns the trait-ref corresponding to a given impl, or None if it is @@ -2409,20 +2336,6 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { || self.sess.cstore.impl_trait_ref(self.global_tcx(), id)) } - /// Returns whether this DefId refers to an impl - pub fn is_impl(self, id: DefId) -> bool { - if let Some(id) = self.map.as_local_node_id(id) { - if let Some(ast_map::NodeItem( - &hir::Item { node: hir::ItemImpl(..), .. })) = self.map.find(id) { - true - } else { - false - } - } else { - self.sess.cstore.is_impl(id) - } - } - /// Returns a path resolution for node id if it exists, panics otherwise. pub fn expect_resolution(self, id: NodeId) -> PathResolution { *self.def_map.borrow().get(&id).expect("no def-map entry for node id") @@ -2443,12 +2356,17 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { // or variant or their constructors, panics otherwise. pub fn expect_variant_def(self, def: Def) -> VariantDef<'tcx> { match def { - Def::Variant(enum_did, did) => { + Def::Variant(did) | Def::VariantCtor(did, ..) => { + let enum_did = self.parent_def_id(did).unwrap(); self.lookup_adt_def(enum_did).variant_with_id(did) } Def::Struct(did) | Def::Union(did) => { self.lookup_adt_def(did).struct_variant() } + Def::StructCtor(ctor_did, ..) => { + let did = self.parent_def_id(ctor_did).expect("struct ctor has no parent"); + self.lookup_adt_def(did).struct_variant() + } _ => bug!("expect_variant_def used with unexpected def {:?}", def) } } @@ -2504,8 +2422,21 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { pub fn item_name(self, id: DefId) -> ast::Name { if let Some(id) = self.map.as_local_node_id(id) { self.map.name(id) + } else if id.index == CRATE_DEF_INDEX { + token::intern(&self.sess.cstore.original_crate_name(id.krate)) } else { - self.sess.cstore.item_name(id) + let def_key = self.sess.cstore.def_key(id); + // The name of a StructCtor is that of its struct parent. + if let ast_map::DefPathData::StructCtor = def_key.disambiguated_data.data { + self.item_name(DefId { + krate: id.krate, + index: def_key.parent.unwrap() + }) + } else { + def_key.disambiguated_data.data.get_opt_name().unwrap_or_else(|| { + bug!("item_name: no name for {:?}", self.def_path(id)); + }) + } } } @@ -2572,7 +2503,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { pub fn lookup_generics(self, did: DefId) -> &'gcx Generics<'gcx> { lookup_locally_or_in_crate_store( "generics", did, &self.generics, - || self.sess.cstore.item_generics(self.global_tcx(), did)) + || self.alloc_generics(self.sess.cstore.item_generics(self.global_tcx(), did))) } /// Given the did of an item, returns its full set of predicates. @@ -2589,6 +2520,19 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { || self.sess.cstore.item_super_predicates(self.global_tcx(), did)) } + /// Given the did of an item, returns its MIR, borrowed immutably. + pub fn item_mir(self, did: DefId) -> Ref<'gcx, Mir<'gcx>> { + lookup_locally_or_in_crate_store("mir_map", did, &self.mir_map, || { + let mir = self.sess.cstore.get_item_mir(self.global_tcx(), did); + let mir = self.alloc_mir(mir); + + // Perma-borrow MIR from extern crates to prevent mutation. + mem::forget(mir.borrow()); + + mir + }).borrow() + } + /// If `type_needs_drop` returns true, then `ty` is definitely /// non-copy and *might* have a destructor attached; if it returns /// false, then `ty` definitely has no destructor (i.e. no drop glue). @@ -2685,10 +2629,10 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { debug!("populate_implementations_for_primitive_if_necessary: searching for {:?}", primitive_def_id); - let impl_items = self.sess.cstore.impl_items(primitive_def_id); + let impl_items = self.sess.cstore.impl_or_trait_items(primitive_def_id); // Store the implementation info. - self.impl_items.borrow_mut().insert(primitive_def_id, impl_items); + self.impl_or_trait_item_def_ids.borrow_mut().insert(primitive_def_id, Rc::new(impl_items)); self.populated_external_primitive_impls.borrow_mut().insert(primitive_def_id); } @@ -2714,8 +2658,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { let inherent_impls = self.sess.cstore.inherent_implementations_for_type(type_id); for &impl_def_id in &inherent_impls { // Store the implementation info. - let impl_items = self.sess.cstore.impl_items(impl_def_id); - self.impl_items.borrow_mut().insert(impl_def_id, impl_items); + let impl_items = self.sess.cstore.impl_or_trait_items(impl_def_id); + self.impl_or_trait_item_def_ids.borrow_mut().insert(impl_def_id, Rc::new(impl_items)); } self.inherent_impls.borrow_mut().insert(type_id, inherent_impls); @@ -2744,28 +2688,24 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { self.record_trait_has_default_impl(trait_id); } - for impl_def_id in self.sess.cstore.implementations_of_trait(trait_id) { - let impl_items = self.sess.cstore.impl_items(impl_def_id); + for impl_def_id in self.sess.cstore.implementations_of_trait(Some(trait_id)) { + let impl_items = self.sess.cstore.impl_or_trait_items(impl_def_id); let trait_ref = self.impl_trait_ref(impl_def_id).unwrap(); // Record the trait->implementation mapping. - if let Some(parent) = self.sess.cstore.impl_parent(impl_def_id) { - def.record_remote_impl(self, impl_def_id, trait_ref, parent); - } else { - def.record_remote_impl(self, impl_def_id, trait_ref, trait_id); - } + let parent = self.sess.cstore.impl_parent(impl_def_id).unwrap_or(trait_id); + def.record_remote_impl(self, impl_def_id, trait_ref, parent); // For any methods that use a default implementation, add them to // the map. This is a bit unfortunate. - for impl_item_def_id in &impl_items { - let method_def_id = impl_item_def_id.def_id(); + for &impl_item_def_id in &impl_items { // load impl items eagerly for convenience // FIXME: we may want to load these lazily - self.impl_or_trait_item(method_def_id); + self.impl_or_trait_item(impl_item_def_id); } // Store the implementation info. - self.impl_items.borrow_mut().insert(impl_def_id, impl_items); + self.impl_or_trait_item_def_ids.borrow_mut().insert(impl_def_id, Rc::new(impl_items)); } def.flags.set(def.flags.get() | TraitFlags::IMPLS_VALID); @@ -2854,19 +2794,19 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// is already that of the original trait method, then the return value is /// the same). /// Otherwise, return `None`. - pub fn trait_item_of_item(self, def_id: DefId) -> Option { + pub fn trait_item_of_item(self, def_id: DefId) -> Option { let impl_or_trait_item = match self.impl_or_trait_items.borrow().get(&def_id) { Some(m) => m.clone(), None => return None, }; match impl_or_trait_item.container() { - TraitContainer(_) => Some(impl_or_trait_item.id()), + TraitContainer(_) => Some(impl_or_trait_item.def_id()), ImplContainer(def_id) => { self.trait_id_of_impl(def_id).and_then(|trait_did| { let name = impl_or_trait_item.name(); self.trait_items(trait_did).iter() .find(|item| item.name() == name) - .map(|item| item.id()) + .map(|item| item.def_id()) }) } } @@ -2880,10 +2820,12 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { // regions, so it shouldn't matter what we use for the free id let free_id_outlive = self.region_maps.node_extent(ast::DUMMY_NODE_ID); ty::ParameterEnvironment { - free_substs: Substs::empty(self), + free_substs: self.intern_substs(&[]), caller_bounds: Vec::new(), implicit_region_bound: self.mk_region(ty::ReEmpty), - free_id_outlive: free_id_outlive + free_id_outlive: free_id_outlive, + is_copy_cache: RefCell::new(FnvHashMap()), + is_sized_cache: RefCell::new(FnvHashMap()), } } @@ -2913,7 +2855,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// See `ParameterEnvironment` struct def'n for details. /// If you were using `free_id: NodeId`, you might try `self.region_maps.item_extent(free_id)` - /// for the `free_id_outlive` parameter. (But note that that is not always quite right.) + /// for the `free_id_outlive` parameter. (But note that this is not always quite right.) pub fn construct_parameter_environment(self, span: Span, def_id: DefId, @@ -2954,6 +2896,8 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { implicit_region_bound: tcx.mk_region(ty::ReScope(free_id_outlive)), caller_bounds: predicates, free_id_outlive: free_id_outlive, + is_copy_cache: RefCell::new(FnvHashMap()), + is_sized_cache: RefCell::new(FnvHashMap()), }; let cause = traits::ObligationCause::misc(span, free_id_outlive.node_id(&self.region_maps)); @@ -2998,7 +2942,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } /// The category of explicit self. -#[derive(Clone, Copy, Eq, PartialEq, Debug)] +#[derive(Clone, Copy, Eq, PartialEq, Debug, RustcEncodable, RustcDecodable)] pub enum ExplicitSelfCategory<'tcx> { Static, ByValue, diff --git a/src/librustc/ty/relate.rs b/src/librustc/ty/relate.rs index b10c731fe27d0..cb90e6392cf03 100644 --- a/src/librustc/ty/relate.rs +++ b/src/librustc/ty/relate.rs @@ -49,12 +49,6 @@ pub trait TypeRelation<'a, 'gcx: 'a+'tcx, 'tcx: 'a> : Sized { Relate::relate(self, a, b) } - /// Relete elements of two slices pairwise. - fn relate_zip>(&mut self, a: &[T], b: &[T]) -> RelateResult<'tcx, Vec> { - assert_eq!(a.len(), b.len()); - a.iter().zip(b).map(|(a, b)| self.relate(a, b)).collect() - } - /// Switch variance for the purpose of relating `a` and `b`. fn relate_with_variance>(&mut self, variance: ty::Variance, @@ -158,7 +152,7 @@ pub fn relate_substs<'a, 'gcx, 'tcx, R>(relation: &mut R, } }); - Substs::maybe_new(tcx, params) + Ok(tcx.mk_substs(params)?) } impl<'tcx> Relate<'tcx> for &'tcx ty::BareFnTy<'tcx> { @@ -489,10 +483,7 @@ pub fn super_relate_tys<'a, 'gcx, 'tcx, R>(relation: &mut R, (&ty::TyTuple(as_), &ty::TyTuple(bs)) => { if as_.len() == bs.len() { - let ts = as_.iter().zip(bs) - .map(|(a, b)| relation.relate(a, b)) - .collect::>()?; - Ok(tcx.mk_tup(ts)) + Ok(tcx.mk_tup(as_.iter().zip(bs).map(|(a, b)| relation.relate(a, b)))?) } else if !(as_.is_empty() || bs.is_empty()) { Err(TypeError::TupleSize( expected_found(relation, &as_.len(), &bs.len()))) @@ -544,10 +535,11 @@ impl<'tcx> Relate<'tcx> for ty::ClosureSubsts<'tcx> { where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a { let substs = relate_substs(relation, None, a.func_substs, b.func_substs)?; - let upvar_tys = relation.relate_zip(&a.upvar_tys, &b.upvar_tys)?; + assert_eq!(a.upvar_tys.len(), b.upvar_tys.len()); Ok(ty::ClosureSubsts { func_substs: substs, - upvar_tys: relation.tcx().mk_type_list(upvar_tys) + upvar_tys: relation.tcx().mk_type_list( + a.upvar_tys.iter().zip(b.upvar_tys).map(|(a, b)| relation.relate(a, b)))? }) } } diff --git a/src/librustc/ty/structural_impls.rs b/src/librustc/ty/structural_impls.rs index 6c3dabfe113fe..3165edebf1a46 100644 --- a/src/librustc/ty/structural_impls.rs +++ b/src/librustc/ty/structural_impls.rs @@ -11,10 +11,10 @@ use infer::type_variable; use ty::{self, Lift, Ty, TyCtxt}; use ty::fold::{TypeFoldable, TypeFolder, TypeVisitor}; +use rustc_data_structures::accumulate_vec::AccumulateVec; use std::rc::Rc; use syntax::abi; -use syntax::ptr::P; use hir; @@ -297,13 +297,8 @@ impl<'a, 'tcx> Lift<'tcx> for ty::error::TypeError<'a> { UnsafetyMismatch(x) => UnsafetyMismatch(x), AbiMismatch(x) => AbiMismatch(x), Mutability => Mutability, - BoxMutability => BoxMutability, - PtrMutability => PtrMutability, - RefMutability => RefMutability, - VecMutability => VecMutability, TupleSize(x) => TupleSize(x), FixedArraySize(x) => FixedArraySize(x), - TyParamSize(x) => TyParamSize(x), ArgCount => ArgCount, RegionsDoesNotOutlive(a, b) => { return tcx.lift(&(a, b)).map(|(a, b)| RegionsDoesNotOutlive(a, b)) @@ -320,14 +315,12 @@ impl<'a, 'tcx> Lift<'tcx> for ty::error::TypeError<'a> { RegionsOverlyPolymorphic(a, b) => { return tcx.lift(&b).map(|b| RegionsOverlyPolymorphic(a, b)) } - IntegerAsChar => IntegerAsChar, IntMismatch(x) => IntMismatch(x), FloatMismatch(x) => FloatMismatch(x), Traits(x) => Traits(x), BuiltinBoundsMismatch(x) => BuiltinBoundsMismatch(x), VariadicMismatch(x) => VariadicMismatch(x), CyclicTy => CyclicTy, - ConvergenceMismatch(x) => ConvergenceMismatch(x), ProjectionNameMismatched(x) => ProjectionNameMismatched(x), ProjectionBoundsLength(x) => ProjectionBoundsLength(x), @@ -437,16 +430,6 @@ impl<'tcx, T:TypeFoldable<'tcx>> TypeFoldable<'tcx> for ty::Binder { } } -impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for P<[T]> { - fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { - self.iter().map(|t| t.fold_with(folder)).collect() - } - - fn super_visit_with>(&self, visitor: &mut V) -> bool { - self.iter().any(|t| t.visit_with(visitor)) - } -} - impl<'tcx> TypeFoldable<'tcx> for ty::TraitObject<'tcx> { fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { ty::TraitObject { @@ -464,10 +447,10 @@ impl<'tcx> TypeFoldable<'tcx> for ty::TraitObject<'tcx> { } } -impl<'tcx> TypeFoldable<'tcx> for &'tcx [Ty<'tcx>] { +impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::Slice> { fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { - let tys = self.iter().map(|t| t.fold_with(folder)).collect(); - folder.tcx().mk_type_list(tys) + let v = self.iter().map(|t| t.fold_with(folder)).collect::>(); + folder.tcx().intern_type_list(&v) } fn super_visit_with>(&self, visitor: &mut V) -> bool { @@ -734,6 +717,7 @@ impl<'tcx> TypeFoldable<'tcx> for ty::TypeParameterDef<'tcx> { default: self.default.fold_with(folder), default_def_id: self.default_def_id, object_lifetime_default: self.object_lifetime_default.fold_with(folder), + pure_wrt_drop: self.pure_wrt_drop, } } @@ -772,6 +756,7 @@ impl<'tcx> TypeFoldable<'tcx> for ty::RegionParameterDef<'tcx> { def_id: self.def_id, index: self.index, bounds: self.bounds.fold_with(folder), + pure_wrt_drop: self.pure_wrt_drop, } } @@ -919,23 +904,6 @@ impl<'tcx> TypeFoldable<'tcx> for ty::ClosureUpvar<'tcx> { } } -impl<'tcx> TypeFoldable<'tcx> for ty::ParameterEnvironment<'tcx> { - fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { - ty::ParameterEnvironment { - free_substs: self.free_substs.fold_with(folder), - implicit_region_bound: self.implicit_region_bound.fold_with(folder), - caller_bounds: self.caller_bounds.fold_with(folder), - free_id_outlive: self.free_id_outlive, - } - } - - fn super_visit_with>(&self, visitor: &mut V) -> bool { - self.free_substs.visit_with(visitor) || - self.implicit_region_bound.visit_with(visitor) || - self.caller_bounds.visit_with(visitor) - } -} - impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for ty::error::ExpectedFound { fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { ty::error::ExpectedFound { diff --git a/src/librustc/ty/sty.rs b/src/librustc/ty/sty.rs index 5fdc7abc0af5b..92dfb883ef301 100644 --- a/src/librustc/ty/sty.rs +++ b/src/librustc/ty/sty.rs @@ -10,29 +10,28 @@ //! This module contains TypeVariants and its major components -use middle::cstore; use hir::def_id::DefId; use middle::region; use ty::subst::Substs; -use ty::{self, AdtDef, ToPredicate, TypeFlags, Ty, TyCtxt, TyS, TypeFoldable}; +use ty::{self, AdtDef, ToPredicate, TypeFlags, Ty, TyCtxt, TypeFoldable}; +use ty::{Slice, TyS}; use util::common::ErrorReported; use collections::enum_set::{self, EnumSet, CLike}; use std::fmt; -use std::mem; use std::ops; use syntax::abi; use syntax::ast::{self, Name}; use syntax::parse::token::{keywords, InternedString}; -use serialize::{Decodable, Decoder, Encodable, Encoder}; +use serialize; use hir; use self::InferTy::*; use self::TypeVariants::*; -#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)] pub struct TypeAndMut<'tcx> { pub ty: Ty<'tcx>, pub mutbl: hir::Mutability, @@ -88,7 +87,7 @@ pub enum Issue32330 { // NB: If you change this, you'll probably want to change the corresponding // AST structure in libsyntax/ast.rs as well. -#[derive(Clone, PartialEq, Eq, Hash, Debug)] +#[derive(Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)] pub enum TypeVariants<'tcx> { /// The primitive boolean type. Written as `bool`. TyBool, @@ -156,7 +155,7 @@ pub enum TypeVariants<'tcx> { TyNever, /// A tuple type. For example, `(i32, bool)`. - TyTuple(&'tcx [Ty<'tcx>]), + TyTuple(&'tcx Slice>), /// The projection of an associated type. For example, /// `>::N`. @@ -253,7 +252,7 @@ pub enum TypeVariants<'tcx> { /// closure C wind up influencing the decisions we ought to make for /// closure C (which would then require fixed point iteration to /// handle). Plus it fixes an ICE. :P -#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)] pub struct ClosureSubsts<'tcx> { /// Lifetime and type parameters from the enclosing function. /// These are separated out because trans wants to pass them around @@ -263,28 +262,10 @@ pub struct ClosureSubsts<'tcx> { /// The types of the upvars. The list parallels the freevars and /// `upvar_borrows` lists. These are kept distinct so that we can /// easily index into them. - pub upvar_tys: &'tcx [Ty<'tcx>] + pub upvar_tys: &'tcx Slice> } -impl<'tcx> Encodable for ClosureSubsts<'tcx> { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - (self.func_substs, self.upvar_tys).encode(s) - } -} - -impl<'tcx> Decodable for ClosureSubsts<'tcx> { - fn decode(d: &mut D) -> Result, D::Error> { - let (func_substs, upvar_tys) = Decodable::decode(d)?; - cstore::tls::with_decoding_context(d, |dcx, _| { - Ok(ClosureSubsts { - func_substs: func_substs, - upvar_tys: dcx.tcx().mk_type_list(upvar_tys) - }) - }) - } -} - -#[derive(Clone, PartialEq, Eq, Hash)] +#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub struct TraitObject<'tcx> { pub principal: PolyExistentialTraitRef<'tcx>, pub region_bound: &'tcx ty::Region, @@ -307,7 +288,7 @@ pub struct TraitObject<'tcx> { /// Note that a `TraitRef` introduces a level of region binding, to /// account for higher-ranked trait bounds like `T : for<'a> Foo<&'a /// U>` or higher-ranked object types. -#[derive(Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub struct TraitRef<'tcx> { pub def_id: DefId, pub substs: &'tcx Substs<'tcx>, @@ -347,7 +328,7 @@ impl<'tcx> PolyTraitRef<'tcx> { /// /// The substitutions don't include the erased `Self`, only trait /// type and lifetime parameters (`[X, Y]` and `['a, 'b]` above). -#[derive(Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub struct ExistentialTraitRef<'tcx> { pub def_id: DefId, pub substs: &'tcx Substs<'tcx>, @@ -383,7 +364,7 @@ impl<'tcx> PolyExistentialTraitRef<'tcx> { /// erase, or otherwise "discharge" these bound regions, we change the /// type from `Binder` to just `T` (see /// e.g. `liberate_late_bound_regions`). -#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)] pub struct Binder(pub T); impl Binder { @@ -425,13 +406,13 @@ impl Binder { impl fmt::Debug for TypeFlags { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}", self.bits) + write!(f, "{:x}", self.bits) } } /// Represents the projection of an associated type. In explicit UFCS /// form this would be written `>::N`. -#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)] pub struct ProjectionTy<'tcx> { /// The trait reference `T as Trait<..>`. pub trait_ref: ty::TraitRef<'tcx>, @@ -440,14 +421,16 @@ pub struct ProjectionTy<'tcx> { pub item_name: Name, } -#[derive(Clone, PartialEq, Eq, Hash, Debug)] +#[derive(Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)] pub struct BareFnTy<'tcx> { pub unsafety: hir::Unsafety, pub abi: abi::Abi, pub sig: PolyFnSig<'tcx>, } -#[derive(Clone, PartialEq, Eq, Hash)] +impl<'tcx> serialize::UseSpecializedDecodable for &'tcx BareFnTy<'tcx> {} + +#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub struct ClosureTy<'tcx> { pub unsafety: hir::Unsafety, pub abi: abi::Abi, @@ -460,7 +443,7 @@ pub struct ClosureTy<'tcx> { /// - `inputs` is the list of arguments and their modes. /// - `output` is the return type. /// - `variadic` indicates whether this is a variadic function. (only true for foreign fns) -#[derive(Clone, PartialEq, Eq, Hash)] +#[derive(Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub struct FnSig<'tcx> { pub inputs: Vec>, pub output: Ty<'tcx>, @@ -484,7 +467,7 @@ impl<'tcx> PolyFnSig<'tcx> { } } -#[derive(Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub struct ParamTy { pub idx: u32, pub name: Name, @@ -663,14 +646,7 @@ pub enum Region { ReErased, } -impl<'tcx> Decodable for &'tcx Region { - fn decode(d: &mut D) -> Result<&'tcx Region, D::Error> { - let r = Decodable::decode(d)?; - cstore::tls::with_decoding_context(d, |dcx, _| { - Ok(dcx.tcx().mk_region(r)) - }) - } -} +impl<'tcx> serialize::UseSpecializedDecodable for &'tcx Region {} #[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, Debug)] pub struct EarlyBoundRegion { @@ -678,17 +654,17 @@ pub struct EarlyBoundRegion { pub name: Name, } -#[derive(Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub struct TyVid { pub index: u32, } -#[derive(Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub struct IntVid { pub index: u32 } -#[derive(Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub struct FloatVid { pub index: u32 } @@ -703,7 +679,7 @@ pub struct SkolemizedRegionVid { pub index: u32 } -#[derive(Clone, Copy, PartialEq, Eq, Hash)] +#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] pub enum InferTy { TyVar(TyVid), IntVar(IntVid), @@ -718,7 +694,7 @@ pub enum InferTy { } /// A `ProjectionPredicate` for an `ExistentialTraitRef`. -#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)] pub struct ExistentialProjection<'tcx> { pub trait_ref: ExistentialTraitRef<'tcx>, pub item_name: Name, @@ -763,7 +739,7 @@ impl<'a, 'tcx, 'gcx> PolyExistentialProjection<'tcx> { } } -#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] +#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)] pub struct BuiltinBounds(EnumSet); impl<'a, 'gcx, 'tcx> BuiltinBounds { @@ -806,12 +782,11 @@ impl<'a> IntoIterator for &'a BuiltinBounds { #[derive(Clone, RustcEncodable, PartialEq, Eq, RustcDecodable, Hash, Debug, Copy)] -#[repr(usize)] pub enum BuiltinBound { - Send, - Sized, - Copy, - Sync, + Send = 0, + Sized = 1, + Copy = 2, + Sync = 3, } impl CLike for BuiltinBound { @@ -819,7 +794,13 @@ impl CLike for BuiltinBound { *self as usize } fn from_usize(v: usize) -> BuiltinBound { - unsafe { mem::transmute(v) } + match v { + 0 => BuiltinBound::Send, + 1 => BuiltinBound::Sized, + 2 => BuiltinBound::Copy, + 3 => BuiltinBound::Sync, + _ => bug!("{} is not a valid BuiltinBound", v) + } } } @@ -885,6 +866,35 @@ impl Region { r => r } } + + pub fn type_flags(&self) -> TypeFlags { + let mut flags = TypeFlags::empty(); + + match *self { + ty::ReVar(..) => { + flags = flags | TypeFlags::HAS_RE_INFER; + flags = flags | TypeFlags::KEEP_IN_LOCAL_TCX; + } + ty::ReSkolemized(..) => { + flags = flags | TypeFlags::HAS_RE_INFER; + flags = flags | TypeFlags::HAS_RE_SKOL; + flags = flags | TypeFlags::KEEP_IN_LOCAL_TCX; + } + ty::ReLateBound(..) => { } + ty::ReEarlyBound(..) => { flags = flags | TypeFlags::HAS_RE_EARLY_BOUND; } + ty::ReStatic | ty::ReErased => { } + _ => { flags = flags | TypeFlags::HAS_FREE_REGIONS; } + } + + match *self { + ty::ReStatic | ty::ReEmpty | ty::ReErased => (), + _ => flags = flags | TypeFlags::HAS_LOCAL_NAMES, + } + + debug!("type_flags({:?}) = {:?}", self, flags); + + flags + } } // Type utilities diff --git a/src/librustc/ty/subst.rs b/src/librustc/ty/subst.rs index 0ccfea2330999..a4ceecd8c9d7b 100644 --- a/src/librustc/ty/subst.rs +++ b/src/librustc/ty/subst.rs @@ -10,13 +10,13 @@ // Type substitutions. -use middle::cstore; use hir::def_id::DefId; -use ty::{self, Ty, TyCtxt}; +use ty::{self, Slice, Ty, TyCtxt}; use ty::fold::{TypeFoldable, TypeFolder, TypeVisitor}; -use serialize::{Encodable, Encoder, Decodable, Decoder}; +use serialize::{self, Encodable, Encoder, Decodable, Decoder}; use syntax_pos::{Span, DUMMY_SP}; +use rustc_data_structures::accumulate_vec::AccumulateVec; use core::nonzero::NonZero; use std::fmt; @@ -129,42 +129,42 @@ impl<'tcx> TypeFoldable<'tcx> for Kind<'tcx> { } } -/// A substitution mapping type/region parameters to new values. -#[derive(Clone, PartialEq, Eq, Debug, Hash)] -pub struct Substs<'tcx> { - params: Vec> -} - -impl<'a, 'gcx, 'tcx> Substs<'tcx> { - pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>, params: I) - -> &'tcx Substs<'tcx> - where I: IntoIterator> { - tcx.mk_substs(Substs { - params: params.into_iter().collect() +impl<'tcx> Encodable for Kind<'tcx> { + fn encode(&self, e: &mut E) -> Result<(), E::Error> { + e.emit_enum("Kind", |e| { + if let Some(ty) = self.as_type() { + e.emit_enum_variant("Ty", TYPE_TAG, 1, |e| { + e.emit_enum_variant_arg(0, |e| ty.encode(e)) + }) + } else if let Some(r) = self.as_region() { + e.emit_enum_variant("Region", REGION_TAG, 1, |e| { + e.emit_enum_variant_arg(0, |e| r.encode(e)) + }) + } else { + bug!() + } }) } +} - pub fn maybe_new(tcx: TyCtxt<'a, 'gcx, 'tcx>, params: I) - -> Result<&'tcx Substs<'tcx>, E> - where I: IntoIterator, E>> { - Ok(tcx.mk_substs(Substs { - params: params.into_iter().collect::>()? - })) - } - - pub fn new_trait(tcx: TyCtxt<'a, 'gcx, 'tcx>, - s: Ty<'tcx>, - t: &[Ty<'tcx>]) - -> &'tcx Substs<'tcx> - { - let t = iter::once(s).chain(t.iter().cloned()); - Substs::new(tcx, t.map(Kind::from)) +impl<'tcx> Decodable for Kind<'tcx> { + fn decode(d: &mut D) -> Result, D::Error> { + d.read_enum("Kind", |d| { + d.read_enum_variant(&["Ty", "Region"], |d, tag| { + match tag { + TYPE_TAG => Ty::decode(d).map(Kind::from), + REGION_TAG => <&ty::Region>::decode(d).map(Kind::from), + _ => Err(d.error("invalid Kind tag")) + } + }) + }) } +} - pub fn empty(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> &'tcx Substs<'tcx> { - Substs::new(tcx, vec![]) - } +/// A substitution mapping type/region parameters to new values. +pub type Substs<'tcx> = Slice>; +impl<'a, 'gcx, 'tcx> Substs<'tcx> { /// Creates a Substs for generic parameter definitions, /// by calling closures to obtain each region and type. /// The closures get to observe the Substs as they're @@ -175,82 +175,80 @@ impl<'a, 'gcx, 'tcx> Substs<'tcx> { mut mk_region: FR, mut mk_type: FT) -> &'tcx Substs<'tcx> - where FR: FnMut(&ty::RegionParameterDef, &Substs<'tcx>) -> &'tcx ty::Region, - FT: FnMut(&ty::TypeParameterDef<'tcx>, &Substs<'tcx>) -> Ty<'tcx> { + where FR: FnMut(&ty::RegionParameterDef, &[Kind<'tcx>]) -> &'tcx ty::Region, + FT: FnMut(&ty::TypeParameterDef<'tcx>, &[Kind<'tcx>]) -> Ty<'tcx> { let defs = tcx.lookup_generics(def_id); - let mut substs = Substs { - params: Vec::with_capacity(defs.count()) - }; - - substs.fill_item(tcx, defs, &mut mk_region, &mut mk_type); - - tcx.mk_substs(substs) + let mut substs = Vec::with_capacity(defs.count()); + Substs::fill_item(&mut substs, tcx, defs, &mut mk_region, &mut mk_type); + tcx.intern_substs(&substs) } - fn fill_item(&mut self, + fn fill_item(substs: &mut Vec>, tcx: TyCtxt<'a, 'gcx, 'tcx>, defs: &ty::Generics<'tcx>, mk_region: &mut FR, mk_type: &mut FT) - where FR: FnMut(&ty::RegionParameterDef, &Substs<'tcx>) -> &'tcx ty::Region, - FT: FnMut(&ty::TypeParameterDef<'tcx>, &Substs<'tcx>) -> Ty<'tcx> { + where FR: FnMut(&ty::RegionParameterDef, &[Kind<'tcx>]) -> &'tcx ty::Region, + FT: FnMut(&ty::TypeParameterDef<'tcx>, &[Kind<'tcx>]) -> Ty<'tcx> { + if let Some(def_id) = defs.parent { let parent_defs = tcx.lookup_generics(def_id); - self.fill_item(tcx, parent_defs, mk_region, mk_type); + Substs::fill_item(substs, tcx, parent_defs, mk_region, mk_type); } // Handle Self first, before all regions. let mut types = defs.types.iter(); if defs.parent.is_none() && defs.has_self { let def = types.next().unwrap(); - let ty = mk_type(def, self); - assert_eq!(def.index as usize, self.params.len()); - self.params.push(Kind::from(ty)); + let ty = mk_type(def, substs); + assert_eq!(def.index as usize, substs.len()); + substs.push(Kind::from(ty)); } for def in &defs.regions { - let region = mk_region(def, self); - assert_eq!(def.index as usize, self.params.len()); - self.params.push(Kind::from(region)); + let region = mk_region(def, substs); + assert_eq!(def.index as usize, substs.len()); + substs.push(Kind::from(region)); } for def in types { - let ty = mk_type(def, self); - assert_eq!(def.index as usize, self.params.len()); - self.params.push(Kind::from(ty)); + let ty = mk_type(def, substs); + assert_eq!(def.index as usize, substs.len()); + substs.push(Kind::from(ty)); } } pub fn is_noop(&self) -> bool { - self.params.is_empty() + self.is_empty() } #[inline] pub fn params(&self) -> &[Kind<'tcx>] { - &self.params + // FIXME (dikaiosune) this should be removed, and corresponding compilation errors fixed + self } #[inline] pub fn types(&'a self) -> impl DoubleEndedIterator> + 'a { - self.params.iter().filter_map(|k| k.as_type()) + self.iter().filter_map(|k| k.as_type()) } #[inline] pub fn regions(&'a self) -> impl DoubleEndedIterator + 'a { - self.params.iter().filter_map(|k| k.as_region()) + self.iter().filter_map(|k| k.as_region()) } #[inline] pub fn type_at(&self, i: usize) -> Ty<'tcx> { - self.params[i].as_type().unwrap_or_else(|| { - bug!("expected type for param #{} in {:?}", i, self.params); + self[i].as_type().unwrap_or_else(|| { + bug!("expected type for param #{} in {:?}", i, self); }) } #[inline] pub fn region_at(&self, i: usize) -> &'tcx ty::Region { - self.params[i].as_region().unwrap_or_else(|| { - bug!("expected region for param #{} in {:?}", i, self.params); + self[i].as_region().unwrap_or_else(|| { + bug!("expected region for param #{} in {:?}", i, self); }) } @@ -274,19 +272,21 @@ impl<'a, 'gcx, 'tcx> Substs<'tcx> { target_substs: &Substs<'tcx>) -> &'tcx Substs<'tcx> { let defs = tcx.lookup_generics(source_ancestor); - tcx.mk_substs(Substs { - params: target_substs.params.iter() - .chain(&self.params[defs.own_count()..]).cloned().collect() - }) + tcx.mk_substs(target_substs.iter().chain(&self[defs.own_count()..]).cloned()) } } impl<'tcx> TypeFoldable<'tcx> for &'tcx Substs<'tcx> { fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { - let params = self.params.iter().map(|k| k.fold_with(folder)).collect(); - folder.tcx().mk_substs(Substs { - params: params - }) + let params: AccumulateVec<[_; 8]> = self.iter().map(|k| k.fold_with(folder)).collect(); + + // If folding doesn't change the substs, it's faster to avoid + // calling `mk_substs` and instead reuse the existing substs. + if params[..] == self[..] { + self + } else { + folder.tcx().intern_substs(¶ms) + } } fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { @@ -294,29 +294,11 @@ impl<'tcx> TypeFoldable<'tcx> for &'tcx Substs<'tcx> { } fn super_visit_with>(&self, visitor: &mut V) -> bool { - self.params.visit_with(visitor) - } -} - -impl<'tcx> Encodable for &'tcx Substs<'tcx> { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - cstore::tls::with_encoding_context(s, |ecx, rbml_w| { - ecx.encode_substs(rbml_w, self); - Ok(()) - }) - } -} - -impl<'tcx> Decodable for &'tcx Substs<'tcx> { - fn decode(d: &mut D) -> Result<&'tcx Substs<'tcx>, D::Error> { - let substs = cstore::tls::with_decoding_context(d, |dcx, rbml_r| { - dcx.decode_substs(rbml_r) - }); - - Ok(substs) + self.iter().any(|t| t.visit_with(visitor)) } } +impl<'tcx> serialize::UseSpecializedDecodable for &'tcx Substs<'tcx> {} /////////////////////////////////////////////////////////////////////////// // Public trait `Subst` @@ -327,19 +309,19 @@ impl<'tcx> Decodable for &'tcx Substs<'tcx> { pub trait Subst<'tcx> : Sized { fn subst<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, - substs: &Substs<'tcx>) -> Self { + substs: &[Kind<'tcx>]) -> Self { self.subst_spanned(tcx, substs, None) } fn subst_spanned<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, - substs: &Substs<'tcx>, + substs: &[Kind<'tcx>], span: Option) -> Self; } impl<'tcx, T:TypeFoldable<'tcx>> Subst<'tcx> for T { fn subst_spanned<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, - substs: &Substs<'tcx>, + substs: &[Kind<'tcx>], span: Option) -> T { @@ -358,7 +340,7 @@ impl<'tcx, T:TypeFoldable<'tcx>> Subst<'tcx> for T { struct SubstFolder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { tcx: TyCtxt<'a, 'gcx, 'tcx>, - substs: &'a Substs<'tcx>, + substs: &'a [Kind<'tcx>], // The location for which the substitution is performed, if available. span: Option, @@ -391,7 +373,7 @@ impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for SubstFolder<'a, 'gcx, 'tcx> { // the specialized routine `ty::replace_late_regions()`. match *r { ty::ReEarlyBound(data) => { - let r = self.substs.params.get(data.index as usize) + let r = self.substs.get(data.index as usize) .and_then(|k| k.as_region()); match r { Some(r) => { @@ -448,7 +430,7 @@ impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for SubstFolder<'a, 'gcx, 'tcx> { impl<'a, 'gcx, 'tcx> SubstFolder<'a, 'gcx, 'tcx> { fn ty_for_param(&self, p: ty::ParamTy, source_ty: Ty<'tcx>) -> Ty<'tcx> { // Look up the type in the substitutions. It really should be in there. - let opt_ty = self.substs.params.get(p.idx as usize) + let opt_ty = self.substs.get(p.idx as usize) .and_then(|k| k.as_type()); let ty = match opt_ty { Some(t) => t, @@ -462,7 +444,7 @@ impl<'a, 'gcx, 'tcx> SubstFolder<'a, 'gcx, 'tcx> { source_ty, p.idx, self.root_ty, - self.substs.params); + self.substs); } }; @@ -539,10 +521,9 @@ impl<'a, 'gcx, 'tcx> ty::TraitRef<'tcx> { -> ty::TraitRef<'tcx> { let defs = tcx.lookup_generics(trait_id); - let params = substs.params[..defs.own_count()].iter().cloned(); ty::TraitRef { def_id: trait_id, - substs: Substs::new(tcx, params) + substs: tcx.intern_substs(&substs[..defs.own_count()]) } } } @@ -554,10 +535,9 @@ impl<'a, 'gcx, 'tcx> ty::ExistentialTraitRef<'tcx> { // Assert there is a Self. trait_ref.substs.type_at(0); - let params = trait_ref.substs.params[1..].iter().cloned(); ty::ExistentialTraitRef { def_id: trait_ref.def_id, - substs: Substs::new(tcx, params) + substs: tcx.intern_substs(&trait_ref.substs[1..]) } } } @@ -574,11 +554,10 @@ impl<'a, 'gcx, 'tcx> ty::PolyExistentialTraitRef<'tcx> { assert!(!self_ty.has_escaping_regions()); self.map_bound(|trait_ref| { - let params = trait_ref.substs.params.iter().cloned(); - let params = iter::once(Kind::from(self_ty)).chain(params); ty::TraitRef { def_id: trait_ref.def_id, - substs: Substs::new(tcx, params) + substs: tcx.mk_substs( + iter::once(Kind::from(self_ty)).chain(trait_ref.substs.iter().cloned())) } }) } diff --git a/src/librustc/ty/trait_def.rs b/src/librustc/ty/trait_def.rs index 268b2fcaa4adb..3ff2ed76e571e 100644 --- a/src/librustc/ty/trait_def.rs +++ b/src/librustc/ty/trait_def.rs @@ -15,7 +15,6 @@ use ty; use ty::fast_reject; use ty::{Ty, TyCtxt, TraitRef}; use std::cell::{Cell, RefCell}; -use syntax::ast::Name; use hir; use util::nodemap::FnvHashMap; @@ -38,10 +37,6 @@ pub struct TraitDef<'tcx> { pub trait_ref: ty::TraitRef<'tcx>, - /// A list of the associated types defined in this trait. Useful - /// for resolving `X::Foo` type markers. - pub associated_type_names: Vec, - // Impls of a trait. To allow for quicker lookup, the impls are indexed by a // simplified version of their `Self` type: impls with a simplifiable `Self` // are stored in `nonblanket_impls` keyed by it, while all other impls are @@ -82,7 +77,6 @@ impl<'a, 'gcx, 'tcx> TraitDef<'tcx> { paren_sugar: bool, generics: &'tcx ty::Generics<'tcx>, trait_ref: ty::TraitRef<'tcx>, - associated_type_names: Vec, def_path_hash: u64) -> TraitDef<'tcx> { TraitDef { @@ -90,7 +84,6 @@ impl<'a, 'gcx, 'tcx> TraitDef<'tcx> { unsafety: unsafety, generics: generics, trait_ref: trait_ref, - associated_type_names: associated_type_names, nonblanket_impls: RefCell::new(FnvHashMap()), blanket_impls: RefCell::new(vec![]), flags: Cell::new(ty::TraitFlags::NO_TRAIT_FLAGS), diff --git a/src/librustc/ty/util.rs b/src/librustc/ty/util.rs index d34fdaa7d71cd..cca4069ba5a17 100644 --- a/src/librustc/ty/util.rs +++ b/src/librustc/ty/util.rs @@ -12,6 +12,7 @@ use hir::def_id::DefId; use infer::InferCtxt; +use hir::map as ast_map; use hir::pat_util; use traits::{self, Reveal}; use ty::{self, Ty, AdtKind, TyCtxt, TypeAndMut, TypeFlags, TypeFoldable}; @@ -19,11 +20,14 @@ use ty::{Disr, ParameterEnvironment}; use ty::fold::TypeVisitor; use ty::layout::{Layout, LayoutError}; use ty::TypeVariants::*; +use util::nodemap::FnvHashMap; use rustc_const_math::{ConstInt, ConstIsize, ConstUsize}; +use std::cell::RefCell; use std::cmp; -use std::hash::{Hash, SipHasher, Hasher}; +use std::hash::{Hash, Hasher}; +use std::collections::hash_map::DefaultHasher; use std::intrinsics; use syntax::ast::{self, Name}; use syntax::attr::{self, SignedInt, UnsignedInt}; @@ -240,7 +244,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { pub fn enum_repr_type(self, opt_hint: Option<&attr::ReprAttr>) -> attr::IntType { match opt_hint { // Feed in the given type - Some(&attr::ReprInt(_, int_t)) => int_t, + Some(&attr::ReprInt(int_t)) => int_t, // ... but provide sensible default if none provided // // NB. Historically `fn enum_variants` generate i64 here, while @@ -352,12 +356,9 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// Creates a hash of the type `Ty` which will be the same no matter what crate /// context it's calculated within. This is used by the `type_id` intrinsic. pub fn type_id_hash(self, ty: Ty<'tcx>) -> u64 { - let mut hasher = TypeIdHasher { - tcx: self, - state: SipHasher::new() - }; + let mut hasher = TypeIdHasher::new(self, DefaultHasher::default()); hasher.visit_ty(ty); - hasher.state.finish() + hasher.finish() } /// Returns true if this ADT is a dtorck type. @@ -391,16 +392,96 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { } } -struct TypeIdHasher<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { +/// When hashing a type this ends up affecting properties like symbol names. We +/// want these symbol names to be calculated independent of other factors like +/// what architecture you're compiling *from*. +/// +/// The hashing just uses the standard `Hash` trait, but the implementations of +/// `Hash` for the `usize` and `isize` types are *not* architecture independent +/// (e.g. they has 4 or 8 bytes). As a result we want to avoid `usize` and +/// `isize` completely when hashing. To ensure that these don't leak in we use a +/// custom hasher implementation here which inflates the size of these to a `u64` +/// and `i64`. +/// +/// The same goes for endianess: We always convert multi-byte integers to little +/// endian before hashing. +#[derive(Debug)] +pub struct ArchIndependentHasher { + inner: H, +} + +impl ArchIndependentHasher { + pub fn new(inner: H) -> ArchIndependentHasher { + ArchIndependentHasher { inner: inner } + } + + pub fn into_inner(self) -> H { + self.inner + } +} + +impl Hasher for ArchIndependentHasher { + fn write(&mut self, bytes: &[u8]) { + self.inner.write(bytes) + } + + fn finish(&self) -> u64 { + self.inner.finish() + } + + fn write_u8(&mut self, i: u8) { + self.inner.write_u8(i) + } + fn write_u16(&mut self, i: u16) { + self.inner.write_u16(i.to_le()) + } + fn write_u32(&mut self, i: u32) { + self.inner.write_u32(i.to_le()) + } + fn write_u64(&mut self, i: u64) { + self.inner.write_u64(i.to_le()) + } + fn write_usize(&mut self, i: usize) { + self.inner.write_u64((i as u64).to_le()) + } + fn write_i8(&mut self, i: i8) { + self.inner.write_i8(i) + } + fn write_i16(&mut self, i: i16) { + self.inner.write_i16(i.to_le()) + } + fn write_i32(&mut self, i: i32) { + self.inner.write_i32(i.to_le()) + } + fn write_i64(&mut self, i: i64) { + self.inner.write_i64(i.to_le()) + } + fn write_isize(&mut self, i: isize) { + self.inner.write_i64((i as i64).to_le()) + } +} + +pub struct TypeIdHasher<'a, 'gcx: 'a+'tcx, 'tcx: 'a, H> { tcx: TyCtxt<'a, 'gcx, 'tcx>, - state: SipHasher + state: ArchIndependentHasher, } -impl<'a, 'gcx, 'tcx> TypeIdHasher<'a, 'gcx, 'tcx> { - fn hash(&mut self, x: T) { +impl<'a, 'gcx, 'tcx, H: Hasher> TypeIdHasher<'a, 'gcx, 'tcx, H> { + pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>, state: H) -> Self { + TypeIdHasher { + tcx: tcx, + state: ArchIndependentHasher::new(state), + } + } + + pub fn hash(&mut self, x: T) { x.hash(&mut self.state); } + pub fn finish(self) -> u64 { + self.state.finish() + } + fn hash_discriminant_u8(&mut self, x: &T) { let v = unsafe { intrinsics::discriminant_value(x) @@ -413,13 +494,20 @@ impl<'a, 'gcx, 'tcx> TypeIdHasher<'a, 'gcx, 'tcx> { fn def_id(&mut self, did: DefId) { // Hash the DefPath corresponding to the DefId, which is independent // of compiler internal state. - let tcx = self.tcx; - let def_path = tcx.def_path(did); - def_path.deterministic_hash_to(tcx, &mut self.state); + let path = self.tcx.def_path(did); + self.def_path(&path) + } + + pub fn def_path(&mut self, def_path: &ast_map::DefPath) { + def_path.deterministic_hash_to(self.tcx, &mut self.state); + } + + pub fn into_inner(self) -> H { + self.state.inner } } -impl<'a, 'gcx, 'tcx> TypeVisitor<'tcx> for TypeIdHasher<'a, 'gcx, 'tcx> { +impl<'a, 'gcx, 'tcx, H: Hasher> TypeVisitor<'tcx> for TypeIdHasher<'a, 'gcx, 'tcx, H> { fn visit_ty(&mut self, ty: Ty<'tcx>) -> bool { // Distinguish between the Ty variants uniformly. self.hash_discriminant_u8(&ty.sty); @@ -428,17 +516,18 @@ impl<'a, 'gcx, 'tcx> TypeVisitor<'tcx> for TypeIdHasher<'a, 'gcx, 'tcx> { TyInt(i) => self.hash(i), TyUint(u) => self.hash(u), TyFloat(f) => self.hash(f), - TyAdt(d, _) => self.def_id(d.did), TyArray(_, n) => self.hash(n), TyRawPtr(m) | TyRef(_, m) => self.hash(m.mutbl), TyClosure(def_id, _) | TyAnon(def_id, _) | TyFnDef(def_id, ..) => self.def_id(def_id), + TyAdt(d, _) => self.def_id(d.did), TyFnPtr(f) => { self.hash(f.unsafety); self.hash(f.abi); self.hash(f.sig.variadic()); + self.hash(f.sig.inputs().skip_binder().len()); } TyTrait(ref data) => { self.def_id(data.principal.def_id()); @@ -460,9 +549,10 @@ impl<'a, 'gcx, 'tcx> TypeVisitor<'tcx> for TypeIdHasher<'a, 'gcx, 'tcx> { TyChar | TyStr | TyBox(_) | - TySlice(_) | - TyError => {} - TyInfer(_) => bug!() + TySlice(_) => {} + + TyError | + TyInfer(_) => bug!("TypeIdHasher: unexpected type {}", ty) } ty.super_visit_with(self) @@ -470,7 +560,7 @@ impl<'a, 'gcx, 'tcx> TypeVisitor<'tcx> for TypeIdHasher<'a, 'gcx, 'tcx> { fn visit_region(&mut self, r: &'tcx ty::Region) -> bool { match *r { - ty::ReStatic | ty::ReErased => { + ty::ReErased => { self.hash::(0); } ty::ReLateBound(db, ty::BrAnon(i)) => { @@ -478,6 +568,7 @@ impl<'a, 'gcx, 'tcx> TypeVisitor<'tcx> for TypeIdHasher<'a, 'gcx, 'tcx> { self.hash::(db.depth); self.hash(i); } + ty::ReStatic | ty::ReEmpty | ty::ReEarlyBound(..) | ty::ReLateBound(..) | @@ -485,7 +576,7 @@ impl<'a, 'gcx, 'tcx> TypeVisitor<'tcx> for TypeIdHasher<'a, 'gcx, 'tcx> { ty::ReScope(..) | ty::ReVar(..) | ty::ReSkolemized(..) => { - bug!("unexpected region found when hashing a type") + bug!("TypeIdHasher: unexpected region {:?}", r) } } false @@ -502,11 +593,24 @@ impl<'a, 'gcx, 'tcx> TypeVisitor<'tcx> for TypeIdHasher<'a, 'gcx, 'tcx> { impl<'a, 'tcx> ty::TyS<'tcx> { fn impls_bound(&'tcx self, tcx: TyCtxt<'a, 'tcx, 'tcx>, param_env: &ParameterEnvironment<'tcx>, - bound: ty::BuiltinBound, span: Span) -> bool + bound: ty::BuiltinBound, + cache: &RefCell, bool>>, + span: Span) -> bool { - tcx.infer_ctxt(None, Some(param_env.clone()), Reveal::ExactMatch).enter(|infcx| { - traits::type_known_to_meet_builtin_bound(&infcx, self, bound, span) - }) + if self.has_param_types() || self.has_self_ty() { + if let Some(result) = cache.borrow().get(self) { + return *result; + } + } + let result = + tcx.infer_ctxt(None, Some(param_env.clone()), Reveal::ExactMatch) + .enter(|infcx| { + traits::type_known_to_meet_builtin_bound(&infcx, self, bound, span) + }); + if self.has_param_types() || self.has_self_ty() { + cache.borrow_mut().insert(self, result); + } + return result; } // FIXME (@jroesch): I made this public to use it, not sure if should be private @@ -533,7 +637,9 @@ impl<'a, 'tcx> ty::TyS<'tcx> { TyArray(..) | TySlice(..) | TyTrait(..) | TyTuple(..) | TyClosure(..) | TyAdt(..) | TyAnon(..) | TyProjection(..) | TyParam(..) | TyInfer(..) | TyError => None - }.unwrap_or_else(|| !self.impls_bound(tcx, param_env, ty::BoundCopy, span)); + }.unwrap_or_else(|| { + !self.impls_bound(tcx, param_env, ty::BoundCopy, ¶m_env.is_copy_cache, span) + }); if !self.has_param_types() && !self.has_self_ty() { self.flags.set(self.flags.get() | if result { @@ -573,7 +679,9 @@ impl<'a, 'tcx> ty::TyS<'tcx> { TyAdt(..) | TyProjection(..) | TyParam(..) | TyInfer(..) | TyAnon(..) | TyError => None - }.unwrap_or_else(|| self.impls_bound(tcx, param_env, ty::BoundSized, span)); + }.unwrap_or_else(|| { + self.impls_bound(tcx, param_env, ty::BoundSized, ¶m_env.is_sized_cache, span) + }); if !self.has_param_types() && !self.has_self_ty() { self.flags.set(self.flags.get() | if result { @@ -597,10 +705,19 @@ impl<'a, 'tcx> ty::TyS<'tcx> { } } + let rec_limit = tcx.sess.recursion_limit.get(); + let depth = tcx.layout_depth.get(); + if depth > rec_limit { + tcx.sess.fatal( + &format!("overflow representing the type `{}`", self)); + } + + tcx.layout_depth.set(depth+1); let layout = Layout::compute_uncached(self, infcx)?; if can_cache { tcx.layout_cache.borrow_mut().insert(self, layout); } + tcx.layout_depth.set(depth); Ok(layout) } diff --git a/src/librustc/ty/walk.rs b/src/librustc/ty/walk.rs index dd3a62f7cd2dd..bebdebf127a54 100644 --- a/src/librustc/ty/walk.rs +++ b/src/librustc/ty/walk.rs @@ -22,7 +22,7 @@ pub struct TypeWalker<'tcx> { impl<'tcx> TypeWalker<'tcx> { pub fn new(ty: Ty<'tcx>) -> TypeWalker<'tcx> { - TypeWalker { stack: vec!(ty), last_subtree: 1, } + TypeWalker { stack: vec![ty], last_subtree: 1, } } /// Skips the subtree of types corresponding to the last type diff --git a/src/librustc/ty/wf.rs b/src/librustc/ty/wf.rs index 0557660e98c2f..1135199d2254a 100644 --- a/src/librustc/ty/wf.rs +++ b/src/librustc/ty/wf.rs @@ -201,11 +201,11 @@ fn implied_bounds_from_components<'tcx>(sub_region: &'tcx ty::Region, .flat_map(|component| { match component { Component::Region(r) => - vec!(ImpliedBound::RegionSubRegion(sub_region, r)), + vec![ImpliedBound::RegionSubRegion(sub_region, r)], Component::Param(p) => - vec!(ImpliedBound::RegionSubParam(sub_region, p)), + vec![ImpliedBound::RegionSubParam(sub_region, p)], Component::Projection(p) => - vec!(ImpliedBound::RegionSubProjection(sub_region, p)), + vec![ImpliedBound::RegionSubProjection(sub_region, p)], Component::EscapingProjection(_) => // If the projection has escaping regions, don't // try to infer any implied bounds even for its @@ -215,9 +215,9 @@ fn implied_bounds_from_components<'tcx>(sub_region: &'tcx ty::Region, // idea is that the WAY that the caller proves // that may change in the future and we want to // give ourselves room to get smarter here. - vec!(), + vec![], Component::UnresolvedInferenceVariable(..) => - vec!(), + vec![], } }) .collect() diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs index 3b84ff86ab9fb..5ca567410291a 100644 --- a/src/librustc/util/ppaux.rs +++ b/src/librustc/util/ppaux.rs @@ -9,12 +9,13 @@ // except according to those terms. use hir::def_id::DefId; -use ty::subst::{self, Subst, Substs}; +use hir::map::definitions::DefPathData; +use ty::subst::{self, Subst}; use ty::{BrAnon, BrEnv, BrFresh, BrNamed}; use ty::{TyBool, TyChar, TyAdt}; use ty::{TyError, TyStr, TyArray, TySlice, TyFloat, TyFnDef, TyFnPtr}; use ty::{TyParam, TyRawPtr, TyRef, TyNever, TyTuple}; -use ty::TyClosure; +use ty::{TyClosure, TyProjection, TyAnon}; use ty::{TyBox, TyTrait, TyInt, TyUint, TyInfer}; use ty::{self, Ty, TyCtxt, TypeFoldable}; use ty::fold::{TypeFolder, TypeVisitor}; @@ -56,17 +57,9 @@ fn fn_sig(f: &mut fmt::Formatter, Ok(()) } -/// Namespace of the path given to parameterized to print. -#[derive(Copy, Clone, PartialEq, Debug)] -pub enum Ns { - Type, - Value -} - pub fn parameterized(f: &mut fmt::Formatter, substs: &subst::Substs, did: DefId, - ns: Ns, projections: &[ty::ProjectionPredicate]) -> fmt::Result { let mut verbose = false; @@ -75,8 +68,34 @@ pub fn parameterized(f: &mut fmt::Formatter, let mut num_regions = 0; let mut num_types = 0; let mut item_name = None; + let mut is_value_path = false; let fn_trait_kind = ty::tls::with(|tcx| { - let mut generics = tcx.lookup_generics(did); + // Unfortunately, some kinds of items (e.g., closures) don't have + // generics. So walk back up the find the closest parent that DOES + // have them. + let mut item_def_id = did; + loop { + let key = tcx.def_key(item_def_id); + match key.disambiguated_data.data { + DefPathData::TypeNs(_) => { + break; + } + DefPathData::ValueNs(_) | DefPathData::EnumVariant(_) => { + is_value_path = true; + break; + } + _ => { + // if we're making a symbol for something, there ought + // to be a value or type-def or something in there + // *somewhere* + item_def_id.index = key.parent.unwrap_or_else(|| { + bug!("finding type for {:?}, encountered def-id {:?} with no \ + parent", did, item_def_id); + }); + } + } + } + let mut generics = tcx.lookup_generics(item_def_id); let mut path_def_id = did; verbose = tcx.sess.verbose(); has_self = generics.has_self; @@ -84,7 +103,7 @@ pub fn parameterized(f: &mut fmt::Formatter, let mut child_types = 0; if let Some(def_id) = generics.parent { // Methods. - assert_eq!(ns, Ns::Value); + assert!(is_value_path); child_types = generics.types.len(); generics = tcx.lookup_generics(def_id); num_regions = generics.regions.len(); @@ -97,7 +116,7 @@ pub fn parameterized(f: &mut fmt::Formatter, item_name = Some(tcx.item_name(did)); path_def_id = def_id; } else { - if ns == Ns::Value { + if is_value_path { // Functions. assert_eq!(has_self, false); } else { @@ -192,7 +211,7 @@ pub fn parameterized(f: &mut fmt::Formatter, start_or_continue(f, "", ">")?; // For values, also print their name and type parameters. - if ns == Ns::Value { + if is_value_path { empty.set(true); if has_self { @@ -298,7 +317,6 @@ impl<'tcx> fmt::Display for TraitAndProjections<'tcx> { let TraitAndProjections(ref trait_ref, ref projection_bounds) = *self; parameterized(f, trait_ref.substs, trait_ref.def_id, - Ns::Type, projection_bounds) } } @@ -398,7 +416,7 @@ impl<'tcx> fmt::Debug for ty::ExistentialTraitRef<'tcx> { let trait_ref = tcx.lift(&ty::Binder(*self)) .expect("could not lift TraitRef for printing") .with_self_ty(tcx, dummy_self).0; - parameterized(f, trait_ref.substs, trait_ref.def_id, Ns::Type, &[]) + parameterized(f, trait_ref.substs, trait_ref.def_id, &[]) }) } } @@ -798,7 +816,7 @@ impl<'tcx> fmt::Display for ty::Binder fmt::Display for ty::TraitRef<'tcx> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - parameterized(f, self.substs, self.def_id, Ns::Type, &[]) + parameterized(f, self.substs, self.def_id, &[]) } } @@ -851,7 +869,7 @@ impl<'tcx> fmt::Display for ty::TypeVariants<'tcx> { } write!(f, "{} {{", bare_fn.sig.0)?; - parameterized(f, substs, def_id, Ns::Value, &[])?; + parameterized(f, substs, def_id, &[])?; write!(f, "}}") } TyFnPtr(ref bare_fn) => { @@ -874,19 +892,19 @@ impl<'tcx> fmt::Display for ty::TypeVariants<'tcx> { !tcx.tcache.borrow().contains_key(&def.did) { write!(f, "{}<..>", tcx.item_path_str(def.did)) } else { - parameterized(f, substs, def.did, Ns::Type, &[]) + parameterized(f, substs, def.did, &[]) } }) } TyTrait(ref data) => write!(f, "{}", data), - ty::TyProjection(ref data) => write!(f, "{}", data), - ty::TyAnon(def_id, substs) => { + TyProjection(ref data) => write!(f, "{}", data), + TyAnon(def_id, substs) => { ty::tls::with(|tcx| { // Grab the "TraitA + TraitB" from `impl TraitA + TraitB`, // by looking up the projections associated with the def_id. let item_predicates = tcx.lookup_predicates(def_id); let substs = tcx.lift(&substs).unwrap_or_else(|| { - Substs::empty(tcx) + tcx.intern_substs(&[]) }); let bounds = item_predicates.instantiate(tcx, substs); @@ -920,7 +938,8 @@ impl<'tcx> fmt::Display for ty::TypeVariants<'tcx> { let mut sep = " "; tcx.with_freevars(node_id, |freevars| { for (freevar, upvar_ty) in freevars.iter().zip(substs.upvar_tys) { - let node_id = freevar.def.var_id(); + let def_id = freevar.def.def_id(); + let node_id = tcx.map.as_local_node_id(def_id).unwrap(); write!(f, "{}{}:{}", sep, diff --git a/src/librustc_back/lib.rs b/src/librustc_back/lib.rs index 6a7bc51d15a42..da5f787bdf31e 100644 --- a/src/librustc_back/lib.rs +++ b/src/librustc_back/lib.rs @@ -36,18 +36,43 @@ #![feature(rand)] #![feature(rustc_private)] #![feature(staged_api)] -#![feature(step_by)] -#![feature(question_mark)] -#![cfg_attr(test, feature(test, rand))] +#![cfg_attr(stage0, feature(question_mark))] +#![cfg_attr(test, feature(rand))] extern crate syntax; extern crate libc; extern crate serialize; #[macro_use] extern crate log; +extern crate serialize as rustc_serialize; // used by deriving + pub mod tempdir; -pub mod rpath; -pub mod sha2; pub mod target; pub mod slice; pub mod dynamic_lib; + +use serialize::json::{Json, ToJson}; + +#[derive(Clone, Copy, Debug, PartialEq, Hash, RustcEncodable, RustcDecodable)] +pub enum PanicStrategy { + Unwind, + Abort, +} + +impl PanicStrategy { + pub fn desc(&self) -> &str { + match *self { + PanicStrategy::Unwind => "unwind", + PanicStrategy::Abort => "abort", + } + } +} + +impl ToJson for PanicStrategy { + fn to_json(&self) -> Json { + match *self { + PanicStrategy::Abort => "abort".to_json(), + PanicStrategy::Unwind => "unwind".to_json(), + } + } +} diff --git a/src/librustc_back/sha2.rs b/src/librustc_back/sha2.rs deleted file mode 100644 index 97fb39c17ea0e..0000000000000 --- a/src/librustc_back/sha2.rs +++ /dev/null @@ -1,679 +0,0 @@ -// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! This module implements only the Sha256 function since that is all that is needed for internal -//! use. This implementation is not intended for external use or for any use where security is -//! important. - -use serialize::hex::ToHex; - -/// Write a u32 into a vector, which must be 4 bytes long. The value is written in big-endian -/// format. -fn write_u32_be(dst: &mut[u8], input: u32) { - dst[0] = (input >> 24) as u8; - dst[1] = (input >> 16) as u8; - dst[2] = (input >> 8) as u8; - dst[3] = input as u8; -} - -/// Read the value of a vector of bytes as a u32 value in big-endian format. -fn read_u32_be(input: &[u8]) -> u32 { - (input[0] as u32) << 24 | - (input[1] as u32) << 16 | - (input[2] as u32) << 8 | - (input[3] as u32) -} - -/// Read a vector of bytes into a vector of u32s. The values are read in big-endian format. -fn read_u32v_be(dst: &mut[u32], input: &[u8]) { - assert!(dst.len() * 4 == input.len()); - let mut pos = 0; - for chunk in input.chunks(4) { - dst[pos] = read_u32_be(chunk); - pos += 1; - } -} - -trait ToBits: Sized { - /// Convert the value in bytes to the number of bits, a tuple where the 1st item is the - /// high-order value and the 2nd item is the low order value. - fn to_bits(self) -> (Self, Self); -} - -impl ToBits for u64 { - fn to_bits(self) -> (u64, u64) { - (self >> 61, self << 3) - } -} - -/// Adds the specified number of bytes to the bit count. panic!() if this would cause numeric -/// overflow. -fn add_bytes_to_bits(bits: u64, bytes: u64) -> u64 { - let (new_high_bits, new_low_bits) = bytes.to_bits(); - - if new_high_bits > 0 { - panic!("numeric overflow occurred.") - } - - match bits.checked_add(new_low_bits) { - Some(x) => x, - None => panic!("numeric overflow occurred.") - } -} - -/// A FixedBuffer, likes its name implies, is a fixed size buffer. When the buffer becomes full, it -/// must be processed. The input() method takes care of processing and then clearing the buffer -/// automatically. However, other methods do not and require the caller to process the buffer. Any -/// method that modifies the buffer directory or provides the caller with bytes that can be modified -/// results in those bytes being marked as used by the buffer. -trait FixedBuffer { - /// Input a vector of bytes. If the buffer becomes full, process it with the provided - /// function and then clear the buffer. - fn input(&mut self, input: &[u8], func: F) where - F: FnMut(&[u8]); - - /// Reset the buffer. - fn reset(&mut self); - - /// Zero the buffer up until the specified index. The buffer position currently must not be - /// greater than that index. - fn zero_until(&mut self, idx: usize); - - /// Get a slice of the buffer of the specified size. There must be at least that many bytes - /// remaining in the buffer. - fn next<'s>(&'s mut self, len: usize) -> &'s mut [u8]; - - /// Get the current buffer. The buffer must already be full. This clears the buffer as well. - fn full_buffer<'s>(&'s mut self) -> &'s [u8]; - - /// Get the current position of the buffer. - fn position(&self) -> usize; - - /// Get the number of bytes remaining in the buffer until it is full. - fn remaining(&self) -> usize; - - /// Get the size of the buffer - fn size(&self) -> usize; -} - -/// A FixedBuffer of 64 bytes useful for implementing Sha256 which has a 64 byte blocksize. -struct FixedBuffer64 { - buffer: [u8; 64], - buffer_idx: usize, -} - -impl FixedBuffer64 { - /// Create a new FixedBuffer64 - fn new() -> FixedBuffer64 { - FixedBuffer64 { - buffer: [0; 64], - buffer_idx: 0 - } - } -} - -impl FixedBuffer for FixedBuffer64 { - fn input(&mut self, input: &[u8], mut func: F) where - F: FnMut(&[u8]), - { - let mut i = 0; - - let size = self.size(); - - // If there is already data in the buffer, copy as much as we can into it and process - // the data if the buffer becomes full. - if self.buffer_idx != 0 { - let buffer_remaining = size - self.buffer_idx; - if input.len() >= buffer_remaining { - self.buffer[self.buffer_idx..size] - .copy_from_slice(&input[..buffer_remaining]); - self.buffer_idx = 0; - func(&self.buffer); - i += buffer_remaining; - } else { - self.buffer[self.buffer_idx..self.buffer_idx + input.len()] - .copy_from_slice(input); - self.buffer_idx += input.len(); - return; - } - } - - // While we have at least a full buffer size chunk's worth of data, process that data - // without copying it into the buffer - while input.len() - i >= size { - func(&input[i..i + size]); - i += size; - } - - // Copy any input data into the buffer. At this point in the method, the amount of - // data left in the input vector will be less than the buffer size and the buffer will - // be empty. - let input_remaining = input.len() - i; - self.buffer[..input_remaining].copy_from_slice(&input[i..]); - self.buffer_idx += input_remaining; - } - - fn reset(&mut self) { - self.buffer_idx = 0; - } - - fn zero_until(&mut self, idx: usize) { - assert!(idx >= self.buffer_idx); - for slot in self.buffer[self.buffer_idx..idx].iter_mut() { - *slot = 0; - } - self.buffer_idx = idx; - } - - fn next<'s>(&'s mut self, len: usize) -> &'s mut [u8] { - self.buffer_idx += len; - &mut self.buffer[self.buffer_idx - len..self.buffer_idx] - } - - fn full_buffer<'s>(&'s mut self) -> &'s [u8] { - assert!(self.buffer_idx == 64); - self.buffer_idx = 0; - &self.buffer[..64] - } - - fn position(&self) -> usize { self.buffer_idx } - - fn remaining(&self) -> usize { 64 - self.buffer_idx } - - fn size(&self) -> usize { 64 } -} - -/// The StandardPadding trait adds a method useful for Sha256 to a FixedBuffer struct. -trait StandardPadding { - /// Add padding to the buffer. The buffer must not be full when this method is called and is - /// guaranteed to have exactly rem remaining bytes when it returns. If there are not at least - /// rem bytes available, the buffer will be zero padded, processed, cleared, and then filled - /// with zeros again until only rem bytes are remaining. - fn standard_padding(&mut self, rem: usize, func: F) where F: FnMut(&[u8]); -} - -impl StandardPadding for T { - fn standard_padding(&mut self, rem: usize, mut func: F) where F: FnMut(&[u8]) { - let size = self.size(); - - self.next(1)[0] = 128; - - if self.remaining() < rem { - self.zero_until(size); - func(self.full_buffer()); - } - - self.zero_until(size - rem); - } -} - -/// The Digest trait specifies an interface common to digest functions, such as SHA-1 and the SHA-2 -/// family of digest functions. -pub trait Digest { - /// Provide message data. - /// - /// # Arguments - /// - /// * input - A vector of message data - fn input(&mut self, input: &[u8]); - - /// Retrieve the digest result. This method may be called multiple times. - /// - /// # Arguments - /// - /// * out - the vector to hold the result. Must be large enough to contain output_bits(). - fn result(&mut self, out: &mut [u8]); - - /// Reset the digest. This method must be called after result() and before supplying more - /// data. - fn reset(&mut self); - - /// Get the output size in bits. - fn output_bits(&self) -> usize; - - /// Convenience function that feeds a string into a digest. - /// - /// # Arguments - /// - /// * `input` The string to feed into the digest - fn input_str(&mut self, input: &str) { - self.input(input.as_bytes()); - } - - /// Convenience function that retrieves the result of a digest as a - /// newly allocated vec of bytes. - fn result_bytes(&mut self) -> Vec { - let mut buf = vec![0; (self.output_bits()+7)/8]; - self.result(&mut buf); - buf - } - - /// Convenience function that retrieves the result of a digest as a - /// String in hexadecimal format. - fn result_str(&mut self) -> String { - self.result_bytes().to_hex().to_string() - } -} - -// A structure that represents that state of a digest computation for the SHA-2 512 family of digest -// functions -struct Engine256State { - h0: u32, - h1: u32, - h2: u32, - h3: u32, - h4: u32, - h5: u32, - h6: u32, - h7: u32, -} - -impl Engine256State { - fn new(h: &[u32; 8]) -> Engine256State { - Engine256State { - h0: h[0], - h1: h[1], - h2: h[2], - h3: h[3], - h4: h[4], - h5: h[5], - h6: h[6], - h7: h[7] - } - } - - fn reset(&mut self, h: &[u32; 8]) { - self.h0 = h[0]; - self.h1 = h[1]; - self.h2 = h[2]; - self.h3 = h[3]; - self.h4 = h[4]; - self.h5 = h[5]; - self.h6 = h[6]; - self.h7 = h[7]; - } - - fn process_block(&mut self, data: &[u8]) { - fn ch(x: u32, y: u32, z: u32) -> u32 { - ((x & y) ^ ((!x) & z)) - } - - fn maj(x: u32, y: u32, z: u32) -> u32 { - ((x & y) ^ (x & z) ^ (y & z)) - } - - fn sum0(x: u32) -> u32 { - ((x >> 2) | (x << 30)) ^ ((x >> 13) | (x << 19)) ^ ((x >> 22) | (x << 10)) - } - - fn sum1(x: u32) -> u32 { - ((x >> 6) | (x << 26)) ^ ((x >> 11) | (x << 21)) ^ ((x >> 25) | (x << 7)) - } - - fn sigma0(x: u32) -> u32 { - ((x >> 7) | (x << 25)) ^ ((x >> 18) | (x << 14)) ^ (x >> 3) - } - - fn sigma1(x: u32) -> u32 { - ((x >> 17) | (x << 15)) ^ ((x >> 19) | (x << 13)) ^ (x >> 10) - } - - let mut a = self.h0; - let mut b = self.h1; - let mut c = self.h2; - let mut d = self.h3; - let mut e = self.h4; - let mut f = self.h5; - let mut g = self.h6; - let mut h = self.h7; - - let mut w = [0; 64]; - - // Sha-512 and Sha-256 use basically the same calculations which are implemented - // by these macros. Inlining the calculations seems to result in better generated code. - macro_rules! schedule_round { ($t:expr) => ( - w[$t] = sigma1(w[$t - 2]).wrapping_add(w[$t - 7]) - .wrapping_add(sigma0(w[$t - 15])).wrapping_add(w[$t - 16]); - ) - } - - macro_rules! sha2_round { - ($A:ident, $B:ident, $C:ident, $D:ident, - $E:ident, $F:ident, $G:ident, $H:ident, $K:ident, $t:expr) => ( - { - $H = $H.wrapping_add(sum1($E)).wrapping_add(ch($E, $F, $G)) - .wrapping_add($K[$t]).wrapping_add(w[$t]); - $D = $D.wrapping_add($H); - $H = $H.wrapping_add(sum0($A)).wrapping_add(maj($A, $B, $C)); - } - ) - } - - read_u32v_be(&mut w[0..16], data); - - // Putting the message schedule inside the same loop as the round calculations allows for - // the compiler to generate better code. - for t in (0..48).step_by(8) { - schedule_round!(t + 16); - schedule_round!(t + 17); - schedule_round!(t + 18); - schedule_round!(t + 19); - schedule_round!(t + 20); - schedule_round!(t + 21); - schedule_round!(t + 22); - schedule_round!(t + 23); - - sha2_round!(a, b, c, d, e, f, g, h, K32, t); - sha2_round!(h, a, b, c, d, e, f, g, K32, t + 1); - sha2_round!(g, h, a, b, c, d, e, f, K32, t + 2); - sha2_round!(f, g, h, a, b, c, d, e, K32, t + 3); - sha2_round!(e, f, g, h, a, b, c, d, K32, t + 4); - sha2_round!(d, e, f, g, h, a, b, c, K32, t + 5); - sha2_round!(c, d, e, f, g, h, a, b, K32, t + 6); - sha2_round!(b, c, d, e, f, g, h, a, K32, t + 7); - } - - for t in (48..64).step_by(8) { - sha2_round!(a, b, c, d, e, f, g, h, K32, t); - sha2_round!(h, a, b, c, d, e, f, g, K32, t + 1); - sha2_round!(g, h, a, b, c, d, e, f, K32, t + 2); - sha2_round!(f, g, h, a, b, c, d, e, K32, t + 3); - sha2_round!(e, f, g, h, a, b, c, d, K32, t + 4); - sha2_round!(d, e, f, g, h, a, b, c, K32, t + 5); - sha2_round!(c, d, e, f, g, h, a, b, K32, t + 6); - sha2_round!(b, c, d, e, f, g, h, a, K32, t + 7); - } - - self.h0 = self.h0.wrapping_add(a); - self.h1 = self.h1.wrapping_add(b); - self.h2 = self.h2.wrapping_add(c); - self.h3 = self.h3.wrapping_add(d); - self.h4 = self.h4.wrapping_add(e); - self.h5 = self.h5.wrapping_add(f); - self.h6 = self.h6.wrapping_add(g); - self.h7 = self.h7.wrapping_add(h); - } -} - -static K32: [u32; 64] = [ - 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, - 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5, - 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, - 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174, - 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, - 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da, - 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, - 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967, - 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, - 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85, - 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, - 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070, - 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, - 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3, - 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, - 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2 -]; - -// A structure that keeps track of the state of the Sha-256 operation and contains the logic -// necessary to perform the final calculations. -struct Engine256 { - length_bits: u64, - buffer: FixedBuffer64, - state: Engine256State, - finished: bool, -} - -impl Engine256 { - fn new(h: &[u32; 8]) -> Engine256 { - Engine256 { - length_bits: 0, - buffer: FixedBuffer64::new(), - state: Engine256State::new(h), - finished: false - } - } - - fn reset(&mut self, h: &[u32; 8]) { - self.length_bits = 0; - self.buffer.reset(); - self.state.reset(h); - self.finished = false; - } - - fn input(&mut self, input: &[u8]) { - assert!(!self.finished); - // Assumes that input.len() can be converted to u64 without overflow - self.length_bits = add_bytes_to_bits(self.length_bits, input.len() as u64); - let self_state = &mut self.state; - self.buffer.input(input, |input: &[u8]| { self_state.process_block(input) }); - } - - fn finish(&mut self) { - if !self.finished { - let self_state = &mut self.state; - self.buffer.standard_padding(8, |input: &[u8]| { self_state.process_block(input) }); - write_u32_be(self.buffer.next(4), (self.length_bits >> 32) as u32 ); - write_u32_be(self.buffer.next(4), self.length_bits as u32); - self_state.process_block(self.buffer.full_buffer()); - - self.finished = true; - } - } -} - -/// The SHA-256 hash algorithm -pub struct Sha256 { - engine: Engine256 -} - -impl Sha256 { - /// Construct a new instance of a SHA-256 digest. - /// Do not – under any circumstances – use this where timing attacks might be possible! - pub fn new() -> Sha256 { - Sha256 { - engine: Engine256::new(&H256) - } - } -} - -impl Digest for Sha256 { - fn input(&mut self, d: &[u8]) { - self.engine.input(d); - } - - fn result(&mut self, out: &mut [u8]) { - self.engine.finish(); - - write_u32_be(&mut out[0..4], self.engine.state.h0); - write_u32_be(&mut out[4..8], self.engine.state.h1); - write_u32_be(&mut out[8..12], self.engine.state.h2); - write_u32_be(&mut out[12..16], self.engine.state.h3); - write_u32_be(&mut out[16..20], self.engine.state.h4); - write_u32_be(&mut out[20..24], self.engine.state.h5); - write_u32_be(&mut out[24..28], self.engine.state.h6); - write_u32_be(&mut out[28..32], self.engine.state.h7); - } - - fn reset(&mut self) { - self.engine.reset(&H256); - } - - fn output_bits(&self) -> usize { 256 } -} - -static H256: [u32; 8] = [ - 0x6a09e667, - 0xbb67ae85, - 0x3c6ef372, - 0xa54ff53a, - 0x510e527f, - 0x9b05688c, - 0x1f83d9ab, - 0x5be0cd19 -]; - -#[cfg(test)] -mod tests { - #![allow(deprecated)] - extern crate rand; - - use self::rand::Rng; - use self::rand::isaac::IsaacRng; - use serialize::hex::FromHex; - use std::u64; - use super::{Digest, Sha256}; - - // A normal addition - no overflow occurs - #[test] - fn test_add_bytes_to_bits_ok() { - assert!(super::add_bytes_to_bits(100, 10) == 180); - } - - // A simple failure case - adding 1 to the max value - #[test] - #[should_panic] - fn test_add_bytes_to_bits_overflow() { - super::add_bytes_to_bits(u64::MAX, 1); - } - - struct Test { - input: String, - output_str: String, - } - - fn test_hash(sh: &mut D, tests: &[Test]) { - // Test that it works when accepting the message all at once - for t in tests { - sh.reset(); - sh.input_str(&t.input); - let out_str = sh.result_str(); - assert!(out_str == t.output_str); - } - - // Test that it works when accepting the message in pieces - for t in tests { - sh.reset(); - let len = t.input.len(); - let mut left = len; - while left > 0 { - let take = (left + 1) / 2; - sh.input_str(&t.input[len - left..take + len - left]); - left = left - take; - } - let out_str = sh.result_str(); - assert!(out_str == t.output_str); - } - } - - #[test] - fn test_sha256() { - // Examples from wikipedia - let wikipedia_tests = vec!( - Test { - input: "".to_string(), - output_str: "e3b0c44298fc1c149afb\ - f4c8996fb92427ae41e4649b934ca495991b7852b855".to_string() - }, - Test { - input: "The quick brown fox jumps over the lazy \ - dog".to_string(), - output_str: "d7a8fbb307d7809469ca\ - 9abcb0082e4f8d5651e46d3cdb762d02d0bf37c9e592".to_string() - }, - Test { - input: "The quick brown fox jumps over the lazy \ - dog.".to_string(), - output_str: "ef537f25c895bfa78252\ - 6529a9b63d97aa631564d5d789c2b765448c8635fb6c".to_string() - }); - - let tests = wikipedia_tests; - - let mut sh: Box<_> = box Sha256::new(); - - test_hash(&mut *sh, &tests); - } - - /// Feed 1,000,000 'a's into the digest with varying input sizes and check that the result is - /// correct. - fn test_digest_1million_random(digest: &mut D, blocksize: usize, expected: &str) { - let total_size = 1000000; - let buffer = vec![b'a'; blocksize * 2]; - let mut rng = IsaacRng::new_unseeded(); - let mut count = 0; - - digest.reset(); - - while count < total_size { - let next: usize = rng.gen_range(0, 2 * blocksize + 1); - let remaining = total_size - count; - let size = if next > remaining { remaining } else { next }; - digest.input(&buffer[..size]); - count += size; - } - - let result_str = digest.result_str(); - let result_bytes = digest.result_bytes(); - - assert_eq!(expected, result_str); - - let expected_vec: Vec = expected.from_hex() - .unwrap() - .into_iter() - .collect(); - assert_eq!(expected_vec, result_bytes); - } - - #[test] - fn test_1million_random_sha256() { - let mut sh = Sha256::new(); - test_digest_1million_random( - &mut sh, - 64, - "cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0"); - } -} - -#[cfg(test)] -mod bench { - extern crate test; - use self::test::Bencher; - use super::{Sha256, Digest}; - - #[bench] - pub fn sha256_10(b: &mut Bencher) { - let mut sh = Sha256::new(); - let bytes = [1; 10]; - b.iter(|| { - sh.input(&bytes); - }); - b.bytes = bytes.len() as u64; - } - - #[bench] - pub fn sha256_1k(b: &mut Bencher) { - let mut sh = Sha256::new(); - let bytes = [1; 1024]; - b.iter(|| { - sh.input(&bytes); - }); - b.bytes = bytes.len() as u64; - } - - #[bench] - pub fn sha256_64k(b: &mut Bencher) { - let mut sh = Sha256::new(); - let bytes = [1; 65536]; - b.iter(|| { - sh.input(&bytes); - }); - b.bytes = bytes.len() as u64; - } -} diff --git a/src/librustc_back/target/aarch64_apple_ios.rs b/src/librustc_back/target/aarch64_apple_ios.rs index 660ed0ac7b843..5ef79359140f7 100644 --- a/src/librustc_back/target/aarch64_apple_ios.rs +++ b/src/librustc_back/target/aarch64_apple_ios.rs @@ -25,7 +25,8 @@ pub fn target() -> TargetResult { options: TargetOptions { features: "+neon,+fp-armv8,+cyclone".to_string(), eliminate_frame_pointer: false, - max_atomic_width: 128, + max_atomic_width: Some(128), + abi_blacklist: super::arm_base::abi_blacklist(), .. base }, }) diff --git a/src/librustc_back/target/aarch64_linux_android.rs b/src/librustc_back/target/aarch64_linux_android.rs index 7f54dab5b5385..140195c780b9c 100644 --- a/src/librustc_back/target/aarch64_linux_android.rs +++ b/src/librustc_back/target/aarch64_linux_android.rs @@ -8,11 +8,11 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use target::{Target, TargetResult}; +use target::{Target, TargetOptions, TargetResult}; pub fn target() -> TargetResult { let mut base = super::android_base::opts(); - base.max_atomic_width = 128; + base.max_atomic_width = Some(128); // As documented in http://developer.android.com/ndk/guides/cpu-features.html // the neon (ASIMD) and FP must exist on all android aarch64 targets. base.features = "+neon,+fp-armv8".to_string(); @@ -25,6 +25,9 @@ pub fn target() -> TargetResult { target_os: "android".to_string(), target_env: "".to_string(), target_vendor: "unknown".to_string(), - options: base, + options: TargetOptions { + abi_blacklist: super::arm_base::abi_blacklist(), + .. base + }, }) } diff --git a/src/librustc_back/target/aarch64_unknown_fuchsia.rs b/src/librustc_back/target/aarch64_unknown_fuchsia.rs new file mode 100644 index 0000000000000..a93a46d140238 --- /dev/null +++ b/src/librustc_back/target/aarch64_unknown_fuchsia.rs @@ -0,0 +1,28 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use target::{Target, TargetResult}; + +pub fn target() -> TargetResult { + let mut base = super::fuchsia_base::opts(); + base.max_atomic_width = Some(128); + + Ok(Target { + llvm_target: "aarch64-unknown-fuchsia".to_string(), + target_endian: "little".to_string(), + target_pointer_width: "64".to_string(), + data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-i128:128-n32:64-S128".to_string(), + arch: "aarch64".to_string(), + target_os: "fuchsia".to_string(), + target_env: "".to_string(), + target_vendor: "unknown".to_string(), + options: base, + }) +} diff --git a/src/librustc_back/target/aarch64_unknown_linux_gnu.rs b/src/librustc_back/target/aarch64_unknown_linux_gnu.rs index cca965f9d4ff1..5f6335d405f5e 100644 --- a/src/librustc_back/target/aarch64_unknown_linux_gnu.rs +++ b/src/librustc_back/target/aarch64_unknown_linux_gnu.rs @@ -8,11 +8,15 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use target::{Target, TargetResult}; +use target::{Target, TargetOptions, TargetResult}; pub fn target() -> TargetResult { let mut base = super::linux_base::opts(); - base.max_atomic_width = 128; + base.max_atomic_width = Some(128); + + // see #36994 + base.exe_allocation_crate = "alloc_system".to_string(); + Ok(Target { llvm_target: "aarch64-unknown-linux-gnu".to_string(), target_endian: "little".to_string(), @@ -22,6 +26,9 @@ pub fn target() -> TargetResult { arch: "aarch64".to_string(), target_os: "linux".to_string(), target_vendor: "unknown".to_string(), - options: base, + options: TargetOptions { + abi_blacklist: super::arm_base::abi_blacklist(), + .. base + }, }) } diff --git a/src/librustc_back/target/arm_base.rs b/src/librustc_back/target/arm_base.rs new file mode 100644 index 0000000000000..ad132c27cb841 --- /dev/null +++ b/src/librustc_back/target/arm_base.rs @@ -0,0 +1,16 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use syntax::abi::Abi; + +// All the calling conventions trigger an assertion(Unsupported calling convention) in llvm on arm +pub fn abi_blacklist() -> Vec { + vec![Abi::Stdcall, Abi::Fastcall, Abi::Vectorcall, Abi::Win64, Abi::SysV64] +} diff --git a/src/librustc_back/target/arm_linux_androideabi.rs b/src/librustc_back/target/arm_linux_androideabi.rs index f3a18b13c6783..c7d2df4344cb1 100644 --- a/src/librustc_back/target/arm_linux_androideabi.rs +++ b/src/librustc_back/target/arm_linux_androideabi.rs @@ -8,12 +8,12 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use target::{Target, TargetResult}; +use target::{Target, TargetOptions, TargetResult}; pub fn target() -> TargetResult { let mut base = super::android_base::opts(); base.features = "+v7,+vfp3,+d16".to_string(); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); Ok(Target { llvm_target: "arm-linux-androideabi".to_string(), @@ -24,6 +24,9 @@ pub fn target() -> TargetResult { target_os: "android".to_string(), target_env: "".to_string(), target_vendor: "unknown".to_string(), - options: base, + options: TargetOptions { + abi_blacklist: super::arm_base::abi_blacklist(), + .. base + }, }) } diff --git a/src/librustc_back/target/arm_unknown_linux_gnueabi.rs b/src/librustc_back/target/arm_unknown_linux_gnueabi.rs index e666a8460e5ea..77d35edfbd09c 100644 --- a/src/librustc_back/target/arm_unknown_linux_gnueabi.rs +++ b/src/librustc_back/target/arm_unknown_linux_gnueabi.rs @@ -12,7 +12,7 @@ use target::{Target, TargetOptions, TargetResult}; pub fn target() -> TargetResult { let mut base = super::linux_base::opts(); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); Ok(Target { llvm_target: "arm-unknown-linux-gnueabi".to_string(), target_endian: "little".to_string(), @@ -25,6 +25,7 @@ pub fn target() -> TargetResult { options: TargetOptions { features: "+v6".to_string(), + abi_blacklist: super::arm_base::abi_blacklist(), .. base }, }) diff --git a/src/librustc_back/target/arm_unknown_linux_gnueabihf.rs b/src/librustc_back/target/arm_unknown_linux_gnueabihf.rs index d65c89abc2064..b183412be1934 100644 --- a/src/librustc_back/target/arm_unknown_linux_gnueabihf.rs +++ b/src/librustc_back/target/arm_unknown_linux_gnueabihf.rs @@ -12,7 +12,7 @@ use target::{Target, TargetOptions, TargetResult}; pub fn target() -> TargetResult { let mut base = super::linux_base::opts(); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); Ok(Target { llvm_target: "arm-unknown-linux-gnueabihf".to_string(), target_endian: "little".to_string(), @@ -25,6 +25,7 @@ pub fn target() -> TargetResult { options: TargetOptions { features: "+v6,+vfp2".to_string(), + abi_blacklist: super::arm_base::abi_blacklist(), .. base } }) diff --git a/src/librustc_back/target/arm_unknown_linux_musleabi.rs b/src/librustc_back/target/arm_unknown_linux_musleabi.rs index 028c91eadaeb3..261d4353c7a09 100644 --- a/src/librustc_back/target/arm_unknown_linux_musleabi.rs +++ b/src/librustc_back/target/arm_unknown_linux_musleabi.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use target::{Target, TargetResult}; +use target::{Target, TargetOptions, TargetResult}; pub fn target() -> TargetResult { let mut base = super::linux_musl_base::opts(); @@ -16,7 +16,7 @@ pub fn target() -> TargetResult { // Most of these settings are copied from the arm_unknown_linux_gnueabi // target. base.features = "+v6".to_string(); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); Ok(Target { // It's important we use "gnueabi" and not "musleabi" here. LLVM uses it // to determine the calling convention and float ABI, and it doesn't @@ -29,6 +29,9 @@ pub fn target() -> TargetResult { target_os: "linux".to_string(), target_env: "musl".to_string(), target_vendor: "unknown".to_string(), - options: base, + options: TargetOptions { + abi_blacklist: super::arm_base::abi_blacklist(), + .. base + }, }) } diff --git a/src/librustc_back/target/arm_unknown_linux_musleabihf.rs b/src/librustc_back/target/arm_unknown_linux_musleabihf.rs index c7dda186f4258..1443dcf5bad41 100644 --- a/src/librustc_back/target/arm_unknown_linux_musleabihf.rs +++ b/src/librustc_back/target/arm_unknown_linux_musleabihf.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use target::{Target, TargetResult}; +use target::{Target, TargetOptions, TargetResult}; pub fn target() -> TargetResult { let mut base = super::linux_musl_base::opts(); @@ -16,7 +16,7 @@ pub fn target() -> TargetResult { // Most of these settings are copied from the arm_unknown_linux_gnueabihf // target. base.features = "+v6,+vfp2".to_string(); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); Ok(Target { // It's important we use "gnueabihf" and not "musleabihf" here. LLVM // uses it to determine the calling convention and float ABI, and it @@ -29,6 +29,9 @@ pub fn target() -> TargetResult { target_os: "linux".to_string(), target_env: "musl".to_string(), target_vendor: "unknown".to_string(), - options: base, + options: TargetOptions { + abi_blacklist: super::arm_base::abi_blacklist(), + .. base + }, }) } diff --git a/src/librustc_back/target/armv7_apple_ios.rs b/src/librustc_back/target/armv7_apple_ios.rs index 71533a09b1672..9e9c443930624 100644 --- a/src/librustc_back/target/armv7_apple_ios.rs +++ b/src/librustc_back/target/armv7_apple_ios.rs @@ -24,7 +24,8 @@ pub fn target() -> TargetResult { target_vendor: "apple".to_string(), options: TargetOptions { features: "+v7,+vfp3,+neon".to_string(), - max_atomic_width: 64, + max_atomic_width: Some(64), + abi_blacklist: super::arm_base::abi_blacklist(), .. base } }) diff --git a/src/librustc_back/target/armv7_linux_androideabi.rs b/src/librustc_back/target/armv7_linux_androideabi.rs index 1c59262e04198..42f0deaa3fbff 100644 --- a/src/librustc_back/target/armv7_linux_androideabi.rs +++ b/src/librustc_back/target/armv7_linux_androideabi.rs @@ -8,12 +8,12 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use target::{Target, TargetResult}; +use target::{Target, TargetOptions, TargetResult}; pub fn target() -> TargetResult { let mut base = super::android_base::opts(); base.features = "+v7,+thumb2,+vfp3,+d16".to_string(); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); Ok(Target { llvm_target: "armv7-none-linux-android".to_string(), @@ -24,6 +24,9 @@ pub fn target() -> TargetResult { target_os: "android".to_string(), target_env: "".to_string(), target_vendor: "unknown".to_string(), - options: base, + options: TargetOptions { + abi_blacklist: super::arm_base::abi_blacklist(), + .. base + }, }) } diff --git a/src/librustc_back/target/armv7_unknown_linux_gnueabihf.rs b/src/librustc_back/target/armv7_unknown_linux_gnueabihf.rs index 7e0306a03e2f6..96ccedd5bea5c 100644 --- a/src/librustc_back/target/armv7_unknown_linux_gnueabihf.rs +++ b/src/librustc_back/target/armv7_unknown_linux_gnueabihf.rs @@ -24,9 +24,10 @@ pub fn target() -> TargetResult { options: TargetOptions { // Info about features at https://wiki.debian.org/ArmHardFloatPort - features: "+v7,+vfp3,+d16,+thumb2".to_string(), + features: "+v7,+vfp3,+d16,+thumb2,-neon".to_string(), cpu: "generic".to_string(), - max_atomic_width: 64, + max_atomic_width: Some(64), + abi_blacklist: super::arm_base::abi_blacklist(), .. base } }) diff --git a/src/librustc_back/target/armv7_unknown_linux_musleabihf.rs b/src/librustc_back/target/armv7_unknown_linux_musleabihf.rs index e40704e5d4997..8f66e6a4f58d4 100644 --- a/src/librustc_back/target/armv7_unknown_linux_musleabihf.rs +++ b/src/librustc_back/target/armv7_unknown_linux_musleabihf.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use target::{Target, TargetResult}; +use target::{Target, TargetOptions, TargetResult}; pub fn target() -> TargetResult { let mut base = super::linux_musl_base::opts(); @@ -17,7 +17,7 @@ pub fn target() -> TargetResult { // target. base.features = "+v7,+vfp3,+neon".to_string(); base.cpu = "cortex-a8".to_string(); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); Ok(Target { // It's important we use "gnueabihf" and not "musleabihf" here. LLVM // uses it to determine the calling convention and float ABI, and LLVM @@ -30,6 +30,9 @@ pub fn target() -> TargetResult { target_os: "linux".to_string(), target_env: "musl".to_string(), target_vendor: "unknown".to_string(), - options: base, + options: TargetOptions { + abi_blacklist: super::arm_base::abi_blacklist(), + .. base + }, }) } diff --git a/src/librustc_back/target/armv7s_apple_ios.rs b/src/librustc_back/target/armv7s_apple_ios.rs index f24b9969910ef..6edde6e73efd3 100644 --- a/src/librustc_back/target/armv7s_apple_ios.rs +++ b/src/librustc_back/target/armv7s_apple_ios.rs @@ -24,7 +24,8 @@ pub fn target() -> TargetResult { target_vendor: "apple".to_string(), options: TargetOptions { features: "+v7,+vfp4,+neon".to_string(), - max_atomic_width: 64, + max_atomic_width: Some(64), + abi_blacklist: super::arm_base::abi_blacklist(), .. base } }) diff --git a/src/librustc_back/target/asmjs_unknown_emscripten.rs b/src/librustc_back/target/asmjs_unknown_emscripten.rs index 9ccfdbb129c73..d86a9b093272e 100644 --- a/src/librustc_back/target/asmjs_unknown_emscripten.rs +++ b/src/librustc_back/target/asmjs_unknown_emscripten.rs @@ -21,7 +21,8 @@ pub fn target() -> Result { linker_is_gnu: true, allow_asm: false, obj_is_bitcode: true, - max_atomic_width: 32, + max_atomic_width: Some(32), + post_link_args: vec!["-s".to_string(), "ERROR_ON_UNDEFINED_SYMBOLS=1".to_string()], .. Default::default() }; Ok(Target { diff --git a/src/librustc_back/target/dragonfly_base.rs b/src/librustc_back/target/dragonfly_base.rs index e2c4003a8b6e9..7555181a15cf2 100644 --- a/src/librustc_back/target/dragonfly_base.rs +++ b/src/librustc_back/target/dragonfly_base.rs @@ -17,7 +17,7 @@ pub fn opts() -> TargetOptions { executables: true, linker_is_gnu: true, has_rpath: true, - pre_link_args: vec!( + pre_link_args: vec![ // GNU-style linkers will use this to omit linking to libraries // which don't actually fulfill any relocations, but only for // libraries which follow this flag. Thus, use it before @@ -26,7 +26,7 @@ pub fn opts() -> TargetOptions { // Always enable NX protection when it is available "-Wl,-z,noexecstack".to_string(), - ), + ], position_independent_executables: true, exe_allocation_crate: super::maybe_jemalloc(), .. Default::default() diff --git a/src/librustc_back/target/freebsd_base.rs b/src/librustc_back/target/freebsd_base.rs index e2c4003a8b6e9..7555181a15cf2 100644 --- a/src/librustc_back/target/freebsd_base.rs +++ b/src/librustc_back/target/freebsd_base.rs @@ -17,7 +17,7 @@ pub fn opts() -> TargetOptions { executables: true, linker_is_gnu: true, has_rpath: true, - pre_link_args: vec!( + pre_link_args: vec![ // GNU-style linkers will use this to omit linking to libraries // which don't actually fulfill any relocations, but only for // libraries which follow this flag. Thus, use it before @@ -26,7 +26,7 @@ pub fn opts() -> TargetOptions { // Always enable NX protection when it is available "-Wl,-z,noexecstack".to_string(), - ), + ], position_independent_executables: true, exe_allocation_crate: super::maybe_jemalloc(), .. Default::default() diff --git a/src/librustc_back/target/fuchsia_base.rs b/src/librustc_back/target/fuchsia_base.rs new file mode 100644 index 0000000000000..69546684cb70b --- /dev/null +++ b/src/librustc_back/target/fuchsia_base.rs @@ -0,0 +1,39 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use target::TargetOptions; +use std::default::Default; + +pub fn opts() -> TargetOptions { + TargetOptions { + dynamic_linking: true, + executables: true, + linker_is_gnu: true, + has_rpath: true, + pre_link_args: vec![ + // We want to be able to strip as much executable code as possible + // from the linker command line, and this flag indicates to the + // linker that it can avoid linking in dynamic libraries that don't + // actually satisfy any symbols up to that point (as with many other + // resolutions the linker does). This option only applies to all + // following libraries so we're sure to pass it as one of the first + // arguments. + // FIXME: figure out whether these linker args are desirable + //"-Wl,--as-needed".to_string(), + + // Always enable NX protection when it is available + //"-Wl,-z,noexecstack".to_string(), + ], + position_independent_executables: true, + exe_allocation_crate: "alloc_system".to_string(), + has_elf_tls: true, + .. Default::default() + } +} diff --git a/src/librustc_back/target/haiku_base.rs b/src/librustc_back/target/haiku_base.rs new file mode 100644 index 0000000000000..5e319ba1838a0 --- /dev/null +++ b/src/librustc_back/target/haiku_base.rs @@ -0,0 +1,23 @@ +// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use target::TargetOptions; +use std::default::Default; + +pub fn opts() -> TargetOptions { + TargetOptions { + linker: "cc".to_string(), + dynamic_linking: true, + executables: true, + has_rpath: true, + linker_is_gnu: true, + .. Default::default() + } +} diff --git a/src/librustc_back/target/i386_apple_ios.rs b/src/librustc_back/target/i386_apple_ios.rs index 94146fe9d9885..319ada4f8e17c 100644 --- a/src/librustc_back/target/i386_apple_ios.rs +++ b/src/librustc_back/target/i386_apple_ios.rs @@ -23,7 +23,7 @@ pub fn target() -> TargetResult { target_env: "".to_string(), target_vendor: "apple".to_string(), options: TargetOptions { - max_atomic_width: 64, + max_atomic_width: Some(64), .. base } }) diff --git a/src/librustc_back/target/i686_apple_darwin.rs b/src/librustc_back/target/i686_apple_darwin.rs index 4876a3489d470..d3b09d9a0f112 100644 --- a/src/librustc_back/target/i686_apple_darwin.rs +++ b/src/librustc_back/target/i686_apple_darwin.rs @@ -13,7 +13,7 @@ use target::{Target, TargetResult}; pub fn target() -> TargetResult { let mut base = super::apple_base::opts(); base.cpu = "yonah".to_string(); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); base.pre_link_args.push("-m32".to_string()); Ok(Target { diff --git a/src/librustc_back/target/i686_linux_android.rs b/src/librustc_back/target/i686_linux_android.rs index 1de629238a13c..a2c007d496960 100644 --- a/src/librustc_back/target/i686_linux_android.rs +++ b/src/librustc_back/target/i686_linux_android.rs @@ -13,7 +13,7 @@ use target::{Target, TargetResult}; pub fn target() -> TargetResult { let mut base = super::android_base::opts(); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); // http://developer.android.com/ndk/guides/abis.html#x86 base.cpu = "pentiumpro".to_string(); diff --git a/src/librustc_back/target/i686_pc_windows_gnu.rs b/src/librustc_back/target/i686_pc_windows_gnu.rs index 2c19b8109c362..0c2c5433e6c41 100644 --- a/src/librustc_back/target/i686_pc_windows_gnu.rs +++ b/src/librustc_back/target/i686_pc_windows_gnu.rs @@ -13,7 +13,7 @@ use target::{Target, TargetResult}; pub fn target() -> TargetResult { let mut base = super::windows_base::opts(); base.cpu = "pentium4".to_string(); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); // Mark all dynamic libraries and executables as compatible with the larger 4GiB address // space available to x86 Windows binaries on x86_64. diff --git a/src/librustc_back/target/i686_pc_windows_msvc.rs b/src/librustc_back/target/i686_pc_windows_msvc.rs index cb02fcc308c2d..2290d2057f130 100644 --- a/src/librustc_back/target/i686_pc_windows_msvc.rs +++ b/src/librustc_back/target/i686_pc_windows_msvc.rs @@ -13,7 +13,7 @@ use target::{Target, TargetResult}; pub fn target() -> TargetResult { let mut base = super::windows_msvc_base::opts(); base.cpu = "pentium4".to_string(); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); // Mark all dynamic libraries and executables as compatible with the larger 4GiB address // space available to x86 Windows binaries on x86_64. diff --git a/src/librustc_back/target/i686_unknown_dragonfly.rs b/src/librustc_back/target/i686_unknown_dragonfly.rs index f96ec004b481e..d8f8431e66e7f 100644 --- a/src/librustc_back/target/i686_unknown_dragonfly.rs +++ b/src/librustc_back/target/i686_unknown_dragonfly.rs @@ -13,7 +13,7 @@ use target::{Target, TargetResult}; pub fn target() -> TargetResult { let mut base = super::dragonfly_base::opts(); base.cpu = "pentium4".to_string(); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); base.pre_link_args.push("-m32".to_string()); Ok(Target { diff --git a/src/librustc_back/target/i686_unknown_freebsd.rs b/src/librustc_back/target/i686_unknown_freebsd.rs index 3489ecfe614d9..ddbc74f25c9cd 100644 --- a/src/librustc_back/target/i686_unknown_freebsd.rs +++ b/src/librustc_back/target/i686_unknown_freebsd.rs @@ -13,7 +13,7 @@ use target::{Target, TargetResult}; pub fn target() -> TargetResult { let mut base = super::freebsd_base::opts(); base.cpu = "pentium4".to_string(); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); base.pre_link_args.push("-m32".to_string()); Ok(Target { diff --git a/src/librustc_back/target/i686_unknown_haiku.rs b/src/librustc_back/target/i686_unknown_haiku.rs new file mode 100644 index 0000000000000..9078206c9e069 --- /dev/null +++ b/src/librustc_back/target/i686_unknown_haiku.rs @@ -0,0 +1,30 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use target::{Target, TargetResult}; + +pub fn target() -> TargetResult { + let mut base = super::haiku_base::opts(); + base.cpu = "pentium4".to_string(); + base.max_atomic_width = Some(64); + base.pre_link_args.push("-m32".to_string()); + + Ok(Target { + llvm_target: "i686-unknown-haiku".to_string(), + target_endian: "little".to_string(), + target_pointer_width: "32".to_string(), + data_layout: "e-m:e-p:32:32-f64:32:64-f80:32-n8:16:32-S128".to_string(), + arch: "x86".to_string(), + target_os: "haiku".to_string(), + target_env: "".to_string(), + target_vendor: "unknown".to_string(), + options: base, + }) +} diff --git a/src/librustc_back/target/i686_unknown_linux_gnu.rs b/src/librustc_back/target/i686_unknown_linux_gnu.rs index f2e865c015e35..bf9c28b0c10e5 100644 --- a/src/librustc_back/target/i686_unknown_linux_gnu.rs +++ b/src/librustc_back/target/i686_unknown_linux_gnu.rs @@ -13,7 +13,7 @@ use target::{Target, TargetResult}; pub fn target() -> TargetResult { let mut base = super::linux_base::opts(); base.cpu = "pentium4".to_string(); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); base.pre_link_args.push("-m32".to_string()); Ok(Target { diff --git a/src/librustc_back/target/i686_unknown_linux_musl.rs b/src/librustc_back/target/i686_unknown_linux_musl.rs index a0a8de46e2f59..3d563fa6e5d97 100644 --- a/src/librustc_back/target/i686_unknown_linux_musl.rs +++ b/src/librustc_back/target/i686_unknown_linux_musl.rs @@ -13,7 +13,7 @@ use target::{Target, TargetResult}; pub fn target() -> TargetResult { let mut base = super::linux_musl_base::opts(); base.cpu = "pentium4".to_string(); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); base.pre_link_args.push("-m32".to_string()); base.pre_link_args.push("-Wl,-melf_i386".to_string()); diff --git a/src/librustc_back/target/le32_unknown_nacl.rs b/src/librustc_back/target/le32_unknown_nacl.rs index 9ba6591f587c4..891e7dda14a2a 100644 --- a/src/librustc_back/target/le32_unknown_nacl.rs +++ b/src/librustc_back/target/le32_unknown_nacl.rs @@ -15,16 +15,16 @@ pub fn target() -> TargetResult { linker: "pnacl-clang".to_string(), ar: "pnacl-ar".to_string(), - pre_link_args: vec!("--pnacl-exceptions=sjlj".to_string(), + pre_link_args: vec!["--pnacl-exceptions=sjlj".to_string(), "--target=le32-unknown-nacl".to_string(), - "-Wl,--start-group".to_string()), - post_link_args: vec!("-Wl,--end-group".to_string()), + "-Wl,--start-group".to_string()], + post_link_args: vec!["-Wl,--end-group".to_string()], dynamic_linking: false, executables: true, exe_suffix: ".pexe".to_string(), linker_is_gnu: true, allow_asm: false, - max_atomic_width: 32, + max_atomic_width: Some(32), .. Default::default() }; Ok(Target { diff --git a/src/librustc_back/target/mips64_unknown_linux_gnuabi64.rs b/src/librustc_back/target/mips64_unknown_linux_gnuabi64.rs index 837856344280f..c284840ecb4bd 100644 --- a/src/librustc_back/target/mips64_unknown_linux_gnuabi64.rs +++ b/src/librustc_back/target/mips64_unknown_linux_gnuabi64.rs @@ -15,7 +15,7 @@ pub fn target() -> TargetResult { llvm_target: "mips64-unknown-linux-gnuabi64".to_string(), target_endian: "big".to_string(), target_pointer_width: "64".to_string(), - data_layout: "E-m:m-i8:8:32-i16:16:32-i64:64-n32:64-S128".to_string(), + data_layout: "E-m:e-i8:8:32-i16:16:32-i64:64-n32:64-S128".to_string(), arch: "mips64".to_string(), target_os: "linux".to_string(), target_env: "gnu".to_string(), @@ -24,7 +24,11 @@ pub fn target() -> TargetResult { // NOTE(mips64r2) matches C toolchain cpu: "mips64r2".to_string(), features: "+mips64r2".to_string(), - max_atomic_width: 64, + max_atomic_width: Some(64), + + // see #36994 + exe_allocation_crate: "alloc_system".to_string(), + ..super::linux_base::opts() }, }) diff --git a/src/librustc_back/target/mips64el_unknown_linux_gnuabi64.rs b/src/librustc_back/target/mips64el_unknown_linux_gnuabi64.rs index e1340e8e127b2..17895836fe87b 100644 --- a/src/librustc_back/target/mips64el_unknown_linux_gnuabi64.rs +++ b/src/librustc_back/target/mips64el_unknown_linux_gnuabi64.rs @@ -15,7 +15,7 @@ pub fn target() -> TargetResult { llvm_target: "mips64el-unknown-linux-gnuabi64".to_string(), target_endian: "little".to_string(), target_pointer_width: "64".to_string(), - data_layout: "e-m:m-i8:8:32-i16:16:32-i64:64-n32:64-S128".to_string(), + data_layout: "e-m:e-i8:8:32-i16:16:32-i64:64-n32:64-S128".to_string(), arch: "mips64".to_string(), target_os: "linux".to_string(), target_env: "gnu".to_string(), @@ -24,7 +24,11 @@ pub fn target() -> TargetResult { // NOTE(mips64r2) matches C toolchain cpu: "mips64r2".to_string(), features: "+mips64r2".to_string(), - max_atomic_width: 64, + max_atomic_width: Some(64), + + // see #36994 + exe_allocation_crate: "alloc_system".to_string(), + ..super::linux_base::opts() }, }) diff --git a/src/librustc_back/target/mips_unknown_linux_gnu.rs b/src/librustc_back/target/mips_unknown_linux_gnu.rs index ab967f6b40fbe..a6d8fae2536ca 100644 --- a/src/librustc_back/target/mips_unknown_linux_gnu.rs +++ b/src/librustc_back/target/mips_unknown_linux_gnu.rs @@ -23,7 +23,11 @@ pub fn target() -> TargetResult { options: TargetOptions { cpu: "mips32r2".to_string(), features: "+mips32r2".to_string(), - max_atomic_width: 32, + max_atomic_width: Some(32), + + // see #36994 + exe_allocation_crate: "alloc_system".to_string(), + ..super::linux_base::opts() }, }) diff --git a/src/librustc_back/target/mips_unknown_linux_musl.rs b/src/librustc_back/target/mips_unknown_linux_musl.rs index 4a69bce53bc97..e4a6d2a55d981 100644 --- a/src/librustc_back/target/mips_unknown_linux_musl.rs +++ b/src/librustc_back/target/mips_unknown_linux_musl.rs @@ -23,7 +23,11 @@ pub fn target() -> TargetResult { options: TargetOptions { cpu: "mips32r2".to_string(), features: "+mips32r2,+soft-float".to_string(), - max_atomic_width: 32, + max_atomic_width: Some(32), + + // see #36994 + exe_allocation_crate: "alloc_system".to_string(), + ..super::linux_base::opts() } }) diff --git a/src/librustc_back/target/mips_unknown_linux_uclibc.rs b/src/librustc_back/target/mips_unknown_linux_uclibc.rs index 529bd310391cd..ccc64ea393b78 100644 --- a/src/librustc_back/target/mips_unknown_linux_uclibc.rs +++ b/src/librustc_back/target/mips_unknown_linux_uclibc.rs @@ -23,7 +23,11 @@ pub fn target() -> TargetResult { options: TargetOptions { cpu: "mips32r2".to_string(), features: "+mips32r2,+soft-float".to_string(), - max_atomic_width: 32, + max_atomic_width: Some(32), + + // see #36994 + exe_allocation_crate: "alloc_system".to_string(), + ..super::linux_base::opts() }, }) diff --git a/src/librustc_back/target/mipsel_unknown_linux_gnu.rs b/src/librustc_back/target/mipsel_unknown_linux_gnu.rs index b66fb62cd591e..9b8b1d5713f1d 100644 --- a/src/librustc_back/target/mipsel_unknown_linux_gnu.rs +++ b/src/librustc_back/target/mipsel_unknown_linux_gnu.rs @@ -24,7 +24,11 @@ pub fn target() -> TargetResult { options: TargetOptions { cpu: "mips32".to_string(), features: "+mips32".to_string(), - max_atomic_width: 32, + max_atomic_width: Some(32), + + // see #36994 + exe_allocation_crate: "alloc_system".to_string(), + ..super::linux_base::opts() }, }) diff --git a/src/librustc_back/target/mipsel_unknown_linux_musl.rs b/src/librustc_back/target/mipsel_unknown_linux_musl.rs index a0524e5e76389..5693bddd0488a 100644 --- a/src/librustc_back/target/mipsel_unknown_linux_musl.rs +++ b/src/librustc_back/target/mipsel_unknown_linux_musl.rs @@ -23,7 +23,11 @@ pub fn target() -> TargetResult { options: TargetOptions { cpu: "mips32".to_string(), features: "+mips32,+soft-float".to_string(), - max_atomic_width: 32, + max_atomic_width: Some(32), + + // see #36994 + exe_allocation_crate: "alloc_system".to_string(), + ..super::linux_base::opts() } }) diff --git a/src/librustc_back/target/mipsel_unknown_linux_uclibc.rs b/src/librustc_back/target/mipsel_unknown_linux_uclibc.rs index 1040a0fbe1724..3acade5a47444 100644 --- a/src/librustc_back/target/mipsel_unknown_linux_uclibc.rs +++ b/src/librustc_back/target/mipsel_unknown_linux_uclibc.rs @@ -24,7 +24,11 @@ pub fn target() -> TargetResult { options: TargetOptions { cpu: "mips32".to_string(), features: "+mips32,+soft-float".to_string(), - max_atomic_width: 32, + max_atomic_width: Some(32), + + // see #36994 + exe_allocation_crate: "alloc_system".to_string(), + ..super::linux_base::opts() }, }) diff --git a/src/librustc_back/target/mod.rs b/src/librustc_back/target/mod.rs index 756586602b45a..4d9315a1a3bdb 100644 --- a/src/librustc_back/target/mod.rs +++ b/src/librustc_back/target/mod.rs @@ -48,14 +48,18 @@ use serialize::json::{Json, ToJson}; use std::collections::BTreeMap; use std::default::Default; use std::io::prelude::*; -use syntax::abi::Abi; +use syntax::abi::{Abi, lookup as lookup_abi}; + +use PanicStrategy; mod android_base; mod apple_base; mod apple_ios_base; +mod arm_base; mod bitrig_base; mod dragonfly_base; mod freebsd_base; +mod haiku_base; mod linux_base; mod linux_musl_base; mod openbsd_base; @@ -63,11 +67,13 @@ mod netbsd_base; mod solaris_base; mod windows_base; mod windows_msvc_base; +mod thumb_base; +mod fuchsia_base; pub type TargetResult = Result; macro_rules! supported_targets { - ( $(($triple:expr, $module:ident)),+ ) => ( + ( $(($triple:expr, $module:ident),)+ ) => ( $(mod $module;)* /// List of supported targets @@ -165,9 +171,15 @@ supported_targets! { ("x86_64-unknown-netbsd", x86_64_unknown_netbsd), ("x86_64-rumprun-netbsd", x86_64_rumprun_netbsd), + ("i686-unknown-haiku", i686_unknown_haiku), + ("x86_64-unknown-haiku", x86_64_unknown_haiku), + ("x86_64-apple-darwin", x86_64_apple_darwin), ("i686-apple-darwin", i686_apple_darwin), + ("aarch64-unknown-fuchsia", aarch64_unknown_fuchsia), + ("x86_64-unknown-fuchsia", x86_64_unknown_fuchsia), + ("i386-apple-ios", i386_apple_ios), ("x86_64-apple-ios", x86_64_apple_ios), ("aarch64-apple-ios", aarch64_apple_ios), @@ -184,7 +196,13 @@ supported_targets! { ("i586-pc-windows-msvc", i586_pc_windows_msvc), ("le32-unknown-nacl", le32_unknown_nacl), - ("asmjs-unknown-emscripten", asmjs_unknown_emscripten) + ("asmjs-unknown-emscripten", asmjs_unknown_emscripten), + ("wasm32-unknown-emscripten", wasm32_unknown_emscripten), + + ("thumbv6m-none-eabi", thumbv6m_none_eabi), + ("thumbv7m-none-eabi", thumbv7m_none_eabi), + ("thumbv7em-none-eabi", thumbv7em_none_eabi), + ("thumbv7em-none-eabihf", thumbv7em_none_eabihf), } /// Everything `rustc` knows about how to compile for a specific target. @@ -340,9 +358,15 @@ pub struct TargetOptions { // will 'just work'. pub obj_is_bitcode: bool, - /// Maximum integer size in bits that this target can perform atomic - /// operations on. - pub max_atomic_width: u64, + /// Don't use this field; instead use the `.max_atomic_width()` method. + pub max_atomic_width: Option, + + /// Panic strategy: "unwind" or "abort" + pub panic_strategy: PanicStrategy, + + /// A blacklist of ABIs unsupported by the current target. Note that generic + /// ABIs are considered to be supported on all platforms and cannot be blacklisted. + pub abi_blacklist: Vec, } impl Default for TargetOptions { @@ -391,7 +415,9 @@ impl Default for TargetOptions { allow_asm: true, has_elf_tls: false, obj_is_bitcode: false, - max_atomic_width: 0, + max_atomic_width: None, + panic_strategy: PanicStrategy::Unwind, + abi_blacklist: vec![], } } } @@ -411,6 +437,16 @@ impl Target { } } + /// Maximum integer size in bits that this target can perform atomic + /// operations on. + pub fn max_atomic_width(&self) -> u64 { + self.options.max_atomic_width.unwrap_or(self.target_pointer_width.parse().unwrap()) + } + + pub fn is_abi_supported(&self, abi: Abi) -> bool { + abi.generic() || !self.options.abi_blacklist.contains(&abi) + } + /// Load a target descriptor from a JSON object. pub fn from_json(obj: Json) -> TargetResult { // While ugly, this code must remain this way to retain @@ -449,9 +485,6 @@ impl Target { options: Default::default(), }; - // Default max-atomic-width to target-pointer-width - base.options.max_atomic_width = base.target_pointer_width.parse().unwrap(); - macro_rules! key { ($key_name:ident) => ( { let name = (stringify!($key_name)).replace("_", "-"); @@ -464,11 +497,24 @@ impl Target { .map(|o| o.as_boolean() .map(|s| base.options.$key_name = s)); } ); - ($key_name:ident, u64) => ( { + ($key_name:ident, Option) => ( { let name = (stringify!($key_name)).replace("_", "-"); obj.find(&name[..]) .map(|o| o.as_u64() - .map(|s| base.options.$key_name = s)); + .map(|s| base.options.$key_name = Some(s))); + } ); + ($key_name:ident, PanicStrategy) => ( { + let name = (stringify!($key_name)).replace("_", "-"); + obj.find(&name[..]).and_then(|o| o.as_string().and_then(|s| { + match s { + "unwind" => base.options.$key_name = PanicStrategy::Unwind, + "abort" => base.options.$key_name = PanicStrategy::Abort, + _ => return Some(Err(format!("'{}' is not a valid value for \ + panic-strategy. Use 'unwind' or 'abort'.", + s))), + } + Some(Ok(())) + })).unwrap_or(Ok(())) } ); ($key_name:ident, list) => ( { let name = (stringify!($key_name)).replace("_", "-"); @@ -529,7 +575,24 @@ impl Target { key!(exe_allocation_crate); key!(has_elf_tls, bool); key!(obj_is_bitcode, bool); - key!(max_atomic_width, u64); + key!(max_atomic_width, Option); + try!(key!(panic_strategy, PanicStrategy)); + + if let Some(array) = obj.find("abi-blacklist").and_then(Json::as_array) { + for name in array.iter().filter_map(|abi| abi.as_string()) { + match lookup_abi(name) { + Some(abi) => { + if abi.generic() { + return Err(format!("The ABI \"{}\" is considered to be supported on \ + all targets and cannot be blacklisted", abi)) + } + + base.options.abi_blacklist.push(abi) + } + None => return Err(format!("Unknown ABI \"{}\" in target specification", name)) + } + } + } Ok(base) } @@ -672,6 +735,13 @@ impl ToJson for Target { target_option_val!(has_elf_tls); target_option_val!(obj_is_bitcode); target_option_val!(max_atomic_width); + target_option_val!(panic_strategy); + + if default.abi_blacklist != self.options.abi_blacklist { + d.insert("abi-blacklist".to_string(), self.options.abi_blacklist.iter() + .map(Abi::name).map(|name| name.to_json()) + .collect::>().to_json()); + } Json::Object(d) } diff --git a/src/librustc_back/target/netbsd_base.rs b/src/librustc_back/target/netbsd_base.rs index cc03ed56aa4d7..6e038a7ed56ee 100644 --- a/src/librustc_back/target/netbsd_base.rs +++ b/src/librustc_back/target/netbsd_base.rs @@ -17,7 +17,7 @@ pub fn opts() -> TargetOptions { executables: true, linker_is_gnu: true, has_rpath: true, - pre_link_args: vec!( + pre_link_args: vec![ // GNU-style linkers will use this to omit linking to libraries // which don't actually fulfill any relocations, but only for // libraries which follow this flag. Thus, use it before @@ -26,7 +26,7 @@ pub fn opts() -> TargetOptions { // Always enable NX protection when it is available "-Wl,-z,noexecstack".to_string(), - ), + ], position_independent_executables: true, .. Default::default() } diff --git a/src/librustc_back/target/openbsd_base.rs b/src/librustc_back/target/openbsd_base.rs index 7afdfcd691134..90e6631841bef 100644 --- a/src/librustc_back/target/openbsd_base.rs +++ b/src/librustc_back/target/openbsd_base.rs @@ -17,7 +17,7 @@ pub fn opts() -> TargetOptions { executables: true, linker_is_gnu: true, has_rpath: true, - pre_link_args: vec!( + pre_link_args: vec![ // GNU-style linkers will use this to omit linking to libraries // which don't actually fulfill any relocations, but only for // libraries which follow this flag. Thus, use it before @@ -26,7 +26,7 @@ pub fn opts() -> TargetOptions { // Always enable NX protection when it is available "-Wl,-z,noexecstack".to_string(), - ), + ], position_independent_executables: true, exe_allocation_crate: "alloc_system".to_string(), .. Default::default() diff --git a/src/librustc_back/target/powerpc64_unknown_linux_gnu.rs b/src/librustc_back/target/powerpc64_unknown_linux_gnu.rs index 1c04e763417c4..909c5488dcb70 100644 --- a/src/librustc_back/target/powerpc64_unknown_linux_gnu.rs +++ b/src/librustc_back/target/powerpc64_unknown_linux_gnu.rs @@ -14,7 +14,10 @@ pub fn target() -> TargetResult { let mut base = super::linux_base::opts(); base.cpu = "ppc64".to_string(); base.pre_link_args.push("-m64".to_string()); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); + + // see #36994 + base.exe_allocation_crate = "alloc_system".to_string(); Ok(Target { llvm_target: "powerpc64-unknown-linux-gnu".to_string(), diff --git a/src/librustc_back/target/powerpc64le_unknown_linux_gnu.rs b/src/librustc_back/target/powerpc64le_unknown_linux_gnu.rs index 906e28d2f20cb..a692346ca0ffe 100644 --- a/src/librustc_back/target/powerpc64le_unknown_linux_gnu.rs +++ b/src/librustc_back/target/powerpc64le_unknown_linux_gnu.rs @@ -14,7 +14,10 @@ pub fn target() -> TargetResult { let mut base = super::linux_base::opts(); base.cpu = "ppc64le".to_string(); base.pre_link_args.push("-m64".to_string()); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); + + // see #36994 + base.exe_allocation_crate = "alloc_system".to_string(); Ok(Target { llvm_target: "powerpc64le-unknown-linux-gnu".to_string(), diff --git a/src/librustc_back/target/powerpc_unknown_linux_gnu.rs b/src/librustc_back/target/powerpc_unknown_linux_gnu.rs index aebf9cd68717d..284772c43319a 100644 --- a/src/librustc_back/target/powerpc_unknown_linux_gnu.rs +++ b/src/librustc_back/target/powerpc_unknown_linux_gnu.rs @@ -13,7 +13,10 @@ use target::{Target, TargetResult}; pub fn target() -> TargetResult { let mut base = super::linux_base::opts(); base.pre_link_args.push("-m32".to_string()); - base.max_atomic_width = 32; + base.max_atomic_width = Some(32); + + // see #36994 + base.exe_allocation_crate = "alloc_system".to_string(); Ok(Target { llvm_target: "powerpc-unknown-linux-gnu".to_string(), diff --git a/src/librustc_back/target/s390x_unknown_linux_gnu.rs b/src/librustc_back/target/s390x_unknown_linux_gnu.rs index 79f2d290e3760..6e2dd6cd67c93 100644 --- a/src/librustc_back/target/s390x_unknown_linux_gnu.rs +++ b/src/librustc_back/target/s390x_unknown_linux_gnu.rs @@ -18,7 +18,7 @@ pub fn target() -> TargetResult { // cabi_s390x.rs are for now hard-coded to assume the no-vector ABI. // Pass the -vector feature string to LLVM to respect this assumption. base.features = "-vector".to_string(); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); Ok(Target { llvm_target: "s390x-unknown-linux-gnu".to_string(), diff --git a/src/librustc_back/target/thumb_base.rs b/src/librustc_back/target/thumb_base.rs new file mode 100644 index 0000000000000..6bb496649a858 --- /dev/null +++ b/src/librustc_back/target/thumb_base.rs @@ -0,0 +1,58 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// These 4 `thumbv*` targets cover the ARM Cortex-M family of processors which are widely used in +// microcontrollers. Namely, all these processors: +// +// - Cortex-M0 +// - Cortex-M0+ +// - Cortex-M1 +// - Cortex-M3 +// - Cortex-M4(F) +// - Cortex-M7(F) +// +// We have opted for 4 targets instead of one target per processor (e.g. `cortex-m0`, `cortex-m3`, +// etc) because the differences between some processors like the cortex-m0 and cortex-m1 are almost +// non-existent from the POV of codegen so it doesn't make sense to have separate targets for them. +// And if differences exist between two processors under the same target, rustc flags can be used to +// optimize for one processor or the other. +// +// Also, we have not chosen a single target (`arm-none-eabi`) like GCC does because this makes +// difficult to integrate Rust code and C code. Targeting the Cortex-M4 requires different gcc flags +// than the ones you would use for the Cortex-M0 and with a single target it'd be impossible to +// differentiate one processor from the other. +// +// About arm vs thumb in the name. The Cortex-M devices only support the Thumb instruction set, +// which is more compact (higher code density), and not the ARM instruction set. That's why LLVM +// triples use thumb instead of arm. We follow suit because having thumb in the name let us +// differentiate these targets from our other `arm(v7)-*-*-gnueabi(hf)` targets in the context of +// build scripts / gcc flags. + +use PanicStrategy; +use std::default::Default; +use target::TargetOptions; + +pub fn opts() -> TargetOptions { + // See rust-lang/rfcs#1645 for a discussion about these defaults + TargetOptions { + executables: true, + // In 99%+ of cases, we want to use the `arm-none-eabi-gcc` compiler (there aren't many + // options around) + linker: "arm-none-eabi-gcc".to_string(), + // Because these devices have very little resources having an unwinder is too onerous so we + // default to "abort" because the "unwind" strategy is very rare. + panic_strategy: PanicStrategy::Abort, + // Similarly, one almost always never wants to use relocatable code because of the extra + // costs it involves. + relocation_model: "static".to_string(), + abi_blacklist: super::arm_base::abi_blacklist(), + .. Default::default() + } +} diff --git a/src/librustc_back/target/thumbv6m_none_eabi.rs b/src/librustc_back/target/thumbv6m_none_eabi.rs new file mode 100644 index 0000000000000..6c22f98538459 --- /dev/null +++ b/src/librustc_back/target/thumbv6m_none_eabi.rs @@ -0,0 +1,36 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Targets the Cortex-M0, Cortex-M0+ and Cortex-M1 processors (ARMv6-M architecture) + +use target::{Target, TargetOptions, TargetResult}; + +pub fn target() -> TargetResult { + Ok(Target { + llvm_target: "thumbv6m-none-eabi".to_string(), + target_endian: "little".to_string(), + target_pointer_width: "32".to_string(), + data_layout: "e-m:e-p:32:32-i64:64-v128:64:128-a:0:32-n32-S64".to_string(), + arch: "arm".to_string(), + target_os: "none".to_string(), + target_env: "".to_string(), + target_vendor: "".to_string(), + + options: TargetOptions { + // The ARMv6-M architecture doesn't support unaligned loads/stores so we disable them + // with +strict-align. + features: "+strict-align".to_string(), + // There are no atomic instructions available in the instruction set of the ARMv6-M + // architecture + max_atomic_width: Some(0), + .. super::thumb_base::opts() + } + }) +} diff --git a/src/librustc_back/target/thumbv7em_none_eabi.rs b/src/librustc_back/target/thumbv7em_none_eabi.rs new file mode 100644 index 0000000000000..ddad4e3624f3c --- /dev/null +++ b/src/librustc_back/target/thumbv7em_none_eabi.rs @@ -0,0 +1,40 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Targets the Cortex-M4 and Cortex-M7 processors (ARMv7E-M) +// +// This target assumes that the device doesn't have a FPU (Floating Point Unit) and lowers all the +// floating point operations to software routines (intrinsics). +// +// As such, this target uses the "soft" calling convention (ABI) where floating point values are +// passed to/from subroutines via general purpose registers (R0, R1, etc.). +// +// To opt-in to hardware accelerated floating point operations, you can use, for example, +// `-C target-feature=+vfp4` or `-C target-cpu=cortex-m4`. + +use target::{Target, TargetOptions, TargetResult}; + +pub fn target() -> TargetResult { + Ok(Target { + llvm_target: "thumbv7em-none-eabi".to_string(), + target_endian: "little".to_string(), + target_pointer_width: "32".to_string(), + data_layout: "e-m:e-p:32:32-i64:64-v128:64:128-a:0:32-n32-S64".to_string(), + arch: "arm".to_string(), + target_os: "none".to_string(), + target_env: "".to_string(), + target_vendor: "".to_string(), + + options: TargetOptions { + max_atomic_width: Some(32), + .. super::thumb_base::opts() + }, + }) +} diff --git a/src/librustc_back/target/thumbv7em_none_eabihf.rs b/src/librustc_back/target/thumbv7em_none_eabihf.rs new file mode 100644 index 0000000000000..a9fac48e8e5ac --- /dev/null +++ b/src/librustc_back/target/thumbv7em_none_eabihf.rs @@ -0,0 +1,49 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Targets the Cortex-M4F and Cortex-M7F processors (ARMv7E-M) +// +// This target assumes that the device does have a FPU (Floating Point Unit) and lowers all (single +// precision) floating point operations to hardware instructions. +// +// Additionally, this target uses the "hard" floating convention (ABI) where floating point values +// are passed to/from subroutines via FPU registers (S0, S1, D0, D1, etc.). +// +// To opt into double precision hardware support, use the `-C target-feature=-fp-only-sp` flag. + +use target::{Target, TargetOptions, TargetResult}; + +pub fn target() -> TargetResult { + Ok(Target { + llvm_target: "thumbv7em-none-eabihf".to_string(), + target_endian: "little".to_string(), + target_pointer_width: "32".to_string(), + data_layout: "e-m:e-p:32:32-i64:64-v128:64:128-a:0:32-n32-S64".to_string(), + arch: "arm".to_string(), + target_os: "none".to_string(), + target_env: "".to_string(), + target_vendor: "".to_string(), + + options: TargetOptions { + // `+vfp4` is the lowest common denominator between the Cortex-M4 (vfp4-16) and the + // Cortex-M7 (vfp5) + // `+d16` both the Cortex-M4 and the Cortex-M7 only have 16 double-precision registers + // available + // `+fp-only-sp` The Cortex-M4 only supports single precision floating point operations + // whereas in the Cortex-M7 double precision is optional + // + // Reference: + // ARMv7-M Architecture Reference Manual - A2.5 The optional floating-point extension + features: "+vfp4,+d16,+fp-only-sp".to_string(), + max_atomic_width: Some(32), + .. super::thumb_base::opts() + } + }) +} diff --git a/src/librustc_back/target/thumbv7m_none_eabi.rs b/src/librustc_back/target/thumbv7m_none_eabi.rs new file mode 100644 index 0000000000000..ed61dd0459b4d --- /dev/null +++ b/src/librustc_back/target/thumbv7m_none_eabi.rs @@ -0,0 +1,31 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Targets the Cortex-M3 processor (ARMv7-M) + +use target::{Target, TargetOptions, TargetResult}; + +pub fn target() -> TargetResult { + Ok(Target { + llvm_target: "thumbv7m-none-eabi".to_string(), + target_endian: "little".to_string(), + target_pointer_width: "32".to_string(), + data_layout: "e-m:e-p:32:32-i64:64-v128:64:128-a:0:32-n32-S64".to_string(), + arch: "arm".to_string(), + target_os: "none".to_string(), + target_env: "".to_string(), + target_vendor: "".to_string(), + + options: TargetOptions { + max_atomic_width: Some(32), + .. super::thumb_base::opts() + }, + }) +} diff --git a/src/librustc_back/target/wasm32_unknown_emscripten.rs b/src/librustc_back/target/wasm32_unknown_emscripten.rs new file mode 100644 index 0000000000000..77ab4fcae7008 --- /dev/null +++ b/src/librustc_back/target/wasm32_unknown_emscripten.rs @@ -0,0 +1,42 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use super::{Target, TargetOptions}; + +pub fn target() -> Result { + let opts = TargetOptions { + linker: "emcc".to_string(), + ar: "emar".to_string(), + + dynamic_linking: false, + executables: true, + // Today emcc emits two files - a .js file to bootstrap and + // possibly interpret the wasm, and a .wasm file + exe_suffix: ".js".to_string(), + linker_is_gnu: true, + allow_asm: false, + obj_is_bitcode: true, + max_atomic_width: Some(32), + post_link_args: vec!["-s".to_string(), "BINARYEN=1".to_string(), + "-s".to_string(), "ERROR_ON_UNDEFINED_SYMBOLS=1".to_string()], + .. Default::default() + }; + Ok(Target { + llvm_target: "asmjs-unknown-emscripten".to_string(), + target_endian: "little".to_string(), + target_pointer_width: "32".to_string(), + target_os: "emscripten".to_string(), + target_env: "".to_string(), + target_vendor: "unknown".to_string(), + data_layout: "e-p:32:32-i64:64-v128:32:128-n32-S128".to_string(), + arch: "wasm32".to_string(), + options: opts, + }) +} diff --git a/src/librustc_back/target/windows_base.rs b/src/librustc_back/target/windows_base.rs index c398ee40f2f97..19ca0df51b9dc 100644 --- a/src/librustc_back/target/windows_base.rs +++ b/src/librustc_back/target/windows_base.rs @@ -26,7 +26,7 @@ pub fn opts() -> TargetOptions { no_default_libraries: true, is_like_windows: true, allows_weak_linkage: false, - pre_link_args: vec!( + pre_link_args: vec![ // And here, we see obscure linker flags #45. On windows, it has been // found to be necessary to have this flag to compile liblibc. // @@ -63,26 +63,26 @@ pub fn opts() -> TargetOptions { // Do not use the standard system startup files or libraries when linking "-nostdlib".to_string(), - ), - pre_link_objects_exe: vec!( + ], + pre_link_objects_exe: vec![ "crt2.o".to_string(), // mingw C runtime initialization for executables "rsbegin.o".to_string(), // Rust compiler runtime initialization, see rsbegin.rs - ), - pre_link_objects_dll: vec!( + ], + pre_link_objects_dll: vec![ "dllcrt2.o".to_string(), // mingw C runtime initialization for dlls "rsbegin.o".to_string(), - ), - late_link_args: vec!( + ], + late_link_args: vec![ "-lmingwex".to_string(), "-lmingw32".to_string(), "-lgcc".to_string(), // alas, mingw* libraries above depend on libgcc "-lmsvcrt".to_string(), "-luser32".to_string(), "-lkernel32".to_string(), - ), - post_link_objects: vec!( + ], + post_link_objects: vec![ "rsend.o".to_string() - ), + ], custom_unwind_resume: true, .. Default::default() diff --git a/src/librustc_back/target/x86_64_apple_darwin.rs b/src/librustc_back/target/x86_64_apple_darwin.rs index 65e4b1400fcf8..b3c1561dbcc0b 100644 --- a/src/librustc_back/target/x86_64_apple_darwin.rs +++ b/src/librustc_back/target/x86_64_apple_darwin.rs @@ -13,7 +13,7 @@ use target::{Target, TargetResult}; pub fn target() -> TargetResult { let mut base = super::apple_base::opts(); base.cpu = "core2".to_string(); - base.max_atomic_width = 128; // core2 support cmpxchg16b + base.max_atomic_width = Some(128); // core2 support cmpxchg16b base.eliminate_frame_pointer = false; base.pre_link_args.push("-m64".to_string()); diff --git a/src/librustc_back/target/x86_64_apple_ios.rs b/src/librustc_back/target/x86_64_apple_ios.rs index 3b8b636b6dc66..7a58bb34ce7f6 100644 --- a/src/librustc_back/target/x86_64_apple_ios.rs +++ b/src/librustc_back/target/x86_64_apple_ios.rs @@ -23,7 +23,7 @@ pub fn target() -> TargetResult { target_env: "".to_string(), target_vendor: "apple".to_string(), options: TargetOptions { - max_atomic_width: 64, + max_atomic_width: Some(64), .. base } }) diff --git a/src/librustc_back/target/x86_64_pc_windows_gnu.rs b/src/librustc_back/target/x86_64_pc_windows_gnu.rs index 086e0e6bf4fe7..321585cd65eb3 100644 --- a/src/librustc_back/target/x86_64_pc_windows_gnu.rs +++ b/src/librustc_back/target/x86_64_pc_windows_gnu.rs @@ -14,7 +14,7 @@ pub fn target() -> TargetResult { let mut base = super::windows_base::opts(); base.cpu = "x86-64".to_string(); base.pre_link_args.push("-m64".to_string()); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); Ok(Target { llvm_target: "x86_64-pc-windows-gnu".to_string(), diff --git a/src/librustc_back/target/x86_64_pc_windows_msvc.rs b/src/librustc_back/target/x86_64_pc_windows_msvc.rs index 064f06e9b31d6..ea8909d213e80 100644 --- a/src/librustc_back/target/x86_64_pc_windows_msvc.rs +++ b/src/librustc_back/target/x86_64_pc_windows_msvc.rs @@ -13,7 +13,7 @@ use target::{Target, TargetResult}; pub fn target() -> TargetResult { let mut base = super::windows_msvc_base::opts(); base.cpu = "x86-64".to_string(); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); Ok(Target { llvm_target: "x86_64-pc-windows-msvc".to_string(), diff --git a/src/librustc_back/target/x86_64_rumprun_netbsd.rs b/src/librustc_back/target/x86_64_rumprun_netbsd.rs index fd6578c2a2a0a..3313721439696 100644 --- a/src/librustc_back/target/x86_64_rumprun_netbsd.rs +++ b/src/librustc_back/target/x86_64_rumprun_netbsd.rs @@ -16,7 +16,7 @@ pub fn target() -> TargetResult { base.pre_link_args.push("-m64".to_string()); base.linker = "x86_64-rumprun-netbsd-gcc".to_string(); base.ar = "x86_64-rumprun-netbsd-ar".to_string(); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); base.dynamic_linking = false; base.has_rpath = false; diff --git a/src/librustc_back/target/x86_64_sun_solaris.rs b/src/librustc_back/target/x86_64_sun_solaris.rs index 2a1feb937f742..8e4fd94e7bce4 100644 --- a/src/librustc_back/target/x86_64_sun_solaris.rs +++ b/src/librustc_back/target/x86_64_sun_solaris.rs @@ -14,7 +14,7 @@ pub fn target() -> TargetResult { let mut base = super::solaris_base::opts(); base.pre_link_args.push("-m64".to_string()); base.cpu = "x86-64".to_string(); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); Ok(Target { llvm_target: "x86_64-pc-solaris".to_string(), diff --git a/src/librustc_back/target/x86_64_unknown_bitrig.rs b/src/librustc_back/target/x86_64_unknown_bitrig.rs index 3820965589815..eda16c29466b5 100644 --- a/src/librustc_back/target/x86_64_unknown_bitrig.rs +++ b/src/librustc_back/target/x86_64_unknown_bitrig.rs @@ -13,7 +13,7 @@ use target::{Target, TargetResult}; pub fn target() -> TargetResult { let mut base = super::bitrig_base::opts(); base.cpu = "x86-64".to_string(); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); base.pre_link_args.push("-m64".to_string()); Ok(Target { diff --git a/src/librustc_back/target/x86_64_unknown_dragonfly.rs b/src/librustc_back/target/x86_64_unknown_dragonfly.rs index 7e40d49b870ad..194efb8fc2322 100644 --- a/src/librustc_back/target/x86_64_unknown_dragonfly.rs +++ b/src/librustc_back/target/x86_64_unknown_dragonfly.rs @@ -13,7 +13,7 @@ use target::{Target, TargetResult}; pub fn target() -> TargetResult { let mut base = super::dragonfly_base::opts(); base.cpu = "x86-64".to_string(); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); base.pre_link_args.push("-m64".to_string()); Ok(Target { diff --git a/src/librustc_back/target/x86_64_unknown_freebsd.rs b/src/librustc_back/target/x86_64_unknown_freebsd.rs index f38cdd4bec55f..b127bee163b86 100644 --- a/src/librustc_back/target/x86_64_unknown_freebsd.rs +++ b/src/librustc_back/target/x86_64_unknown_freebsd.rs @@ -13,7 +13,7 @@ use target::{Target, TargetResult}; pub fn target() -> TargetResult { let mut base = super::freebsd_base::opts(); base.cpu = "x86-64".to_string(); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); base.pre_link_args.push("-m64".to_string()); Ok(Target { diff --git a/src/librustc_back/target/x86_64_unknown_fuchsia.rs b/src/librustc_back/target/x86_64_unknown_fuchsia.rs new file mode 100644 index 0000000000000..08fe17a556ecc --- /dev/null +++ b/src/librustc_back/target/x86_64_unknown_fuchsia.rs @@ -0,0 +1,30 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use target::{Target, TargetResult}; + +pub fn target() -> TargetResult { + let mut base = super::fuchsia_base::opts(); + base.cpu = "x86-64".to_string(); + base.max_atomic_width = Some(64); + base.pre_link_args.push("-m64".to_string()); + + Ok(Target { + llvm_target: "x86_64-unknown-fuchsia".to_string(), + target_endian: "little".to_string(), + target_pointer_width: "64".to_string(), + data_layout: "e-m:e-i64:64-f80:128-n8:16:32:64-S128".to_string(), + arch: "x86_64".to_string(), + target_os: "fuchsia".to_string(), + target_env: "".to_string(), + target_vendor: "unknown".to_string(), + options: base, + }) +} diff --git a/src/librustc_back/target/x86_64_unknown_haiku.rs b/src/librustc_back/target/x86_64_unknown_haiku.rs new file mode 100644 index 0000000000000..7cf0599037c1e --- /dev/null +++ b/src/librustc_back/target/x86_64_unknown_haiku.rs @@ -0,0 +1,30 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use target::{Target, TargetResult}; + +pub fn target() -> TargetResult { + let mut base = super::haiku_base::opts(); + base.cpu = "x86-64".to_string(); + base.max_atomic_width = Some(64); + base.pre_link_args.push("-m64".to_string()); + + Ok(Target { + llvm_target: "x86_64-unknown-haiku".to_string(), + target_endian: "little".to_string(), + target_pointer_width: "64".to_string(), + data_layout: "e-m:e-i64:64-f80:128-n8:16:32:64-S128".to_string(), + arch: "x86_64".to_string(), + target_os: "haiku".to_string(), + target_env: "".to_string(), + target_vendor: "unknown".to_string(), + options: base, + }) +} diff --git a/src/librustc_back/target/x86_64_unknown_linux_gnu.rs b/src/librustc_back/target/x86_64_unknown_linux_gnu.rs index ef81d397a8f1e..f95bcb556e57f 100644 --- a/src/librustc_back/target/x86_64_unknown_linux_gnu.rs +++ b/src/librustc_back/target/x86_64_unknown_linux_gnu.rs @@ -13,7 +13,7 @@ use target::{Target, TargetResult}; pub fn target() -> TargetResult { let mut base = super::linux_base::opts(); base.cpu = "x86-64".to_string(); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); base.pre_link_args.push("-m64".to_string()); Ok(Target { diff --git a/src/librustc_back/target/x86_64_unknown_linux_musl.rs b/src/librustc_back/target/x86_64_unknown_linux_musl.rs index 4bad7754b3907..c3bf9dcca6ee4 100644 --- a/src/librustc_back/target/x86_64_unknown_linux_musl.rs +++ b/src/librustc_back/target/x86_64_unknown_linux_musl.rs @@ -13,7 +13,7 @@ use target::{Target, TargetResult}; pub fn target() -> TargetResult { let mut base = super::linux_musl_base::opts(); base.cpu = "x86-64".to_string(); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); base.pre_link_args.push("-m64".to_string()); Ok(Target { diff --git a/src/librustc_back/target/x86_64_unknown_netbsd.rs b/src/librustc_back/target/x86_64_unknown_netbsd.rs index 2d0b1e2a9330b..87a7c184644d5 100644 --- a/src/librustc_back/target/x86_64_unknown_netbsd.rs +++ b/src/librustc_back/target/x86_64_unknown_netbsd.rs @@ -13,7 +13,7 @@ use target::{Target, TargetResult}; pub fn target() -> TargetResult { let mut base = super::netbsd_base::opts(); base.cpu = "x86-64".to_string(); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); base.pre_link_args.push("-m64".to_string()); Ok(Target { diff --git a/src/librustc_back/target/x86_64_unknown_openbsd.rs b/src/librustc_back/target/x86_64_unknown_openbsd.rs index 339dbd591a02e..e9d645b0d38f2 100644 --- a/src/librustc_back/target/x86_64_unknown_openbsd.rs +++ b/src/librustc_back/target/x86_64_unknown_openbsd.rs @@ -13,7 +13,7 @@ use target::{Target, TargetResult}; pub fn target() -> TargetResult { let mut base = super::openbsd_base::opts(); base.cpu = "x86-64".to_string(); - base.max_atomic_width = 64; + base.max_atomic_width = Some(64); base.pre_link_args.push("-m64".to_string()); Ok(Target { diff --git a/src/librustc_bitflags/lib.rs b/src/librustc_bitflags/lib.rs index afc2e04d446a1..e65d112430a16 100644 --- a/src/librustc_bitflags/lib.rs +++ b/src/librustc_bitflags/lib.rs @@ -201,7 +201,7 @@ macro_rules! bitflags { !(*self & other).is_empty() } - /// Returns `true` all of the flags in `other` are contained within `self`. + /// Returns `true` if all of the flags in `other` are contained within `self`. #[inline] pub fn contains(&self, other: $BitFlags) -> bool { (*self & other) == other @@ -291,7 +291,8 @@ macro_rules! bitflags { #[cfg(test)] #[allow(non_upper_case_globals)] mod tests { - use std::hash::{Hash, Hasher, SipHasher}; + use std::hash::{Hash, Hasher}; + use std::collections::hash_map::DefaultHasher; use std::option::Option::{None, Some}; bitflags! { @@ -492,7 +493,7 @@ mod tests { } fn hash(t: &T) -> u64 { - let mut s = SipHasher::new(); + let mut s = DefaultHasher::new(); t.hash(&mut s); s.finish() } diff --git a/src/librustc_borrowck/borrowck/check_loans.rs b/src/librustc_borrowck/borrowck/check_loans.rs index 089733da536d8..b2032e6a1bf9f 100644 --- a/src/librustc_borrowck/borrowck/check_loans.rs +++ b/src/librustc_borrowck/borrowck/check_loans.rs @@ -135,15 +135,12 @@ impl<'a, 'tcx> euv::Delegate<'tcx> for CheckLoanCtxt<'a, 'tcx> { borrow_id, cmt, loan_region, bk, loan_cause); - match opt_loan_path(&cmt) { - Some(lp) => { - let moved_value_use_kind = match loan_cause { - euv::ClosureCapture(_) => MovedInCapture, - _ => MovedInUse, - }; - self.check_if_path_is_moved(borrow_id, borrow_span, moved_value_use_kind, &lp); - } - None => { } + if let Some(lp) = opt_loan_path(&cmt) { + let moved_value_use_kind = match loan_cause { + euv::ClosureCapture(_) => MovedInCapture, + _ => MovedInUse, + }; + self.check_if_path_is_moved(borrow_id, borrow_span, moved_value_use_kind, &lp); } self.check_for_conflicting_loans(borrow_id); @@ -158,33 +155,29 @@ impl<'a, 'tcx> euv::Delegate<'tcx> for CheckLoanCtxt<'a, 'tcx> { debug!("mutate(assignment_id={}, assignee_cmt={:?})", assignment_id, assignee_cmt); - match opt_loan_path(&assignee_cmt) { - Some(lp) => { - match mode { - MutateMode::Init | MutateMode::JustWrite => { - // In a case like `path = 1`, then path does not - // have to be *FULLY* initialized, but we still - // must be careful lest it contains derefs of - // pointers. - self.check_if_assigned_path_is_moved(assignee_cmt.id, - assignment_span, - MovedInUse, - &lp); - } - MutateMode::WriteAndRead => { - // In a case like `path += 1`, then path must be - // fully initialized, since we will read it before - // we write it. - self.check_if_path_is_moved(assignee_cmt.id, - assignment_span, - MovedInUse, - &lp); - } + if let Some(lp) = opt_loan_path(&assignee_cmt) { + match mode { + MutateMode::Init | MutateMode::JustWrite => { + // In a case like `path = 1`, then path does not + // have to be *FULLY* initialized, but we still + // must be careful lest it contains derefs of + // pointers. + self.check_if_assigned_path_is_moved(assignee_cmt.id, + assignment_span, + MovedInUse, + &lp); + } + MutateMode::WriteAndRead => { + // In a case like `path += 1`, then path must be + // fully initialized, since we will read it before + // we write it. + self.check_if_path_is_moved(assignee_cmt.id, + assignment_span, + MovedInUse, + &lp); } } - None => { } } - self.check_assignment(assignment_id, assignment_span, assignee_cmt); } @@ -601,39 +594,36 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { span: Span, cmt: mc::cmt<'tcx>, mode: euv::ConsumeMode) { - match opt_loan_path(&cmt) { - Some(lp) => { - let moved_value_use_kind = match mode { - euv::Copy => { - self.check_for_copy_of_frozen_path(id, span, &lp); - MovedInUse - } - euv::Move(_) => { - match self.move_data.kind_of_move_of_path(id, &lp) { - None => { - // Sometimes moves don't have a move kind; - // this either means that the original move - // was from something illegal to move, - // or was moved from referent of an unsafe - // pointer or something like that. + if let Some(lp) = opt_loan_path(&cmt) { + let moved_value_use_kind = match mode { + euv::Copy => { + self.check_for_copy_of_frozen_path(id, span, &lp); + MovedInUse + } + euv::Move(_) => { + match self.move_data.kind_of_move_of_path(id, &lp) { + None => { + // Sometimes moves don't have a move kind; + // this either means that the original move + // was from something illegal to move, + // or was moved from referent of an unsafe + // pointer or something like that. + MovedInUse + } + Some(move_kind) => { + self.check_for_move_of_borrowed_path(id, span, + &lp, move_kind); + if move_kind == move_data::Captured { + MovedInCapture + } else { MovedInUse } - Some(move_kind) => { - self.check_for_move_of_borrowed_path(id, span, - &lp, move_kind); - if move_kind == move_data::Captured { - MovedInCapture - } else { - MovedInUse - } - } } } - }; + } + }; - self.check_if_path_is_moved(id, span, moved_value_use_kind, &lp); - } - None => { } + self.check_if_path_is_moved(id, span, moved_value_use_kind, &lp); } } diff --git a/src/librustc_borrowck/borrowck/gather_loans/move_error.rs b/src/librustc_borrowck/borrowck/gather_loans/move_error.rs index 9fbf1492f5d28..47f8d978704f4 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/move_error.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/move_error.rs @@ -92,7 +92,7 @@ fn group_errors_with_same_origin<'tcx>(errors: &Vec>) let move_from_id = error.move_from.id; debug!("append_to_grouped_errors(move_from_id={})", move_from_id); let move_to = if error.move_to.is_some() { - vec!(error.move_to.clone().unwrap()) + vec![error.move_to.clone().unwrap()] } else { Vec::new() }; diff --git a/src/librustc_borrowck/borrowck/mir/abs_domain.rs b/src/librustc_borrowck/borrowck/mir/abs_domain.rs index 155b615d83c7b..dc450433ad9fa 100644 --- a/src/librustc_borrowck/borrowck/mir/abs_domain.rs +++ b/src/librustc_borrowck/borrowck/mir/abs_domain.rs @@ -21,8 +21,8 @@ //! `a[x]` would still overlap them both. But that is not this //! representation does today.) -use rustc::mir::repr::{Lvalue, LvalueElem}; -use rustc::mir::repr::{Operand, Projection, ProjectionElem}; +use rustc::mir::{Lvalue, LvalueElem}; +use rustc::mir::{Operand, Projection, ProjectionElem}; #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] pub struct AbstractOperand; diff --git a/src/librustc_borrowck/borrowck/mir/dataflow/graphviz.rs b/src/librustc_borrowck/borrowck/mir/dataflow/graphviz.rs index 91be50d11f952..28f58723862cb 100644 --- a/src/librustc_borrowck/borrowck/mir/dataflow/graphviz.rs +++ b/src/librustc_borrowck/borrowck/mir/dataflow/graphviz.rs @@ -11,7 +11,9 @@ //! Hook into libgraphviz for rendering dataflow graphs for MIR. use syntax::ast::NodeId; -use rustc::mir::repr::{BasicBlock, Mir}; +use rustc::mir::{BasicBlock, Mir}; +use rustc_data_structures::bitslice::bits_to_string; +use rustc_data_structures::indexed_set::{IdxSet}; use rustc_data_structures::indexed_vec::Idx; use dot; @@ -27,8 +29,6 @@ use std::path::Path; use super::super::MoveDataParamEnv; use super::super::MirBorrowckCtxtPreDataflow; -use bitslice::bits_to_string; -use indexed_set::{IdxSet}; use super::{BitDenotation, DataflowState}; impl DataflowState { diff --git a/src/librustc_borrowck/borrowck/mir/dataflow/impls.rs b/src/librustc_borrowck/borrowck/mir/dataflow/impls.rs index 8ac59c60396f6..fcb453d81aa77 100644 --- a/src/librustc_borrowck/borrowck/mir/dataflow/impls.rs +++ b/src/librustc_borrowck/borrowck/mir/dataflow/impls.rs @@ -9,7 +9,10 @@ // except according to those terms. use rustc::ty::TyCtxt; -use rustc::mir::repr::{self, Mir, Location}; +use rustc::mir::{self, Mir, Location}; +use rustc_data_structures::bitslice::BitSlice; // adds set_bit/get_bit to &[usize] bitvector rep. +use rustc_data_structures::bitslice::{BitwiseOperator}; +use rustc_data_structures::indexed_set::{IdxSet}; use rustc_data_structures::indexed_vec::Idx; use super::super::gather_moves::{MoveOutIndex, MovePathIndex}; @@ -21,10 +24,6 @@ use super::super::on_lookup_result_bits; use super::{BitDenotation, BlockSets, DataflowOperator}; -use bitslice::BitSlice; // adds set_bit/get_bit to &[usize] bitvector rep. -use bitslice::{BitwiseOperator}; -use indexed_set::{IdxSet}; - // Dataflow analyses are built upon some interpretation of the // bitvectors attached to each basic block, represented via a // zero-sized structure. @@ -246,7 +245,7 @@ impl<'a, 'tcx> BitDenotation for MaybeInitializedLvals<'a, 'tcx> { fn statement_effect(&self, ctxt: &Self::Ctxt, sets: &mut BlockSets, - bb: repr::BasicBlock, + bb: mir::BasicBlock, idx: usize) { drop_flag_effects_for_location( @@ -259,7 +258,7 @@ impl<'a, 'tcx> BitDenotation for MaybeInitializedLvals<'a, 'tcx> { fn terminator_effect(&self, ctxt: &Self::Ctxt, sets: &mut BlockSets, - bb: repr::BasicBlock, + bb: mir::BasicBlock, statements_len: usize) { drop_flag_effects_for_location( @@ -272,9 +271,9 @@ impl<'a, 'tcx> BitDenotation for MaybeInitializedLvals<'a, 'tcx> { fn propagate_call_return(&self, ctxt: &Self::Ctxt, in_out: &mut IdxSet, - _call_bb: repr::BasicBlock, - _dest_bb: repr::BasicBlock, - dest_lval: &repr::Lvalue) { + _call_bb: mir::BasicBlock, + _dest_bb: mir::BasicBlock, + dest_lval: &mir::Lvalue) { // when a call returns successfully, that means we need to set // the bits for that dest_lval to 1 (initialized). on_lookup_result_bits(self.tcx, self.mir, &ctxt.move_data, @@ -307,7 +306,7 @@ impl<'a, 'tcx> BitDenotation for MaybeUninitializedLvals<'a, 'tcx> { fn statement_effect(&self, ctxt: &Self::Ctxt, sets: &mut BlockSets, - bb: repr::BasicBlock, + bb: mir::BasicBlock, idx: usize) { drop_flag_effects_for_location( @@ -320,7 +319,7 @@ impl<'a, 'tcx> BitDenotation for MaybeUninitializedLvals<'a, 'tcx> { fn terminator_effect(&self, ctxt: &Self::Ctxt, sets: &mut BlockSets, - bb: repr::BasicBlock, + bb: mir::BasicBlock, statements_len: usize) { drop_flag_effects_for_location( @@ -333,9 +332,9 @@ impl<'a, 'tcx> BitDenotation for MaybeUninitializedLvals<'a, 'tcx> { fn propagate_call_return(&self, ctxt: &Self::Ctxt, in_out: &mut IdxSet, - _call_bb: repr::BasicBlock, - _dest_bb: repr::BasicBlock, - dest_lval: &repr::Lvalue) { + _call_bb: mir::BasicBlock, + _dest_bb: mir::BasicBlock, + dest_lval: &mir::Lvalue) { // when a call returns successfully, that means we need to set // the bits for that dest_lval to 0 (initialized). on_lookup_result_bits(self.tcx, self.mir, &ctxt.move_data, @@ -367,7 +366,7 @@ impl<'a, 'tcx> BitDenotation for DefinitelyInitializedLvals<'a, 'tcx> { fn statement_effect(&self, ctxt: &Self::Ctxt, sets: &mut BlockSets, - bb: repr::BasicBlock, + bb: mir::BasicBlock, idx: usize) { drop_flag_effects_for_location( @@ -380,7 +379,7 @@ impl<'a, 'tcx> BitDenotation for DefinitelyInitializedLvals<'a, 'tcx> { fn terminator_effect(&self, ctxt: &Self::Ctxt, sets: &mut BlockSets, - bb: repr::BasicBlock, + bb: mir::BasicBlock, statements_len: usize) { drop_flag_effects_for_location( @@ -393,9 +392,9 @@ impl<'a, 'tcx> BitDenotation for DefinitelyInitializedLvals<'a, 'tcx> { fn propagate_call_return(&self, ctxt: &Self::Ctxt, in_out: &mut IdxSet, - _call_bb: repr::BasicBlock, - _dest_bb: repr::BasicBlock, - dest_lval: &repr::Lvalue) { + _call_bb: mir::BasicBlock, + _dest_bb: mir::BasicBlock, + dest_lval: &mir::Lvalue) { // when a call returns successfully, that means we need to set // the bits for that dest_lval to 1 (initialized). on_lookup_result_bits(self.tcx, self.mir, &ctxt.move_data, @@ -419,7 +418,7 @@ impl<'a, 'tcx> BitDenotation for MovingOutStatements<'a, 'tcx> { fn statement_effect(&self, ctxt: &Self::Ctxt, sets: &mut BlockSets, - bb: repr::BasicBlock, + bb: mir::BasicBlock, idx: usize) { let (tcx, mir, move_data) = (self.tcx, self.mir, &ctxt.move_data); let stmt = &mir[bb].statements[idx]; @@ -438,10 +437,10 @@ impl<'a, 'tcx> BitDenotation for MovingOutStatements<'a, 'tcx> { } let bits_per_block = self.bits_per_block(ctxt); match stmt.kind { - repr::StatementKind::SetDiscriminant { .. } => { + mir::StatementKind::SetDiscriminant { .. } => { span_bug!(stmt.source_info.span, "SetDiscriminant should not exist in borrowck"); } - repr::StatementKind::Assign(ref lvalue, _) => { + mir::StatementKind::Assign(ref lvalue, _) => { // assigning into this `lvalue` kills all // MoveOuts from it, and *also* all MoveOuts // for children and associated fragment sets. @@ -454,15 +453,16 @@ impl<'a, 'tcx> BitDenotation for MovingOutStatements<'a, 'tcx> { sets.kill_set.add(&moi); }); } - repr::StatementKind::StorageLive(_) | - repr::StatementKind::StorageDead(_) => {} + mir::StatementKind::StorageLive(_) | + mir::StatementKind::StorageDead(_) | + mir::StatementKind::Nop => {} } } fn terminator_effect(&self, ctxt: &Self::Ctxt, sets: &mut BlockSets, - bb: repr::BasicBlock, + bb: mir::BasicBlock, statements_len: usize) { let (mir, move_data) = (self.mir, &ctxt.move_data); @@ -481,9 +481,9 @@ impl<'a, 'tcx> BitDenotation for MovingOutStatements<'a, 'tcx> { fn propagate_call_return(&self, ctxt: &Self::Ctxt, in_out: &mut IdxSet, - _call_bb: repr::BasicBlock, - _dest_bb: repr::BasicBlock, - dest_lval: &repr::Lvalue) { + _call_bb: mir::BasicBlock, + _dest_bb: mir::BasicBlock, + dest_lval: &mir::Lvalue) { let move_data = &ctxt.move_data; let bits_per_block = self.bits_per_block(ctxt); diff --git a/src/librustc_borrowck/borrowck/mir/dataflow/mod.rs b/src/librustc_borrowck/borrowck/mir/dataflow/mod.rs index a9b4de450967c..51817afbfeafd 100644 --- a/src/librustc_borrowck/borrowck/mir/dataflow/mod.rs +++ b/src/librustc_borrowck/borrowck/mir/dataflow/mod.rs @@ -8,10 +8,12 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +use rustc_data_structures::indexed_set::{IdxSet, IdxSetBuf}; use rustc_data_structures::indexed_vec::Idx; +use rustc_data_structures::bitslice::{bitwise, BitwiseOperator}; use rustc::ty::TyCtxt; -use rustc::mir::repr::{self, Mir}; +use rustc::mir::{self, Mir}; use std::fmt::Debug; use std::io; @@ -22,9 +24,6 @@ use std::usize; use super::MirBorrowckCtxtPreDataflow; use super::MoveDataParamEnv; -use bitslice::{bitwise, BitwiseOperator}; -use indexed_set::{IdxSet, IdxSetBuf}; - pub use self::sanity_check::sanity_check_via_rustc_peek; pub use self::impls::{MaybeInitializedLvals, MaybeUninitializedLvals}; pub use self::impls::{DefinitelyInitializedLvals, MovingOutStatements}; @@ -79,14 +78,12 @@ impl<'a, 'tcx: 'a, BD> DataflowAnalysis<'a, 'tcx, BD> // the kill-sets. { - let sets = &mut self.flow_state.sets.for_block(repr::START_BLOCK.index()); + let sets = &mut self.flow_state.sets.for_block(mir::START_BLOCK.index()); self.flow_state.operator.start_block_effect(&self.ctxt, sets); } for (bb, data) in self.mir.basic_blocks().iter_enumerated() { - let &repr::BasicBlockData { ref statements, - ref terminator, - is_cleanup: _ } = data; + let &mir::BasicBlockData { ref statements, ref terminator, is_cleanup: _ } = data; let sets = &mut self.flow_state.sets.for_block(bb.index()); for j_stmt in 0..statements.len() { @@ -123,7 +120,7 @@ impl<'b, 'a: 'b, 'tcx: 'a, BD> PropagationContext<'b, 'a, 'tcx, BD> in_out.subtract(sets.kill_set); } builder.propagate_bits_into_graph_successors_of( - in_out, &mut self.changed, (repr::BasicBlock::new(bb_idx), bb_data)); + in_out, &mut self.changed, (mir::BasicBlock::new(bb_idx), bb_data)); } } } @@ -337,7 +334,7 @@ pub trait BitDenotation { fn statement_effect(&self, ctxt: &Self::Ctxt, sets: &mut BlockSets, - bb: repr::BasicBlock, + bb: mir::BasicBlock, idx_stmt: usize); /// Mutates the block-sets (the flow sets for the given @@ -353,7 +350,7 @@ pub trait BitDenotation { fn terminator_effect(&self, ctxt: &Self::Ctxt, sets: &mut BlockSets, - bb: repr::BasicBlock, + bb: mir::BasicBlock, idx_term: usize); /// Mutates the block-sets according to the (flow-dependent) @@ -378,9 +375,9 @@ pub trait BitDenotation { fn propagate_call_return(&self, ctxt: &Self::Ctxt, in_out: &mut IdxSet, - call_bb: repr::BasicBlock, - dest_bb: repr::BasicBlock, - dest_lval: &repr::Lvalue); + call_bb: mir::BasicBlock, + dest_bb: mir::BasicBlock, + dest_lval: &mir::Lvalue); } impl<'a, 'tcx: 'a, D> DataflowAnalysis<'a, 'tcx, D> @@ -445,39 +442,39 @@ impl<'a, 'tcx: 'a, D> DataflowAnalysis<'a, 'tcx, D> &mut self, in_out: &mut IdxSet, changed: &mut bool, - (bb, bb_data): (repr::BasicBlock, &repr::BasicBlockData)) + (bb, bb_data): (mir::BasicBlock, &mir::BasicBlockData)) { match bb_data.terminator().kind { - repr::TerminatorKind::Return | - repr::TerminatorKind::Resume | - repr::TerminatorKind::Unreachable => {} - repr::TerminatorKind::Goto { ref target } | - repr::TerminatorKind::Assert { ref target, cleanup: None, .. } | - repr::TerminatorKind::Drop { ref target, location: _, unwind: None } | - repr::TerminatorKind::DropAndReplace { + mir::TerminatorKind::Return | + mir::TerminatorKind::Resume | + mir::TerminatorKind::Unreachable => {} + mir::TerminatorKind::Goto { ref target } | + mir::TerminatorKind::Assert { ref target, cleanup: None, .. } | + mir::TerminatorKind::Drop { ref target, location: _, unwind: None } | + mir::TerminatorKind::DropAndReplace { ref target, value: _, location: _, unwind: None } => { self.propagate_bits_into_entry_set_for(in_out, changed, target); } - repr::TerminatorKind::Assert { ref target, cleanup: Some(ref unwind), .. } | - repr::TerminatorKind::Drop { ref target, location: _, unwind: Some(ref unwind) } | - repr::TerminatorKind::DropAndReplace { + mir::TerminatorKind::Assert { ref target, cleanup: Some(ref unwind), .. } | + mir::TerminatorKind::Drop { ref target, location: _, unwind: Some(ref unwind) } | + mir::TerminatorKind::DropAndReplace { ref target, value: _, location: _, unwind: Some(ref unwind) } => { self.propagate_bits_into_entry_set_for(in_out, changed, target); self.propagate_bits_into_entry_set_for(in_out, changed, unwind); } - repr::TerminatorKind::If { ref targets, .. } => { + mir::TerminatorKind::If { ref targets, .. } => { self.propagate_bits_into_entry_set_for(in_out, changed, &targets.0); self.propagate_bits_into_entry_set_for(in_out, changed, &targets.1); } - repr::TerminatorKind::Switch { ref targets, .. } | - repr::TerminatorKind::SwitchInt { ref targets, .. } => { + mir::TerminatorKind::Switch { ref targets, .. } | + mir::TerminatorKind::SwitchInt { ref targets, .. } => { for target in targets { self.propagate_bits_into_entry_set_for(in_out, changed, target); } } - repr::TerminatorKind::Call { ref cleanup, ref destination, func: _, args: _ } => { + mir::TerminatorKind::Call { ref cleanup, ref destination, func: _, args: _ } => { if let Some(ref unwind) = *cleanup { self.propagate_bits_into_entry_set_for(in_out, changed, unwind); } @@ -495,7 +492,7 @@ impl<'a, 'tcx: 'a, D> DataflowAnalysis<'a, 'tcx, D> fn propagate_bits_into_entry_set_for(&mut self, in_out: &IdxSet, changed: &mut bool, - bb: &repr::BasicBlock) { + bb: &mir::BasicBlock) { let entry_set = self.flow_state.sets.for_block(bb.index()).on_entry; let set_changed = bitwise(entry_set.words_mut(), in_out.words(), diff --git a/src/librustc_borrowck/borrowck/mir/dataflow/sanity_check.rs b/src/librustc_borrowck/borrowck/mir/dataflow/sanity_check.rs index 88f6d5fef562d..b8c26a0512ff8 100644 --- a/src/librustc_borrowck/borrowck/mir/dataflow/sanity_check.rs +++ b/src/librustc_borrowck/borrowck/mir/dataflow/sanity_check.rs @@ -13,7 +13,7 @@ use syntax::ast; use syntax_pos::Span; use rustc::ty::{self, TyCtxt}; -use rustc::mir::repr::{self, Mir}; +use rustc::mir::{self, Mir}; use rustc_data_structures::indexed_vec::Idx; use super::super::gather_moves::{MovePathIndex, LookupResult}; @@ -59,13 +59,11 @@ fn each_block<'a, 'tcx, O>(tcx: TyCtxt<'a, 'tcx, 'tcx>, mir: &Mir<'tcx>, ctxt: &O::Ctxt, results: &DataflowResults, - bb: repr::BasicBlock) where + bb: mir::BasicBlock) where O: BitDenotation, Idx=MovePathIndex> { let move_data = &ctxt.move_data; - let repr::BasicBlockData { ref statements, - ref terminator, - is_cleanup: _ } = mir[bb]; + let mir::BasicBlockData { ref statements, ref terminator, is_cleanup: _ } = mir[bb]; let (args, span) = match is_rustc_peek(tcx, terminator) { Some(args_and_span) => args_and_span, @@ -73,11 +71,13 @@ fn each_block<'a, 'tcx, O>(tcx: TyCtxt<'a, 'tcx, 'tcx>, }; assert!(args.len() == 1); let peek_arg_lval = match args[0] { - repr::Operand::Consume(ref lval @ repr::Lvalue::Temp(_)) => { - lval - } - repr::Operand::Consume(_) | - repr::Operand::Constant(_) => { + mir::Operand::Consume(ref lval @ mir::Lvalue::Local(_)) => Some(lval), + _ => None, + }; + + let peek_arg_lval = match peek_arg_lval { + Some(arg) => arg, + None => { tcx.sess.diagnostic().span_err( span, "dataflow::sanity_check cannot feed a non-temp to rustc_peek."); return; @@ -101,20 +101,19 @@ fn each_block<'a, 'tcx, O>(tcx: TyCtxt<'a, 'tcx, 'tcx>, for (j, stmt) in statements.iter().enumerate() { debug!("rustc_peek: ({:?},{}) {:?}", bb, j, stmt); let (lvalue, rvalue) = match stmt.kind { - repr::StatementKind::Assign(ref lvalue, ref rvalue) => { + mir::StatementKind::Assign(ref lvalue, ref rvalue) => { (lvalue, rvalue) } - repr::StatementKind::StorageLive(_) | - repr::StatementKind::StorageDead(_) => continue, - repr::StatementKind::SetDiscriminant{ .. } => + mir::StatementKind::StorageLive(_) | + mir::StatementKind::StorageDead(_) | + mir::StatementKind::Nop => continue, + mir::StatementKind::SetDiscriminant{ .. } => span_bug!(stmt.source_info.span, "sanity_check should run before Deaggregator inserts SetDiscriminant"), }; if lvalue == peek_arg_lval { - if let repr::Rvalue::Ref(_, - repr::BorrowKind::Shared, - ref peeking_at_lval) = *rvalue { + if let mir::Rvalue::Ref(_, mir::BorrowKind::Shared, ref peeking_at_lval) = *rvalue { // Okay, our search is over. match move_data.rev_lookup.find(peeking_at_lval) { LookupResult::Exact(peek_mpi) => { @@ -159,12 +158,12 @@ fn each_block<'a, 'tcx, O>(tcx: TyCtxt<'a, 'tcx, 'tcx>, } fn is_rustc_peek<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - terminator: &'a Option>) - -> Option<(&'a [repr::Operand<'tcx>], Span)> { - if let Some(repr::Terminator { ref kind, source_info, .. }) = *terminator { - if let repr::TerminatorKind::Call { func: ref oper, ref args, .. } = *kind + terminator: &'a Option>) + -> Option<(&'a [mir::Operand<'tcx>], Span)> { + if let Some(mir::Terminator { ref kind, source_info, .. }) = *terminator { + if let mir::TerminatorKind::Call { func: ref oper, ref args, .. } = *kind { - if let repr::Operand::Constant(ref func) = *oper + if let mir::Operand::Constant(ref func) = *oper { if let ty::TyFnDef(def_id, _, &ty::BareFnTy { abi, .. }) = func.ty.sty { diff --git a/src/librustc_borrowck/borrowck/mir/elaborate_drops.rs b/src/librustc_borrowck/borrowck/mir/elaborate_drops.rs index 96702b209a1f5..191cd981b61eb 100644 --- a/src/librustc_borrowck/borrowck/mir/elaborate_drops.rs +++ b/src/librustc_borrowck/borrowck/mir/elaborate_drops.rs @@ -8,7 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use indexed_set::IdxSetBuf; use super::gather_moves::{MoveData, MovePathIndex, LookupResult}; use super::dataflow::{MaybeInitializedLvals, MaybeUninitializedLvals}; use super::dataflow::{DataflowResults}; @@ -18,11 +17,12 @@ use super::{DropFlagState, MoveDataParamEnv}; use super::patch::MirPatch; use rustc::ty::{self, Ty, TyCtxt}; use rustc::ty::subst::{Kind, Subst, Substs}; -use rustc::mir::repr::*; +use rustc::mir::*; use rustc::mir::transform::{Pass, MirPass, MirSource}; use rustc::middle::const_val::ConstVal; use rustc::middle::lang_items; use rustc::util::nodemap::FnvHashMap; +use rustc_data_structures::indexed_set::IdxSetBuf; use rustc_data_structures::indexed_vec::Idx; use syntax_pos::Span; @@ -118,7 +118,7 @@ struct ElaborateDropsCtxt<'a, 'tcx: 'a> { env: &'a MoveDataParamEnv<'tcx>, flow_inits: DataflowResults>, flow_uninits: DataflowResults>, - drop_flags: FnvHashMap, + drop_flags: FnvHashMap, patch: MirPatch<'tcx>, } @@ -164,7 +164,7 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> { } fn drop_flag(&mut self, index: MovePathIndex) -> Option> { - self.drop_flags.get(&index).map(|t| Lvalue::Temp(*t)) + self.drop_flags.get(&index).map(|t| Lvalue::Local(*t)) } /// create a patch that elaborates all drops in the input @@ -847,17 +847,17 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> { statements.push(Statement { source_info: c.source_info, kind: StatementKind::Assign( - Lvalue::Temp(flag), + Lvalue::Local(flag), self.constant_bool(c.source_info.span, false) ) }); } let tcx = self.tcx; - let unit_temp = Lvalue::Temp(self.patch.new_temp(tcx.mk_nil())); + let unit_temp = Lvalue::Local(self.patch.new_temp(tcx.mk_nil())); let free_func = tcx.lang_items.require(lang_items::BoxFreeFnLangItem) .unwrap_or_else(|e| tcx.sess.fatal(&e)); - let substs = Substs::new(tcx, iter::once(Kind::from(ty))); + let substs = tcx.mk_substs(iter::once(Kind::from(ty))); let fty = tcx.lookup_item_type(free_func).ty.subst(tcx, substs); self.patch.new_block(BasicBlockData { @@ -917,7 +917,7 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> { if let Some(&flag) = self.drop_flags.get(&path) { let span = self.patch.source_info_for_location(self.mir, loc).span; let val = self.constant_bool(span, val.value()); - self.patch.add_assign(loc, Lvalue::Temp(flag), val); + self.patch.add_assign(loc, Lvalue::Local(flag), val); } } @@ -926,7 +926,7 @@ impl<'b, 'tcx> ElaborateDropsCtxt<'b, 'tcx> { let span = self.patch.source_info_for_location(self.mir, loc).span; let false_ = self.constant_bool(span, false); for flag in self.drop_flags.values() { - self.patch.add_assign(loc, Lvalue::Temp(*flag), false_.clone()); + self.patch.add_assign(loc, Lvalue::Local(*flag), false_.clone()); } } diff --git a/src/librustc_borrowck/borrowck/mir/gather_moves.rs b/src/librustc_borrowck/borrowck/mir/gather_moves.rs index bd38f554dc9b0..1dc5769e63cf8 100644 --- a/src/librustc_borrowck/borrowck/mir/gather_moves.rs +++ b/src/librustc_borrowck/borrowck/mir/gather_moves.rs @@ -10,7 +10,7 @@ use rustc::ty::{self, TyCtxt, ParameterEnvironment}; -use rustc::mir::repr::*; +use rustc::mir::*; use rustc::util::nodemap::FnvHashMap; use rustc_data_structures::indexed_vec::{IndexVec}; @@ -173,13 +173,7 @@ impl fmt::Debug for MoveOut { /// Tables mapping from an l-value to its MovePathIndex. #[derive(Debug)] pub struct MovePathLookup<'tcx> { - vars: IndexVec, - temps: IndexVec, - args: IndexVec, - - /// The move path representing the return value is constructed - /// lazily when we first encounter it in the input MIR. - return_ptr: Option, + locals: IndexVec, /// projections are made from a base-lvalue and a projection /// elem. The base-lvalue will have a unique MovePathIndex; we use @@ -218,16 +212,9 @@ impl<'a, 'tcx> MoveDataBuilder<'a, 'tcx> { moves: IndexVec::new(), loc_map: LocationMap::new(mir), rev_lookup: MovePathLookup { - vars: mir.var_decls.indices().map(Lvalue::Var).map(|v| { + locals: mir.local_decls.indices().map(Lvalue::Local).map(|v| { Self::new_move_path(&mut move_paths, &mut path_map, None, v) }).collect(), - temps: mir.temp_decls.indices().map(Lvalue::Temp).map(|t| { - Self::new_move_path(&mut move_paths, &mut path_map, None, t) - }).collect(), - args: mir.arg_decls.indices().map(Lvalue::Arg).map(|a| { - Self::new_move_path(&mut move_paths, &mut path_map, None, a) - }).collect(), - return_ptr: None, projections: FnvHashMap(), }, move_paths: move_paths, @@ -272,23 +259,9 @@ impl<'a, 'tcx> MoveDataBuilder<'a, 'tcx> { { debug!("lookup({:?})", lval); match *lval { - Lvalue::Var(var) => Ok(self.data.rev_lookup.vars[var]), - Lvalue::Arg(arg) => Ok(self.data.rev_lookup.args[arg]), - Lvalue::Temp(temp) => Ok(self.data.rev_lookup.temps[temp]), + Lvalue::Local(local) => Ok(self.data.rev_lookup.locals[local]), // error: can't move out of a static Lvalue::Static(..) => Err(MovePathError::IllegalMove), - Lvalue::ReturnPointer => match self.data.rev_lookup.return_ptr { - Some(ptr) => Ok(ptr), - ref mut ptr @ None => { - let path = Self::new_move_path( - &mut self.data.move_paths, - &mut self.data.path_map, - None, - lval.clone()); - *ptr = Some(path); - Ok(path) - } - }, Lvalue::Projection(ref proj) => { self.move_path_for_projection(lval, proj) } @@ -373,11 +346,8 @@ impl<'tcx> MovePathLookup<'tcx> { // parent. pub fn find(&self, lval: &Lvalue<'tcx>) -> LookupResult { match *lval { - Lvalue::Var(var) => LookupResult::Exact(self.vars[var]), - Lvalue::Temp(temp) => LookupResult::Exact(self.temps[temp]), - Lvalue::Arg(arg) => LookupResult::Exact(self.args[arg]), + Lvalue::Local(local) => LookupResult::Exact(self.locals[local]), Lvalue::Static(..) => LookupResult::Parent(None), - Lvalue::ReturnPointer => LookupResult::Exact(self.return_ptr.unwrap()), Lvalue::Projection(ref proj) => { match self.find(&proj.base) { LookupResult::Exact(base_path) => { @@ -438,6 +408,7 @@ impl<'a, 'tcx> MoveDataBuilder<'a, 'tcx> { span_bug!(stmt.source_info.span, "SetDiscriminant should not exist during borrowck"); } + StatementKind::Nop => {} } } @@ -485,7 +456,7 @@ impl<'a, 'tcx> MoveDataBuilder<'a, 'tcx> { TerminatorKind::Unreachable => { } TerminatorKind::Return => { - self.gather_move(loc, &Lvalue::ReturnPointer); + self.gather_move(loc, &Lvalue::Local(RETURN_POINTER)); } TerminatorKind::If { .. } | diff --git a/src/librustc_borrowck/borrowck/mir/mod.rs b/src/librustc_borrowck/borrowck/mir/mod.rs index 5b5d782bc83a2..cea9170da9ffd 100644 --- a/src/librustc_borrowck/borrowck/mir/mod.rs +++ b/src/librustc_borrowck/borrowck/mir/mod.rs @@ -17,8 +17,7 @@ use syntax_pos::{Span, DUMMY_SP}; use rustc::hir; use rustc::hir::intravisit::{FnKind}; -use rustc::mir::repr; -use rustc::mir::repr::{BasicBlock, BasicBlockData, Mir, Statement, Terminator, Location}; +use rustc::mir::{self, BasicBlock, BasicBlockData, Mir, Statement, Terminator, Location}; use rustc::session::Session; use rustc::ty::{self, TyCtxt}; @@ -56,15 +55,13 @@ pub struct MoveDataParamEnv<'tcx> { param_env: ty::ParameterEnvironment<'tcx>, } -pub fn borrowck_mir<'a, 'tcx: 'a>( - bcx: &mut BorrowckCtxt<'a, 'tcx>, - fk: FnKind, - _decl: &hir::FnDecl, - mir: &'a Mir<'tcx>, - body: &hir::Block, - _sp: Span, - id: ast::NodeId, - attributes: &[ast::Attribute]) { +pub fn borrowck_mir(bcx: &mut BorrowckCtxt, + fk: FnKind, + _decl: &hir::FnDecl, + body: &hir::Block, + _sp: Span, + id: ast::NodeId, + attributes: &[ast::Attribute]) { match fk { FnKind::ItemFn(name, ..) | FnKind::Method(name, ..) => { @@ -76,8 +73,10 @@ pub fn borrowck_mir<'a, 'tcx: 'a>( } let tcx = bcx.tcx; - let param_env = ty::ParameterEnvironment::for_item(tcx, id); + + let mir = &tcx.item_mir(tcx.map.local_def_id(id)); + let move_data = MoveData::gather_moves(mir, tcx, ¶m_env); let mdpe = MoveDataParamEnv { move_data: move_data, param_env: param_env }; let flow_inits = @@ -171,8 +170,8 @@ pub struct MirBorrowckCtxt<'b, 'a: 'b, 'tcx: 'a> { mir: &'b Mir<'tcx>, node_id: ast::NodeId, move_data: MoveData<'tcx>, - flow_inits: DataflowResults>, - flow_uninits: DataflowResults> + flow_inits: DataflowResults>, + flow_uninits: DataflowResults> } impl<'b, 'a: 'b, 'tcx: 'a> MirBorrowckCtxt<'b, 'a, 'tcx> { @@ -214,12 +213,12 @@ fn move_path_children_matching<'tcx, F>(move_data: &MoveData<'tcx>, path: MovePathIndex, mut cond: F) -> Option - where F: FnMut(&repr::LvalueProjection<'tcx>) -> bool + where F: FnMut(&mir::LvalueProjection<'tcx>) -> bool { let mut next_child = move_data.move_paths[path].first_child; while let Some(child_index) = next_child { match move_data.move_paths[child_index].lvalue { - repr::Lvalue::Projection(ref proj) => { + mir::Lvalue::Projection(ref proj) => { if cond(proj) { return Some(child_index) } @@ -252,7 +251,7 @@ fn move_path_children_matching<'tcx, F>(move_data: &MoveData<'tcx>, /// FIXME: we have to do something for moving slice patterns. fn lvalue_contents_drop_state_cannot_differ<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, mir: &Mir<'tcx>, - lv: &repr::Lvalue<'tcx>) -> bool { + lv: &mir::Lvalue<'tcx>) -> bool { let ty = lv.ty(mir, tcx).to_ty(tcx); match ty.sty { ty::TyArray(..) | ty::TySlice(..) | ty::TyRef(..) | ty::TyRawPtr(..) => { @@ -338,8 +337,8 @@ fn drop_flag_effects_for_function_entry<'a, 'tcx, F>( where F: FnMut(MovePathIndex, DropFlagState) { let move_data = &ctxt.move_data; - for (arg, _) in mir.arg_decls.iter_enumerated() { - let lvalue = repr::Lvalue::Arg(arg); + for arg in mir.args_iter() { + let lvalue = mir::Lvalue::Local(arg); let lookup_result = move_data.rev_lookup.find(&lvalue); on_lookup_result_bits(tcx, mir, move_data, lookup_result, @@ -379,22 +378,23 @@ fn drop_flag_effects_for_location<'a, 'tcx, F>( let block = &mir[loc.block]; match block.statements.get(loc.statement_index) { Some(stmt) => match stmt.kind { - repr::StatementKind::SetDiscriminant{ .. } => { + mir::StatementKind::SetDiscriminant{ .. } => { span_bug!(stmt.source_info.span, "SetDiscrimant should not exist during borrowck"); } - repr::StatementKind::Assign(ref lvalue, _) => { + mir::StatementKind::Assign(ref lvalue, _) => { debug!("drop_flag_effects: assignment {:?}", stmt); on_lookup_result_bits(tcx, mir, move_data, move_data.rev_lookup.find(lvalue), |moi| callback(moi, DropFlagState::Present)) } - repr::StatementKind::StorageLive(_) | - repr::StatementKind::StorageDead(_) => {} + mir::StatementKind::StorageLive(_) | + mir::StatementKind::StorageDead(_) | + mir::StatementKind::Nop => {} }, None => { debug!("drop_flag_effects: replace {:?}", block.terminator()); match block.terminator().kind { - repr::TerminatorKind::DropAndReplace { ref location, .. } => { + mir::TerminatorKind::DropAndReplace { ref location, .. } => { on_lookup_result_bits(tcx, mir, move_data, move_data.rev_lookup.find(location), |moi| callback(moi, DropFlagState::Present)) diff --git a/src/librustc_borrowck/borrowck/mir/patch.rs b/src/librustc_borrowck/borrowck/mir/patch.rs index 52cd1a9f949bf..19f240da73059 100644 --- a/src/librustc_borrowck/borrowck/mir/patch.rs +++ b/src/librustc_borrowck/borrowck/mir/patch.rs @@ -9,7 +9,7 @@ // except according to those terms. use rustc::ty::Ty; -use rustc::mir::repr::*; +use rustc::mir::*; use rustc_data_structures::indexed_vec::{IndexVec, Idx}; /// This struct represents a patch to MIR, which can add @@ -19,9 +19,9 @@ pub struct MirPatch<'tcx> { patch_map: IndexVec>>, new_blocks: Vec>, new_statements: Vec<(Location, StatementKind<'tcx>)>, - new_temps: Vec>, + new_locals: Vec>, resume_block: BasicBlock, - next_temp: usize, + next_local: usize, } impl<'tcx> MirPatch<'tcx> { @@ -29,9 +29,9 @@ impl<'tcx> MirPatch<'tcx> { let mut result = MirPatch { patch_map: IndexVec::from_elem(None, mir.basic_blocks()), new_blocks: vec![], - new_temps: vec![], new_statements: vec![], - next_temp: mir.temp_decls.len(), + new_locals: vec![], + next_local: mir.local_decls.len(), resume_block: START_BLOCK }; @@ -92,11 +92,11 @@ impl<'tcx> MirPatch<'tcx> { } } - pub fn new_temp(&mut self, ty: Ty<'tcx>) -> Temp { - let index = self.next_temp; - self.next_temp += 1; - self.new_temps.push(TempDecl { ty: ty }); - Temp::new(index as usize) + pub fn new_temp(&mut self, ty: Ty<'tcx>) -> Local { + let index = self.next_local; + self.next_local += 1; + self.new_locals.push(LocalDecl::new_temp(ty)); + Local::new(index as usize) } pub fn new_block(&mut self, data: BasicBlockData<'tcx>) -> BasicBlock { @@ -124,11 +124,11 @@ impl<'tcx> MirPatch<'tcx> { pub fn apply(self, mir: &mut Mir<'tcx>) { debug!("MirPatch: {:?} new temps, starting from index {}: {:?}", - self.new_temps.len(), mir.temp_decls.len(), self.new_temps); + self.new_locals.len(), mir.local_decls.len(), self.new_locals); debug!("MirPatch: {} new blocks, starting from index {}", self.new_blocks.len(), mir.basic_blocks().len()); mir.basic_blocks_mut().extend(self.new_blocks); - mir.temp_decls.extend(self.new_temps); + mir.local_decls.extend(self.new_locals); for (src, patch) in self.patch_map.into_iter_enumerated() { if let Some(patch) = patch { debug!("MirPatch: patching block {:?}", src); diff --git a/src/librustc_borrowck/borrowck/mod.rs b/src/librustc_borrowck/borrowck/mod.rs index 5d62629b64810..ef6936b6e7db3 100644 --- a/src/librustc_borrowck/borrowck/mod.rs +++ b/src/librustc_borrowck/borrowck/mod.rs @@ -51,8 +51,6 @@ use rustc::hir::{FnDecl, Block}; use rustc::hir::intravisit; use rustc::hir::intravisit::{Visitor, FnKind}; -use rustc::mir::mir_map::MirMap; - pub mod check_loans; pub mod gather_loans; @@ -102,10 +100,9 @@ impl<'a, 'tcx, 'v> Visitor<'v> for BorrowckCtxt<'a, 'tcx> { } } -pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, mir_map: &MirMap<'tcx>) { +pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { let mut bccx = BorrowckCtxt { tcx: tcx, - mir_map: Some(mir_map), free_region_map: FreeRegionMap::new(), stats: BorrowStats { loaned_paths_same: 0, @@ -168,12 +165,9 @@ fn borrowck_fn(this: &mut BorrowckCtxt, attributes: &[ast::Attribute]) { debug!("borrowck_fn(id={})", id); - let def_id = this.tcx.map.local_def_id(id); - if attributes.iter().any(|item| item.check_name("rustc_mir_borrowck")) { - let mir = this.mir_map.unwrap().map.get(&def_id).unwrap(); this.with_temp_region_map(id, |this| { - mir::borrowck_mir(this, fk, decl, mir, body, sp, id, attributes) + mir::borrowck_mir(this, fk, decl, body, sp, id, attributes) }); } @@ -249,7 +243,6 @@ fn build_borrowck_dataflow_data<'a, 'tcx>(this: &mut BorrowckCtxt<'a, 'tcx>, /// the `BorrowckCtxt` itself , e.g. the flowgraph visualizer. pub fn build_borrowck_dataflow_data_for_fn<'a, 'tcx>( tcx: TyCtxt<'a, 'tcx, 'tcx>, - mir_map: Option<&'a MirMap<'tcx>>, fn_parts: FnParts<'a>, cfg: &cfg::CFG) -> (BorrowckCtxt<'a, 'tcx>, AnalysisData<'a, 'tcx>) @@ -257,7 +250,6 @@ pub fn build_borrowck_dataflow_data_for_fn<'a, 'tcx>( let mut bccx = BorrowckCtxt { tcx: tcx, - mir_map: mir_map, free_region_map: FreeRegionMap::new(), stats: BorrowStats { loaned_paths_same: 0, @@ -297,10 +289,7 @@ pub struct BorrowckCtxt<'a, 'tcx: 'a> { free_region_map: FreeRegionMap, // Statistics: - stats: BorrowStats, - - // NodeId to MIR mapping (for methods that carry the #[rustc_mir] attribute). - mir_map: Option<&'a MirMap<'tcx>>, + stats: BorrowStats } #[derive(Clone)] @@ -1024,13 +1013,14 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { } err_out_of_scope(super_scope, sub_scope, cause) => { - let (value_kind, value_msg) = match err.cmt.cat { + let (value_kind, value_msg, is_temporary) = match err.cmt.cat { mc::Categorization::Rvalue(_) => - ("temporary value", "temporary value created here"), + ("temporary value", "temporary value created here", true), _ => - ("borrowed value", "does not live long enough") + ("borrowed value", "does not live long enough", false) }; - match cause { + + let is_closure = match cause { euv::ClosureCapture(s) => { // The primary span starts out as the closure creation point. // Change the primary span here to highlight the use of the variable @@ -1041,21 +1031,36 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { db.span = MultiSpan::from_span(s); db.span_label(primary, &format!("capture occurs here")); db.span_label(s, &value_msg); + true } - None => () + None => false } } _ => { db.span_label(error_span, &value_msg); + false } - } + }; let sub_span = self.region_end_span(sub_scope); let super_span = self.region_end_span(super_scope); match (sub_span, super_span) { (Some(s1), Some(s2)) if s1 == s2 => { - db.span_label(s1, &format!("{} dropped before borrower", value_kind)); + if !is_temporary && !is_closure { + db.span = MultiSpan::from_span(s1); + db.span_label(error_span, &format!("borrow occurs here")); + let msg = match opt_loan_path(&err.cmt) { + None => "borrowed value".to_string(), + Some(lp) => { + format!("`{}`", self.loan_path_to_string(&lp)) + } + }; + db.span_label(s1, + &format!("{} dropped here while still borrowed", msg)); + } else { + db.span_label(s1, &format!("{} dropped before borrower", value_kind)); + } db.note("values in a scope are dropped in the opposite order \ they are created"); } diff --git a/src/librustc_borrowck/borrowck/move_data.rs b/src/librustc_borrowck/borrowck/move_data.rs index e9ba406389f88..ba036f1a8b157 100644 --- a/src/librustc_borrowck/borrowck/move_data.rs +++ b/src/librustc_borrowck/borrowck/move_data.rs @@ -331,7 +331,7 @@ impl<'a, 'tcx> MoveData<'tcx> { fn existing_base_paths(&self, lp: &Rc>) -> Vec { - let mut result = vec!(); + let mut result = vec![]; self.add_existing_base_paths(lp, &mut result); result } diff --git a/src/librustc_borrowck/lib.rs b/src/librustc_borrowck/lib.rs index 22b590592fe16..da899714e9351 100644 --- a/src/librustc_borrowck/lib.rs +++ b/src/librustc_borrowck/lib.rs @@ -26,7 +26,7 @@ #![feature(staged_api)] #![feature(associated_consts)] #![feature(nonzero)] -#![feature(question_mark)] +#![cfg_attr(stage0, feature(question_mark))] #[macro_use] extern crate log; #[macro_use] extern crate syntax; extern crate syntax_pos; @@ -50,8 +50,6 @@ pub use borrowck::{AnalysisData, BorrowckCtxt, ElaborateDrops}; pub mod diagnostics; mod borrowck; -mod bitslice; -mod indexed_set; pub mod graphviz; diff --git a/src/librustc_const_eval/Cargo.toml b/src/librustc_const_eval/Cargo.toml index 8967672548b10..0e5cbce8639be 100644 --- a/src/librustc_const_eval/Cargo.toml +++ b/src/librustc_const_eval/Cargo.toml @@ -9,11 +9,13 @@ path = "lib.rs" crate-type = ["dylib"] [dependencies] +arena = { path = "../libarena" } log = { path = "../liblog" } serialize = { path = "../libserialize" } rustc = { path = "../librustc" } rustc_back = { path = "../librustc_back" } rustc_const_math = { path = "../librustc_const_math" } +rustc_data_structures = { path = "../librustc_data_structures" } rustc_errors = { path = "../librustc_errors" } syntax = { path = "../libsyntax" } graphviz = { path = "../libgraphviz" } diff --git a/src/librustc_const_eval/_match.rs b/src/librustc_const_eval/_match.rs new file mode 100644 index 0000000000000..7f5eb31612cb3 --- /dev/null +++ b/src/librustc_const_eval/_match.rs @@ -0,0 +1,767 @@ +// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use self::Constructor::*; +use self::Usefulness::*; +use self::WitnessPreference::*; + +use rustc::middle::const_val::ConstVal; +use eval::{compare_const_vals}; + +use rustc_const_math::ConstInt; + +use rustc_data_structures::fnv::FnvHashMap; +use rustc_data_structures::indexed_vec::Idx; + +use pattern::{FieldPattern, Pattern, PatternKind}; +use pattern::{PatternFoldable, PatternFolder}; + +use rustc::hir::def_id::{DefId}; +use rustc::hir::pat_util::def_to_path; +use rustc::ty::{self, Ty, TyCtxt, TypeFoldable}; + +use rustc::hir; +use rustc::hir::def::CtorKind; +use rustc::hir::{Pat, PatKind}; +use rustc::util::common::ErrorReported; + +use syntax::ast::{self, DUMMY_NODE_ID}; +use syntax::codemap::Spanned; +use syntax::ptr::P; +use syntax_pos::{Span, DUMMY_SP}; + +use arena::TypedArena; + +use std::cmp::Ordering; +use std::fmt; +use std::iter::{FromIterator, IntoIterator, repeat}; + +pub fn expand_pattern<'a, 'tcx>(cx: &MatchCheckCtxt<'a, 'tcx>, pat: Pattern<'tcx>) + -> &'a Pattern<'tcx> +{ + cx.pattern_arena.alloc(LiteralExpander.fold_pattern(&pat)) +} + +struct LiteralExpander; +impl<'tcx> PatternFolder<'tcx> for LiteralExpander { + fn fold_pattern(&mut self, pat: &Pattern<'tcx>) -> Pattern<'tcx> { + match (&pat.ty.sty, &*pat.kind) { + (&ty::TyRef(_, mt), &PatternKind::Constant { ref value }) => { + Pattern { + ty: pat.ty, + span: pat.span, + kind: box PatternKind::Deref { + subpattern: Pattern { + ty: mt.ty, + span: pat.span, + kind: box PatternKind::Constant { value: value.clone() }, + } + } + } + } + (_, &PatternKind::Binding { subpattern: Some(ref s), .. }) => { + s.fold_with(self) + } + _ => pat.super_fold_with(self) + } + } +} + +pub const DUMMY_WILD_PAT: &'static Pat = &Pat { + id: DUMMY_NODE_ID, + node: PatKind::Wild, + span: DUMMY_SP +}; + +impl<'tcx> Pattern<'tcx> { + fn is_wildcard(&self) -> bool { + match *self.kind { + PatternKind::Binding { subpattern: None, .. } | PatternKind::Wild => + true, + _ => false + } + } +} + +pub struct Matrix<'a, 'tcx: 'a>(Vec>>); + +impl<'a, 'tcx> Matrix<'a, 'tcx> { + pub fn empty() -> Self { + Matrix(vec![]) + } + + pub fn push(&mut self, row: Vec<&'a Pattern<'tcx>>) { + self.0.push(row) + } +} + +/// Pretty-printer for matrices of patterns, example: +/// ++++++++++++++++++++++++++ +/// + _ + [] + +/// ++++++++++++++++++++++++++ +/// + true + [First] + +/// ++++++++++++++++++++++++++ +/// + true + [Second(true)] + +/// ++++++++++++++++++++++++++ +/// + false + [_] + +/// ++++++++++++++++++++++++++ +/// + _ + [_, _, ..tail] + +/// ++++++++++++++++++++++++++ +impl<'a, 'tcx> fmt::Debug for Matrix<'a, 'tcx> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "\n")?; + + let &Matrix(ref m) = self; + let pretty_printed_matrix: Vec> = m.iter().map(|row| { + row.iter().map(|pat| format!("{:?}", pat)).collect() + }).collect(); + + let column_count = m.iter().map(|row| row.len()).max().unwrap_or(0); + assert!(m.iter().all(|row| row.len() == column_count)); + let column_widths: Vec = (0..column_count).map(|col| { + pretty_printed_matrix.iter().map(|row| row[col].len()).max().unwrap_or(0) + }).collect(); + + let total_width = column_widths.iter().cloned().sum::() + column_count * 3 + 1; + let br = repeat('+').take(total_width).collect::(); + write!(f, "{}\n", br)?; + for row in pretty_printed_matrix { + write!(f, "+")?; + for (column, pat_str) in row.into_iter().enumerate() { + write!(f, " ")?; + write!(f, "{:1$}", pat_str, column_widths[column])?; + write!(f, " +")?; + } + write!(f, "\n")?; + write!(f, "{}\n", br)?; + } + Ok(()) + } +} + +impl<'a, 'tcx> FromIterator>> for Matrix<'a, 'tcx> { + fn from_iter>>>(iter: T) -> Self + { + Matrix(iter.into_iter().collect()) + } +} + +//NOTE: appears to be the only place other then InferCtxt to contain a ParamEnv +pub struct MatchCheckCtxt<'a, 'tcx: 'a> { + pub tcx: TyCtxt<'a, 'tcx, 'tcx>, + /// A wild pattern with an error type - it exists to avoid having to normalize + /// associated types to get field types. + pub wild_pattern: &'a Pattern<'tcx>, + pub pattern_arena: &'a TypedArena>, + pub byte_array_map: FnvHashMap<*const Pattern<'tcx>, Vec<&'a Pattern<'tcx>>>, +} + +impl<'a, 'tcx> MatchCheckCtxt<'a, 'tcx> { + pub fn create_and_enter( + tcx: TyCtxt<'a, 'tcx, 'tcx>, + f: F) -> R + where F: for<'b> FnOnce(MatchCheckCtxt<'b, 'tcx>) -> R + { + let wild_pattern = Pattern { + ty: tcx.types.err, + span: DUMMY_SP, + kind: box PatternKind::Wild + }; + + let pattern_arena = TypedArena::new(); + + f(MatchCheckCtxt { + tcx: tcx, + wild_pattern: &wild_pattern, + pattern_arena: &pattern_arena, + byte_array_map: FnvHashMap(), + }) + } + + // convert a byte-string pattern to a list of u8 patterns. + fn lower_byte_str_pattern(&mut self, pat: &'a Pattern<'tcx>) -> Vec<&'a Pattern<'tcx>> { + let pattern_arena = &*self.pattern_arena; + let tcx = self.tcx; + self.byte_array_map.entry(pat).or_insert_with(|| { + match pat.kind { + box PatternKind::Constant { + value: ConstVal::ByteStr(ref data) + } => { + data.iter().map(|c| &*pattern_arena.alloc(Pattern { + ty: tcx.types.u8, + span: pat.span, + kind: box PatternKind::Constant { + value: ConstVal::Integral(ConstInt::U8(*c)) + } + })).collect() + } + _ => span_bug!(pat.span, "unexpected byte array pattern {:?}", pat) + } + }).clone() + } +} + +#[derive(Clone, Debug, PartialEq)] +pub enum Constructor { + /// The constructor of all patterns that don't vary by constructor, + /// e.g. struct patterns and fixed-length arrays. + Single, + /// Enum variants. + Variant(DefId), + /// Literal values. + ConstantValue(ConstVal), + /// Ranges of literal values (2..5). + ConstantRange(ConstVal, ConstVal), + /// Array patterns of length n. + Slice(usize), +} + +impl Constructor { + fn variant_for_adt<'tcx, 'container, 'a>(&self, + adt: &'a ty::AdtDefData<'tcx, 'container>) + -> &'a ty::VariantDefData<'tcx, 'container> { + match self { + &Variant(vid) => adt.variant_with_id(vid), + &Single => { + assert_eq!(adt.variants.len(), 1); + &adt.variants[0] + } + _ => bug!("bad constructor {:?} for adt {:?}", self, adt) + } + } +} + +#[derive(Clone, PartialEq)] +pub enum Usefulness { + Useful, + UsefulWithWitness(Vec), + NotUseful +} + +#[derive(Copy, Clone)] +pub enum WitnessPreference { + ConstructWitness, + LeaveOutWitness +} + +#[derive(Copy, Clone, Debug)] +struct PatternContext<'tcx> { + ty: Ty<'tcx>, + max_slice_length: usize, +} + + +fn const_val_to_expr(value: &ConstVal) -> P { + let node = match value { + &ConstVal::Bool(b) => ast::LitKind::Bool(b), + _ => bug!() + }; + P(hir::Expr { + id: DUMMY_NODE_ID, + node: hir::ExprLit(P(Spanned { node: node, span: DUMMY_SP })), + span: DUMMY_SP, + attrs: ast::ThinVec::new(), + }) +} + +/// A stack of patterns in reverse order of construction +#[derive(Clone, PartialEq, Eq)] +pub struct Witness(Vec>); + +impl Witness { + pub fn single_pattern(&self) -> &Pat { + assert_eq!(self.0.len(), 1); + &self.0[0] + } + + fn push_wild_constructor<'a, 'tcx>( + mut self, + cx: &MatchCheckCtxt<'a, 'tcx>, + ctor: &Constructor, + ty: Ty<'tcx>) + -> Self + { + let arity = constructor_arity(cx, ctor, ty); + self.0.extend(repeat(DUMMY_WILD_PAT).take(arity).map(|p| P(p.clone()))); + self.apply_constructor(cx, ctor, ty) + } + + + /// Constructs a partial witness for a pattern given a list of + /// patterns expanded by the specialization step. + /// + /// When a pattern P is discovered to be useful, this function is used bottom-up + /// to reconstruct a complete witness, e.g. a pattern P' that covers a subset + /// of values, V, where each value in that set is not covered by any previously + /// used patterns and is covered by the pattern P'. Examples: + /// + /// left_ty: tuple of 3 elements + /// pats: [10, 20, _] => (10, 20, _) + /// + /// left_ty: struct X { a: (bool, &'static str), b: usize} + /// pats: [(false, "foo"), 42] => X { a: (false, "foo"), b: 42 } + fn apply_constructor<'a, 'tcx>( + mut self, + cx: &MatchCheckCtxt<'a,'tcx>, + ctor: &Constructor, + ty: Ty<'tcx>) + -> Self + { + let arity = constructor_arity(cx, ctor, ty); + let pat = { + let len = self.0.len(); + let mut pats = self.0.drain(len-arity..).rev(); + + match ty.sty { + ty::TyTuple(..) => PatKind::Tuple(pats.collect(), None), + + ty::TyAdt(adt, _) => { + let v = ctor.variant_for_adt(adt); + match v.ctor_kind { + CtorKind::Fictive => { + let field_pats: hir::HirVec<_> = v.fields.iter() + .zip(pats) + .filter(|&(_, ref pat)| pat.node != PatKind::Wild) + .map(|(field, pat)| Spanned { + span: DUMMY_SP, + node: hir::FieldPat { + name: field.name, + pat: pat, + is_shorthand: false, + } + }).collect(); + let has_more_fields = field_pats.len() < arity; + PatKind::Struct( + def_to_path(cx.tcx, v.did), field_pats, has_more_fields) + } + CtorKind::Fn => { + PatKind::TupleStruct( + def_to_path(cx.tcx, v.did), pats.collect(), None) + } + CtorKind::Const => { + PatKind::Path(None, def_to_path(cx.tcx, v.did)) + } + } + } + + ty::TyRef(_, ty::TypeAndMut { mutbl, .. }) => { + PatKind::Ref(pats.nth(0).unwrap(), mutbl) + } + + ty::TySlice(_) | ty::TyArray(..) => { + PatKind::Slice(pats.collect(), None, hir::HirVec::new()) + } + + _ => { + match *ctor { + ConstantValue(ref v) => PatKind::Lit(const_val_to_expr(v)), + _ => PatKind::Wild, + } + } + } + }; + + self.0.push(P(hir::Pat { + id: DUMMY_NODE_ID, + node: pat, + span: DUMMY_SP + })); + + self + } +} + +/// Return the set of constructors from the same type as the first column of `matrix`, +/// that are matched only by wildcard patterns from that first column. +/// +/// Therefore, if there is some pattern that is unmatched by `matrix`, it will +/// still be unmatched if the first constructor is replaced by any of the constructors +/// in the return value. +fn missing_constructors(cx: &mut MatchCheckCtxt, + matrix: &Matrix, + pcx: PatternContext) -> Vec { + let used_constructors: Vec = + matrix.0.iter() + .flat_map(|row| pat_constructors(cx, row[0], pcx).unwrap_or(vec![])) + .collect(); + debug!("used_constructors = {:?}", used_constructors); + all_constructors(cx, pcx).into_iter() + .filter(|c| !used_constructors.contains(c)) + .collect() +} + +/// This determines the set of all possible constructors of a pattern matching +/// values of type `left_ty`. For vectors, this would normally be an infinite set +/// +/// This intentionally does not list ConstantValue specializations for +/// non-booleans, because we currently assume that there is always a +/// "non-standard constant" that matches. See issue #12483. +/// +/// but is instead bounded by the maximum fixed length of slice patterns in +/// the column of patterns being analyzed. +fn all_constructors(_cx: &mut MatchCheckCtxt, pcx: PatternContext) -> Vec { + match pcx.ty.sty { + ty::TyBool => + [true, false].iter().map(|b| ConstantValue(ConstVal::Bool(*b))).collect(), + ty::TySlice(_) => + (0..pcx.max_slice_length+1).map(|length| Slice(length)).collect(), + ty::TyArray(_, length) => vec![Slice(length)], + ty::TyAdt(def, _) if def.is_enum() && def.variants.len() > 1 => + def.variants.iter().map(|v| Variant(v.did)).collect(), + _ => vec![Single] + } +} + +/// Algorithm from http://moscova.inria.fr/~maranget/papers/warn/index.html +/// +/// Whether a vector `v` of patterns is 'useful' in relation to a set of such +/// vectors `m` is defined as there being a set of inputs that will match `v` +/// but not any of the sets in `m`. +/// +/// This is used both for reachability checking (if a pattern isn't useful in +/// relation to preceding patterns, it is not reachable) and exhaustiveness +/// checking (if a wildcard pattern is useful in relation to a matrix, the +/// matrix isn't exhaustive). +/// +/// Note: is_useful doesn't work on empty types, as the paper notes. +/// So it assumes that v is non-empty. +pub fn is_useful<'a, 'tcx>(cx: &mut MatchCheckCtxt<'a, 'tcx>, + matrix: &Matrix<'a, 'tcx>, + v: &[&'a Pattern<'tcx>], + witness: WitnessPreference) + -> Usefulness { + let &Matrix(ref rows) = matrix; + debug!("is_useful({:?}, {:?})", matrix, v); + if rows.is_empty() { + return match witness { + ConstructWitness => UsefulWithWitness(vec![Witness( + repeat(DUMMY_WILD_PAT).take(v.len()).map(|p| P(p.clone())).collect() + )]), + LeaveOutWitness => Useful + }; + } + if rows[0].is_empty() { + return NotUseful; + } + + let &Matrix(ref rows) = matrix; + assert!(rows.iter().all(|r| r.len() == v.len())); + let pcx = PatternContext { + ty: rows.iter().map(|r| r[0].ty).find(|ty| !ty.references_error()) + .unwrap_or(v[0].ty), + max_slice_length: rows.iter().filter_map(|row| match *row[0].kind { + PatternKind::Slice { ref prefix, slice: _, ref suffix } => + Some(prefix.len() + suffix.len()), + PatternKind::Constant { value: ConstVal::ByteStr(ref data) } => + Some(data.len()), + _ => None + }).max().map_or(0, |v| v + 1) + }; + + debug!("is_useful_expand_first_col: pcx={:?}, expanding {:?}", pcx, v[0]); + + if let Some(constructors) = pat_constructors(cx, v[0], pcx) { + debug!("is_useful - expanding constructors: {:?}", constructors); + constructors.into_iter().map(|c| + is_useful_specialized(cx, matrix, v, c.clone(), pcx.ty, witness) + ).find(|result| result != &NotUseful).unwrap_or(NotUseful) + } else { + debug!("is_useful - expanding wildcard"); + let constructors = missing_constructors(cx, matrix, pcx); + debug!("is_useful - missing_constructors = {:?}", constructors); + if constructors.is_empty() { + all_constructors(cx, pcx).into_iter().map(|c| { + is_useful_specialized(cx, matrix, v, c.clone(), pcx.ty, witness) + }).find(|result| result != &NotUseful).unwrap_or(NotUseful) + } else { + let matrix = rows.iter().filter_map(|r| { + if r[0].is_wildcard() { + Some(r[1..].to_vec()) + } else { + None + } + }).collect(); + match is_useful(cx, &matrix, &v[1..], witness) { + UsefulWithWitness(pats) => { + let cx = &*cx; + UsefulWithWitness(pats.into_iter().flat_map(|witness| { + constructors.iter().map(move |ctor| { + witness.clone().push_wild_constructor(cx, ctor, pcx.ty) + }) + }).collect()) + } + result => result + } + } + } +} + +fn is_useful_specialized<'a, 'tcx>( + cx: &mut MatchCheckCtxt<'a, 'tcx>, + &Matrix(ref m): &Matrix<'a, 'tcx>, + v: &[&'a Pattern<'tcx>], + ctor: Constructor, + lty: Ty<'tcx>, + witness: WitnessPreference) -> Usefulness +{ + let arity = constructor_arity(cx, &ctor, lty); + let matrix = Matrix(m.iter().flat_map(|r| { + specialize(cx, &r[..], &ctor, 0, arity) + }).collect()); + match specialize(cx, v, &ctor, 0, arity) { + Some(v) => match is_useful(cx, &matrix, &v[..], witness) { + UsefulWithWitness(witnesses) => UsefulWithWitness( + witnesses.into_iter() + .map(|witness| witness.apply_constructor(cx, &ctor, lty)) + .collect() + ), + result => result + }, + None => NotUseful + } +} + +/// Determines the constructors that the given pattern can be specialized to. +/// +/// In most cases, there's only one constructor that a specific pattern +/// represents, such as a specific enum variant or a specific literal value. +/// Slice patterns, however, can match slices of different lengths. For instance, +/// `[a, b, ..tail]` can match a slice of length 2, 3, 4 and so on. +/// +/// Returns None in case of a catch-all, which can't be specialized. +fn pat_constructors(_cx: &mut MatchCheckCtxt, + pat: &Pattern, + pcx: PatternContext) + -> Option> +{ + match *pat.kind { + PatternKind::Binding { .. } | PatternKind::Wild => + None, + PatternKind::Leaf { .. } | PatternKind::Deref { .. } => + Some(vec![Single]), + PatternKind::Variant { adt_def, variant_index, .. } => + Some(vec![Variant(adt_def.variants[variant_index].did)]), + PatternKind::Constant { ref value } => + Some(vec![ConstantValue(value.clone())]), + PatternKind::Range { ref lo, ref hi } => + Some(vec![ConstantRange(lo.clone(), hi.clone())]), + PatternKind::Array { .. } => match pcx.ty.sty { + ty::TyArray(_, length) => Some(vec![Slice(length)]), + _ => span_bug!(pat.span, "bad ty {:?} for array pattern", pcx.ty) + }, + PatternKind::Slice { ref prefix, ref slice, ref suffix } => { + let pat_len = prefix.len() + suffix.len(); + if slice.is_some() { + Some((pat_len..pcx.max_slice_length+1).map(Slice).collect()) + } else { + Some(vec![Slice(pat_len)]) + } + } + } +} + +/// This computes the arity of a constructor. The arity of a constructor +/// is how many subpattern patterns of that constructor should be expanded to. +/// +/// For instance, a tuple pattern (_, 42, Some([])) has the arity of 3. +/// A struct pattern's arity is the number of fields it contains, etc. +fn constructor_arity(_cx: &MatchCheckCtxt, ctor: &Constructor, ty: Ty) -> usize { + debug!("constructor_arity({:?}, {:?})", ctor, ty); + match ty.sty { + ty::TyTuple(ref fs) => fs.len(), + ty::TyBox(_) => 1, + ty::TySlice(..) | ty::TyArray(..) => match *ctor { + Slice(length) => length, + ConstantValue(_) => 0, + _ => bug!("bad slice pattern {:?} {:?}", ctor, ty) + }, + ty::TyRef(..) => 1, + ty::TyAdt(adt, _) => { + ctor.variant_for_adt(adt).fields.len() + } + _ => 0 + } +} + +fn slice_pat_covered_by_constructor(_tcx: TyCtxt, _span: Span, + ctor: &Constructor, + prefix: &[Pattern], + slice: &Option, + suffix: &[Pattern]) + -> Result { + let data = match *ctor { + ConstantValue(ConstVal::ByteStr(ref data)) => data, + _ => bug!() + }; + + let pat_len = prefix.len() + suffix.len(); + if data.len() < pat_len || (slice.is_none() && data.len() > pat_len) { + return Ok(false); + } + + for (ch, pat) in + data[..prefix.len()].iter().zip(prefix).chain( + data[data.len()-suffix.len()..].iter().zip(suffix)) + { + match pat.kind { + box PatternKind::Constant { ref value } => match *value { + ConstVal::Integral(ConstInt::U8(u)) => { + if u != *ch { + return Ok(false); + } + }, + _ => span_bug!(pat.span, "bad const u8 {:?}", value) + }, + _ => {} + } + } + + Ok(true) +} + +fn range_covered_by_constructor(tcx: TyCtxt, span: Span, + ctor: &Constructor, + from: &ConstVal, to: &ConstVal) + -> Result { + let (c_from, c_to) = match *ctor { + ConstantValue(ref value) => (value, value), + ConstantRange(ref from, ref to) => (from, to), + Single => return Ok(true), + _ => bug!() + }; + let cmp_from = compare_const_vals(tcx, span, c_from, from)?; + let cmp_to = compare_const_vals(tcx, span, c_to, to)?; + Ok(cmp_from != Ordering::Less && cmp_to != Ordering::Greater) +} + +fn patterns_for_variant<'a, 'tcx>( + cx: &mut MatchCheckCtxt<'a, 'tcx>, + subpatterns: &'a [FieldPattern<'tcx>], + arity: usize) + -> Vec<&'a Pattern<'tcx>> +{ + let mut result = vec![cx.wild_pattern; arity]; + + for subpat in subpatterns { + result[subpat.field.index()] = &subpat.pattern; + } + + debug!("patterns_for_variant({:?}, {:?}) = {:?}", subpatterns, arity, result); + result +} + +/// This is the main specialization step. It expands the first pattern in the given row +/// into `arity` patterns based on the constructor. For most patterns, the step is trivial, +/// for instance tuple patterns are flattened and box patterns expand into their inner pattern. +/// +/// OTOH, slice patterns with a subslice pattern (..tail) can be expanded into multiple +/// different patterns. +/// Structure patterns with a partial wild pattern (Foo { a: 42, .. }) have their missing +/// fields filled with wild patterns. +fn specialize<'a, 'tcx>( + cx: &mut MatchCheckCtxt<'a, 'tcx>, + r: &[&'a Pattern<'tcx>], + constructor: &Constructor, col: usize, arity: usize) + -> Option>> +{ + let pat = &r[col]; + + let head: Option> = match *pat.kind { + PatternKind::Binding { .. } | PatternKind::Wild => + Some(vec![cx.wild_pattern; arity]), + + PatternKind::Variant { adt_def, variant_index, ref subpatterns } => { + let ref variant = adt_def.variants[variant_index]; + if *constructor == Variant(variant.did) { + Some(patterns_for_variant(cx, subpatterns, arity)) + } else { + None + } + } + + PatternKind::Leaf { ref subpatterns } => Some(patterns_for_variant(cx, subpatterns, arity)), + PatternKind::Deref { ref subpattern } => Some(vec![subpattern]), + + PatternKind::Constant { ref value } => { + match *constructor { + Slice(..) => match *value { + ConstVal::ByteStr(ref data) => { + if arity == data.len() { + Some(cx.lower_byte_str_pattern(pat)) + } else { + None + } + } + _ => span_bug!(pat.span, + "unexpected const-val {:?} with ctor {:?}", value, constructor) + }, + _ => { + match range_covered_by_constructor( + cx.tcx, pat.span, constructor, value, value + ) { + Ok(true) => Some(vec![]), + Ok(false) => None, + Err(ErrorReported) => None, + } + } + } + } + + PatternKind::Range { ref lo, ref hi } => { + match range_covered_by_constructor( + cx.tcx, pat.span, constructor, lo, hi + ) { + Ok(true) => Some(vec![]), + Ok(false) => None, + Err(ErrorReported) => None, + } + } + + PatternKind::Array { ref prefix, ref slice, ref suffix } | + PatternKind::Slice { ref prefix, ref slice, ref suffix } => { + match *constructor { + Slice(..) => { + let pat_len = prefix.len() + suffix.len(); + if let Some(slice_count) = arity.checked_sub(pat_len) { + if slice_count == 0 || slice.is_some() { + Some( + prefix.iter().chain( + repeat(cx.wild_pattern).take(slice_count).chain( + suffix.iter() + )).collect()) + } else { + None + } + } else { + None + } + } + ConstantValue(..) => { + match slice_pat_covered_by_constructor( + cx.tcx, pat.span, constructor, prefix, slice, suffix + ) { + Ok(true) => Some(vec![]), + Ok(false) => None, + Err(ErrorReported) => None + } + } + _ => span_bug!(pat.span, + "unexpected ctor {:?} for slice pat", constructor) + } + } + }; + debug!("specialize({:?}, {:?}) = {:?}", r[col], arity, head); + + head.map(|mut head| { + head.extend_from_slice(&r[..col]); + head.extend_from_slice(&r[col + 1..]); + head + }) +} diff --git a/src/librustc_const_eval/check_match.rs b/src/librustc_const_eval/check_match.rs index a1e7d0a1e34de..9aa1ac62f5523 100644 --- a/src/librustc_const_eval/check_match.rs +++ b/src/librustc_const_eval/check_match.rs @@ -8,221 +8,210 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use self::Constructor::*; -use self::Usefulness::*; -use self::WitnessPreference::*; +use _match::{MatchCheckCtxt, Matrix, expand_pattern, is_useful}; +use _match::{DUMMY_WILD_PAT}; +use _match::Usefulness::*; +use _match::WitnessPreference::*; + +use pattern::{Pattern, PatternContext, PatternError}; -use rustc::dep_graph::DepNode; -use rustc::middle::const_val::ConstVal; -use ::{eval_const_expr, eval_const_expr_partial, compare_const_vals}; -use ::{const_expr_to_pat, lookup_const_by_id}; -use ::EvalHint::ExprTypeChecked; use eval::report_const_eval_err; -use rustc::hir::def::*; -use rustc::hir::def_id::{DefId}; + +use rustc::dep_graph::DepNode; + +use rustc::hir::pat_util::{pat_bindings, pat_contains_bindings}; + use rustc::middle::expr_use_visitor::{ConsumeMode, Delegate, ExprUseVisitor}; use rustc::middle::expr_use_visitor::{LoanCause, MutateMode}; use rustc::middle::expr_use_visitor as euv; use rustc::middle::mem_categorization::{cmt}; -use rustc::hir::pat_util::*; +use rustc::session::Session; use rustc::traits::Reveal; -use rustc::ty::*; -use rustc::ty; -use std::cmp::Ordering; -use std::fmt; -use std::iter::{FromIterator, IntoIterator, repeat}; +use rustc::ty::{self, TyCtxt}; +use rustc_errors::DiagnosticBuilder; -use rustc::hir; -use rustc::hir::{Pat, PatKind}; +use rustc::hir::def::*; use rustc::hir::intravisit::{self, Visitor, FnKind}; -use rustc_back::slice; - -use syntax::ast::{self, DUMMY_NODE_ID, NodeId}; -use syntax::codemap::Spanned; -use syntax_pos::{Span, DUMMY_SP}; -use rustc::hir::fold::{Folder, noop_fold_pat}; use rustc::hir::print::pat_to_string; -use syntax::ptr::P; -use rustc::util::common::ErrorReported; -use rustc::util::nodemap::FnvHashMap; +use rustc::hir::{self, Pat, PatKind}; -pub const DUMMY_WILD_PAT: &'static Pat = &Pat { - id: DUMMY_NODE_ID, - node: PatKind::Wild, - span: DUMMY_SP -}; - -struct Matrix<'a, 'tcx>(Vec>)>>); +use rustc_back::slice; -/// Pretty-printer for matrices of patterns, example: -/// ++++++++++++++++++++++++++ -/// + _ + [] + -/// ++++++++++++++++++++++++++ -/// + true + [First] + -/// ++++++++++++++++++++++++++ -/// + true + [Second(true)] + -/// ++++++++++++++++++++++++++ -/// + false + [_] + -/// ++++++++++++++++++++++++++ -/// + _ + [_, _, ..tail] + -/// ++++++++++++++++++++++++++ -impl<'a, 'tcx> fmt::Debug for Matrix<'a, 'tcx> { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "\n")?; +use syntax::ast; +use syntax::ptr::P; +use syntax_pos::Span; - let &Matrix(ref m) = self; - let pretty_printed_matrix: Vec> = m.iter().map(|row| { - row.iter() - .map(|&(pat,ty)| format!("{}: {:?}", pat_to_string(&pat), ty)) - .collect::>() - }).collect(); +struct OuterVisitor<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx> } - let column_count = m.iter().map(|row| row.len()).max().unwrap_or(0); - assert!(m.iter().all(|row| row.len() == column_count)); - let column_widths: Vec = (0..column_count).map(|col| { - pretty_printed_matrix.iter().map(|row| row[col].len()).max().unwrap_or(0) - }).collect(); +impl<'a, 'v, 'tcx> Visitor<'v> for OuterVisitor<'a, 'tcx> { + fn visit_expr(&mut self, _expr: &hir::Expr) { + return // const, static and N in [T; N] - shouldn't contain anything + } - let total_width = column_widths.iter().cloned().sum::() + column_count * 3 + 1; - let br = repeat('+').take(total_width).collect::(); - write!(f, "{}\n", br)?; - for row in pretty_printed_matrix { - write!(f, "+")?; - for (column, pat_str) in row.into_iter().enumerate() { - write!(f, " ")?; - write!(f, "{:1$}", pat_str, column_widths[column])?; - write!(f, " +")?; - } - write!(f, "\n")?; - write!(f, "{}\n", br)?; + fn visit_trait_item(&mut self, item: &hir::TraitItem) { + if let hir::ConstTraitItem(..) = item.node { + return // nothing worth match checking in a constant + } else { + intravisit::walk_trait_item(self, item); } - Ok(()) } -} -impl<'a, 'tcx> FromIterator>)>> for Matrix<'a, 'tcx> { - fn from_iter>)>>>(iter: T) - -> Self - { - Matrix(iter.into_iter().collect()) + fn visit_impl_item(&mut self, item: &hir::ImplItem) { + if let hir::ImplItemKind::Const(..) = item.node { + return // nothing worth match checking in a constant + } else { + intravisit::walk_impl_item(self, item); + } } -} -//NOTE: appears to be the only place other then InferCtxt to contain a ParamEnv -pub struct MatchCheckCtxt<'a, 'tcx: 'a> { - pub tcx: TyCtxt<'a, 'tcx, 'tcx>, - pub param_env: ParameterEnvironment<'tcx>, + fn visit_fn(&mut self, fk: FnKind<'v>, fd: &'v hir::FnDecl, + b: &'v hir::Block, s: Span, id: ast::NodeId) { + if let FnKind::Closure(..) = fk { + span_bug!(s, "check_match: closure outside of function") + } + + MatchVisitor { + tcx: self.tcx, + param_env: &ty::ParameterEnvironment::for_item(self.tcx, id) + }.visit_fn(fk, fd, b, s, id); + } } -#[derive(Clone, Debug, PartialEq)] -pub enum Constructor { - /// The constructor of all patterns that don't vary by constructor, - /// e.g. struct patterns and fixed-length arrays. - Single, - /// Enum variants. - Variant(DefId), - /// Literal values. - ConstantValue(ConstVal), - /// Ranges of literal values (2..5). - ConstantRange(ConstVal, ConstVal), - /// Array patterns of length n. - Slice(usize), - /// Array patterns with a subslice. - SliceWithSubslice(usize, usize) +pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { + tcx.visit_all_items_in_krate(DepNode::MatchCheck, &mut OuterVisitor { tcx: tcx }); + tcx.sess.abort_if_errors(); } -#[derive(Clone, PartialEq)] -enum Usefulness { - Useful, - UsefulWithWitness(Vec>), - NotUseful +fn create_e0004<'a>(sess: &'a Session, sp: Span, error_message: String) -> DiagnosticBuilder<'a> { + struct_span_err!(sess, sp, E0004, "{}", &error_message) } -#[derive(Copy, Clone)] -enum WitnessPreference { - ConstructWitness, - LeaveOutWitness +struct MatchVisitor<'a, 'tcx: 'a> { + tcx: TyCtxt<'a, 'tcx, 'tcx>, + param_env: &'a ty::ParameterEnvironment<'tcx> } -impl<'a, 'tcx, 'v> Visitor<'v> for MatchCheckCtxt<'a, 'tcx> { +impl<'a, 'tcx, 'v> Visitor<'v> for MatchVisitor<'a, 'tcx> { fn visit_expr(&mut self, ex: &hir::Expr) { - check_expr(self, ex); + intravisit::walk_expr(self, ex); + + match ex.node { + hir::ExprMatch(ref scrut, ref arms, source) => { + self.check_match(scrut, arms, source, ex.span); + } + _ => {} + } } - fn visit_local(&mut self, l: &hir::Local) { - check_local(self, l); + + fn visit_local(&mut self, loc: &hir::Local) { + intravisit::walk_local(self, loc); + + self.check_irrefutable(&loc.pat, false); + + // Check legality of move bindings and `@` patterns. + self.check_patterns(false, slice::ref_slice(&loc.pat)); } + fn visit_fn(&mut self, fk: FnKind<'v>, fd: &'v hir::FnDecl, - b: &'v hir::Block, s: Span, n: NodeId) { - check_fn(self, fk, fd, b, s, n); - } -} + b: &'v hir::Block, s: Span, n: ast::NodeId) { + intravisit::walk_fn(self, fk, fd, b, s, n); -pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { - tcx.visit_all_items_in_krate(DepNode::MatchCheck, &mut MatchCheckCtxt { - tcx: tcx, - param_env: tcx.empty_parameter_environment(), - }); - tcx.sess.abort_if_errors(); + for input in &fd.inputs { + self.check_irrefutable(&input.pat, true); + self.check_patterns(false, slice::ref_slice(&input.pat)); + } + } } -fn check_expr(cx: &mut MatchCheckCtxt, ex: &hir::Expr) { - intravisit::walk_expr(cx, ex); - match ex.node { - hir::ExprMatch(ref scrut, ref arms, source) => { - for arm in arms { - // First, check legality of move bindings. - check_legality_of_move_bindings(cx, - arm.guard.is_some(), - &arm.pats); +impl<'a, 'tcx> MatchVisitor<'a, 'tcx> { + fn check_patterns(&self, has_guard: bool, pats: &[P]) { + check_legality_of_move_bindings(self, has_guard, pats); + for pat in pats { + check_legality_of_bindings_in_at_patterns(self, pat); + } + } - // Second, if there is a guard on each arm, make sure it isn't - // assigning or borrowing anything mutably. - if let Some(ref guard) = arm.guard { - check_for_mutation_in_guard(cx, &guard); + fn report_inlining_errors(&self, patcx: PatternContext, pat_span: Span) { + for error in patcx.errors { + match error { + PatternError::BadConstInPattern(span, def_id) => { + self.tcx.sess.span_err( + span, + &format!("constants of the type `{}` \ + cannot be used in patterns", + self.tcx.item_path_str(def_id))); + } + PatternError::StaticInPattern(span) => { + span_err!(self.tcx.sess, span, E0158, + "statics cannot be referenced in patterns"); + } + PatternError::ConstEval(err) => { + report_const_eval_err(self.tcx, &err, pat_span, "pattern").emit(); } } + } + } - let mut static_inliner = StaticInliner::new(cx.tcx, None); - let inlined_arms = arms.iter().map(|arm| { - (arm.pats.iter().map(|pat| { - static_inliner.fold_pat((*pat).clone()) - }).collect(), arm.guard.as_ref().map(|e| &**e)) - }).collect::>, Option<&hir::Expr>)>>(); + fn check_match( + &self, + scrut: &hir::Expr, + arms: &[hir::Arm], + source: hir::MatchSource, + span: Span) + { + for arm in arms { + // First, check legality of move bindings. + self.check_patterns(arm.guard.is_some(), &arm.pats); + + // Second, if there is a guard on each arm, make sure it isn't + // assigning or borrowing anything mutably. + if let Some(ref guard) = arm.guard { + check_for_mutation_in_guard(self, &guard); + } - // Bail out early if inlining failed. - if static_inliner.failed { - return; + // Third, perform some lints. + for pat in &arm.pats { + check_for_bindings_named_the_same_as_variants(self, pat); } + } - for pat in inlined_arms - .iter() - .flat_map(|&(ref pats, _)| pats) { - // Third, check legality of move bindings. - check_legality_of_bindings_in_at_patterns(cx, &pat); + MatchCheckCtxt::create_and_enter(self.tcx, |ref mut cx| { + let mut have_errors = false; - // Fourth, check if there are any references to NaN that we should warn about. - check_for_static_nan(cx, &pat); + let inlined_arms : Vec<(Vec<_>, _)> = arms.iter().map(|arm| ( + arm.pats.iter().map(|pat| { + let mut patcx = PatternContext::new(self.tcx); + let pattern = expand_pattern(cx, patcx.lower_pattern(&pat)); + if !patcx.errors.is_empty() { + self.report_inlining_errors(patcx, pat.span); + have_errors = true; + } + (pattern, &**pat) + }).collect(), + arm.guard.as_ref().map(|e| &**e) + )).collect(); - // Fifth, check if for any of the patterns that match an enumerated type - // are bindings with the same name as one of the variants of said type. - check_for_bindings_named_the_same_as_variants(cx, &pat); + // Bail out early if inlining failed. + if have_errors { + return; } // Fourth, check for unreachable arms. - check_arms(cx, &inlined_arms[..], source); + check_arms(cx, &inlined_arms, source); // Finally, check if the whole match expression is exhaustive. // Check for empty enum, because is_useful only works on inhabited types. - let pat_ty = cx.tcx.node_id_to_type(scrut.id); + let pat_ty = self.tcx.node_id_to_type(scrut.id); if inlined_arms.is_empty() { - if !pat_ty.is_uninhabited(cx.tcx) { + if !pat_ty.is_uninhabited(self.tcx) { // We know the type is inhabited, so this must be wrong - let mut err = struct_span_err!(cx.tcx.sess, ex.span, E0002, - "non-exhaustive patterns: type {} is non-empty", - pat_ty); - span_help!(&mut err, ex.span, - "Please ensure that all possible cases are being handled; \ - possibly adding wildcards or more match arms."); + let mut err = create_e0004(self.tcx.sess, span, + format!("non-exhaustive patterns: type {} \ + is non-empty", + pat_ty)); + span_help!(&mut err, span, + "Please ensure that all possible cases are being handled; \ + possibly adding wildcards or more match arms."); err.emit(); } // If the type *is* uninhabited, it's vacuously exhaustive @@ -233,15 +222,44 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &hir::Expr) { .iter() .filter(|&&(_, guard)| guard.is_none()) .flat_map(|arm| &arm.0) - .map(|pat| vec![wrap_pat(cx, &pat)]) + .map(|pat| vec![pat.0]) .collect(); check_exhaustive(cx, scrut.span, &matrix, source); - }, - _ => () + }) + } + + fn check_irrefutable(&self, pat: &Pat, is_fn_arg: bool) { + let origin = if is_fn_arg { + "function argument" + } else { + "local binding" + }; + + MatchCheckCtxt::create_and_enter(self.tcx, |ref mut cx| { + let mut patcx = PatternContext::new(self.tcx); + let pats : Matrix = vec![vec![ + expand_pattern(cx, patcx.lower_pattern(pat)) + ]].into_iter().collect(); + + let witness = match is_useful(cx, &pats, &[cx.wild_pattern], ConstructWitness) { + UsefulWithWitness(witness) => witness, + NotUseful => return, + Useful => bug!() + }; + + let pattern_string = pat_to_string(witness[0].single_pattern()); + let mut diag = struct_span_err!( + self.tcx.sess, pat.span, E0005, + "refutable pattern in {}: `{}` not covered", + origin, pattern_string + ); + diag.span_label(pat.span, &format!("pattern `{}` not covered", pattern_string)); + diag.emit(); + }); } } -fn check_for_bindings_named_the_same_as_variants(cx: &MatchCheckCtxt, pat: &Pat) { +fn check_for_bindings_named_the_same_as_variants(cx: &MatchVisitor, pat: &Pat) { pat.walk(|p| { if let PatKind::Binding(hir::BindByValue(hir::MutImmutable), name, None) = p.node { let pat_ty = cx.tcx.pat_ty(p); @@ -249,7 +267,7 @@ fn check_for_bindings_named_the_same_as_variants(cx: &MatchCheckCtxt, pat: &Pat) if edef.is_enum() { if let Def::Local(..) = cx.tcx.expect_def(p.id) { if edef.variants.iter().any(|variant| { - variant.name == name.node && variant.kind == VariantKind::Unit + variant.name == name.node && variant.ctor_kind == CtorKind::Const }) { let ty_path = cx.tcx.item_path_str(edef.did); let mut err = struct_span_warn!(cx.tcx.sess, p.span, E0170, @@ -270,36 +288,30 @@ fn check_for_bindings_named_the_same_as_variants(cx: &MatchCheckCtxt, pat: &Pat) }); } -// Check that we do not match against a static NaN (#6804) -fn check_for_static_nan(cx: &MatchCheckCtxt, pat: &Pat) { - pat.walk(|p| { - if let PatKind::Lit(ref expr) = p.node { - match eval_const_expr_partial(cx.tcx, &expr, ExprTypeChecked, None) { - Ok(ConstVal::Float(f)) if f.is_nan() => { - span_warn!(cx.tcx.sess, p.span, E0003, - "unmatchable NaN in pattern, \ - use the is_nan method in a guard instead"); - } - Ok(_) => {} - - Err(err) => { - report_const_eval_err(cx.tcx, &err, p.span, "pattern").emit(); - } - } - } - true - }); +/// Checks for common cases of "catchall" patterns that may not be intended as such. +fn pat_is_catchall(dm: &DefMap, pat: &Pat) -> bool { + match pat.node { + PatKind::Binding(.., None) => true, + PatKind::Binding(.., Some(ref s)) => pat_is_catchall(dm, s), + PatKind::Ref(ref s, _) => pat_is_catchall(dm, s), + PatKind::Tuple(ref v, _) => v.iter().all(|p| { + pat_is_catchall(dm, &p) + }), + _ => false + } } // Check for unreachable patterns -fn check_arms(cx: &MatchCheckCtxt, - arms: &[(Vec>, Option<&hir::Expr>)], - source: hir::MatchSource) { - let mut seen = Matrix(vec![]); +fn check_arms<'a, 'tcx>(cx: &mut MatchCheckCtxt<'a, 'tcx>, + arms: &[(Vec<(&'a Pattern<'tcx>, &hir::Pat)>, Option<&hir::Expr>)], + source: hir::MatchSource) +{ + let mut seen = Matrix::empty(); + let mut catchall = None; let mut printed_if_let_err = false; for &(ref pats, guard) in arms { - for pat in pats { - let v = vec![wrap_pat(cx, &pat)]; + for &(pat, hir_pat) in pats { + let v = vec![pat]; match is_useful(cx, &seen, &v[..], LeaveOutWitness) { NotUseful => { @@ -312,7 +324,7 @@ fn check_arms(cx: &MatchCheckCtxt, // find the first arm pattern so we can use its span let &(ref first_arm_pats, _) = &arms[0]; let first_pat = &first_arm_pats[0]; - let span = first_pat.span; + let span = first_pat.0.span; struct_span_err!(cx.tcx.sess, span, E0162, "irrefutable if-let pattern") .span_label(span, &format!("irrefutable pattern")) @@ -325,7 +337,7 @@ fn check_arms(cx: &MatchCheckCtxt, // find the first arm pattern so we can use its span let &(ref first_arm_pats, _) = &arms[0]; let first_pat = &first_arm_pats[0]; - let span = first_pat.span; + let span = first_pat.0.span; struct_span_err!(cx.tcx.sess, span, E0165, "irrefutable while-let pattern") .span_label(span, &format!("irrefutable pattern")) @@ -343,13 +355,10 @@ fn check_arms(cx: &MatchCheckCtxt, hir::MatchSource::Normal => { let mut err = struct_span_err!(cx.tcx.sess, pat.span, E0001, "unreachable pattern"); - err.span_label(pat.span, &format!("this is an unreachable pattern")); + err.span_label(pat.span, &"this is an unreachable pattern"); // if we had a catchall pattern, hint at that - for row in &seen.0 { - if pat_is_catchall(&cx.tcx.def_map.borrow(), row[0].0) { - span_note!(err, row[0].0.span, - "this pattern matches any value"); - } + if let Some(catchall) = catchall { + err.span_note(catchall, "this pattern matches any value"); } err.emit(); }, @@ -363,42 +372,25 @@ fn check_arms(cx: &MatchCheckCtxt, UsefulWithWitness(_) => bug!() } if guard.is_none() { - let Matrix(mut rows) = seen; - rows.push(v); - seen = Matrix(rows); + seen.push(v); + if catchall.is_none() && pat_is_catchall(&cx.tcx.def_map.borrow(), hir_pat) { + catchall = Some(pat.span); + } } } } } -/// Checks for common cases of "catchall" patterns that may not be intended as such. -fn pat_is_catchall(dm: &DefMap, p: &Pat) -> bool { - match p.node { - PatKind::Binding(.., None) => true, - PatKind::Binding(.., Some(ref s)) => pat_is_catchall(dm, &s), - PatKind::Ref(ref s, _) => pat_is_catchall(dm, &s), - PatKind::Tuple(ref v, _) => v.iter().all(|p| pat_is_catchall(dm, &p)), - _ => false - } -} - -fn raw_pat(p: &Pat) -> &Pat { - match p.node { - PatKind::Binding(.., Some(ref s)) => raw_pat(&s), - _ => p - } -} - -fn check_exhaustive<'a, 'tcx>(cx: &MatchCheckCtxt<'a, 'tcx>, +fn check_exhaustive<'a, 'tcx>(cx: &mut MatchCheckCtxt<'a, 'tcx>, sp: Span, matrix: &Matrix<'a, 'tcx>, source: hir::MatchSource) { - match is_useful(cx, matrix, &[(DUMMY_WILD_PAT, None)], ConstructWitness) { + match is_useful(cx, matrix, &[cx.wild_pattern], ConstructWitness) { UsefulWithWitness(pats) => { let witnesses = if pats.is_empty() { vec![DUMMY_WILD_PAT] } else { - pats.iter().map(|w| &**w).collect() + pats.iter().map(|w| w.single_pattern()).collect() }; match source { hir::MatchSource::ForLoopDesugar => { @@ -440,10 +432,11 @@ fn check_exhaustive<'a, 'tcx>(cx: &MatchCheckCtxt<'a, 'tcx>, 1 => format!("pattern {} not covered", joined_patterns), _ => format!("patterns {} not covered", joined_patterns) }; - struct_span_err!(cx.tcx.sess, sp, E0004, - "non-exhaustive patterns: {} not covered", - joined_patterns - ).span_label(sp, &label_text).emit(); + create_e0004(cx.tcx.sess, sp, + format!("non-exhaustive patterns: {} not covered", + joined_patterns)) + .span_label(sp, &label_text) + .emit(); }, } } @@ -454,657 +447,8 @@ fn check_exhaustive<'a, 'tcx>(cx: &MatchCheckCtxt<'a, 'tcx>, } } -fn const_val_to_expr(value: &ConstVal) -> P { - let node = match value { - &ConstVal::Bool(b) => ast::LitKind::Bool(b), - _ => bug!() - }; - P(hir::Expr { - id: 0, - node: hir::ExprLit(P(Spanned { node: node, span: DUMMY_SP })), - span: DUMMY_SP, - attrs: ast::ThinVec::new(), - }) -} - -pub struct StaticInliner<'a, 'tcx: 'a> { - pub tcx: TyCtxt<'a, 'tcx, 'tcx>, - pub failed: bool, - pub renaming_map: Option<&'a mut FnvHashMap<(NodeId, Span), NodeId>>, -} - -impl<'a, 'tcx> StaticInliner<'a, 'tcx> { - pub fn new<'b>(tcx: TyCtxt<'b, 'tcx, 'tcx>, - renaming_map: Option<&'b mut FnvHashMap<(NodeId, Span), NodeId>>) - -> StaticInliner<'b, 'tcx> { - StaticInliner { - tcx: tcx, - failed: false, - renaming_map: renaming_map - } - } -} - -struct RenamingRecorder<'map> { - substituted_node_id: NodeId, - origin_span: Span, - renaming_map: &'map mut FnvHashMap<(NodeId, Span), NodeId> -} - -impl<'v, 'map> Visitor<'v> for RenamingRecorder<'map> { - fn visit_id(&mut self, node_id: NodeId) { - let key = (node_id, self.origin_span); - self.renaming_map.insert(key, self.substituted_node_id); - } -} - -impl<'a, 'tcx> Folder for StaticInliner<'a, 'tcx> { - fn fold_pat(&mut self, pat: P) -> P { - return match pat.node { - PatKind::Path(..) => { - match self.tcx.expect_def(pat.id) { - Def::AssociatedConst(did) | Def::Const(did) => { - let substs = Some(self.tcx.node_id_item_substs(pat.id).substs); - if let Some((const_expr, _)) = lookup_const_by_id(self.tcx, did, substs) { - match const_expr_to_pat(self.tcx, const_expr, pat.id, pat.span) { - Ok(new_pat) => { - if let Some(ref mut map) = self.renaming_map { - // Record any renamings we do here - record_renamings(const_expr, &pat, map); - } - new_pat - } - Err(def_id) => { - self.failed = true; - self.tcx.sess.span_err( - pat.span, - &format!("constants of the type `{}` \ - cannot be used in patterns", - self.tcx.item_path_str(def_id))); - pat - } - } - } else { - self.failed = true; - span_err!(self.tcx.sess, pat.span, E0158, - "statics cannot be referenced in patterns"); - pat - } - } - _ => noop_fold_pat(pat, self) - } - } - _ => noop_fold_pat(pat, self) - }; - - fn record_renamings(const_expr: &hir::Expr, - substituted_pat: &hir::Pat, - renaming_map: &mut FnvHashMap<(NodeId, Span), NodeId>) { - let mut renaming_recorder = RenamingRecorder { - substituted_node_id: substituted_pat.id, - origin_span: substituted_pat.span, - renaming_map: renaming_map, - }; - - renaming_recorder.visit_expr(const_expr); - } - } -} - -/// Constructs a partial witness for a pattern given a list of -/// patterns expanded by the specialization step. -/// -/// When a pattern P is discovered to be useful, this function is used bottom-up -/// to reconstruct a complete witness, e.g. a pattern P' that covers a subset -/// of values, V, where each value in that set is not covered by any previously -/// used patterns and is covered by the pattern P'. Examples: -/// -/// left_ty: tuple of 3 elements -/// pats: [10, 20, _] => (10, 20, _) -/// -/// left_ty: struct X { a: (bool, &'static str), b: usize} -/// pats: [(false, "foo"), 42] => X { a: (false, "foo"), b: 42 } -fn construct_witness<'a,'tcx>(cx: &MatchCheckCtxt<'a,'tcx>, ctor: &Constructor, - pats: Vec<&Pat>, left_ty: Ty<'tcx>) -> P { - let pats_len = pats.len(); - let mut pats = pats.into_iter().map(|p| P((*p).clone())); - let pat = match left_ty.sty { - ty::TyTuple(..) => PatKind::Tuple(pats.collect(), None), - - ty::TyAdt(adt, _) => { - let v = ctor.variant_for_adt(adt); - match v.kind { - VariantKind::Struct => { - let field_pats: hir::HirVec<_> = v.fields.iter() - .zip(pats) - .filter(|&(_, ref pat)| pat.node != PatKind::Wild) - .map(|(field, pat)| Spanned { - span: DUMMY_SP, - node: hir::FieldPat { - name: field.name, - pat: pat, - is_shorthand: false, - } - }).collect(); - let has_more_fields = field_pats.len() < pats_len; - PatKind::Struct(def_to_path(cx.tcx, v.did), field_pats, has_more_fields) - } - VariantKind::Tuple => { - PatKind::TupleStruct(def_to_path(cx.tcx, v.did), pats.collect(), None) - } - VariantKind::Unit => { - PatKind::Path(None, def_to_path(cx.tcx, v.did)) - } - } - } - - ty::TyRef(_, ty::TypeAndMut { mutbl, .. }) => { - assert_eq!(pats_len, 1); - PatKind::Ref(pats.nth(0).unwrap(), mutbl) - } - - ty::TySlice(_) => match ctor { - &Slice(n) => { - assert_eq!(pats_len, n); - PatKind::Vec(pats.collect(), None, hir::HirVec::new()) - }, - _ => unreachable!() - }, - - ty::TyArray(_, len) => { - assert_eq!(pats_len, len); - PatKind::Vec(pats.collect(), None, hir::HirVec::new()) - } - - _ => { - match *ctor { - ConstantValue(ref v) => PatKind::Lit(const_val_to_expr(v)), - _ => PatKind::Wild, - } - } - }; - - P(hir::Pat { - id: 0, - node: pat, - span: DUMMY_SP - }) -} - -impl Constructor { - fn variant_for_adt<'tcx, 'container, 'a>(&self, - adt: &'a ty::AdtDefData<'tcx, 'container>) - -> &'a VariantDefData<'tcx, 'container> { - match self { - &Variant(vid) => adt.variant_with_id(vid), - _ => adt.struct_variant() - } - } -} - -fn missing_constructors(cx: &MatchCheckCtxt, &Matrix(ref rows): &Matrix, - left_ty: Ty, max_slice_length: usize) -> Vec { - let used_constructors: Vec = rows.iter() - .flat_map(|row| pat_constructors(cx, row[0].0, left_ty, max_slice_length)) - .collect(); - all_constructors(cx, left_ty, max_slice_length) - .into_iter() - .filter(|c| !used_constructors.contains(c)) - .collect() -} - -/// This determines the set of all possible constructors of a pattern matching -/// values of type `left_ty`. For vectors, this would normally be an infinite set -/// but is instead bounded by the maximum fixed length of slice patterns in -/// the column of patterns being analyzed. -fn all_constructors(_cx: &MatchCheckCtxt, left_ty: Ty, - max_slice_length: usize) -> Vec { - match left_ty.sty { - ty::TyBool => - [true, false].iter().map(|b| ConstantValue(ConstVal::Bool(*b))).collect(), - ty::TySlice(_) => - (0..max_slice_length+1).map(|length| Slice(length)).collect(), - ty::TyAdt(def, _) if def.is_enum() => - def.variants.iter().map(|v| Variant(v.did)).collect(), - _ => vec![Single] - } -} - -// Algorithm from http://moscova.inria.fr/~maranget/papers/warn/index.html -// -// Whether a vector `v` of patterns is 'useful' in relation to a set of such -// vectors `m` is defined as there being a set of inputs that will match `v` -// but not any of the sets in `m`. -// -// This is used both for reachability checking (if a pattern isn't useful in -// relation to preceding patterns, it is not reachable) and exhaustiveness -// checking (if a wildcard pattern is useful in relation to a matrix, the -// matrix isn't exhaustive). - -// Note: is_useful doesn't work on empty types, as the paper notes. -// So it assumes that v is non-empty. -fn is_useful<'a, 'tcx>(cx: &MatchCheckCtxt<'a, 'tcx>, - matrix: &Matrix<'a, 'tcx>, - v: &[(&Pat, Option>)], - witness: WitnessPreference) - -> Usefulness { - let &Matrix(ref rows) = matrix; - debug!("is_useful({:?}, {:?})", matrix, v); - if rows.is_empty() { - return match witness { - ConstructWitness => UsefulWithWitness(vec!()), - LeaveOutWitness => Useful - }; - } - if rows[0].is_empty() { - return NotUseful; - } - assert!(rows.iter().all(|r| r.len() == v.len())); - let left_ty = match rows.iter().filter_map(|r| r[0].1).next().or_else(|| v[0].1) { - Some(ty) => ty, - None => { - // all patterns are wildcards - we can pick any type we want - cx.tcx.types.bool - } - }; - - let max_slice_length = rows.iter().filter_map(|row| match row[0].0.node { - PatKind::Vec(ref before, _, ref after) => Some(before.len() + after.len()), - _ => None - }).max().map_or(0, |v| v + 1); - - let constructors = pat_constructors(cx, v[0].0, left_ty, max_slice_length); - debug!("is_useful - pat_constructors = {:?} left_ty = {:?}", constructors, - left_ty); - if constructors.is_empty() { - let constructors = missing_constructors(cx, matrix, left_ty, max_slice_length); - debug!("is_useful - missing_constructors = {:?}", constructors); - if constructors.is_empty() { - all_constructors(cx, left_ty, max_slice_length).into_iter().map(|c| { - match is_useful_specialized(cx, matrix, v, c.clone(), left_ty, witness) { - UsefulWithWitness(pats) => UsefulWithWitness({ - let arity = constructor_arity(cx, &c, left_ty); - let mut result = { - let pat_slice = &pats[..]; - let subpats: Vec<_> = (0..arity).map(|i| { - pat_slice.get(i).map_or(DUMMY_WILD_PAT, |p| &**p) - }).collect(); - vec![construct_witness(cx, &c, subpats, left_ty)] - }; - result.extend(pats.into_iter().skip(arity)); - result - }), - result => result - } - }).find(|result| result != &NotUseful).unwrap_or(NotUseful) - } else { - let matrix = rows.iter().filter_map(|r| { - match raw_pat(r[0].0).node { - PatKind::Binding(..) | PatKind::Wild => Some(r[1..].to_vec()), - _ => None, - } - }).collect(); - match is_useful(cx, &matrix, &v[1..], witness) { - UsefulWithWitness(pats) => { - let mut new_pats: Vec<_> = constructors.into_iter().map(|constructor| { - let arity = constructor_arity(cx, &constructor, left_ty); - let wild_pats = vec![DUMMY_WILD_PAT; arity]; - construct_witness(cx, &constructor, wild_pats, left_ty) - }).collect(); - new_pats.extend(pats); - UsefulWithWitness(new_pats) - }, - result => result - } - } - } else { - constructors.into_iter().map(|c| - is_useful_specialized(cx, matrix, v, c.clone(), left_ty, witness) - ).find(|result| result != &NotUseful).unwrap_or(NotUseful) - } -} - -fn is_useful_specialized<'a, 'tcx>( - cx: &MatchCheckCtxt<'a, 'tcx>, - &Matrix(ref m): &Matrix<'a, 'tcx>, - v: &[(&Pat, Option>)], - ctor: Constructor, - lty: Ty<'tcx>, - witness: WitnessPreference) -> Usefulness -{ - let arity = constructor_arity(cx, &ctor, lty); - let matrix = Matrix(m.iter().filter_map(|r| { - specialize(cx, &r[..], &ctor, 0, arity) - }).collect()); - match specialize(cx, v, &ctor, 0, arity) { - Some(v) => is_useful(cx, &matrix, &v[..], witness), - None => NotUseful - } -} - -/// Determines the constructors that the given pattern can be specialized to. -/// -/// In most cases, there's only one constructor that a specific pattern -/// represents, such as a specific enum variant or a specific literal value. -/// Slice patterns, however, can match slices of different lengths. For instance, -/// `[a, b, ..tail]` can match a slice of length 2, 3, 4 and so on. -/// -/// On the other hand, a wild pattern and an identifier pattern cannot be -/// specialized in any way. -fn pat_constructors(cx: &MatchCheckCtxt, p: &Pat, - left_ty: Ty, max_slice_length: usize) -> Vec { - let pat = raw_pat(p); - match pat.node { - PatKind::Struct(..) | PatKind::TupleStruct(..) | PatKind::Path(..) => - match cx.tcx.expect_def(pat.id) { - Def::Variant(_, id) => vec![Variant(id)], - Def::Struct(..) | Def::Union(..) | - Def::TyAlias(..) | Def::AssociatedTy(..) => vec![Single], - Def::Const(..) | Def::AssociatedConst(..) => - span_bug!(pat.span, "const pattern should've been rewritten"), - def => span_bug!(pat.span, "pat_constructors: unexpected definition {:?}", def), - }, - PatKind::Lit(ref expr) => - vec![ConstantValue(eval_const_expr(cx.tcx, &expr))], - PatKind::Range(ref lo, ref hi) => - vec![ConstantRange(eval_const_expr(cx.tcx, &lo), eval_const_expr(cx.tcx, &hi))], - PatKind::Vec(ref before, ref slice, ref after) => - match left_ty.sty { - ty::TyArray(..) => vec![Single], - ty::TySlice(_) if slice.is_some() => { - (before.len() + after.len()..max_slice_length+1) - .map(|length| Slice(length)) - .collect() - } - ty::TySlice(_) => vec!(Slice(before.len() + after.len())), - _ => span_bug!(pat.span, "pat_constructors: unexpected \ - slice pattern type {:?}", left_ty) - }, - PatKind::Box(..) | PatKind::Tuple(..) | PatKind::Ref(..) => - vec![Single], - PatKind::Binding(..) | PatKind::Wild => - vec![], - } -} - -/// This computes the arity of a constructor. The arity of a constructor -/// is how many subpattern patterns of that constructor should be expanded to. -/// -/// For instance, a tuple pattern (_, 42, Some([])) has the arity of 3. -/// A struct pattern's arity is the number of fields it contains, etc. -pub fn constructor_arity(_cx: &MatchCheckCtxt, ctor: &Constructor, ty: Ty) -> usize { - debug!("constructor_arity({:?}, {:?})", ctor, ty); - match ty.sty { - ty::TyTuple(ref fs) => fs.len(), - ty::TyBox(_) => 1, - ty::TySlice(_) => match *ctor { - Slice(length) => length, - ConstantValue(_) => 0, - _ => bug!() - }, - ty::TyRef(..) => 1, - ty::TyAdt(adt, _) => { - ctor.variant_for_adt(adt).fields.len() - } - ty::TyArray(_, n) => n, - _ => 0 - } -} - -fn range_covered_by_constructor(tcx: TyCtxt, span: Span, - ctor: &Constructor, - from: &ConstVal, to: &ConstVal) - -> Result { - let (c_from, c_to) = match *ctor { - ConstantValue(ref value) => (value, value), - ConstantRange(ref from, ref to) => (from, to), - Single => return Ok(true), - _ => bug!() - }; - let cmp_from = compare_const_vals(tcx, span, c_from, from)?; - let cmp_to = compare_const_vals(tcx, span, c_to, to)?; - Ok(cmp_from != Ordering::Less && cmp_to != Ordering::Greater) -} - -fn wrap_pat<'a, 'b, 'tcx>(cx: &MatchCheckCtxt<'b, 'tcx>, - pat: &'a Pat) - -> (&'a Pat, Option>) -{ - let pat_ty = cx.tcx.pat_ty(pat); - (pat, Some(match pat.node { - PatKind::Binding(hir::BindByRef(..), ..) => { - pat_ty.builtin_deref(false, NoPreference).unwrap().ty - } - _ => pat_ty - })) -} - -/// This is the main specialization step. It expands the first pattern in the given row -/// into `arity` patterns based on the constructor. For most patterns, the step is trivial, -/// for instance tuple patterns are flattened and box patterns expand into their inner pattern. -/// -/// OTOH, slice patterns with a subslice pattern (..tail) can be expanded into multiple -/// different patterns. -/// Structure patterns with a partial wild pattern (Foo { a: 42, .. }) have their missing -/// fields filled with wild patterns. -pub fn specialize<'a, 'b, 'tcx>( - cx: &MatchCheckCtxt<'b, 'tcx>, - r: &[(&'a Pat, Option>)], - constructor: &Constructor, col: usize, arity: usize) - -> Option>)>> -{ - let pat = raw_pat(r[col].0); - let &Pat { - id: pat_id, ref node, span: pat_span - } = pat; - let wpat = |pat: &'a Pat| wrap_pat(cx, pat); - let dummy_pat = (DUMMY_WILD_PAT, None); - - let head: Option)>> = match *node { - PatKind::Binding(..) | PatKind::Wild => - Some(vec![dummy_pat; arity]), - - PatKind::Path(..) => { - match cx.tcx.expect_def(pat_id) { - Def::Const(..) | Def::AssociatedConst(..) => - span_bug!(pat_span, "const pattern should've \ - been rewritten"), - Def::Variant(_, id) if *constructor != Variant(id) => None, - Def::Variant(..) | Def::Struct(..) => Some(Vec::new()), - def => span_bug!(pat_span, "specialize: unexpected \ - definition {:?}", def), - } - } - - PatKind::TupleStruct(_, ref args, ddpos) => { - match cx.tcx.expect_def(pat_id) { - Def::Const(..) | Def::AssociatedConst(..) => - span_bug!(pat_span, "const pattern should've \ - been rewritten"), - Def::Variant(_, id) if *constructor != Variant(id) => None, - Def::Variant(..) | Def::Struct(..) => { - match ddpos { - Some(ddpos) => { - let mut pats: Vec<_> = args[..ddpos].iter().map(|p| { - wpat(p) - }).collect(); - pats.extend(repeat((DUMMY_WILD_PAT, None)).take(arity - args.len())); - pats.extend(args[ddpos..].iter().map(|p| wpat(p))); - Some(pats) - } - None => Some(args.iter().map(|p| wpat(p)).collect()) - } - } - _ => None - } - } - - PatKind::Struct(_, ref pattern_fields, _) => { - let adt = cx.tcx.node_id_to_type(pat_id).ty_adt_def().unwrap(); - let variant = constructor.variant_for_adt(adt); - let def_variant = adt.variant_of_def(cx.tcx.expect_def(pat_id)); - if variant.did == def_variant.did { - Some(variant.fields.iter().map(|sf| { - match pattern_fields.iter().find(|f| f.node.name == sf.name) { - Some(ref f) => wpat(&f.node.pat), - _ => dummy_pat - } - }).collect()) - } else { - None - } - } - - PatKind::Tuple(ref args, Some(ddpos)) => { - let mut pats: Vec<_> = args[..ddpos].iter().map(|p| wpat(p)).collect(); - pats.extend(repeat(dummy_pat).take(arity - args.len())); - pats.extend(args[ddpos..].iter().map(|p| wpat(p))); - Some(pats) - } - PatKind::Tuple(ref args, None) => - Some(args.iter().map(|p| wpat(&**p)).collect()), - - PatKind::Box(ref inner) | PatKind::Ref(ref inner, _) => - Some(vec![wpat(&**inner)]), - - PatKind::Lit(ref expr) => { - if let Some(&ty::TyS { sty: ty::TyRef(_, mt), .. }) = r[col].1 { - // HACK: handle string literals. A string literal pattern - // serves both as an unary reference pattern and as a - // nullary value pattern, depending on the type. - Some(vec![(pat, Some(mt.ty))]) - } else { - let expr_value = eval_const_expr(cx.tcx, &expr); - match range_covered_by_constructor( - cx.tcx, expr.span, constructor, &expr_value, &expr_value - ) { - Ok(true) => Some(vec![]), - Ok(false) => None, - Err(ErrorReported) => None, - } - } - } - - PatKind::Range(ref from, ref to) => { - let from_value = eval_const_expr(cx.tcx, &from); - let to_value = eval_const_expr(cx.tcx, &to); - match range_covered_by_constructor( - cx.tcx, pat_span, constructor, &from_value, &to_value - ) { - Ok(true) => Some(vec![]), - Ok(false) => None, - Err(ErrorReported) => None, - } - } - - PatKind::Vec(ref before, ref slice, ref after) => { - let pat_len = before.len() + after.len(); - match *constructor { - Single => { - // Fixed-length vectors. - Some( - before.iter().map(|p| wpat(p)).chain( - repeat(dummy_pat).take(arity - pat_len).chain( - after.iter().map(|p| wpat(p)) - )).collect()) - }, - Slice(length) if pat_len <= length && slice.is_some() => { - Some( - before.iter().map(|p| wpat(p)).chain( - repeat(dummy_pat).take(arity - pat_len).chain( - after.iter().map(|p| wpat(p)) - )).collect()) - } - Slice(length) if pat_len == length => { - Some( - before.iter().map(|p| wpat(p)).chain( - after.iter().map(|p| wpat(p)) - ).collect()) - } - SliceWithSubslice(prefix, suffix) - if before.len() == prefix - && after.len() == suffix - && slice.is_some() => { - // this is used by trans::_match only - let mut pats: Vec<_> = before.iter() - .map(|p| (&**p, None)).collect(); - pats.extend(after.iter().map(|p| (&**p, None))); - Some(pats) - } - _ => None - } - } - }; - debug!("specialize({:?}, {:?}) = {:?}", r[col], arity, head); - - head.map(|mut head| { - head.extend_from_slice(&r[..col]); - head.extend_from_slice(&r[col + 1..]); - head - }) -} - -fn check_local(cx: &mut MatchCheckCtxt, loc: &hir::Local) { - intravisit::walk_local(cx, loc); - - let pat = StaticInliner::new(cx.tcx, None).fold_pat(loc.pat.clone()); - check_irrefutable(cx, &pat, false); - - // Check legality of move bindings and `@` patterns. - check_legality_of_move_bindings(cx, false, slice::ref_slice(&loc.pat)); - check_legality_of_bindings_in_at_patterns(cx, &loc.pat); -} - -fn check_fn(cx: &mut MatchCheckCtxt, - kind: FnKind, - decl: &hir::FnDecl, - body: &hir::Block, - sp: Span, - fn_id: NodeId) { - match kind { - FnKind::Closure(_) => {} - _ => cx.param_env = ParameterEnvironment::for_item(cx.tcx, fn_id), - } - - intravisit::walk_fn(cx, kind, decl, body, sp, fn_id); - - for input in &decl.inputs { - check_irrefutable(cx, &input.pat, true); - check_legality_of_move_bindings(cx, false, slice::ref_slice(&input.pat)); - check_legality_of_bindings_in_at_patterns(cx, &input.pat); - } -} - -fn check_irrefutable(cx: &MatchCheckCtxt, pat: &Pat, is_fn_arg: bool) { - let origin = if is_fn_arg { - "function argument" - } else { - "local binding" - }; - - is_refutable(cx, pat, |uncovered_pat| { - let pattern_string = pat_to_string(uncovered_pat); - struct_span_err!(cx.tcx.sess, pat.span, E0005, - "refutable pattern in {}: `{}` not covered", - origin, - pattern_string, - ).span_label(pat.span, &format!("pattern `{}` not covered", pattern_string)).emit(); - }); -} - -fn is_refutable(cx: &MatchCheckCtxt, pat: &Pat, refutable: F) -> Option where - F: FnOnce(&Pat) -> A, -{ - let pats = Matrix(vec!(vec!(wrap_pat(cx, pat)))); - match is_useful(cx, &pats, &[(DUMMY_WILD_PAT, None)], ConstructWitness) { - UsefulWithWitness(pats) => Some(refutable(&pats[0])), - NotUseful => None, - Useful => bug!() - } -} - // Legality of move bindings checking -fn check_legality_of_move_bindings(cx: &MatchCheckCtxt, +fn check_legality_of_move_bindings(cx: &MatchVisitor, has_guard: bool, pats: &[P]) { let mut by_ref_span = None; @@ -1143,13 +487,9 @@ fn check_legality_of_move_bindings(cx: &MatchCheckCtxt, pat.walk(|p| { if let PatKind::Binding(hir::BindByValue(..), _, ref sub) = p.node { let pat_ty = cx.tcx.node_id_to_type(p.id); - //FIXME: (@jroesch) this code should be floated up as well - cx.tcx.infer_ctxt(None, Some(cx.param_env.clone()), - Reveal::NotSpecializable).enter(|infcx| { - if infcx.type_moves_by_default(pat_ty, pat.span) { - check_move(p, sub.as_ref().map(|p| &**p)); - } - }); + if pat_ty.moves_by_default(cx.tcx, cx.param_env, pat.span) { + check_move(p, sub.as_ref().map(|p| &**p)); + } } true }); @@ -1158,8 +498,9 @@ fn check_legality_of_move_bindings(cx: &MatchCheckCtxt, /// Ensures that a pattern guard doesn't borrow by mutable reference or /// assign. -fn check_for_mutation_in_guard<'a, 'tcx>(cx: &'a MatchCheckCtxt<'a, 'tcx>, - guard: &hir::Expr) { +/// +/// FIXME: this should be done by borrowck. +fn check_for_mutation_in_guard(cx: &MatchVisitor, guard: &hir::Expr) { cx.tcx.infer_ctxt(None, Some(cx.param_env.clone()), Reveal::NotSpecializable).enter(|infcx| { let mut checker = MutationChecker { @@ -1171,32 +512,32 @@ fn check_for_mutation_in_guard<'a, 'tcx>(cx: &'a MatchCheckCtxt<'a, 'tcx>, } struct MutationChecker<'a, 'gcx: 'a> { - cx: &'a MatchCheckCtxt<'a, 'gcx>, + cx: &'a MatchVisitor<'a, 'gcx>, } impl<'a, 'gcx, 'tcx> Delegate<'tcx> for MutationChecker<'a, 'gcx> { fn matched_pat(&mut self, _: &Pat, _: cmt, _: euv::MatchMode) {} - fn consume(&mut self, _: NodeId, _: Span, _: cmt, _: ConsumeMode) {} + fn consume(&mut self, _: ast::NodeId, _: Span, _: cmt, _: ConsumeMode) {} fn consume_pat(&mut self, _: &Pat, _: cmt, _: ConsumeMode) {} fn borrow(&mut self, - _: NodeId, + _: ast::NodeId, span: Span, _: cmt, - _: &'tcx Region, - kind: BorrowKind, + _: &'tcx ty::Region, + kind:ty:: BorrowKind, _: LoanCause) { match kind { - MutBorrow => { + ty::MutBorrow => { struct_span_err!(self.cx.tcx.sess, span, E0301, "cannot mutably borrow in a pattern guard") .span_label(span, &format!("borrowed mutably in pattern guard")) .emit(); } - ImmBorrow | UniqueImmBorrow => {} + ty::ImmBorrow | ty::UniqueImmBorrow => {} } } - fn decl_without_init(&mut self, _: NodeId, _: Span) {} - fn mutate(&mut self, _: NodeId, span: Span, _: cmt, mode: MutateMode) { + fn decl_without_init(&mut self, _: ast::NodeId, _: Span) {} + fn mutate(&mut self, _: ast::NodeId, span: Span, _: cmt, mode: MutateMode) { match mode { MutateMode::JustWrite | MutateMode::WriteAndRead => { struct_span_err!(self.cx.tcx.sess, span, E0302, "cannot assign in a pattern guard") @@ -1211,12 +552,12 @@ impl<'a, 'gcx, 'tcx> Delegate<'tcx> for MutationChecker<'a, 'gcx> { /// Forbids bindings in `@` patterns. This is necessary for memory safety, /// because of the way rvalues are handled in the borrow check. (See issue /// #14587.) -fn check_legality_of_bindings_in_at_patterns(cx: &MatchCheckCtxt, pat: &Pat) { +fn check_legality_of_bindings_in_at_patterns(cx: &MatchVisitor, pat: &Pat) { AtBindingPatternVisitor { cx: cx, bindings_allowed: true }.visit_pat(pat); } struct AtBindingPatternVisitor<'a, 'b:'a, 'tcx:'b> { - cx: &'a MatchCheckCtxt<'b, 'tcx>, + cx: &'a MatchVisitor<'b, 'tcx>, bindings_allowed: bool } @@ -1225,8 +566,10 @@ impl<'a, 'b, 'tcx, 'v> Visitor<'v> for AtBindingPatternVisitor<'a, 'b, 'tcx> { match pat.node { PatKind::Binding(.., ref subpat) => { if !self.bindings_allowed { - span_err!(self.cx.tcx.sess, pat.span, E0303, - "pattern bindings are not allowed after an `@`"); + struct_span_err!(self.cx.tcx.sess, pat.span, E0303, + "pattern bindings are not allowed after an `@`") + .span_label(pat.span, &format!("not allowed after `@`")) + .emit(); } if subpat.is_some() { diff --git a/src/librustc_const_eval/diagnostics.rs b/src/librustc_const_eval/diagnostics.rs index 9cdc76f25a63f..db72057636a85 100644 --- a/src/librustc_const_eval/diagnostics.rs +++ b/src/librustc_const_eval/diagnostics.rs @@ -40,7 +40,7 @@ Ensure the ordering of the match arm is correct and remove any superfluous arms. "##, -E0002: r##" +/*E0002: r##" This error indicates that an empty match expression is invalid because the type it is matching on is non-empty (there exist values of this type). In safe code it is impossible to create an instance of an empty type, so empty match @@ -68,16 +68,14 @@ fn foo(x: Option) { } } ``` -"##, +"##,*/ -E0003: r##" +/*E0003: r##" Not-a-Number (NaN) values cannot be compared for equality and hence can never match the input to a match expression. So, the following will not compile: ```compile_fail -#![deny(illegal_floating_point_constant_pattern)] - const NAN: f32 = 0.0 / 0.0; let number = 0.1f32; @@ -100,7 +98,7 @@ match number { } ``` "##, - +*/ E0004: r##" This error indicates that the compiler cannot guarantee a matching pattern for @@ -456,7 +454,7 @@ loop variable, consider using a `match` or `if let` inside the loop body. For instance: ```compile_fail,E0297 -let xs : Vec> = vec!(Some(1), None); +let xs : Vec> = vec![Some(1), None]; // This fails because `None` is not covered. for Some(x) in xs { @@ -467,7 +465,7 @@ for Some(x) in xs { Match inside the loop instead: ``` -let xs : Vec> = vec!(Some(1), None); +let xs : Vec> = vec![Some(1), None]; for item in xs { match item { @@ -480,7 +478,7 @@ for item in xs { Or use `if let`: ``` -let xs : Vec> = vec!(Some(1), None); +let xs : Vec> = vec![Some(1), None]; for item in xs { if let Some(x) = item { diff --git a/src/librustc_const_eval/eval.rs b/src/librustc_const_eval/eval.rs index 4ced9d87f0a5a..c02cca0da7225 100644 --- a/src/librustc_const_eval/eval.rs +++ b/src/librustc_const_eval/eval.rs @@ -19,7 +19,7 @@ use rustc::hir::map as ast_map; use rustc::hir::map::blocks::FnLikeNode; use rustc::middle::cstore::InlinedItem; use rustc::traits; -use rustc::hir::def::{Def, PathResolution}; +use rustc::hir::def::{Def, CtorKind, PathResolution}; use rustc::hir::def_id::DefId; use rustc::hir::pat_util::def_to_path; use rustc::ty::{self, Ty, TyCtxt}; @@ -57,7 +57,6 @@ macro_rules! math { } fn lookup_variant_by_id<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - enum_def: DefId, variant_def: DefId) -> Option<&'tcx Expr> { fn variant_expr<'a>(variants: &'a [hir::Variant], id: ast::NodeId) @@ -70,8 +69,8 @@ fn lookup_variant_by_id<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, None } - if let Some(enum_node_id) = tcx.map.as_local_node_id(enum_def) { - let variant_node_id = tcx.map.as_local_node_id(variant_def).unwrap(); + if let Some(variant_node_id) = tcx.map.as_local_node_id(variant_def) { + let enum_node_id = tcx.map.get_parent(variant_node_id); match tcx.map.find(enum_node_id) { None => None, Some(ast_map::NodeItem(it)) => match it.node { @@ -288,8 +287,8 @@ pub fn const_expr_to_pat<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, entry.insert(PathResolution::new(def)); } let path = match def { - Def::Struct(def_id) => def_to_path(tcx, def_id), - Def::Variant(_, variant_did) => def_to_path(tcx, variant_did), + Def::StructCtor(def_id, CtorKind::Fn) | + Def::VariantCtor(def_id, CtorKind::Fn) => def_to_path(tcx, def_id), Def::Fn(..) | Def::Method(..) => return Ok(P(hir::Pat { id: expr.id, node: PatKind::Lit(P(expr.clone())), @@ -318,16 +317,17 @@ pub fn const_expr_to_pat<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, PatKind::Struct(path.clone(), field_pats, false) } - hir::ExprVec(ref exprs) => { + hir::ExprArray(ref exprs) => { let pats = exprs.iter() .map(|expr| const_expr_to_pat(tcx, &expr, pat_id, span)) .collect::>()?; - PatKind::Vec(pats, None, hir::HirVec::new()) + PatKind::Slice(pats, None, hir::HirVec::new()) } hir::ExprPath(_, ref path) => { match tcx.expect_def(expr.id) { - Def::Struct(..) | Def::Variant(..) => PatKind::Path(None, path.clone()), + Def::StructCtor(_, CtorKind::Const) | + Def::VariantCtor(_, CtorKind::Const) => PatKind::Path(None, path.clone()), Def::Const(def_id) | Def::AssociatedConst(def_id) => { let substs = Some(tcx.node_id_item_substs(expr.id).substs); let (expr, _ty) = lookup_const_by_id(tcx, def_id, substs).unwrap(); @@ -392,7 +392,7 @@ pub fn note_const_eval_err<'a, 'tcx>( pub fn eval_const_expr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, e: &Expr) -> ConstVal { - match eval_const_expr_partial(tcx, e, ExprTypeChecked, None) { + match eval_const_expr_checked(tcx, e) { Ok(r) => r, // non-const path still needs to be a fatal error, because enums are funky Err(s) => { @@ -407,15 +407,21 @@ pub fn eval_const_expr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, } } +pub fn eval_const_expr_checked<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + e: &Expr) -> EvalResult +{ + eval_const_expr_partial(tcx, e, ExprTypeChecked, None) +} + pub type FnArgMap<'a> = Option<&'a NodeMap>; -#[derive(Clone)] +#[derive(Clone, Debug)] pub struct ConstEvalErr { pub span: Span, pub kind: ErrKind, } -#[derive(Clone)] +#[derive(Clone, Debug)] pub enum ErrKind { CannotCast, CannotCastTo(&'static str), @@ -732,6 +738,10 @@ pub fn eval_const_expr_partial<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, hir::BiBitOr => a | b, hir::BiEq => a == b, hir::BiNe => a != b, + hir::BiLt => a < b, + hir::BiLe => a <= b, + hir::BiGe => a >= b, + hir::BiGt => a > b, _ => signal!(e, InvalidOpForBools(op.node)), }) } @@ -808,8 +818,8 @@ pub fn eval_const_expr_partial<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, signal!(e, NonConstPath); } }, - Def::Variant(enum_def, variant_def) => { - if let Some(const_expr) = lookup_variant_by_id(tcx, enum_def, variant_def) { + Def::VariantCtor(variant_def, ..) => { + if let Some(const_expr) = lookup_variant_by_id(tcx, variant_def) { match eval_const_expr_partial(tcx, const_expr, ty_hint, None) { Ok(val) => val, Err(err) => { @@ -821,10 +831,11 @@ pub fn eval_const_expr_partial<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, signal!(e, UnimplementedConstVal("enum variants")); } } - Def::Struct(..) => { + Def::StructCtor(..) => { ConstVal::Struct(e.id) } - Def::Local(_, id) => { + Def::Local(def_id) => { + let id = tcx.map.as_local_node_id(def_id).unwrap(); debug!("Def::Local({:?}): {:?}", id, fn_args); if let Some(val) = fn_args.and_then(|args| args.get(&id)) { val.clone() @@ -868,7 +879,7 @@ pub fn eval_const_expr_partial<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, debug!("const call({:?})", call_args); eval_const_expr_partial(tcx, &result, ty_hint, Some(&call_args))? }, - hir::ExprLit(ref lit) => match lit_to_const(&lit.node, tcx, ety, lit.span) { + hir::ExprLit(ref lit) => match lit_to_const(&lit.node, tcx, ety) { Ok(val) => val, Err(err) => signal!(e, err), }, @@ -898,7 +909,7 @@ pub fn eval_const_expr_partial<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, Array(_, n) if idx >= n => { signal!(e, IndexOutOfBounds { len: n, index: idx }) } - Array(v, n) => if let hir::ExprVec(ref v) = tcx.map.expect_expr(v).node { + Array(v, n) => if let hir::ExprArray(ref v) = tcx.map.expect_expr(v).node { assert_eq!(n as usize as u64, n); eval_const_expr_partial(tcx, &v[idx as usize], ty_hint, fn_args)? } else { @@ -925,7 +936,7 @@ pub fn eval_const_expr_partial<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, _ => signal!(e, IndexedNonVec), } } - hir::ExprVec(ref v) => Array(e.id, v.len() as u64), + hir::ExprArray(ref v) => Array(e.id, v.len() as u64), hir::ExprRepeat(_, ref n) => { let len_hint = ty_hint.checked_or(tcx.types.usize); Repeat( @@ -1079,8 +1090,14 @@ fn resolve_trait_associated_const<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // when constructing the inference context above. match selection { traits::VtableImpl(ref impl_data) => { - match tcx.associated_consts(impl_data.impl_def_id) - .iter().find(|ic| ic.name == ti.name) { + let ac = tcx.impl_or_trait_items(impl_data.impl_def_id) + .iter().filter_map(|&def_id| { + match tcx.impl_or_trait_item(def_id) { + ty::ConstTraitItem(ic) => Some(ic), + _ => None + } + }).find(|ic| ic.name == ti.name); + match ac { Some(ic) => lookup_const_by_id(tcx, ic.def_id, None), None => match ti.node { hir::ConstTraitItem(ref ty, Some(ref expr)) => { @@ -1204,8 +1221,7 @@ fn cast_const<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, val: ConstVal, ty: ty::Ty) fn lit_to_const<'a, 'tcx>(lit: &ast::LitKind, tcx: TyCtxt<'a, 'tcx, 'tcx>, - ty_hint: Option>, - span: Span) + ty_hint: Option>) -> Result { use syntax::ast::*; use syntax::ast::LitIntType::*; @@ -1239,21 +1255,22 @@ fn lit_to_const<'a, 'tcx>(lit: &ast::LitKind, }, LitKind::Float(ref n, fty) => { - Ok(Float(parse_float(n, Some(fty), span))) + parse_float(n, Some(fty)).map(Float) } LitKind::FloatUnsuffixed(ref n) => { let fty_hint = match ty_hint.map(|t| &t.sty) { Some(&ty::TyFloat(fty)) => Some(fty), _ => None }; - Ok(Float(parse_float(n, fty_hint, span))) + parse_float(n, fty_hint).map(Float) } LitKind::Bool(b) => Ok(Bool(b)), LitKind::Char(c) => Ok(Char(c)), } } -fn parse_float(num: &str, fty_hint: Option, span: Span) -> ConstFloat { +fn parse_float(num: &str, fty_hint: Option) + -> Result { let val = match fty_hint { Some(ast::FloatTy::F32) => num.parse::().map(F32), Some(ast::FloatTy::F64) => num.parse::().map(F64), @@ -1265,9 +1282,9 @@ fn parse_float(num: &str, fty_hint: Option, span: Span) -> ConstFl }) } }; - val.unwrap_or_else(|_| { + val.map_err(|_| { // FIXME(#31407) this is only necessary because float parsing is buggy - span_bug!(span, "could not evaluate float literal (see issue #31407)"); + UnimplementedConstVal("could not evaluate float literal (see issue #31407)") }) } diff --git a/src/librustc_const_eval/lib.rs b/src/librustc_const_eval/lib.rs index f926fef065ea6..1a07ece044ff2 100644 --- a/src/librustc_const_eval/lib.rs +++ b/src/librustc_const_eval/lib.rs @@ -27,15 +27,17 @@ #![feature(staged_api)] #![feature(rustc_diagnostic_macros)] #![feature(slice_patterns)] -#![feature(question_mark)] +#![cfg_attr(stage0, feature(question_mark))] #![feature(box_patterns)] #![feature(box_syntax)] +extern crate arena; #[macro_use] extern crate syntax; #[macro_use] extern crate log; #[macro_use] extern crate rustc; extern crate rustc_back; extern crate rustc_const_math; +extern crate rustc_data_structures; extern crate rustc_errors; extern crate graphviz; extern crate syntax_pos; @@ -46,7 +48,9 @@ extern crate serialize as rustc_serialize; // used by deriving pub mod diagnostics; mod eval; +mod _match; pub mod check_match; +pub mod pattern; pub use eval::*; diff --git a/src/librustc_const_eval/pattern.rs b/src/librustc_const_eval/pattern.rs new file mode 100644 index 0000000000000..946a397474765 --- /dev/null +++ b/src/librustc_const_eval/pattern.rs @@ -0,0 +1,612 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use eval; + +use rustc::middle::const_val::ConstVal; +use rustc::mir::{Field, BorrowKind, Mutability}; +use rustc::ty::{self, TyCtxt, AdtDef, Ty, Region}; +use rustc::hir::{self, PatKind}; +use rustc::hir::def::Def; +use rustc::hir::def_id::DefId; +use rustc::hir::pat_util::EnumerateAndAdjustIterator; + +use rustc_data_structures::indexed_vec::Idx; + +use syntax::ast; +use syntax::ptr::P; +use syntax_pos::Span; + +#[derive(Clone, Debug)] +pub enum PatternError { + StaticInPattern(Span), + BadConstInPattern(Span, DefId), + ConstEval(eval::ConstEvalErr), +} + +#[derive(Copy, Clone, Debug)] +pub enum BindingMode<'tcx> { + ByValue, + ByRef(&'tcx Region, BorrowKind), +} + +#[derive(Clone, Debug)] +pub struct FieldPattern<'tcx> { + pub field: Field, + pub pattern: Pattern<'tcx>, +} + +#[derive(Clone, Debug)] +pub struct Pattern<'tcx> { + pub ty: Ty<'tcx>, + pub span: Span, + pub kind: Box>, +} + +#[derive(Clone, Debug)] +pub enum PatternKind<'tcx> { + Wild, + + /// x, ref x, x @ P, etc + Binding { + mutability: Mutability, + name: ast::Name, + mode: BindingMode<'tcx>, + var: ast::NodeId, + ty: Ty<'tcx>, + subpattern: Option>, + }, + + /// Foo(...) or Foo{...} or Foo, where `Foo` is a variant name from an adt with >1 variants + Variant { + adt_def: AdtDef<'tcx>, + variant_index: usize, + subpatterns: Vec>, + }, + + /// (...), Foo(...), Foo{...}, or Foo, where `Foo` is a variant name from an adt with 1 variant + Leaf { + subpatterns: Vec>, + }, + + /// box P, &P, &mut P, etc + Deref { + subpattern: Pattern<'tcx>, + }, + + Constant { + value: ConstVal, + }, + + Range { + lo: ConstVal, + hi: ConstVal, + }, + + /// matches against a slice, checking the length and extracting elements + Slice { + prefix: Vec>, + slice: Option>, + suffix: Vec>, + }, + + /// fixed match against an array, irrefutable + Array { + prefix: Vec>, + slice: Option>, + suffix: Vec>, + }, +} + +pub struct PatternContext<'a, 'gcx: 'tcx, 'tcx: 'a> { + pub tcx: TyCtxt<'a, 'gcx, 'tcx>, + pub errors: Vec, +} + +impl<'a, 'gcx, 'tcx> Pattern<'tcx> { + pub fn from_hir(tcx: TyCtxt<'a, 'gcx, 'tcx>, pat: &hir::Pat) -> Self { + let mut pcx = PatternContext::new(tcx); + let result = pcx.lower_pattern(pat); + if !pcx.errors.is_empty() { + span_bug!(pat.span, "encountered errors lowering pattern: {:?}", pcx.errors) + } + debug!("Pattern::from_hir({:?}) = {:?}", pat, result); + result + } +} + +impl<'a, 'gcx, 'tcx> PatternContext<'a, 'gcx, 'tcx> { + pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Self { + PatternContext { tcx: tcx, errors: vec![] } + } + + pub fn lower_pattern(&mut self, pat: &hir::Pat) -> Pattern<'tcx> { + let mut ty = self.tcx.node_id_to_type(pat.id); + + let kind = match pat.node { + PatKind::Wild => PatternKind::Wild, + + PatKind::Lit(ref value) => { + match eval::eval_const_expr_checked(self.tcx.global_tcx(), value) { + Ok(value) => { + PatternKind::Constant { value: value } + } + Err(e) => { + self.errors.push(PatternError::ConstEval(e)); + PatternKind::Wild + } + } + } + + PatKind::Range(ref lo, ref hi) => { + let r_lo = eval::eval_const_expr_checked(self.tcx.global_tcx(), lo); + if let Err(ref e_lo) = r_lo { + self.errors.push(PatternError::ConstEval(e_lo.clone())); + } + + let r_hi = eval::eval_const_expr_checked(self.tcx.global_tcx(), hi); + if let Err(ref e_hi) = r_hi { + self.errors.push(PatternError::ConstEval(e_hi.clone())); + } + + if let (Ok(lo), Ok(hi)) = (r_lo, r_hi) { + PatternKind::Range { lo: lo, hi: hi } + } else { + PatternKind::Wild + } + } + + PatKind::Path(..) => { + match self.tcx.expect_def(pat.id) { + Def::Const(def_id) | Def::AssociatedConst(def_id) => { + let tcx = self.tcx.global_tcx(); + let substs = Some(self.tcx.node_id_item_substs(pat.id).substs); + match eval::lookup_const_by_id(tcx, def_id, substs) { + Some((const_expr, _const_ty)) => { + match eval::const_expr_to_pat( + tcx, const_expr, pat.id, pat.span) + { + Ok(pat) => return self.lower_pattern(&pat), + Err(_) => { + self.errors.push(PatternError::BadConstInPattern( + pat.span, def_id)); + PatternKind::Wild + } + } + } + None => { + self.errors.push(PatternError::StaticInPattern(pat.span)); + PatternKind::Wild + } + } + } + _ => self.lower_variant_or_leaf(pat, vec![]) + } + } + + PatKind::Ref(ref subpattern, _) | + PatKind::Box(ref subpattern) => { + PatternKind::Deref { subpattern: self.lower_pattern(subpattern) } + } + + PatKind::Slice(ref prefix, ref slice, ref suffix) => { + let ty = self.tcx.node_id_to_type(pat.id); + match ty.sty { + ty::TyRef(_, mt) => + PatternKind::Deref { + subpattern: Pattern { + ty: mt.ty, + span: pat.span, + kind: Box::new(self.slice_or_array_pattern( + pat.span, mt.ty, prefix, slice, suffix)) + }, + }, + + ty::TySlice(..) | + ty::TyArray(..) => + self.slice_or_array_pattern(pat.span, ty, prefix, slice, suffix), + + ref sty => + span_bug!( + pat.span, + "unexpanded type for vector pattern: {:?}", + sty), + } + } + + PatKind::Tuple(ref subpatterns, ddpos) => { + match self.tcx.node_id_to_type(pat.id).sty { + ty::TyTuple(ref tys) => { + let subpatterns = + subpatterns.iter() + .enumerate_and_adjust(tys.len(), ddpos) + .map(|(i, subpattern)| FieldPattern { + field: Field::new(i), + pattern: self.lower_pattern(subpattern) + }) + .collect(); + + PatternKind::Leaf { subpatterns: subpatterns } + } + + ref sty => span_bug!(pat.span, "unexpected type for tuple pattern: {:?}", sty), + } + } + + PatKind::Binding(bm, ref ident, ref sub) => { + let def_id = self.tcx.expect_def(pat.id).def_id(); + let id = self.tcx.map.as_local_node_id(def_id).unwrap(); + let var_ty = self.tcx.node_id_to_type(pat.id); + let region = match var_ty.sty { + ty::TyRef(r, _) => Some(r), + _ => None, + }; + let (mutability, mode) = match bm { + hir::BindByValue(hir::MutMutable) => + (Mutability::Mut, BindingMode::ByValue), + hir::BindByValue(hir::MutImmutable) => + (Mutability::Not, BindingMode::ByValue), + hir::BindByRef(hir::MutMutable) => + (Mutability::Not, BindingMode::ByRef(region.unwrap(), BorrowKind::Mut)), + hir::BindByRef(hir::MutImmutable) => + (Mutability::Not, BindingMode::ByRef(region.unwrap(), BorrowKind::Shared)), + }; + + // A ref x pattern is the same node used for x, and as such it has + // x's type, which is &T, where we want T (the type being matched). + if let hir::BindByRef(_) = bm { + if let ty::TyRef(_, mt) = ty.sty { + ty = mt.ty; + } else { + bug!("`ref {}` has wrong type {}", ident.node, ty); + } + } + + PatternKind::Binding { + mutability: mutability, + mode: mode, + name: ident.node, + var: id, + ty: var_ty, + subpattern: self.lower_opt_pattern(sub), + } + } + + PatKind::TupleStruct(_, ref subpatterns, ddpos) => { + let pat_ty = self.tcx.node_id_to_type(pat.id); + let adt_def = match pat_ty.sty { + ty::TyAdt(adt_def, _) => adt_def, + _ => span_bug!(pat.span, "tuple struct pattern not applied to an ADT"), + }; + let variant_def = adt_def.variant_of_def(self.tcx.expect_def(pat.id)); + + let subpatterns = + subpatterns.iter() + .enumerate_and_adjust(variant_def.fields.len(), ddpos) + .map(|(i, field)| FieldPattern { + field: Field::new(i), + pattern: self.lower_pattern(field), + }) + .collect(); + self.lower_variant_or_leaf(pat, subpatterns) + } + + PatKind::Struct(_, ref fields, _) => { + let pat_ty = self.tcx.node_id_to_type(pat.id); + let adt_def = match pat_ty.sty { + ty::TyAdt(adt_def, _) => adt_def, + _ => { + span_bug!( + pat.span, + "struct pattern not applied to an ADT"); + } + }; + let variant_def = adt_def.variant_of_def(self.tcx.expect_def(pat.id)); + + let subpatterns = + fields.iter() + .map(|field| { + let index = variant_def.index_of_field_named(field.node.name); + let index = index.unwrap_or_else(|| { + span_bug!( + pat.span, + "no field with name {:?}", + field.node.name); + }); + FieldPattern { + field: Field::new(index), + pattern: self.lower_pattern(&field.node.pat), + } + }) + .collect(); + + self.lower_variant_or_leaf(pat, subpatterns) + } + }; + + Pattern { + span: pat.span, + ty: ty, + kind: Box::new(kind), + } + } + + fn lower_patterns(&mut self, pats: &[P]) -> Vec> { + pats.iter().map(|p| self.lower_pattern(p)).collect() + } + + fn lower_opt_pattern(&mut self, pat: &Option>) -> Option> + { + pat.as_ref().map(|p| self.lower_pattern(p)) + } + + fn flatten_nested_slice_patterns( + &mut self, + prefix: Vec>, + slice: Option>, + suffix: Vec>) + -> (Vec>, Option>, Vec>) + { + let orig_slice = match slice { + Some(orig_slice) => orig_slice, + None => return (prefix, slice, suffix) + }; + let orig_prefix = prefix; + let orig_suffix = suffix; + + // dance because of intentional borrow-checker stupidity. + let kind = *orig_slice.kind; + match kind { + PatternKind::Slice { prefix, slice, mut suffix } | + PatternKind::Array { prefix, slice, mut suffix } => { + let mut orig_prefix = orig_prefix; + + orig_prefix.extend(prefix); + suffix.extend(orig_suffix); + + (orig_prefix, slice, suffix) + } + _ => { + (orig_prefix, Some(Pattern { + kind: box kind, ..orig_slice + }), orig_suffix) + } + } + } + + fn slice_or_array_pattern( + &mut self, + span: Span, + ty: Ty<'tcx>, + prefix: &[P], + slice: &Option>, + suffix: &[P]) + -> PatternKind<'tcx> + { + let prefix = self.lower_patterns(prefix); + let slice = self.lower_opt_pattern(slice); + let suffix = self.lower_patterns(suffix); + let (prefix, slice, suffix) = + self.flatten_nested_slice_patterns(prefix, slice, suffix); + + match ty.sty { + ty::TySlice(..) => { + // matching a slice or fixed-length array + PatternKind::Slice { prefix: prefix, slice: slice, suffix: suffix } + } + + ty::TyArray(_, len) => { + // fixed-length array + assert!(len >= prefix.len() + suffix.len()); + PatternKind::Array { prefix: prefix, slice: slice, suffix: suffix } + } + + _ => { + span_bug!(span, "bad slice pattern type {:?}", ty); + } + } + } + + fn lower_variant_or_leaf( + &mut self, + pat: &hir::Pat, + subpatterns: Vec>) + -> PatternKind<'tcx> + { + match self.tcx.expect_def(pat.id) { + Def::Variant(variant_id) | Def::VariantCtor(variant_id, ..) => { + let enum_id = self.tcx.parent_def_id(variant_id).unwrap(); + let adt_def = self.tcx.lookup_adt_def(enum_id); + if adt_def.variants.len() > 1 { + PatternKind::Variant { + adt_def: adt_def, + variant_index: adt_def.variant_index_with_id(variant_id), + subpatterns: subpatterns, + } + } else { + PatternKind::Leaf { subpatterns: subpatterns } + } + } + + Def::Struct(..) | Def::StructCtor(..) | Def::Union(..) | + Def::TyAlias(..) | Def::AssociatedTy(..) | Def::SelfTy(..) => { + PatternKind::Leaf { subpatterns: subpatterns } + } + + def => { + span_bug!(pat.span, "inappropriate def for pattern: {:?}", def); + } + } + } +} + +pub trait PatternFoldable<'tcx> : Sized { + fn fold_with>(&self, folder: &mut F) -> Self { + self.super_fold_with(folder) + } + + fn super_fold_with>(&self, folder: &mut F) -> Self; +} + +pub trait PatternFolder<'tcx> : Sized { + fn fold_pattern(&mut self, pattern: &Pattern<'tcx>) -> Pattern<'tcx> { + pattern.super_fold_with(self) + } + + fn fold_pattern_kind(&mut self, kind: &PatternKind<'tcx>) -> PatternKind<'tcx> { + kind.super_fold_with(self) + } +} + + +impl<'tcx, T: PatternFoldable<'tcx>> PatternFoldable<'tcx> for Box { + fn super_fold_with>(&self, folder: &mut F) -> Self { + let content: T = (**self).fold_with(folder); + box content + } +} + +impl<'tcx, T: PatternFoldable<'tcx>> PatternFoldable<'tcx> for Vec { + fn super_fold_with>(&self, folder: &mut F) -> Self { + self.iter().map(|t| t.fold_with(folder)).collect() + } +} + +impl<'tcx, T: PatternFoldable<'tcx>> PatternFoldable<'tcx> for Option { + fn super_fold_with>(&self, folder: &mut F) -> Self{ + self.as_ref().map(|t| t.fold_with(folder)) + } +} + +macro_rules! CopyImpls { + ($($ty:ty),+) => { + $( + impl<'tcx> PatternFoldable<'tcx> for $ty { + fn super_fold_with>(&self, _: &mut F) -> Self { + self.clone() + } + } + )+ + } +} + +macro_rules! TcxCopyImpls { + ($($ty:ident),+) => { + $( + impl<'tcx> PatternFoldable<'tcx> for $ty<'tcx> { + fn super_fold_with>(&self, _: &mut F) -> Self { + *self + } + } + )+ + } +} + +CopyImpls!{ Span, Field, Mutability, ast::Name, ast::NodeId, usize, ConstVal } +TcxCopyImpls!{ Ty, BindingMode, AdtDef } + +impl<'tcx> PatternFoldable<'tcx> for FieldPattern<'tcx> { + fn super_fold_with>(&self, folder: &mut F) -> Self { + FieldPattern { + field: self.field.fold_with(folder), + pattern: self.pattern.fold_with(folder) + } + } +} + +impl<'tcx> PatternFoldable<'tcx> for Pattern<'tcx> { + fn fold_with>(&self, folder: &mut F) -> Self { + folder.fold_pattern(self) + } + + fn super_fold_with>(&self, folder: &mut F) -> Self { + Pattern { + ty: self.ty.fold_with(folder), + span: self.span.fold_with(folder), + kind: self.kind.fold_with(folder) + } + } +} + +impl<'tcx> PatternFoldable<'tcx> for PatternKind<'tcx> { + fn fold_with>(&self, folder: &mut F) -> Self { + folder.fold_pattern_kind(self) + } + + fn super_fold_with>(&self, folder: &mut F) -> Self { + match *self { + PatternKind::Wild => PatternKind::Wild, + PatternKind::Binding { + mutability, + name, + mode, + var, + ty, + ref subpattern, + } => PatternKind::Binding { + mutability: mutability.fold_with(folder), + name: name.fold_with(folder), + mode: mode.fold_with(folder), + var: var.fold_with(folder), + ty: ty.fold_with(folder), + subpattern: subpattern.fold_with(folder), + }, + PatternKind::Variant { + adt_def, + variant_index, + ref subpatterns, + } => PatternKind::Variant { + adt_def: adt_def.fold_with(folder), + variant_index: variant_index.fold_with(folder), + subpatterns: subpatterns.fold_with(folder) + }, + PatternKind::Leaf { + ref subpatterns, + } => PatternKind::Leaf { + subpatterns: subpatterns.fold_with(folder), + }, + PatternKind::Deref { + ref subpattern, + } => PatternKind::Deref { + subpattern: subpattern.fold_with(folder), + }, + PatternKind::Constant { + ref value + } => PatternKind::Constant { + value: value.fold_with(folder) + }, + PatternKind::Range { + ref lo, + ref hi + } => PatternKind::Range { + lo: lo.fold_with(folder), + hi: hi.fold_with(folder) + }, + PatternKind::Slice { + ref prefix, + ref slice, + ref suffix, + } => PatternKind::Slice { + prefix: prefix.fold_with(folder), + slice: slice.fold_with(folder), + suffix: suffix.fold_with(folder) + }, + PatternKind::Array { + ref prefix, + ref slice, + ref suffix + } => PatternKind::Array { + prefix: prefix.fold_with(folder), + slice: slice.fold_with(folder), + suffix: suffix.fold_with(folder) + }, + } + } +} diff --git a/src/librustc_const_math/lib.rs b/src/librustc_const_math/lib.rs index 741dd4107e001..31fccb41ce573 100644 --- a/src/librustc_const_math/lib.rs +++ b/src/librustc_const_math/lib.rs @@ -25,7 +25,7 @@ #![feature(rustc_private)] #![feature(staged_api)] -#![feature(question_mark)] +#![cfg_attr(stage0, feature(question_mark))] #[macro_use] extern crate log; #[macro_use] extern crate syntax; diff --git a/src/librustc_data_structures/accumulate_vec.rs b/src/librustc_data_structures/accumulate_vec.rs new file mode 100644 index 0000000000000..3894db40277a1 --- /dev/null +++ b/src/librustc_data_structures/accumulate_vec.rs @@ -0,0 +1,52 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! A vector type intended to be used for collecting from iterators onto the stack. +//! +//! Space for up to N elements is provided on the stack. If more elements are collected, Vec is +//! used to store the values on the heap. This type does not support re-allocating onto the heap, +//! and there is no way to push more elements onto the existing storage. +//! +//! The N above is determined by Array's implementor, by way of an associatated constant. + +use std::ops::Deref; +use std::iter::{IntoIterator, FromIterator}; + +use array_vec::{Array, ArrayVec}; + +#[derive(Debug)] +pub enum AccumulateVec { + Array(ArrayVec), + Heap(Vec) +} + +impl Deref for AccumulateVec { + type Target = [A::Element]; + fn deref(&self) -> &Self::Target { + match *self { + AccumulateVec::Array(ref v) => &v[..], + AccumulateVec::Heap(ref v) => &v[..], + } + } +} + +impl FromIterator for AccumulateVec { + fn from_iter(iter: I) -> AccumulateVec where I: IntoIterator { + let iter = iter.into_iter(); + if iter.size_hint().1.map_or(false, |n| n <= A::LEN) { + let mut v = ArrayVec::new(); + v.extend(iter); + AccumulateVec::Array(v) + } else { + AccumulateVec::Heap(iter.collect()) + } + } +} + diff --git a/src/librustc_data_structures/array_vec.rs b/src/librustc_data_structures/array_vec.rs new file mode 100644 index 0000000000000..f87426cee59ea --- /dev/null +++ b/src/librustc_data_structures/array_vec.rs @@ -0,0 +1,106 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! A stack-allocated vector, allowing storage of N elements on the stack. +//! +//! Currently, only the N = 8 case is supported (due to Array only being impl-ed for [T; 8]). + +use std::marker::Unsize; +use std::iter::Extend; +use std::ptr::drop_in_place; +use std::ops::{Deref, DerefMut}; +use std::slice; +use std::fmt; + +pub unsafe trait Array { + type Element; + type PartialStorage: Default + Unsize<[ManuallyDrop]>; + const LEN: usize; +} + +unsafe impl Array for [T; 8] { + type Element = T; + type PartialStorage = [ManuallyDrop; 8]; + const LEN: usize = 8; +} + +pub struct ArrayVec { + count: usize, + values: A::PartialStorage +} + +impl ArrayVec { + pub fn new() -> Self { + ArrayVec { + count: 0, + values: Default::default(), + } + } +} + +impl fmt::Debug for ArrayVec + where A: Array, + A::Element: fmt::Debug { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self[..].fmt(f) + } +} + +impl Deref for ArrayVec { + type Target = [A::Element]; + fn deref(&self) -> &Self::Target { + unsafe { + slice::from_raw_parts(&self.values as *const _ as *const A::Element, self.count) + } + } +} + +impl DerefMut for ArrayVec { + fn deref_mut(&mut self) -> &mut [A::Element] { + unsafe { + slice::from_raw_parts_mut(&mut self.values as *mut _ as *mut A::Element, self.count) + } + } +} + +impl Drop for ArrayVec { + fn drop(&mut self) { + unsafe { + drop_in_place(&mut self[..]) + } + } +} + +impl Extend for ArrayVec { + fn extend(&mut self, iter: I) where I: IntoIterator { + for el in iter { + unsafe { + let arr = &mut self.values as &mut [ManuallyDrop<_>]; + arr[self.count].value = el; + } + self.count += 1; + } + } +} + +// FIXME: This should use repr(transparent) from rust-lang/rfcs#1758. +#[allow(unions_with_drop_fields)] +pub union ManuallyDrop { + value: T, + #[allow(dead_code)] + empty: (), +} + +impl Default for ManuallyDrop { + fn default() -> Self { + ManuallyDrop { empty: () } + } +} + diff --git a/src/librustc_borrowck/bitslice.rs b/src/librustc_data_structures/bitslice.rs similarity index 98% rename from src/librustc_borrowck/bitslice.rs rename to src/librustc_data_structures/bitslice.rs index 80fa86a007ed3..ba53578e57918 100644 --- a/src/librustc_borrowck/bitslice.rs +++ b/src/librustc_data_structures/bitslice.rs @@ -8,8 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// FIXME: move this to `rustc_data_structures` and potentially merge -// with `bitvec` there. +// FIXME: merge with `bitvec` use std::mem; diff --git a/src/librustc_data_structures/blake2b.rs b/src/librustc_data_structures/blake2b.rs new file mode 100644 index 0000000000000..8c82c135dc426 --- /dev/null +++ b/src/librustc_data_structures/blake2b.rs @@ -0,0 +1,338 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + + +// An implementation of the Blake2b cryptographic hash function. +// The implementation closely follows: https://tools.ietf.org/html/rfc7693 +// +// "BLAKE2 is a cryptographic hash function faster than MD5, SHA-1, SHA-2, and +// SHA-3, yet is at least as secure as the latest standard SHA-3." +// according to their own website :) +// +// Indeed this implementation is two to three times as fast as our SHA-256 +// implementation. If you have the luxury of being able to use crates from +// crates.io, you can go there and find still faster implementations. + +use std::mem; +use std::slice; + +pub struct Blake2bCtx { + b: [u8; 128], + h: [u64; 8], + t: [u64; 2], + c: usize, + outlen: u16, + finalized: bool +} + +impl ::std::fmt::Debug for Blake2bCtx { + fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> { + try!(write!(fmt, "hash: ")); + for v in &self.h[..] { + try!(write!(fmt, "{:x}", v)); + } + Ok(()) + } +} + +#[inline(always)] +fn b2b_g(v: &mut [u64; 16], + a: usize, + b: usize, + c: usize, + d: usize, + x: u64, + y: u64) +{ + v[a] = v[a].wrapping_add(v[b]).wrapping_add(x); + v[d] = (v[d] ^ v[a]).rotate_right(32); + v[c] = v[c].wrapping_add(v[d]); + v[b] = (v[b] ^ v[c]).rotate_right(24); + v[a] = v[a].wrapping_add(v[b]).wrapping_add(y); + v[d] = (v[d] ^ v[a]).rotate_right(16); + v[c] = v[c].wrapping_add(v[d]); + v[b] = (v[b] ^ v[c]).rotate_right(63); +} + +// Initialization vector +const BLAKE2B_IV: [u64; 8] = [ + 0x6A09E667F3BCC908, 0xBB67AE8584CAA73B, + 0x3C6EF372FE94F82B, 0xA54FF53A5F1D36F1, + 0x510E527FADE682D1, 0x9B05688C2B3E6C1F, + 0x1F83D9ABFB41BD6B, 0x5BE0CD19137E2179 +]; + +fn blake2b_compress(ctx: &mut Blake2bCtx, last: bool) { + + const SIGMA: [[usize; 16]; 12] = [ + [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 ], + [14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 ], + [11, 8, 12, 0, 5, 2, 15, 13, 10, 14, 3, 6, 7, 1, 9, 4 ], + [7, 9, 3, 1, 13, 12, 11, 14, 2, 6, 5, 10, 4, 0, 15, 8 ], + [9, 0, 5, 7, 2, 4, 10, 15, 14, 1, 11, 12, 6, 8, 3, 13 ], + [2, 12, 6, 10, 0, 11, 8, 3, 4, 13, 7, 5, 15, 14, 1, 9 ], + [12, 5, 1, 15, 14, 13, 4, 10, 0, 7, 6, 3, 9, 2, 8, 11 ], + [13, 11, 7, 14, 12, 1, 3, 9, 5, 0, 15, 4, 8, 6, 2, 10 ], + [6, 15, 14, 9, 11, 3, 0, 8, 12, 2, 13, 7, 1, 4, 10, 5 ], + [10, 2, 8, 4, 7, 6, 1, 5, 15, 11, 9, 14, 3, 12, 13, 0 ], + [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15 ], + [14, 10, 4, 8, 9, 15, 13, 6, 1, 12, 0, 2, 11, 7, 5, 3 ] + ]; + + let mut v: [u64; 16] = [ + ctx.h[0], + ctx.h[1], + ctx.h[2], + ctx.h[3], + ctx.h[4], + ctx.h[5], + ctx.h[6], + ctx.h[7], + + BLAKE2B_IV[0], + BLAKE2B_IV[1], + BLAKE2B_IV[2], + BLAKE2B_IV[3], + BLAKE2B_IV[4], + BLAKE2B_IV[5], + BLAKE2B_IV[6], + BLAKE2B_IV[7], + ]; + + v[12] ^= ctx.t[0]; // low 64 bits of offset + v[13] ^= ctx.t[1]; // high 64 bits + if last { + v[14] = !v[14]; + } + + { + // Re-interpret the input buffer in the state as u64s + let m: &mut [u64; 16] = unsafe { + let b: &mut [u8; 128] = &mut ctx.b; + ::std::mem::transmute(b) + }; + + // It's OK to modify the buffer in place since this is the last time + // this data will be accessed before it's overwritten + if cfg!(target_endian = "big") { + for word in &mut m[..] { + *word = word.to_be(); + } + } + + for i in 0 .. 12 { + b2b_g(&mut v, 0, 4, 8, 12, m[SIGMA[i][ 0]], m[SIGMA[i][ 1]]); + b2b_g(&mut v, 1, 5, 9, 13, m[SIGMA[i][ 2]], m[SIGMA[i][ 3]]); + b2b_g(&mut v, 2, 6, 10, 14, m[SIGMA[i][ 4]], m[SIGMA[i][ 5]]); + b2b_g(&mut v, 3, 7, 11, 15, m[SIGMA[i][ 6]], m[SIGMA[i][ 7]]); + b2b_g(&mut v, 0, 5, 10, 15, m[SIGMA[i][ 8]], m[SIGMA[i][ 9]]); + b2b_g(&mut v, 1, 6, 11, 12, m[SIGMA[i][10]], m[SIGMA[i][11]]); + b2b_g(&mut v, 2, 7, 8, 13, m[SIGMA[i][12]], m[SIGMA[i][13]]); + b2b_g(&mut v, 3, 4, 9, 14, m[SIGMA[i][14]], m[SIGMA[i][15]]); + } + } + + for i in 0 .. 8 { + ctx.h[i] ^= v[i] ^ v[i + 8]; + } +} + +fn blake2b_new(outlen: usize, key: &[u8]) -> Blake2bCtx { + assert!(outlen > 0 && outlen <= 64 && key.len() <= 64); + + let mut ctx = Blake2bCtx { + b: [0; 128], + h: BLAKE2B_IV, + t: [0; 2], + c: 0, + outlen: outlen as u16, + finalized: false, + }; + + ctx.h[0] ^= 0x01010000 ^ ((key.len() << 8) as u64) ^ (outlen as u64); + + if key.len() > 0 { + blake2b_update(&mut ctx, key); + ctx.c = ctx.b.len(); + } + + ctx +} + +fn blake2b_update(ctx: &mut Blake2bCtx, mut data: &[u8]) { + assert!(!ctx.finalized, "Blake2bCtx already finalized"); + + let mut bytes_to_copy = data.len(); + let mut space_in_buffer = ctx.b.len() - ctx.c; + + while bytes_to_copy > space_in_buffer { + checked_mem_copy(data, &mut ctx.b[ctx.c .. ], space_in_buffer); + + ctx.t[0] = ctx.t[0].wrapping_add(ctx.b.len() as u64); + if ctx.t[0] < (ctx.b.len() as u64) { + ctx.t[1] += 1; + } + blake2b_compress(ctx, false); + ctx.c = 0; + + data = &data[space_in_buffer .. ]; + bytes_to_copy -= space_in_buffer; + space_in_buffer = ctx.b.len(); + } + + if bytes_to_copy > 0 { + checked_mem_copy(data, &mut ctx.b[ctx.c .. ], bytes_to_copy); + ctx.c += bytes_to_copy; + } +} + +fn blake2b_final(ctx: &mut Blake2bCtx) +{ + assert!(!ctx.finalized, "Blake2bCtx already finalized"); + + ctx.t[0] = ctx.t[0].wrapping_add(ctx.c as u64); + if ctx.t[0] < ctx.c as u64 { + ctx.t[1] += 1; + } + + while ctx.c < 128 { + ctx.b[ctx.c] = 0; + ctx.c += 1; + } + + blake2b_compress(ctx, true); + + if cfg!(target_endian = "big") { + // Make sure that the data is in memory in little endian format, as is + // demanded by BLAKE2 + for word in &mut ctx.h { + *word = word.to_le(); + } + } + + ctx.finalized = true; +} + +#[inline(always)] +fn checked_mem_copy(from: &[T1], to: &mut [T2], byte_count: usize) { + let from_size = from.len() * mem::size_of::(); + let to_size = to.len() * mem::size_of::(); + assert!(from_size >= byte_count); + assert!(to_size >= byte_count); + let from_byte_ptr = from.as_ptr() as * const u8; + let to_byte_ptr = to.as_mut_ptr() as * mut u8; + unsafe { + ::std::ptr::copy_nonoverlapping(from_byte_ptr, to_byte_ptr, byte_count); + } +} + +pub fn blake2b(out: &mut [u8], key: &[u8], data: &[u8]) +{ + let mut ctx = blake2b_new(out.len(), key); + blake2b_update(&mut ctx, data); + blake2b_final(&mut ctx); + checked_mem_copy(&ctx.h, out, ctx.outlen as usize); +} + +pub struct Blake2bHasher(Blake2bCtx); + +impl ::std::hash::Hasher for Blake2bHasher { + fn write(&mut self, bytes: &[u8]) { + blake2b_update(&mut self.0, bytes); + } + + fn finish(&self) -> u64 { + assert!(self.0.outlen == 8, + "Hasher initialized with incompatible output length"); + u64::from_le(self.0.h[0]) + } +} + +impl Blake2bHasher { + pub fn new(outlen: usize, key: &[u8]) -> Blake2bHasher { + Blake2bHasher(blake2b_new(outlen, key)) + } + + pub fn finalize(&mut self) -> &[u8] { + if !self.0.finalized { + blake2b_final(&mut self.0); + } + debug_assert!(mem::size_of_val(&self.0.h) >= self.0.outlen as usize); + let raw_ptr = (&self.0.h[..]).as_ptr() as * const u8; + unsafe { + slice::from_raw_parts(raw_ptr, self.0.outlen as usize) + } + } +} + +impl ::std::fmt::Debug for Blake2bHasher { + fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> { + write!(fmt, "{:?}", self.0) + } +} + +#[cfg(test)] +fn selftest_seq(out: &mut [u8], seed: u32) +{ + let mut a: u32 = 0xDEAD4BADu32.wrapping_mul(seed); + let mut b: u32 = 1; + + for i in 0 .. out.len() { + let t: u32 = a.wrapping_add(b); + a = b; + b = t; + out[i] = ((t >> 24) & 0xFF) as u8; + } +} + +#[test] +fn blake2b_selftest() +{ + use std::hash::Hasher; + + // grand hash of hash results + const BLAKE2B_RES: [u8; 32] = [ + 0xC2, 0x3A, 0x78, 0x00, 0xD9, 0x81, 0x23, 0xBD, + 0x10, 0xF5, 0x06, 0xC6, 0x1E, 0x29, 0xDA, 0x56, + 0x03, 0xD7, 0x63, 0xB8, 0xBB, 0xAD, 0x2E, 0x73, + 0x7F, 0x5E, 0x76, 0x5A, 0x7B, 0xCC, 0xD4, 0x75 + ]; + + // parameter sets + const B2B_MD_LEN: [usize; 4] = [20, 32, 48, 64]; + const B2B_IN_LEN: [usize; 6] = [0, 3, 128, 129, 255, 1024]; + + let mut data = [0u8; 1024]; + let mut md = [0u8; 64]; + let mut key = [0u8; 64]; + + let mut hasher = Blake2bHasher::new(32, &[]); + + for i in 0 .. 4 { + let outlen = B2B_MD_LEN[i]; + for j in 0 .. 6 { + let inlen = B2B_IN_LEN[j]; + + selftest_seq(&mut data[.. inlen], inlen as u32); // unkeyed hash + blake2b(&mut md[.. outlen], &[], &data[.. inlen]); + hasher.write(&md[.. outlen]); // hash the hash + + selftest_seq(&mut key[0 .. outlen], outlen as u32); // keyed hash + blake2b(&mut md[.. outlen], &key[.. outlen], &data[.. inlen]); + hasher.write(&md[.. outlen]); // hash the hash + } + } + + // compute and compare the hash of hashes + let md = hasher.finalize(); + for i in 0 .. 32 { + assert_eq!(md[i], BLAKE2B_RES[i]); + } +} diff --git a/src/librustc_data_structures/control_flow_graph/dominators/mod.rs b/src/librustc_data_structures/control_flow_graph/dominators/mod.rs index 250b89d12ed05..ab675db21503e 100644 --- a/src/librustc_data_structures/control_flow_graph/dominators/mod.rs +++ b/src/librustc_data_structures/control_flow_graph/dominators/mod.rs @@ -57,9 +57,9 @@ pub fn dominators_given_rpo(graph: &G, // (*) // (*) dominators for `pred` have been calculated new_idom = intersect_opt(&post_order_rank, - &immediate_dominators, - new_idom, - Some(pred)); + &immediate_dominators, + new_idom, + Some(pred)); } } @@ -77,10 +77,10 @@ pub fn dominators_given_rpo(graph: &G, } fn intersect_opt(post_order_rank: &IndexVec, - immediate_dominators: &IndexVec>, - node1: Option, - node2: Option) - -> Option { + immediate_dominators: &IndexVec>, + node1: Option, + node2: Option) + -> Option { match (node1, node2) { (None, None) => None, (Some(n), None) | (None, Some(n)) => Some(n), @@ -89,10 +89,10 @@ fn intersect_opt(post_order_rank: &IndexVec, } fn intersect(post_order_rank: &IndexVec, - immediate_dominators: &IndexVec>, - mut node1: Node, - mut node2: Node) - -> Node { + immediate_dominators: &IndexVec>, + mut node1: Node, + mut node2: Node) + -> Node { while node1 != node2 { while post_order_rank[node1] < post_order_rank[node2] { node1 = immediate_dominators[node1].unwrap(); @@ -142,9 +142,9 @@ impl Dominators { "node {:?} is not reachable", node2); intersect::(&self.post_order_rank, - &self.immediate_dominators, - node1, - node2) + &self.immediate_dominators, + node1, + node2) } pub fn mutual_dominator(&self, iter: I) -> Option diff --git a/src/librustc_data_structures/control_flow_graph/dominators/test.rs b/src/librustc_data_structures/control_flow_graph/dominators/test.rs index a6db5f2fe3ea1..0af878cac2df1 100644 --- a/src/librustc_data_structures/control_flow_graph/dominators/test.rs +++ b/src/librustc_data_structures/control_flow_graph/dominators/test.rs @@ -14,12 +14,7 @@ use super::*; #[test] fn diamond() { - let graph = TestGraph::new(0, &[ - (0, 1), - (0, 2), - (1, 3), - (2, 3), - ]); + let graph = TestGraph::new(0, &[(0, 1), (0, 2), (1, 3), (2, 3)]); let dominators = dominators(&graph); let immediate_dominators = dominators.all_immediate_dominators(); @@ -32,17 +27,9 @@ fn diamond() { #[test] fn paper() { // example from the paper: - let graph = TestGraph::new(6, &[ - (6, 5), - (6, 4), - (5, 1), - (4, 2), - (4, 3), - (1, 2), - (2, 3), - (3, 2), - (2, 1), - ]); + let graph = TestGraph::new(6, + &[(6, 5), (6, 4), (5, 1), (4, 2), (4, 3), (1, 2), (2, 3), (3, 2), + (2, 1)]); let dominators = dominators(&graph); let immediate_dominators = dominators.all_immediate_dominators(); @@ -54,4 +41,3 @@ fn paper() { assert_eq!(immediate_dominators[5], Some(6)); assert_eq!(immediate_dominators[6], Some(6)); } - diff --git a/src/librustc_data_structures/control_flow_graph/iterate/test.rs b/src/librustc_data_structures/control_flow_graph/iterate/test.rs index 28297d55bdf32..dca45602f17c4 100644 --- a/src/librustc_data_structures/control_flow_graph/iterate/test.rs +++ b/src/librustc_data_structures/control_flow_graph/iterate/test.rs @@ -15,12 +15,7 @@ use super::*; #[test] fn diamond_post_order() { - let graph = TestGraph::new(0, &[ - (0, 1), - (0, 2), - (1, 3), - (2, 3), - ]); + let graph = TestGraph::new(0, &[(0, 1), (0, 2), (1, 3), (2, 3)]); let result = post_order_from(&graph, 0); assert_eq!(result, vec![3, 1, 2, 0]); @@ -33,16 +28,8 @@ fn rev_post_order_inner_loop() { // ^ ^ v | // | 6 <- 4 | // +-----------------+ - let graph = TestGraph::new(0, &[ - (0, 1), - (1, 2), - (2, 3), - (3, 5), - (3, 1), - (2, 4), - (4, 6), - (6, 2), - ]); + let graph = TestGraph::new(0, + &[(0, 1), (1, 2), (2, 3), (3, 5), (3, 1), (2, 4), (4, 6), (6, 2)]); let rev_graph = TransposedGraph::new(&graph); @@ -52,4 +39,3 @@ fn rev_post_order_inner_loop() { let result = post_order_from_to(&rev_graph, 3, Some(1)); assert_eq!(result, vec![4, 6, 2, 3]); } - diff --git a/src/librustc_data_structures/control_flow_graph/mod.rs b/src/librustc_data_structures/control_flow_graph/mod.rs index f9e75b12e0358..eb6839df6274f 100644 --- a/src/librustc_data_structures/control_flow_graph/mod.rs +++ b/src/librustc_data_structures/control_flow_graph/mod.rs @@ -36,10 +36,10 @@ pub trait ControlFlowGraph pub trait GraphPredecessors<'graph> { type Item; - type Iter: Iterator; + type Iter: Iterator; } pub trait GraphSuccessors<'graph> { type Item; - type Iter: Iterator; -} \ No newline at end of file + type Iter: Iterator; +} diff --git a/src/librustc_data_structures/control_flow_graph/reachable/mod.rs b/src/librustc_data_structures/control_flow_graph/reachable/mod.rs index e520e23f3afb8..24210ebb95d3d 100644 --- a/src/librustc_data_structures/control_flow_graph/reachable/mod.rs +++ b/src/librustc_data_structures/control_flow_graph/reachable/mod.rs @@ -19,8 +19,7 @@ use super::super::indexed_vec::{IndexVec, Idx}; #[cfg(test)] mod test; -pub fn reachable(graph: &G) - -> Reachability { +pub fn reachable(graph: &G) -> Reachability { let reverse_post_order = reverse_post_order(graph, graph.start_node()); reachable_given_rpo(graph, &reverse_post_order) } @@ -53,12 +52,10 @@ pub struct Reachability { impl Reachability { fn new(graph: &G) -> Self { let num_nodes = graph.num_nodes(); - Reachability { - bits: IndexVec::from_elem_n(BitVector::new(num_nodes), num_nodes), - } + Reachability { bits: IndexVec::from_elem_n(BitVector::new(num_nodes), num_nodes) } } - pub fn can_reach(&self, source: Node, target: Node)-> bool { + pub fn can_reach(&self, source: Node, target: Node) -> bool { let bit: usize = target.index(); self.bits[source].contains(bit) } diff --git a/src/librustc_data_structures/control_flow_graph/reachable/test.rs b/src/librustc_data_structures/control_flow_graph/reachable/test.rs index 6aa906a0804e2..ef45deeaafc78 100644 --- a/src/librustc_data_structures/control_flow_graph/reachable/test.rs +++ b/src/librustc_data_structures/control_flow_graph/reachable/test.rs @@ -17,15 +17,7 @@ fn test1() { // 0 -> 1 -> 2 -> 3 // ^ v // 6 <- 4 -> 5 - let graph = TestGraph::new(0, &[ - (0, 1), - (1, 2), - (2, 3), - (2, 4), - (4, 5), - (4, 6), - (6, 1), - ]); + let graph = TestGraph::new(0, &[(0, 1), (1, 2), (2, 3), (2, 4), (4, 5), (4, 6), (6, 1)]); let reachable = reachable(&graph); assert!((0..6).all(|i| reachable.can_reach(0, i))); assert!((1..6).all(|i| reachable.can_reach(1, i))); @@ -43,15 +35,9 @@ fn test2() { // 30 -> 31 -> 32 -> 33 // ^ v // 36 <- 34 -> 35 - let graph = TestGraph::new(30, &[ - (30, 31), - (31, 32), - (32, 33), - (32, 34), - (34, 35), - (34, 36), - (36, 31), - ]); + let graph = TestGraph::new(30, + &[(30, 31), (31, 32), (32, 33), (32, 34), (34, 35), (34, 36), + (36, 31)]); let reachable = reachable(&graph); assert!((30..36).all(|i| reachable.can_reach(30, i))); assert!((31..36).all(|i| reachable.can_reach(31, i))); diff --git a/src/librustc_data_structures/control_flow_graph/reference.rs b/src/librustc_data_structures/control_flow_graph/reference.rs index d735be1ed2f54..3b8b01f2ff43b 100644 --- a/src/librustc_data_structures/control_flow_graph/reference.rs +++ b/src/librustc_data_structures/control_flow_graph/reference.rs @@ -21,13 +21,13 @@ impl<'graph, G: ControlFlowGraph> ControlFlowGraph for &'graph G { (**self).start_node() } - fn predecessors<'iter>(&'iter self, node: Self::Node) - -> >::Iter { + fn predecessors<'iter>(&'iter self, + node: Self::Node) + -> >::Iter { (**self).predecessors(node) } - fn successors<'iter>(&'iter self, node: Self::Node) - -> >::Iter { + fn successors<'iter>(&'iter self, node: Self::Node) -> >::Iter { (**self).successors(node) } } diff --git a/src/librustc_data_structures/control_flow_graph/test.rs b/src/librustc_data_structures/control_flow_graph/test.rs index 57b2a858de568..d48a6e684ad8e 100644 --- a/src/librustc_data_structures/control_flow_graph/test.rs +++ b/src/librustc_data_structures/control_flow_graph/test.rs @@ -28,7 +28,7 @@ impl TestGraph { num_nodes: start_node + 1, start_node: start_node, successors: HashMap::new(), - predecessors: HashMap::new() + predecessors: HashMap::new(), }; for &(source, target) in edges { graph.num_nodes = max(graph.num_nodes, source + 1); @@ -55,13 +55,13 @@ impl ControlFlowGraph for TestGraph { self.num_nodes } - fn predecessors<'graph>(&'graph self, node: usize) + fn predecessors<'graph>(&'graph self, + node: usize) -> >::Iter { - self.predecessors[&node].iter().cloned() + self.predecessors[&node].iter().cloned() } - fn successors<'graph>(&'graph self, node: usize) - -> >::Iter { + fn successors<'graph>(&'graph self, node: usize) -> >::Iter { self.successors[&node].iter().cloned() } } @@ -75,4 +75,3 @@ impl<'graph> GraphSuccessors<'graph> for TestGraph { type Item = usize; type Iter = iter::Cloned>; } - diff --git a/src/librustc_data_structures/control_flow_graph/transpose.rs b/src/librustc_data_structures/control_flow_graph/transpose.rs index 792e079c28c85..a1a117edb94fc 100644 --- a/src/librustc_data_structures/control_flow_graph/transpose.rs +++ b/src/librustc_data_structures/control_flow_graph/transpose.rs @@ -22,7 +22,10 @@ impl TransposedGraph { } pub fn with_start(base_graph: G, start_node: G::Node) -> Self { - TransposedGraph { base_graph: base_graph, start_node: start_node } + TransposedGraph { + base_graph: base_graph, + start_node: start_node, + } } } @@ -37,12 +40,14 @@ impl ControlFlowGraph for TransposedGraph { self.start_node } - fn predecessors<'graph>(&'graph self, node: Self::Node) + fn predecessors<'graph>(&'graph self, + node: Self::Node) -> >::Iter { self.base_graph.successors(node) } - fn successors<'graph>(&'graph self, node: Self::Node) + fn successors<'graph>(&'graph self, + node: Self::Node) -> >::Iter { self.base_graph.predecessors(node) } diff --git a/src/librustc_data_structures/flock.rs b/src/librustc_data_structures/flock.rs index 4a184d3174dff..510c9ceef0960 100644 --- a/src/librustc_data_structures/flock.rs +++ b/src/librustc_data_structures/flock.rs @@ -94,6 +94,27 @@ mod imp { pub const F_SETLKW: libc::c_int = 9; } + #[cfg(target_os = "haiku")] + mod os { + use libc; + + pub struct flock { + pub l_type: libc::c_short, + pub l_whence: libc::c_short, + pub l_start: libc::off_t, + pub l_len: libc::off_t, + pub l_pid: libc::pid_t, + + // not actually here, but brings in line with freebsd + pub l_sysid: libc::c_int, + } + + pub const F_UNLCK: libc::c_short = 0x0200; + pub const F_WRLCK: libc::c_short = 0x0400; + pub const F_SETLK: libc::c_int = 0x0080; + pub const F_SETLKW: libc::c_int = 0x0100; + } + #[cfg(any(target_os = "macos", target_os = "ios"))] mod os { use libc; diff --git a/src/librustc_data_structures/fmt_wrap.rs b/src/librustc_data_structures/fmt_wrap.rs new file mode 100644 index 0000000000000..50fd1d802b7ff --- /dev/null +++ b/src/librustc_data_structures/fmt_wrap.rs @@ -0,0 +1,31 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::fmt; + +// Provide some more formatting options for some data types (at the moment +// that's just `{:x}` for slices of u8). + +pub struct FmtWrap(pub T); + +impl<'a> fmt::LowerHex for FmtWrap<&'a [u8]> { + fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { + for byte in self.0.iter() { + try!(write!(formatter, "{:02x}", byte)); + } + Ok(()) + } +} + +#[test] +fn test_lower_hex() { + let bytes: &[u8] = &[0x01, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef]; + assert_eq!("0123456789abcdef", &format!("{:x}", FmtWrap(bytes))); +} diff --git a/src/librustc_data_structures/graph/mod.rs b/src/librustc_data_structures/graph/mod.rs index 4561a3d084c20..fdb629ca5a578 100644 --- a/src/librustc_data_structures/graph/mod.rs +++ b/src/librustc_data_structures/graph/mod.rs @@ -380,7 +380,7 @@ impl<'g, N: Debug, E: Debug> DepthFirstTraversal<'g, N, E> { graph: graph, stack: vec![], visited: visited, - direction: direction + direction: direction, } } @@ -394,7 +394,7 @@ impl<'g, N: Debug, E: Debug> DepthFirstTraversal<'g, N, E> { graph: graph, stack: vec![start_node], visited: visited, - direction: direction + direction: direction, } } diff --git a/src/librustc_borrowck/indexed_set.rs b/src/librustc_data_structures/indexed_set.rs similarity index 98% rename from src/librustc_borrowck/indexed_set.rs rename to src/librustc_data_structures/indexed_set.rs index 671aff97d20aa..2e9e054e97eaf 100644 --- a/src/librustc_borrowck/indexed_set.rs +++ b/src/librustc_data_structures/indexed_set.rs @@ -8,16 +8,13 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// FIXME: move this to `rustc_data_structures` - use std::fmt; use std::marker::PhantomData; use std::mem; use std::ops::{Deref, DerefMut, Range}; use bitslice::{BitSlice, Word}; use bitslice::{bitwise, Union, Subtract}; - -use rustc_data_structures::indexed_vec::Idx; +use indexed_vec::Idx; /// Represents a set (or packed family of sets), of some element type /// E, where each E is identified by some unique index type `T`. diff --git a/src/librustc_data_structures/lib.rs b/src/librustc_data_structures/lib.rs index e7da18cef10f9..fc963dac9495f 100644 --- a/src/librustc_data_structures/lib.rs +++ b/src/librustc_data_structures/lib.rs @@ -30,6 +30,9 @@ #![feature(staged_api)] #![feature(unboxed_closures)] #![feature(fn_traits)] +#![feature(untagged_unions)] +#![feature(associated_consts)] +#![feature(unsize)] #![cfg_attr(unix, feature(libc))] #![cfg_attr(test, feature(test))] @@ -41,9 +44,15 @@ extern crate serialize as rustc_serialize; // used by deriving #[cfg(unix)] extern crate libc; +pub mod array_vec; +pub mod accumulate_vec; +pub mod bitslice; +pub mod blake2b; pub mod bitvec; +pub mod fmt_wrap; pub mod graph; pub mod ivar; +pub mod indexed_set; pub mod indexed_vec; pub mod obligation_forest; pub mod snapshot_map; diff --git a/src/librustc_data_structures/obligation_forest/mod.rs b/src/librustc_data_structures/obligation_forest/mod.rs index c079146edbf42..5e590cb445f79 100644 --- a/src/librustc_data_structures/obligation_forest/mod.rs +++ b/src/librustc_data_structures/obligation_forest/mod.rs @@ -333,6 +333,7 @@ impl ObligationForest { } } Err(err) => { + stalled = false; let backtrace = self.error_at(index); errors.push(Error { error: err, @@ -342,6 +343,16 @@ impl ObligationForest { } } + if stalled { + // There's no need to perform marking, cycle processing and compression when nothing + // changed. + return Outcome { + completed: vec![], + errors: errors, + stalled: stalled, + }; + } + self.mark_as_waiting(); self.process_cycles(processor); diff --git a/src/librustc_data_structures/snapshot_map/mod.rs b/src/librustc_data_structures/snapshot_map/mod.rs index b3989013d2114..a4e6166032d81 100644 --- a/src/librustc_data_structures/snapshot_map/mod.rs +++ b/src/librustc_data_structures/snapshot_map/mod.rs @@ -11,6 +11,7 @@ use fnv::FnvHashMap; use std::hash::Hash; use std::ops; +use std::mem; #[cfg(test)] mod test; @@ -23,7 +24,7 @@ pub struct SnapshotMap } pub struct Snapshot { - len: usize + len: usize, } enum UndoLog { @@ -31,6 +32,7 @@ enum UndoLog { CommittedSnapshot, Inserted(K), Overwrite(K, V), + Noop, } impl SnapshotMap @@ -39,7 +41,7 @@ impl SnapshotMap pub fn new() -> Self { SnapshotMap { map: FnvHashMap(), - undo_log: vec![] + undo_log: vec![], } } @@ -68,9 +70,7 @@ impl SnapshotMap } true } - None => { - false - } + None => false, } } @@ -88,7 +88,7 @@ impl SnapshotMap assert!(snapshot.len < self.undo_log.len()); assert!(match self.undo_log[snapshot.len] { UndoLog::OpenSnapshot => true, - _ => false + _ => false, }); } @@ -102,29 +102,61 @@ impl SnapshotMap } } + pub fn partial_rollback(&mut self, + snapshot: &Snapshot, + should_revert_key: &F) + where F: Fn(&K) -> bool + { + self.assert_open_snapshot(snapshot); + for i in (snapshot.len + 1..self.undo_log.len()).rev() { + let reverse = match self.undo_log[i] { + UndoLog::OpenSnapshot => false, + UndoLog::CommittedSnapshot => false, + UndoLog::Noop => false, + UndoLog::Inserted(ref k) => should_revert_key(k), + UndoLog::Overwrite(ref k, _) => should_revert_key(k), + }; + + if reverse { + let entry = mem::replace(&mut self.undo_log[i], UndoLog::Noop); + self.reverse(entry); + } + } + } + pub fn rollback_to(&mut self, snapshot: Snapshot) { self.assert_open_snapshot(&snapshot); while self.undo_log.len() > snapshot.len + 1 { - match self.undo_log.pop().unwrap() { - UndoLog::OpenSnapshot => { - panic!("cannot rollback an uncommitted snapshot"); - } + let entry = self.undo_log.pop().unwrap(); + self.reverse(entry); + } + + let v = self.undo_log.pop().unwrap(); + assert!(match v { + UndoLog::OpenSnapshot => true, + _ => false, + }); + assert!(self.undo_log.len() == snapshot.len); + } - UndoLog::CommittedSnapshot => { } + fn reverse(&mut self, entry: UndoLog) { + match entry { + UndoLog::OpenSnapshot => { + panic!("cannot rollback an uncommitted snapshot"); + } - UndoLog::Inserted(key) => { - self.map.remove(&key); - } + UndoLog::CommittedSnapshot => {} - UndoLog::Overwrite(key, old_value) => { - self.map.insert(key, old_value); - } + UndoLog::Inserted(key) => { + self.map.remove(&key); } - } - let v = self.undo_log.pop().unwrap(); - assert!(match v { UndoLog::OpenSnapshot => true, _ => false }); - assert!(self.undo_log.len() == snapshot.len); + UndoLog::Overwrite(key, old_value) => { + self.map.insert(key, old_value); + } + + UndoLog::Noop => {} + } } } diff --git a/src/librustc_data_structures/unify/mod.rs b/src/librustc_data_structures/unify/mod.rs index 3feea3218d013..1f4d09a92247e 100644 --- a/src/librustc_data_structures/unify/mod.rs +++ b/src/librustc_data_structures/unify/mod.rs @@ -27,7 +27,7 @@ mod tests; /// /// Clients are expected to provide implementations of this trait; you /// can see some examples in the `test` module. -pub trait UnifyKey : Copy + Clone + Debug + PartialEq { +pub trait UnifyKey: Copy + Clone + Debug + PartialEq { type Value: Clone + PartialEq + Debug; fn index(&self) -> u32; @@ -115,11 +115,7 @@ impl VarValue { } fn if_not_self(&self, key: K, self_key: K) -> Option { - if key == self_key { - None - } else { - Some(key) - } + if key == self_key { None } else { Some(key) } } } @@ -236,7 +232,8 @@ impl UnificationTable { new_rank: u32, old_root: VarValue, new_root: VarValue, - new_value: K::Value) -> K { + new_value: K::Value) + -> K { let old_root_key = old_root.key(); let new_root_key = new_root.key(); self.set(old_root_key, old_root.redirect(new_root_key)); @@ -306,7 +303,8 @@ impl<'tcx, K, V> UnificationTable let combined = { match (&node_a.value, &node_b.value) { (&None, &None) => None, - (&Some(ref v), &None) | (&None, &Some(ref v)) => Some(v.clone()), + (&Some(ref v), &None) | + (&None, &Some(ref v)) => Some(v.clone()), (&Some(ref v1), &Some(ref v2)) => { if *v1 != *v2 { return Err((v1.clone(), v2.clone())); diff --git a/src/librustc_driver/Cargo.toml b/src/librustc_driver/Cargo.toml index 772d83eb2cfad..99d3e155e8936 100644 --- a/src/librustc_driver/Cargo.toml +++ b/src/librustc_driver/Cargo.toml @@ -13,25 +13,26 @@ arena = { path = "../libarena" } flate = { path = "../libflate" } graphviz = { path = "../libgraphviz" } log = { path = "../liblog" } +proc_macro_plugin = { path = "../libproc_macro_plugin" } rustc = { path = "../librustc" } rustc_back = { path = "../librustc_back" } rustc_borrowck = { path = "../librustc_borrowck" } rustc_const_eval = { path = "../librustc_const_eval" } +rustc_data_structures = { path = "../librustc_data_structures" } rustc_errors = { path = "../librustc_errors" } +rustc_incremental = { path = "../librustc_incremental" } rustc_lint = { path = "../librustc_lint" } rustc_llvm = { path = "../librustc_llvm" } +rustc_metadata = { path = "../librustc_metadata" } rustc_mir = { path = "../librustc_mir" } -rustc_plugin = { path = "../librustc_plugin" } rustc_passes = { path = "../librustc_passes" } +rustc_plugin = { path = "../librustc_plugin" } rustc_privacy = { path = "../librustc_privacy" } -rustc_incremental = { path = "../librustc_incremental" } rustc_resolve = { path = "../librustc_resolve" } rustc_save_analysis = { path = "../librustc_save_analysis" } rustc_trans = { path = "../librustc_trans" } rustc_typeck = { path = "../librustc_typeck" } -rustc_metadata = { path = "../librustc_metadata" } serialize = { path = "../libserialize" } syntax = { path = "../libsyntax" } syntax_ext = { path = "../libsyntax_ext" } syntax_pos = { path = "../libsyntax_pos" } -proc_macro = { path = "../libproc_macro" } diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index 36e9fccdf5fd8..da1d5ad2c4a9b 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -12,8 +12,10 @@ use rustc::hir; use rustc::hir::{map as hir_map, FreevarMap, TraitMap}; use rustc::hir::def::DefMap; use rustc::hir::lowering::lower_crate; +use rustc_data_structures::blake2b::Blake2bHasher; +use rustc_data_structures::fmt_wrap::FmtWrap; +use rustc::ty::util::ArchIndependentHasher; use rustc_mir as mir; -use rustc::mir::mir_map::MirMap; use rustc::session::{Session, CompileResult, compile_result_from_err_count}; use rustc::session::config::{self, Input, OutputFilenames, OutputType, OutputTypes}; @@ -24,12 +26,10 @@ use rustc::middle::privacy::AccessLevels; use rustc::ty::{self, TyCtxt}; use rustc::util::common::time; use rustc::util::nodemap::NodeSet; -use rustc_back::sha2::{Sha256, Digest}; use rustc_borrowck as borrowck; use rustc_incremental::{self, IncrementalHashesMap}; use rustc_resolve::{MakeGlobMap, Resolver}; -use rustc_metadata::macro_import; -use rustc_metadata::creader::read_local_crates; +use rustc_metadata::creader::CrateLoader; use rustc_metadata::cstore::CStore; use rustc_trans::back::{link, write}; use rustc_trans as trans; @@ -44,6 +44,7 @@ use super::Compilation; use serialize::json; use std::env; +use std::mem; use std::ffi::{OsString, OsStr}; use std::fs; use std::io::{self, Write}; @@ -68,7 +69,6 @@ pub struct Resolutions { pub fn compile_input(sess: &Session, cstore: &CStore, - cfg: ast::CrateConfig, input: &Input, outdir: &Option, output: &Option, @@ -92,7 +92,7 @@ pub fn compile_input(sess: &Session, // large chunks of memory alive and we want to free them as soon as // possible to keep the peak memory usage low let (outputs, trans) = { - let krate = match phase_1_parse_input(sess, cfg, input) { + let krate = match phase_1_parse_input(sess, input) { Ok(krate) => krate, Err(mut parse_error) => { parse_error.emit(); @@ -175,7 +175,7 @@ pub fn compile_input(sess: &Session, resolutions, &arenas, &crate_name, - |tcx, mir_map, analysis, incremental_hashes_map, result| { + |tcx, analysis, incremental_hashes_map, result| { { // Eventually, we will want to track plugins. let _ignore = tcx.dep_graph.in_ignore(); @@ -187,7 +187,6 @@ pub fn compile_input(sess: &Session, opt_crate, tcx.map.krate(), &analysis, - mir_map.as_ref(), tcx, &crate_name); (control.after_analysis.callback)(&mut state); @@ -203,10 +202,7 @@ pub fn compile_input(sess: &Session, println!("Pre-trans"); tcx.print_debug_stats(); } - let trans = phase_4_translate_to_llvm(tcx, - mir_map.unwrap(), - analysis, - &incremental_hashes_map); + let trans = phase_4_translate_to_llvm(tcx, analysis, &incremental_hashes_map); if log_enabled!(::log::INFO) { println!("Post-trans"); @@ -348,7 +344,6 @@ pub struct CompileState<'a, 'b, 'ast: 'a, 'tcx: 'b> where 'ast: 'tcx { pub hir_crate: Option<&'a hir::Crate>, pub ast_map: Option<&'a hir_map::Map<'ast>>, pub resolutions: Option<&'a Resolutions>, - pub mir_map: Option<&'b MirMap<'tcx>>, pub analysis: Option<&'a ty::CrateAnalysis<'a>>, pub tcx: Option>, pub trans: Option<&'a trans::CrateTranslation>, @@ -375,7 +370,6 @@ impl<'a, 'b, 'ast, 'tcx> CompileState<'a, 'b, 'ast, 'tcx> { ast_map: None, resolutions: None, analysis: None, - mir_map: None, tcx: None, trans: None, } @@ -449,13 +443,11 @@ impl<'a, 'b, 'ast, 'tcx> CompileState<'a, 'b, 'ast, 'tcx> { krate: Option<&'a ast::Crate>, hir_crate: &'a hir::Crate, analysis: &'a ty::CrateAnalysis<'a>, - mir_map: Option<&'b MirMap<'tcx>>, tcx: TyCtxt<'b, 'tcx, 'tcx>, crate_name: &'a str) -> CompileState<'a, 'b, 'ast, 'tcx> { CompileState { analysis: Some(analysis), - mir_map: mir_map, tcx: Some(tcx), expanded_crate: krate, hir_crate: Some(hir_crate), @@ -491,23 +483,17 @@ impl<'a, 'b, 'ast, 'tcx> CompileState<'a, 'b, 'ast, 'tcx> { } } -pub fn phase_1_parse_input<'a>(sess: &'a Session, - cfg: ast::CrateConfig, - input: &Input) - -> PResult<'a, ast::Crate> { +pub fn phase_1_parse_input<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::Crate> { let continue_after_error = sess.opts.debugging_opts.continue_parse_after_error; sess.diagnostic().set_continue_after_error(continue_after_error); let krate = time(sess.time_passes(), "parsing", || { match *input { Input::File(ref file) => { - parse::parse_crate_from_file(file, cfg.clone(), &sess.parse_sess) + parse::parse_crate_from_file(file, &sess.parse_sess) } Input::Str { ref input, ref name } => { - parse::parse_crate_from_source_str(name.clone(), - input.clone(), - cfg.clone(), - &sess.parse_sess) + parse::parse_crate_from_source_str(name.clone(), input.clone(), &sess.parse_sess) } } })?; @@ -639,12 +625,18 @@ pub fn phase_2_configure_and_expand<'a, F>(sess: &Session, } sess.track_errors(|| sess.lint_store.borrow_mut().process_command_line(sess))?; - let mut macro_loader = - macro_import::MacroLoader::new(sess, &cstore, crate_name, krate.config.clone()); - + // Currently, we ignore the name resolution data structures for the purposes of dependency + // tracking. Instead we will run name resolution and include its output in the hash of each + // item, much like we do for macro expansion. In other words, the hash reflects not just + // its contents but the results of name resolution on those contents. Hopefully we'll push + // this back at some point. + let _ignore = sess.dep_graph.in_ignore(); + let mut crate_loader = CrateLoader::new(sess, &cstore, crate_name); + crate_loader.preprocess(&krate); let resolver_arenas = Resolver::arenas(); - let mut resolver = Resolver::new(sess, make_glob_map, &mut macro_loader, &resolver_arenas); - syntax_ext::register_builtins(&mut resolver, sess.features.borrow().quote); + let mut resolver = + Resolver::new(sess, &krate, make_glob_map, &mut crate_loader, &resolver_arenas); + syntax_ext::register_builtins(&mut resolver, syntax_exts, sess.features.borrow().quote); krate = time(time_passes, "expansion", || { // Windows dlls do not have rpaths, so they don't know how to find their @@ -674,20 +666,28 @@ pub fn phase_2_configure_and_expand<'a, F>(sess: &Session, } let features = sess.features.borrow(); let cfg = syntax::ext::expand::ExpansionConfig { - crate_name: crate_name.to_string(), features: Some(&features), recursion_limit: sess.recursion_limit.get(), trace_mac: sess.opts.debugging_opts.trace_macros, should_test: sess.opts.test, + ..syntax::ext::expand::ExpansionConfig::default(crate_name.to_string()) }; - let mut ecx = ExtCtxt::new(&sess.parse_sess, krate.config.clone(), cfg, &mut resolver); - let ret = syntax::ext::expand::expand_crate(&mut ecx, syntax_exts, krate); + let mut ecx = ExtCtxt::new(&sess.parse_sess, cfg, &mut resolver); + let err_count = ecx.parse_sess.span_diagnostic.err_count(); + + let krate = ecx.monotonic_expander().expand_crate(krate); + + if ecx.parse_sess.span_diagnostic.err_count() - ecx.resolve_err_count > err_count { + ecx.parse_sess.span_diagnostic.abort_if_errors(); + } if cfg!(windows) { env::set_var("PATH", &old_path); } - ret + krate }); + krate.exported_macros = mem::replace(&mut resolver.exported_macros, Vec::new()); + krate = time(time_passes, "maybe building test harness", || { syntax::test::modify_for_testing(&sess.parse_sess, &mut resolver, @@ -696,18 +696,23 @@ pub fn phase_2_configure_and_expand<'a, F>(sess: &Session, sess.diagnostic()) }); - krate = time(time_passes, "maybe creating a macro crate", || { - let crate_types = sess.crate_types.borrow(); - let is_rustc_macro_crate = crate_types.contains(&config::CrateTypeRustcMacro); - let num_crate_types = crate_types.len(); - syntax_ext::rustc_macro_registrar::modify(&sess.parse_sess, - &mut resolver, - krate, - is_rustc_macro_crate, - num_crate_types, - sess.diagnostic(), - &sess.features.borrow()) - }); + // If we're in rustdoc we're always compiling as an rlib, but that'll trip a + // bunch of checks in the `modify` function below. For now just skip this + // step entirely if we're rustdoc as it's not too useful anyway. + if !sess.opts.actually_rustdoc { + krate = time(time_passes, "maybe creating a macro crate", || { + let crate_types = sess.crate_types.borrow(); + let num_crate_types = crate_types.len(); + let is_proc_macro_crate = crate_types.contains(&config::CrateTypeProcMacro); + syntax_ext::proc_macro_registrar::modify(&sess.parse_sess, + &mut resolver, + krate, + is_proc_macro_crate, + num_crate_types, + sess.diagnostic(), + &sess.features.borrow()) + }); + } if sess.opts.debugging_opts.input_stats { println!("Post-expansion node count: {}", count_nodes(&krate)); @@ -732,14 +737,6 @@ pub fn phase_2_configure_and_expand<'a, F>(sess: &Session, }) })?; - // Collect defintions for def ids. - time(sess.time_passes(), "collecting defs", || resolver.definitions.collect(&krate)); - - time(sess.time_passes(), "external crate/lib resolution", || { - let defs = &resolver.definitions; - read_local_crates(sess, &cstore, defs, &krate, crate_name, &sess.dep_graph) - }); - time(sess.time_passes(), "early lint checks", || lint::check_ast_crate(sess, &krate)); @@ -749,13 +746,6 @@ pub fn phase_2_configure_and_expand<'a, F>(sess: &Session, || ast_validation::check_crate(sess, &krate)); time(sess.time_passes(), "name resolution", || -> CompileResult { - // Currently, we ignore the name resolution data structures for the purposes of dependency - // tracking. Instead we will run name resolution and include its output in the hash of each - // item, much like we do for macro expansion. In other words, the hash reflects not just - // its contents but the results of name resolution on those contents. Hopefully we'll push - // this back at some point. - let _ignore = sess.dep_graph.in_ignore(); - resolver.build_reduced_graph(&krate); resolver.resolve_imports(); // Since import resolution will eventually happen in expansion, @@ -808,17 +798,16 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session, f: F) -> Result where F: for<'a> FnOnce(TyCtxt<'a, 'tcx, 'tcx>, - Option>, ty::CrateAnalysis, IncrementalHashesMap, CompileResult) -> R { macro_rules! try_with_f { - ($e: expr, ($t: expr, $m: expr, $a: expr, $h: expr)) => { + ($e: expr, ($t: expr, $a: expr, $h: expr)) => { match $e { Ok(x) => x, Err(x) => { - f($t, $m, $a, $h, Err(x)); + f($t, $a, $h, Err(x)); return Err(x); } } @@ -884,7 +873,7 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session, || rustc_incremental::load_dep_graph(tcx, &incremental_hashes_map)); // passes are timed inside typeck - try_with_f!(typeck::check_crate(tcx), (tcx, None, analysis, incremental_hashes_map)); + try_with_f!(typeck::check_crate(tcx), (tcx, analysis, incremental_hashes_map)); time(time_passes, "const checking", @@ -924,28 +913,28 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session, "rvalue checking", || rvalues::check_crate(tcx)); - let mut mir_map = - time(time_passes, - "MIR dump", - || mir::mir_map::build_mir_for_crate(tcx)); + time(time_passes, + "MIR dump", + || mir::mir_map::build_mir_for_crate(tcx)); time(time_passes, "MIR passes", || { let mut passes = sess.mir_passes.borrow_mut(); // Push all the built-in passes. passes.push_hook(box mir::transform::dump_mir::DumpMir); passes.push_pass(box mir::transform::simplify_cfg::SimplifyCfg::new("initial")); - passes.push_pass(box mir::transform::qualify_consts::QualifyAndPromoteConstants); + passes.push_pass( + box mir::transform::qualify_consts::QualifyAndPromoteConstants::default()); passes.push_pass(box mir::transform::type_check::TypeckMir); passes.push_pass( box mir::transform::simplify_branches::SimplifyBranches::new("initial")); passes.push_pass(box mir::transform::simplify_cfg::SimplifyCfg::new("qualify-consts")); // And run everything. - passes.run_passes(tcx, &mut mir_map); + passes.run_passes(tcx); }); time(time_passes, "borrow checking", - || borrowck::check_crate(tcx, &mir_map)); + || borrowck::check_crate(tcx)); // Avoid overwhelming user with errors if type checking failed. // I'm not sure how helpful this is, to be honest, but it avoids @@ -954,11 +943,7 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session, // lint warnings and so on -- kindck used to do this abort, but // kindck is gone now). -nmatsakis if sess.err_count() > 0 { - return Ok(f(tcx, - Some(mir_map), - analysis, - incremental_hashes_map, - Err(sess.err_count()))); + return Ok(f(tcx, analysis, incremental_hashes_map, Err(sess.err_count()))); } analysis.reachable = @@ -986,20 +971,15 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session, // The above three passes generate errors w/o aborting if sess.err_count() > 0 { - return Ok(f(tcx, - Some(mir_map), - analysis, - incremental_hashes_map, - Err(sess.err_count()))); + return Ok(f(tcx, analysis, incremental_hashes_map, Err(sess.err_count()))); } - Ok(f(tcx, Some(mir_map), analysis, incremental_hashes_map, Ok(()))) + Ok(f(tcx, analysis, incremental_hashes_map, Ok(()))) }) } /// Run the translation phase to LLVM, after which the AST and analysis can pub fn phase_4_translate_to_llvm<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - mut mir_map: MirMap<'tcx>, analysis: ty::CrateAnalysis, incremental_hashes_map: &IncrementalHashesMap) -> trans::CrateTranslation { @@ -1017,6 +997,7 @@ pub fn phase_4_translate_to_llvm<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, passes.push_pass(box mir::transform::no_landing_pads::NoLandingPads); passes.push_pass(box mir::transform::simplify_cfg::SimplifyCfg::new("no-landing-pads")); + // From here on out, regions are gone. passes.push_pass(box mir::transform::erase_regions::EraseRegions); passes.push_pass(box mir::transform::add_call_guards::AddCallGuards); @@ -1024,18 +1005,21 @@ pub fn phase_4_translate_to_llvm<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, passes.push_pass(box mir::transform::no_landing_pads::NoLandingPads); passes.push_pass(box mir::transform::simplify_cfg::SimplifyCfg::new("elaborate-drops")); + // No lifetime analysis based on borrowing can be done from here on out. + passes.push_pass(box mir::transform::instcombine::InstCombine::new()); passes.push_pass(box mir::transform::deaggregator::Deaggregator); + passes.push_pass(box mir::transform::copy_prop::CopyPropagation); passes.push_pass(box mir::transform::add_call_guards::AddCallGuards); passes.push_pass(box mir::transform::dump_mir::Marker("PreTrans")); - passes.run_passes(tcx, &mut mir_map); + passes.run_passes(tcx); }); let translation = time(time_passes, "translation", - move || trans::trans_crate(tcx, &mir_map, analysis, &incremental_hashes_map)); + move || trans::trans_crate(tcx, analysis, &incremental_hashes_map)); time(time_passes, "assert dep graph", @@ -1180,8 +1164,8 @@ pub fn collect_crate_types(session: &Session, attrs: &[ast::Attribute]) -> Vec { Some(config::CrateTypeStaticlib) } - Some(ref n) if *n == "rustc-macro" => { - Some(config::CrateTypeRustcMacro) + Some(ref n) if *n == "proc-macro" => { + Some(config::CrateTypeProcMacro) } Some(ref n) if *n == "bin" => Some(config::CrateTypeExecutable), Some(_) => { @@ -1239,7 +1223,16 @@ pub fn collect_crate_types(session: &Session, attrs: &[ast::Attribute]) -> Vec String { - let mut hasher = Sha256::new(); + use std::hash::Hasher; + + // The crate_disambiguator is a 128 bit hash. The disambiguator is fed + // into various other hashes quite a bit (symbol hashes, incr. comp. hashes, + // debuginfo type IDs, etc), so we don't want it to be too wide. 128 bits + // should still be safe enough to avoid collisions in practice. + // FIXME(mw): It seems that the crate_disambiguator is used everywhere as + // a hex-string instead of raw bytes. We should really use the + // smaller representation. + let mut hasher = ArchIndependentHasher::new(Blake2bHasher::new(128 / 8, &[])); let mut metadata = session.opts.cg.metadata.clone(); // We don't want the crate_disambiguator to dependent on the order @@ -1248,24 +1241,23 @@ pub fn compute_crate_disambiguator(session: &Session) -> String { // Every distinct -C metadata value is only incorporated once: metadata.dedup(); - hasher.input_str("metadata"); + hasher.write(b"metadata"); for s in &metadata { // Also incorporate the length of a metadata string, so that we generate // different values for `-Cmetadata=ab -Cmetadata=c` and // `-Cmetadata=a -Cmetadata=bc` - hasher.input_str(&format!("{}", s.len())[..]); - hasher.input_str(&s[..]); + hasher.write_usize(s.len()); + hasher.write(s.as_bytes()); } - let mut hash = hasher.result_str(); + let mut hash_state = hasher.into_inner(); + let hash_bytes = hash_state.finalize(); // If this is an executable, add a special suffix, so that we don't get // symbol conflicts when linking against a library of the same name. - if session.crate_types.borrow().contains(&config::CrateTypeExecutable) { - hash.push_str("-exe"); - } + let is_exe = session.crate_types.borrow().contains(&config::CrateTypeExecutable); - hash + format!("{:x}{}", FmtWrap(hash_bytes), if is_exe { "-exe" } else {""}) } pub fn build_output_filenames(input: &Input, diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index fbd48fc42c92d..cb78baa12a6ad 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -31,7 +31,7 @@ #![feature(rustc_private)] #![feature(set_stdio)] #![feature(staged_api)] -#![feature(question_mark)] +#![cfg_attr(stage0, feature(question_mark))] extern crate arena; extern crate flate; @@ -42,6 +42,7 @@ extern crate rustc; extern crate rustc_back; extern crate rustc_borrowck; extern crate rustc_const_eval; +extern crate rustc_data_structures; extern crate rustc_errors as errors; extern crate rustc_passes; extern crate rustc_lint; @@ -73,10 +74,11 @@ use rustc_trans::back::write::{create_target_machine, RELOC_MODEL_ARGS, CODE_GEN use rustc::dep_graph::DepGraph; use rustc::session::{self, config, Session, build_session, CompileResult}; use rustc::session::config::{Input, PrintRequest, OutputType, ErrorOutputType}; -use rustc::session::config::{get_unstable_features_setting, nightly_options}; +use rustc::session::config::nightly_options; +use rustc::session::early_error; use rustc::lint::Lint; use rustc::lint; -use rustc_metadata::loader; +use rustc_metadata::locator; use rustc_metadata::cstore::CStore; use rustc::util::common::time; @@ -93,8 +95,6 @@ use std::str; use std::sync::{Arc, Mutex}; use std::thread; -use rustc::session::early_error; - use syntax::{ast, json}; use syntax::codemap::{CodeMap, FileLoader, RealFileLoader}; use syntax::feature_gate::{GatedCfg, UnstableFeatures}; @@ -131,17 +131,18 @@ pub fn abort_on_err(result: Result, sess: &Session) -> T { } } -pub fn run(args: Vec) -> isize { +pub fn run(run_compiler: F) -> isize + where F: FnOnce() -> (CompileResult, Option) + Send + 'static +{ monitor(move || { - let (result, session) = run_compiler(&args, &mut RustcDefaultCalls); + let (result, session) = run_compiler(); if let Err(err_count) = result { if err_count > 0 { match session { Some(sess) => sess.fatal(&abort_msg(err_count)), None => { let emitter = - errors::emitter::EmitterWriter::stderr(errors::ColorConfig::Auto, - None); + errors::emitter::EmitterWriter::stderr(errors::ColorConfig::Auto, None); let handler = errors::Handler::with_emitter(true, false, Box::new(emitter)); handler.emit(&MultiSpan::new(), &abort_msg(err_count), @@ -155,20 +156,15 @@ pub fn run(args: Vec) -> isize { 0 } -pub fn run_compiler<'a>(args: &[String], - callbacks: &mut CompilerCalls<'a>) - -> (CompileResult, Option) { - run_compiler_with_file_loader(args, callbacks, box RealFileLoader) -} - // Parse args and run the compiler. This is the primary entry point for rustc. // See comments on CompilerCalls below for details about the callbacks argument. // The FileLoader provides a way to load files from sources other than the file system. -pub fn run_compiler_with_file_loader<'a, L>(args: &[String], - callbacks: &mut CompilerCalls<'a>, - loader: Box) - -> (CompileResult, Option) - where L: FileLoader + 'static { +pub fn run_compiler<'a>(args: &[String], + callbacks: &mut CompilerCalls<'a>, + file_loader: Option>, + emitter_dest: Option>) + -> (CompileResult, Option) +{ macro_rules! do_or_return {($expr: expr, $sess: expr) => { match $expr { Compilation::Stop => return (Ok(()), $sess), @@ -207,24 +203,23 @@ pub fn run_compiler_with_file_loader<'a, L>(args: &[String], let dep_graph = DepGraph::new(sopts.build_dep_graph()); let cstore = Rc::new(CStore::new(&dep_graph)); + + let loader = file_loader.unwrap_or(box RealFileLoader); let codemap = Rc::new(CodeMap::with_file_loader(loader)); - let sess = session::build_session_with_codemap(sopts, - &dep_graph, - input_file_path, - descriptions, - cstore.clone(), - codemap); + let mut sess = session::build_session_with_codemap( + sopts, &dep_graph, input_file_path, descriptions, cstore.clone(), codemap, emitter_dest, + ); rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess)); + let mut cfg = config::build_configuration(&sess, cfg); target_features::add_configuration(&mut cfg, &sess); + sess.parse_sess.config = cfg; - do_or_return!(callbacks.late_callback(&matches, &sess, &cfg, &input, &odir, &ofile), - Some(sess)); + do_or_return!(callbacks.late_callback(&matches, &sess, &input, &odir, &ofile), Some(sess)); let plugins = sess.opts.debugging_opts.extra_plugins.clone(); let control = callbacks.build_controller(&sess, &matches); - (driver::compile_input(&sess, &cstore, cfg, &input, &odir, &ofile, - Some(plugins), &control), + (driver::compile_input(&sess, &cstore, &input, &odir, &ofile, Some(plugins), &control), Some(sess)) } @@ -312,7 +307,6 @@ pub trait CompilerCalls<'a> { fn late_callback(&mut self, _: &getopts::Matches, _: &Session, - _: &ast::CrateConfig, _: &Input, _: &Option, _: &Option) @@ -441,7 +435,7 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls { } let dep_graph = DepGraph::new(sopts.build_dep_graph()); let cstore = Rc::new(CStore::new(&dep_graph)); - let sess = build_session(sopts.clone(), + let mut sess = build_session(sopts.clone(), &dep_graph, None, descriptions.clone(), @@ -449,11 +443,10 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls { rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess)); let mut cfg = config::build_configuration(&sess, cfg.clone()); target_features::add_configuration(&mut cfg, &sess); - let should_stop = RustcDefaultCalls::print_crate_info(&sess, - &cfg, - None, - odir, - ofile); + sess.parse_sess.config = cfg; + let should_stop = + RustcDefaultCalls::print_crate_info(&sess, None, odir, ofile); + if should_stop == Compilation::Stop { return None; } @@ -469,12 +462,11 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls { fn late_callback(&mut self, matches: &getopts::Matches, sess: &Session, - cfg: &ast::CrateConfig, input: &Input, odir: &Option, ofile: &Option) -> Compilation { - RustcDefaultCalls::print_crate_info(sess, cfg, Some(input), odir, ofile) + RustcDefaultCalls::print_crate_info(sess, Some(input), odir, ofile) .and_then(|| RustcDefaultCalls::list_metadata(sess, matches, input)) } @@ -580,8 +572,7 @@ impl RustcDefaultCalls { &Input::File(ref ifile) => { let path = &(*ifile); let mut v = Vec::new(); - loader::list_file_metadata(&sess.target.target, path, &mut v) - .unwrap(); + locator::list_file_metadata(&sess.target.target, path, &mut v).unwrap(); println!("{}", String::from_utf8(v).unwrap()); } &Input::Str { .. } => { @@ -596,7 +587,6 @@ impl RustcDefaultCalls { fn print_crate_info(sess: &Session, - cfg: &ast::CrateConfig, input: Option<&Input>, odir: &Option, ofile: &Option) @@ -649,13 +639,11 @@ impl RustcDefaultCalls { } } PrintRequest::Cfg => { - let allow_unstable_cfg = match get_unstable_features_setting() { - UnstableFeatures::Disallow => false, - _ => true, - }; + let allow_unstable_cfg = UnstableFeatures::from_environment() + .is_nightly_build(); - for cfg in cfg { - if !allow_unstable_cfg && GatedCfg::gate(&*cfg).is_some() { + for cfg in &sess.parse_sess.config { + if !allow_unstable_cfg && GatedCfg::gate(cfg).is_some() { continue; } @@ -733,6 +721,10 @@ pub fn version(binary: &str, matches: &getopts::Matches) { println!("commit-date: {}", unw(commit_date_str())); println!("host: {}", config::host_triple()); println!("release: {}", unw(release_str())); + unsafe { + println!("LLVM version: {}.{}", + llvm::LLVMRustVersionMajor(), llvm::LLVMRustVersionMinor()); + } } } @@ -1037,13 +1029,10 @@ pub fn handle_options(args: &[String]) -> Option { fn parse_crate_attrs<'a>(sess: &'a Session, input: &Input) -> PResult<'a, Vec> { match *input { Input::File(ref ifile) => { - parse::parse_crate_attrs_from_file(ifile, Vec::new(), &sess.parse_sess) + parse::parse_crate_attrs_from_file(ifile, &sess.parse_sess) } Input::Str { ref name, ref input } => { - parse::parse_crate_attrs_from_source_str(name.clone(), - input.clone(), - Vec::new(), - &sess.parse_sess) + parse::parse_crate_attrs_from_source_str(name.clone(), input.clone(), &sess.parse_sess) } } } @@ -1054,7 +1043,8 @@ fn parse_crate_attrs<'a>(sess: &'a Session, input: &Input) -> PResult<'a, Vec(f: F) { - const STACK_SIZE: usize = 8 * 1024 * 1024; // 8MB + // Temporarily have stack size set to 16MB to deal with nom-using crates failing + const STACK_SIZE: usize = 16 * 1024 * 1024; // 16MB struct Sink(Arc>>); impl Write for Sink { @@ -1078,7 +1068,7 @@ pub fn monitor(f: F) { } let thread = cfg.spawn(move || { - io::set_panic(box err); + io::set_panic(Some(box err)); f() }); @@ -1113,7 +1103,7 @@ pub fn monitor(f: F) { errors::Level::Note); } - println!("{}", str::from_utf8(&data.lock().unwrap()).unwrap()); + writeln!(io::stderr(), "{}", str::from_utf8(&data.lock().unwrap()).unwrap()).unwrap(); } exit_on_err(); @@ -1124,7 +1114,7 @@ fn exit_on_err() -> ! { // Panic so the process returns a failure code, but don't pollute the // output with some unnecessary panic messages, we've already // printed everything that we needed to. - io::set_panic(box io::sink()); + io::set_panic(Some(box io::sink())); panic!(); } @@ -1139,11 +1129,15 @@ pub fn diagnostics_registry() -> errors::registry::Registry { all_errors.extend_from_slice(&rustc_privacy::DIAGNOSTICS); all_errors.extend_from_slice(&rustc_trans::DIAGNOSTICS); all_errors.extend_from_slice(&rustc_const_eval::DIAGNOSTICS); + all_errors.extend_from_slice(&rustc_metadata::DIAGNOSTICS); Registry::new(&all_errors) } pub fn main() { - let result = run(env::args().collect()); + let result = run(|| run_compiler(&env::args().collect::>(), + &mut RustcDefaultCalls, + None, + None)); process::exit(result as i32); } diff --git a/src/librustc_driver/pretty.rs b/src/librustc_driver/pretty.rs index 1ffeaf322bf57..10ff7dc89f9a3 100644 --- a/src/librustc_driver/pretty.rs +++ b/src/librustc_driver/pretty.rs @@ -52,8 +52,6 @@ use rustc::hir::map::{blocks, NodePrinter}; use rustc::hir; use rustc::hir::print as pprust_hir; -use rustc::mir::mir_map::MirMap; - #[derive(Copy, Clone, PartialEq, Debug)] pub enum PpSourceMode { PpmNormal, @@ -234,7 +232,7 @@ impl PpSourceMode { resolutions.clone(), arenas, id, - |tcx, _, _, _, _| { + |tcx, _, _, _| { let annotation = TypedAnnotation { tcx: tcx, }; @@ -539,6 +537,7 @@ impl FromStr for UserIdentifiedItem { type Err = (); fn from_str(s: &str) -> Result { Ok(s.parse() + .map(ast::NodeId::new) .map(ItemViaNode) .unwrap_or_else(|_| ItemViaPath(s.split("::").map(|s| s.to_string()).collect()))) } @@ -694,7 +693,6 @@ impl fold::Folder for ReplaceBodyWithLoop { fn print_flowgraph<'a, 'tcx, W: Write>(variants: Vec, tcx: TyCtxt<'a, 'tcx, 'tcx>, - mir_map: Option<&MirMap<'tcx>>, code: blocks::Code, mode: PpFlowGraphMode, mut out: W) @@ -724,7 +722,6 @@ fn print_flowgraph<'a, 'tcx, W: Write>(variants: Vec, blocks::FnLikeCode(fn_like) => { let (bccx, analysis_data) = borrowck::build_borrowck_dataflow_data_for_fn(tcx, - mir_map, fn_like.to_fn_parts(), &cfg); @@ -951,32 +948,28 @@ fn print_with_analysis<'tcx, 'a: 'tcx>(sess: &'a Session, resolutions.clone(), arenas, crate_name, - |tcx, mir_map, _, _, _| { + |tcx, _, _, _| { match ppm { PpmMir | PpmMirCFG => { - if let Some(mir_map) = mir_map { - if let Some(nodeid) = nodeid { - let def_id = tcx.map.local_def_id(nodeid); - match ppm { - PpmMir => write_mir_pretty(tcx, iter::once(def_id), &mir_map, &mut out), - PpmMirCFG => { - write_mir_graphviz(tcx, iter::once(def_id), &mir_map, &mut out) - } - _ => unreachable!(), - }?; - } else { - match ppm { - PpmMir => write_mir_pretty(tcx, - mir_map.map.keys().into_iter(), - &mir_map, - &mut out), - PpmMirCFG => write_mir_graphviz(tcx, - mir_map.map.keys().into_iter(), - &mir_map, - &mut out), - _ => unreachable!(), - }?; - } + if let Some(nodeid) = nodeid { + let def_id = tcx.map.local_def_id(nodeid); + match ppm { + PpmMir => write_mir_pretty(tcx, iter::once(def_id), &mut out), + PpmMirCFG => { + write_mir_graphviz(tcx, iter::once(def_id), &mut out) + } + _ => unreachable!(), + }?; + } else { + match ppm { + PpmMir => write_mir_pretty(tcx, + tcx.mir_map.borrow().keys().into_iter(), + &mut out), + PpmMirCFG => write_mir_graphviz(tcx, + tcx.mir_map.borrow().keys().into_iter(), + &mut out), + _ => unreachable!(), + }?; } Ok(()) } @@ -994,12 +987,7 @@ fn print_with_analysis<'tcx, 'a: 'tcx>(sess: &'a Session, let out: &mut Write = &mut out; - print_flowgraph(variants, - tcx, - mir_map.as_ref(), - code, - mode, - out) + print_flowgraph(variants, tcx, code, mode, out) } None => { let message = format!("--pretty=flowgraph needs block, fn, or method; got \ diff --git a/src/librustc_driver/test.rs b/src/librustc_driver/test.rs index 8569ff64f9100..50903c89a5890 100644 --- a/src/librustc_driver/test.rs +++ b/src/librustc_driver/test.rs @@ -20,14 +20,13 @@ use rustc::middle::region::{self, CodeExtent}; use rustc::middle::region::CodeExtentData; use rustc::middle::resolve_lifetime; use rustc::middle::stability; -use rustc::ty::subst::{Kind, Subst, Substs}; +use rustc::ty::subst::{Kind, Subst}; use rustc::traits::Reveal; use rustc::ty::{self, Ty, TyCtxt, TypeFoldable}; use rustc::infer::{self, InferOk, InferResult, TypeOrigin}; use rustc_metadata::cstore::CStore; use rustc::hir::map as hir_map; use rustc::session::{self, config}; -use std::iter; use std::rc::Rc; use syntax::ast; use syntax::abi::Abi; @@ -107,12 +106,11 @@ fn test_env(source_string: &str, let sess = session::build_session_(options, &dep_graph, None, diagnostic_handler, Rc::new(CodeMap::new()), cstore.clone()); rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess)); - let krate_config = Vec::new(); let input = config::Input::Str { name: driver::anon_src(), input: source_string.to_string(), }; - let krate = driver::phase_1_parse_input(&sess, krate_config, &input).unwrap(); + let krate = driver::phase_1_parse_input(&sess, &input).unwrap(); let driver::ExpansionResult { defs, resolutions, mut hir_forest, .. } = { driver::phase_2_configure_and_expand( &sess, &cstore, krate, None, "test", None, MakeGlobMap::No, |_| Ok(()), @@ -166,16 +164,17 @@ impl<'a, 'gcx, 'tcx> Env<'a, 'gcx, 'tcx> { pub fn create_simple_region_hierarchy(&self) { // creates a region hierarchy where 1 is root, 10 and 11 are // children of 1, etc + + let node = ast::NodeId::from_u32; let dscope = self.infcx .tcx .region_maps - .intern_code_extent(CodeExtentData::DestructionScope(1), + .intern_code_extent(CodeExtentData::DestructionScope(node(1)), region::ROOT_CODE_EXTENT); self.create_region_hierarchy(&RH { - id: 1, - sub: &[RH { id: 10, sub: &[] }, RH { id: 11, sub: &[] }], - }, - dscope); + id: node(1), + sub: &[RH { id: node(10), sub: &[] }, RH { id: node(11), sub: &[] }], + }, dscope); } #[allow(dead_code)] // this seems like it could be useful, even if we don't use it now @@ -274,7 +273,7 @@ impl<'a, 'gcx, 'tcx> Env<'a, 'gcx, 'tcx> { } pub fn t_pair(&self, ty1: Ty<'tcx>, ty2: Ty<'tcx>) -> Ty<'tcx> { - self.infcx.tcx.mk_tup(vec![ty1, ty2]) + self.infcx.tcx.intern_tup(&[ty1, ty2]) } pub fn t_param(&self, index: u32) -> Ty<'tcx> { @@ -315,8 +314,8 @@ impl<'a, 'gcx, 'tcx> Env<'a, 'gcx, 'tcx> { self.infcx.tcx.mk_imm_ref(r, self.tcx().types.isize) } - pub fn t_rptr_scope(&self, id: ast::NodeId) -> Ty<'tcx> { - let r = ty::ReScope(self.tcx().region_maps.node_extent(id)); + pub fn t_rptr_scope(&self, id: u32) -> Ty<'tcx> { + let r = ty::ReScope(self.tcx().region_maps.node_extent(ast::NodeId::from_u32(id))); self.infcx.tcx.mk_imm_ref(self.infcx.tcx.mk_region(r), self.tcx().types.isize) } @@ -327,8 +326,8 @@ impl<'a, 'gcx, 'tcx> Env<'a, 'gcx, 'tcx> { })) } - pub fn t_rptr_free(&self, nid: ast::NodeId, id: u32) -> Ty<'tcx> { - let r = self.re_free(nid, id); + pub fn t_rptr_free(&self, nid: u32, id: u32) -> Ty<'tcx> { + let r = self.re_free(ast::NodeId::from_u32(nid), id); self.infcx.tcx.mk_imm_ref(r, self.tcx().types.isize) } @@ -678,7 +677,7 @@ fn subst_ty_renumber_bound() { env.t_fn(&[t_param], env.t_nil()) }; - let substs = Substs::new(env.infcx.tcx, iter::once(Kind::from(t_rptr_bound1))); + let substs = env.infcx.tcx.intern_substs(&[Kind::from(t_rptr_bound1)]); let t_substituted = t_source.subst(env.infcx.tcx, substs); // t_expected = fn(&'a isize) @@ -713,7 +712,7 @@ fn subst_ty_renumber_some_bounds() { env.t_pair(t_param, env.t_fn(&[t_param], env.t_nil())) }; - let substs = Substs::new(env.infcx.tcx, iter::once(Kind::from(t_rptr_bound1))); + let substs = env.infcx.tcx.intern_substs(&[Kind::from(t_rptr_bound1)]); let t_substituted = t_source.subst(env.infcx.tcx, substs); // t_expected = (&'a isize, fn(&'a isize)) @@ -775,7 +774,7 @@ fn subst_region_renumber_region() { env.t_fn(&[env.t_rptr(re_early)], env.t_nil()) }; - let substs = Substs::new(env.infcx.tcx, iter::once(Kind::from(re_bound1))); + let substs = env.infcx.tcx.intern_substs(&[Kind::from(re_bound1)]); let t_substituted = t_source.subst(env.infcx.tcx, substs); // t_expected = fn(&'a isize) @@ -802,8 +801,8 @@ fn walk_ty() { let tcx = env.infcx.tcx; let int_ty = tcx.types.isize; let uint_ty = tcx.types.usize; - let tup1_ty = tcx.mk_tup(vec![int_ty, uint_ty, int_ty, uint_ty]); - let tup2_ty = tcx.mk_tup(vec![tup1_ty, tup1_ty, uint_ty]); + let tup1_ty = tcx.intern_tup(&[int_ty, uint_ty, int_ty, uint_ty]); + let tup2_ty = tcx.intern_tup(&[tup1_ty, tup1_ty, uint_ty]); let uniq_ty = tcx.mk_box(tup2_ty); let walked: Vec<_> = uniq_ty.walk().collect(); assert_eq!(walked, @@ -818,8 +817,8 @@ fn walk_ty_skip_subtree() { let tcx = env.infcx.tcx; let int_ty = tcx.types.isize; let uint_ty = tcx.types.usize; - let tup1_ty = tcx.mk_tup(vec![int_ty, uint_ty, int_ty, uint_ty]); - let tup2_ty = tcx.mk_tup(vec![tup1_ty, tup1_ty, uint_ty]); + let tup1_ty = tcx.intern_tup(&[int_ty, uint_ty, int_ty, uint_ty]); + let tup2_ty = tcx.intern_tup(&[tup1_ty, tup1_ty, uint_ty]); let uniq_ty = tcx.mk_box(tup2_ty); // types we expect to see (in order), plus a boolean saying diff --git a/src/librustc_errors/emitter.rs b/src/librustc_errors/emitter.rs index 1bdc9ef30881f..a307e9b696def 100644 --- a/src/librustc_errors/emitter.rs +++ b/src/librustc_errors/emitter.rs @@ -51,8 +51,8 @@ impl ColorConfig { fn use_color(&self) -> bool { match *self { ColorConfig::Always => true, - ColorConfig::Never => false, - ColorConfig::Auto => stderr_isatty(), + ColorConfig::Never => false, + ColorConfig::Auto => stderr_isatty(), } } } @@ -83,31 +83,33 @@ macro_rules! println_maybe_styled { } impl EmitterWriter { - pub fn stderr(color_config: ColorConfig, - code_map: Option>) - -> EmitterWriter { + pub fn stderr(color_config: ColorConfig, code_map: Option>) -> EmitterWriter { if color_config.use_color() { let dst = Destination::from_stderr(); - EmitterWriter { dst: dst, - cm: code_map} + EmitterWriter { + dst: dst, + cm: code_map, + } } else { - EmitterWriter { dst: Raw(Box::new(io::stderr())), - cm: code_map} + EmitterWriter { + dst: Raw(Box::new(io::stderr())), + cm: code_map, + } } } - pub fn new(dst: Box, - code_map: Option>) - -> EmitterWriter { - EmitterWriter { dst: Raw(dst), - cm: code_map} + pub fn new(dst: Box, code_map: Option>) -> EmitterWriter { + EmitterWriter { + dst: Raw(dst), + cm: code_map, + } } fn preprocess_annotations(&self, msp: &MultiSpan) -> Vec { fn add_annotation_to_file(file_vec: &mut Vec, - file: Rc, - line_index: usize, - ann: Annotation) { + file: Rc, + line_index: usize, + ann: Annotation) { for slot in file_vec.iter_mut() { // Look through each of our files for the one we're adding to @@ -166,15 +168,15 @@ impl EmitterWriter { } add_annotation_to_file(&mut output, - lo.file, - lo.line, - Annotation { - start_col: lo.col.0, - end_col: hi.col.0, - is_primary: span_label.is_primary, - is_minimized: is_minimized, - label: span_label.label.clone(), - }); + lo.file, + lo.line, + Annotation { + start_col: lo.col.0, + end_col: hi.col.0, + is_primary: span_label.is_primary, + is_minimized: is_minimized, + label: span_label.label.clone(), + }); } } output @@ -235,9 +237,7 @@ impl EmitterWriter { '^', Style::UnderlinePrimary); if !annotation.is_minimized { - buffer.set_style(line_offset, - width_offset + p, - Style::UnderlinePrimary); + buffer.set_style(line_offset, width_offset + p, Style::UnderlinePrimary); } } else { buffer.putc(line_offset + 1, @@ -245,9 +245,7 @@ impl EmitterWriter { '-', Style::UnderlineSecondary); if !annotation.is_minimized { - buffer.set_style(line_offset, - width_offset + p, - Style::UnderlineSecondary); + buffer.set_style(line_offset, width_offset + p, Style::UnderlineSecondary); } } } @@ -427,8 +425,7 @@ impl EmitterWriter { } // Check to make sure we're not in any <*macros> if !cm.span_to_filename(def_site).contains("macros>") && - !trace.macro_decl_name.starts_with("#[") - { + !trace.macro_decl_name.starts_with("#[") { new_labels.push((trace.call_site, "in this macro invocation".to_string())); break; @@ -473,9 +470,10 @@ impl EmitterWriter { if spans_updated { children.push(SubDiagnostic { level: Level::Note, - message: "this error originates in a macro from the standard library".to_string(), + message: "this error originates in a macro outside of the current crate" + .to_string(), span: MultiSpan::new(), - render_span: None + render_span: None, }); } } @@ -499,8 +497,7 @@ impl EmitterWriter { buffer.append(0, &level.to_string(), Style::HeaderMsg); buffer.append(0, ": ", Style::NoStyle); buffer.append(0, msg, Style::NoStyle); - } - else { + } else { buffer.append(0, &level.to_string(), Style::Level(level.clone())); match code { &Some(ref code) => { @@ -519,23 +516,21 @@ impl EmitterWriter { let mut annotated_files = self.preprocess_annotations(msp); // Make sure our primary file comes first - let primary_lo = - if let (Some(ref cm), Some(ref primary_span)) = (self.cm.as_ref(), - msp.primary_span().as_ref()) { - if primary_span != &&DUMMY_SP && primary_span != &&COMMAND_LINE_SP { - cm.lookup_char_pos(primary_span.lo) - } - else { - emit_to_destination(&buffer.render(), level, &mut self.dst)?; - return Ok(()); - } + let primary_lo = if let (Some(ref cm), Some(ref primary_span)) = + (self.cm.as_ref(), msp.primary_span().as_ref()) { + if primary_span != &&DUMMY_SP && primary_span != &&COMMAND_LINE_SP { + cm.lookup_char_pos(primary_span.lo) } else { - // If we don't have span information, emit and exit emit_to_destination(&buffer.render(), level, &mut self.dst)?; return Ok(()); - }; + } + } else { + // If we don't have span information, emit and exit + emit_to_destination(&buffer.render(), level, &mut self.dst)?; + return Ok(()); + }; if let Ok(pos) = - annotated_files.binary_search_by(|x| x.file.name.cmp(&primary_lo.file.name)) { + annotated_files.binary_search_by(|x| x.file.name.cmp(&primary_lo.file.name)) { annotated_files.swap(0, pos); } @@ -551,8 +546,8 @@ impl EmitterWriter { buffer.prepend(buffer_msg_line_offset, "--> ", Style::LineNumber); let loc = primary_lo.clone(); buffer.append(buffer_msg_line_offset, - &format!("{}:{}:{}", loc.file.name, loc.line, loc.col.0 + 1), - Style::LineAndColumn); + &format!("{}:{}:{}", loc.file.name, loc.line, loc.col.0 + 1), + Style::LineAndColumn); for _ in 0..max_line_num_len { buffer.prepend(buffer_msg_line_offset, " ", Style::NoStyle); } @@ -566,8 +561,8 @@ impl EmitterWriter { // Then, the secondary file indicator buffer.prepend(buffer_msg_line_offset + 1, "::: ", Style::LineNumber); buffer.append(buffer_msg_line_offset + 1, - &annotated_file.file.name, - Style::LineAndColumn); + &annotated_file.file.name, + Style::LineAndColumn); for _ in 0..max_line_num_len { buffer.prepend(buffer_msg_line_offset + 1, " ", Style::NoStyle); } @@ -588,7 +583,7 @@ impl EmitterWriter { // this annotated line and the next one if line_idx < (annotated_file.lines.len() - 1) { let line_idx_delta = annotated_file.lines[line_idx + 1].line_index - - annotated_file.lines[line_idx].line_index; + annotated_file.lines[line_idx].line_index; if line_idx_delta > 2 { let last_buffer_line_num = buffer.num_lines(); buffer.puts(last_buffer_line_num, 0, "...", Style::LineNumber); @@ -669,12 +664,7 @@ impl EmitterWriter { let max_line_num = self.get_max_line_num(span, children); let max_line_num_len = max_line_num.to_string().len(); - match self.emit_message_default(span, - message, - code, - level, - max_line_num_len, - false) { + match self.emit_message_default(span, message, code, level, max_line_num_len, false) { Ok(()) => { if !children.is_empty() { let mut buffer = StyledBuffer::new(); @@ -720,13 +710,15 @@ impl EmitterWriter { } } } - Err(e) => panic!("failed to emit error: {}", e) + Err(e) => panic!("failed to emit error: {}", e), } match write!(&mut self.dst, "\n") { Err(e) => panic!("failed to emit error: {}", e), - _ => match self.dst.flush() { - Err(e) => panic!("failed to emit error: {}", e), - _ => () + _ => { + match self.dst.flush() { + Err(e) => panic!("failed to emit error: {}", e), + _ => (), + } } } } @@ -750,8 +742,9 @@ fn overlaps(a1: &Annotation, a2: &Annotation) -> bool { } fn emit_to_destination(rendered_buffer: &Vec>, - lvl: &Level, - dst: &mut Destination) -> io::Result<()> { + lvl: &Level, + dst: &mut Destination) + -> io::Result<()> { use lock; // In order to prevent error message interleaving, where multiple error lines get intermixed @@ -792,8 +785,7 @@ fn stderr_isatty() -> bool { const STD_ERROR_HANDLE: DWORD = -12i32 as DWORD; extern "system" { fn GetStdHandle(which: DWORD) -> HANDLE; - fn GetConsoleMode(hConsoleHandle: HANDLE, - lpMode: *mut DWORD) -> BOOL; + fn GetConsoleMode(hConsoleHandle: HANDLE, lpMode: *mut DWORD) -> BOOL; } unsafe { let handle = GetStdHandle(STD_ERROR_HANDLE); @@ -821,9 +813,7 @@ impl BufferedWriter { // note: we use _new because the conditional compilation at its use site may make this // this function unused on some platforms fn _new() -> BufferedWriter { - BufferedWriter { - buffer: vec![] - } + BufferedWriter { buffer: vec![] } } } @@ -850,35 +840,34 @@ impl Destination { /// When not on Windows, prefer the buffered terminal so that we can buffer an entire error /// to be emitted at one time. fn from_stderr() -> Destination { - let stderr: Option> = + let stderr: Option> = term::TerminfoTerminal::new(BufferedWriter::_new()) .map(|t| Box::new(t) as Box); match stderr { Some(t) => BufferedTerminal(t), - None => Raw(Box::new(io::stderr())), + None => Raw(Box::new(io::stderr())), } } #[cfg(windows)] /// Return a normal, unbuffered terminal when on Windows. fn from_stderr() -> Destination { - let stderr: Option> = - term::TerminfoTerminal::new(io::stderr()) - .map(|t| Box::new(t) as Box) - .or_else(|| term::WinConsole::new(io::stderr()).ok() - .map(|t| Box::new(t) as Box)); + let stderr: Option> = term::TerminfoTerminal::new(io::stderr()) + .map(|t| Box::new(t) as Box) + .or_else(|| { + term::WinConsole::new(io::stderr()) + .ok() + .map(|t| Box::new(t) as Box) + }); match stderr { Some(t) => Terminal(t), - None => Raw(Box::new(io::stderr())), + None => Raw(Box::new(io::stderr())), } } - fn apply_style(&mut self, - lvl: Level, - style: Style) - -> io::Result<()> { + fn apply_style(&mut self, lvl: Level, style: Style) -> io::Result<()> { match style { Style::FileNameStyle | Style::LineAndColumn => {} Style::LineNumber => { @@ -928,18 +917,26 @@ impl Destination { fn start_attr(&mut self, attr: term::Attr) -> io::Result<()> { match *self { - Terminal(ref mut t) => { t.attr(attr)?; } - BufferedTerminal(ref mut t) => { t.attr(attr)?; } - Raw(_) => { } + Terminal(ref mut t) => { + t.attr(attr)?; + } + BufferedTerminal(ref mut t) => { + t.attr(attr)?; + } + Raw(_) => {} } Ok(()) } fn reset_attrs(&mut self) -> io::Result<()> { match *self { - Terminal(ref mut t) => { t.reset()?; } - BufferedTerminal(ref mut t) => { t.reset()?; } - Raw(_) => { } + Terminal(ref mut t) => { + t.reset()?; + } + BufferedTerminal(ref mut t) => { + t.reset()?; + } + Raw(_) => {} } Ok(()) } diff --git a/src/librustc_errors/lib.rs b/src/librustc_errors/lib.rs index d2f3eea85f228..25b314256b092 100644 --- a/src/librustc_errors/lib.rs +++ b/src/librustc_errors/lib.rs @@ -21,15 +21,17 @@ #![allow(unused_attributes)] #![feature(rustc_private)] #![feature(staged_api)] -#![feature(question_mark)] +#![cfg_attr(stage0, feature(question_mark))] #![feature(range_contains)] #![feature(libc)] #![feature(unicode)] extern crate serialize; extern crate term; -#[macro_use] extern crate log; -#[macro_use] extern crate libc; +#[macro_use] +extern crate log; +#[macro_use] +extern crate libc; extern crate rustc_unicode; extern crate serialize as rustc_serialize; // used by deriving extern crate syntax_pos; @@ -52,8 +54,8 @@ pub mod registry; pub mod styled_buffer; mod lock; -use syntax_pos::{BytePos, Loc, FileLinesResult, FileName, MultiSpan, Span, NO_EXPANSION }; -use syntax_pos::{MacroBacktrace}; +use syntax_pos::{BytePos, Loc, FileLinesResult, FileName, MultiSpan, Span, NO_EXPANSION}; +use syntax_pos::MacroBacktrace; #[derive(Clone)] pub enum RenderSpan { @@ -81,6 +83,7 @@ pub trait CodeMapper { fn span_to_string(&self, sp: Span) -> String; fn span_to_filename(&self, sp: Span) -> FileName; fn macro_backtrace(&self, span: Span) -> Vec; + fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option; } impl CodeSuggestion { @@ -88,9 +91,11 @@ impl CodeSuggestion { pub fn splice_lines(&self, cm: &CodeMapper) -> String { use syntax_pos::{CharPos, Loc, Pos}; - fn push_trailing(buf: &mut String, line_opt: Option<&str>, - lo: &Loc, hi_opt: Option<&Loc>) { - let (lo, hi_opt) = (lo.col.to_usize(), hi_opt.map(|hi|hi.col.to_usize())); + fn push_trailing(buf: &mut String, + line_opt: Option<&str>, + lo: &Loc, + hi_opt: Option<&Loc>) { + let (lo, hi_opt) = (lo.col.to_usize(), hi_opt.map(|hi| hi.col.to_usize())); if let Some(line) = line_opt { if line.len() > lo { buf.push_str(match hi_opt { @@ -118,7 +123,11 @@ impl CodeSuggestion { // Find the bounding span. let lo = primary_spans.iter().map(|sp| sp.lo).min().unwrap(); let hi = primary_spans.iter().map(|sp| sp.hi).min().unwrap(); - let bounding_span = Span { lo: lo, hi: hi, expn_id: NO_EXPANSION }; + let bounding_span = Span { + lo: lo, + hi: hi, + expn_id: NO_EXPANSION, + }; let lines = cm.span_to_lines(bounding_span).unwrap(); assert!(!lines.lines.is_empty()); @@ -152,7 +161,7 @@ impl CodeSuggestion { } } if let Some(cur_line) = fm.get_line(cur_lo.line - 1) { - buf.push_str(&cur_line[.. cur_lo.col.to_usize()]); + buf.push_str(&cur_line[..cur_lo.col.to_usize()]); } } buf.push_str(substitute); @@ -262,8 +271,7 @@ impl<'a> DiagnosticBuilder<'a> { /// all, and you just supplied a `Span` to create the diagnostic, /// then the snippet will just include that `Span`, which is /// called the primary span. - pub fn span_label(&mut self, span: Span, label: &fmt::Display) - -> &mut DiagnosticBuilder<'a> { + pub fn span_label(&mut self, span: Span, label: &fmt::Display) -> &mut DiagnosticBuilder<'a> { self.span.push_span_label(span, format!("{}", label)); self } @@ -272,8 +280,7 @@ impl<'a> DiagnosticBuilder<'a> { label: &fmt::Display, expected: &fmt::Display, found: &fmt::Display) - -> &mut DiagnosticBuilder<'a> - { + -> &mut DiagnosticBuilder<'a> { self.note_expected_found_extra(label, expected, found, &"", &"") } @@ -283,8 +290,7 @@ impl<'a> DiagnosticBuilder<'a> { found: &fmt::Display, expected_extra: &fmt::Display, found_extra: &fmt::Display) - -> &mut DiagnosticBuilder<'a> - { + -> &mut DiagnosticBuilder<'a> { // For now, just attach these as notes self.note(&format!("expected {} `{}`{}", label, expected, expected_extra)); self.note(&format!(" found {} `{}`{}", label, found, found_extra)); @@ -313,7 +319,7 @@ impl<'a> DiagnosticBuilder<'a> { self.sub(Level::Warning, msg, sp.into(), None); self } - pub fn help(&mut self , msg: &str) -> &mut DiagnosticBuilder<'a> { + pub fn help(&mut self, msg: &str) -> &mut DiagnosticBuilder<'a> { self.sub(Level::Help, msg, MultiSpan::new(), None); self } @@ -332,10 +338,13 @@ impl<'a> DiagnosticBuilder<'a> { msg: &str, suggestion: String) -> &mut DiagnosticBuilder<'a> { - self.sub(Level::Help, msg, MultiSpan::new(), Some(Suggestion(CodeSuggestion { - msp: sp.into(), - substitutes: vec![suggestion], - }))); + self.sub(Level::Help, + msg, + MultiSpan::new(), + Some(Suggestion(CodeSuggestion { + msp: sp.into(), + substitutes: vec![suggestion], + }))); self } @@ -359,18 +368,17 @@ impl<'a> DiagnosticBuilder<'a> { /// Convenience function for internal use, clients should use one of the /// struct_* methods on Handler. - fn new(handler: &'a Handler, - level: Level, - message: &str) -> DiagnosticBuilder<'a> { + fn new(handler: &'a Handler, level: Level, message: &str) -> DiagnosticBuilder<'a> { DiagnosticBuilder::new_with_code(handler, level, None, message) } /// Convenience function for internal use, clients should use one of the /// struct_* methods on Handler. fn new_with_code(handler: &'a Handler, - level: Level, - code: Option, - message: &str) -> DiagnosticBuilder<'a> { + level: Level, + code: Option, + message: &str) + -> DiagnosticBuilder<'a> { DiagnosticBuilder { handler: handler, level: level, @@ -409,9 +417,8 @@ impl<'a> fmt::Debug for DiagnosticBuilder<'a> { impl<'a> Drop for DiagnosticBuilder<'a> { fn drop(&mut self) { if !panicking() && !self.cancelled() { - let mut db = DiagnosticBuilder::new(self.handler, - Bug, - "Error constructed but not emitted"); + let mut db = + DiagnosticBuilder::new(self.handler, Bug, "Error constructed but not emitted"); db.emit(); panic!(); } @@ -442,7 +449,8 @@ impl Handler { pub fn with_emitter(can_emit_warnings: bool, treat_err_as_bug: bool, - e: Box) -> Handler { + e: Box) + -> Handler { Handler { err_count: Cell::new(0), emitter: RefCell::new(e), @@ -543,10 +551,10 @@ impl Handler { pub fn cancel(&self, err: &mut DiagnosticBuilder) { if err.level == Level::Error || err.level == Level::Fatal { - self.err_count.set( - self.err_count.get().checked_sub(1) - .expect("cancelled an error but err_count is 0") - ); + self.err_count.set(self.err_count + .get() + .checked_sub(1) + .expect("cancelled an error but err_count is 0")); } err.cancel(); } @@ -557,14 +565,16 @@ impl Handler { } } - pub fn span_fatal>(&self, sp: S, msg: &str) - -> FatalError { + pub fn span_fatal>(&self, sp: S, msg: &str) -> FatalError { self.emit(&sp.into(), msg, Fatal); self.bump_err_count(); self.panic_if_treat_err_as_bug(); return FatalError; } - pub fn span_fatal_with_code>(&self, sp: S, msg: &str, code: &str) + pub fn span_fatal_with_code>(&self, + sp: S, + msg: &str, + code: &str) -> FatalError { self.emit_with_code(&sp.into(), msg, code, Fatal); self.bump_err_count(); @@ -618,9 +628,7 @@ impl Handler { if self.treat_err_as_bug { self.bug(msg); } - let mut db = DiagnosticBuilder::new(self, - Fatal, - msg); + let mut db = DiagnosticBuilder::new(self, Fatal, msg); db.emit(); self.bump_err_count(); FatalError @@ -629,28 +637,20 @@ impl Handler { if self.treat_err_as_bug { self.bug(msg); } - let mut db = DiagnosticBuilder::new(self, - Error, - msg); + let mut db = DiagnosticBuilder::new(self, Error, msg); db.emit(); self.bump_err_count(); } pub fn warn(&self, msg: &str) { - let mut db = DiagnosticBuilder::new(self, - Warning, - msg); + let mut db = DiagnosticBuilder::new(self, Warning, msg); db.emit(); } pub fn note_without_error(&self, msg: &str) { - let mut db = DiagnosticBuilder::new(self, - Note, - msg); + let mut db = DiagnosticBuilder::new(self, Note, msg); db.emit(); } pub fn bug(&self, msg: &str) -> ! { - let mut db = DiagnosticBuilder::new(self, - Bug, - msg); + let mut db = DiagnosticBuilder::new(self, Bug, msg); db.emit(); panic!(ExplicitBug); } @@ -677,44 +677,41 @@ impl Handler { match *delayed_bug { Some((ref span, ref errmsg)) => { self.span_bug(span.clone(), errmsg); - }, + } _ => {} } return; } 1 => s = "aborting due to previous error".to_string(), - _ => { - s = format!("aborting due to {} previous errors", - self.err_count.get()); + _ => { + s = format!("aborting due to {} previous errors", self.err_count.get()); } } panic!(self.fatal(&s)); } - pub fn emit(&self, - msp: &MultiSpan, - msg: &str, - lvl: Level) { - if lvl == Warning && !self.can_emit_warnings { return } + pub fn emit(&self, msp: &MultiSpan, msg: &str, lvl: Level) { + if lvl == Warning && !self.can_emit_warnings { + return; + } let mut db = DiagnosticBuilder::new(self, lvl, msg); db.set_span(msp.clone()); db.emit(); - if !self.continue_after_error.get() { self.abort_if_errors(); } - } - pub fn emit_with_code(&self, - msp: &MultiSpan, - msg: &str, - code: &str, - lvl: Level) { - if lvl == Warning && !self.can_emit_warnings { return } - let mut db = DiagnosticBuilder::new_with_code(self, - lvl, - Some(code.to_owned()), - msg); + if !self.continue_after_error.get() { + self.abort_if_errors(); + } + } + pub fn emit_with_code(&self, msp: &MultiSpan, msg: &str, code: &str, lvl: Level) { + if lvl == Warning && !self.can_emit_warnings { + return; + } + let mut db = DiagnosticBuilder::new_with_code(self, lvl, Some(code.to_owned()), msg); db.set_span(msp.clone()); db.emit(); - if !self.continue_after_error.get() { self.abort_if_errors(); } + if !self.continue_after_error.get() { + self.abort_if_errors(); + } } } @@ -749,7 +746,7 @@ impl Level { } else { term::color::YELLOW } - }, + } Note => term::color::BRIGHT_GREEN, Help => term::color::BRIGHT_CYAN, Cancelled => unreachable!(), @@ -768,8 +765,8 @@ impl Level { } } -pub fn expect(diag: &Handler, opt: Option, msg: M) -> T where - M: FnOnce() -> String, +pub fn expect(diag: &Handler, opt: Option, msg: M) -> T + where M: FnOnce() -> String { match opt { Some(t) => t, diff --git a/src/librustc_errors/lock.rs b/src/librustc_errors/lock.rs index 0a9e0c4bbefb3..4c298228c37c7 100644 --- a/src/librustc_errors/lock.rs +++ b/src/librustc_errors/lock.rs @@ -40,9 +40,9 @@ pub fn acquire_global_lock(name: &str) -> Box { extern "system" { fn CreateMutexA(lpMutexAttributes: LPSECURITY_ATTRIBUTES, bInitialOwner: BOOL, - lpName: LPCSTR) -> HANDLE; - fn WaitForSingleObject(hHandle: HANDLE, - dwMilliseconds: DWORD) -> DWORD; + lpName: LPCSTR) + -> HANDLE; + fn WaitForSingleObject(hHandle: HANDLE, dwMilliseconds: DWORD) -> DWORD; fn ReleaseMutex(hMutex: HANDLE) -> BOOL; fn CloseHandle(hObject: HANDLE) -> BOOL; } @@ -76,7 +76,8 @@ pub fn acquire_global_lock(name: &str) -> Box { // open up a handle to one if it already exists. let mutex = CreateMutexA(0 as *mut _, 0, cname.as_ptr() as *const u8); if mutex.is_null() { - panic!("failed to create global mutex named `{}`: {}", name, + panic!("failed to create global mutex named `{}`: {}", + name, io::Error::last_os_error()); } let mutex = Handle(mutex); @@ -96,8 +97,10 @@ pub fn acquire_global_lock(name: &str) -> Box { WAIT_OBJECT_0 | WAIT_ABANDONED => {} code => { panic!("WaitForSingleObject failed on global mutex named \ - `{}`: {} (ret={:x})", name, - io::Error::last_os_error(), code); + `{}`: {} (ret={:x})", + name, + io::Error::last_os_error(), + code); } } diff --git a/src/librustc_errors/registry.rs b/src/librustc_errors/registry.rs index a6cfd1a5a9ac3..83737681471e2 100644 --- a/src/librustc_errors/registry.rs +++ b/src/librustc_errors/registry.rs @@ -12,7 +12,7 @@ use std::collections::HashMap; #[derive(Clone)] pub struct Registry { - descriptions: HashMap<&'static str, &'static str> + descriptions: HashMap<&'static str, &'static str>, } impl Registry { diff --git a/src/librustc_errors/snippet.rs b/src/librustc_errors/snippet.rs index 5ade8cd9bad82..abfb71c861b25 100644 --- a/src/librustc_errors/snippet.rs +++ b/src/librustc_errors/snippet.rs @@ -13,12 +13,12 @@ use syntax_pos::{Span, FileMap}; use CodeMapper; use std::rc::Rc; -use {Level}; +use Level; #[derive(Clone)] pub struct SnippetData { codemap: Rc, - files: Vec + files: Vec, } #[derive(Clone)] @@ -84,4 +84,4 @@ pub enum Style { NoStyle, ErrorCode, Level(Level), -} \ No newline at end of file +} diff --git a/src/librustc_errors/styled_buffer.rs b/src/librustc_errors/styled_buffer.rs index 9768b68619e79..dfc7c64de0197 100644 --- a/src/librustc_errors/styled_buffer.rs +++ b/src/librustc_errors/styled_buffer.rs @@ -28,10 +28,9 @@ impl StyledBuffer { pub fn copy_tabs(&mut self, row: usize) { if row < self.text.len() { - for i in row+1..self.text.len() { + for i in row + 1..self.text.len() { for j in 0..self.text[i].len() { - if self.text[row].len() > j && - self.text[row][j] == '\t' && + if self.text[row].len() > j && self.text[row][j] == '\t' && self.text[i][j] == ' ' { self.text[i][j] = '\t'; } @@ -44,7 +43,7 @@ impl StyledBuffer { let mut output: Vec> = vec![]; let mut styled_vec: Vec = vec![]; - //before we render, do a little patch-up work to support tabs + // before we render, do a little patch-up work to support tabs self.copy_tabs(3); for (row, row_style) in self.text.iter().zip(&self.styles) { diff --git a/src/librustc_incremental/Cargo.toml b/src/librustc_incremental/Cargo.toml index 7db1a6348b27c..e3ee752754504 100644 --- a/src/librustc_incremental/Cargo.toml +++ b/src/librustc_incremental/Cargo.toml @@ -10,10 +10,9 @@ crate-type = ["dylib"] [dependencies] graphviz = { path = "../libgraphviz" } -rbml = { path = "../librbml" } rustc = { path = "../librustc" } rustc_data_structures = { path = "../librustc_data_structures" } serialize = { path = "../libserialize" } log = { path = "../liblog" } syntax = { path = "../libsyntax" } -syntax_pos = { path = "../libsyntax_pos" } \ No newline at end of file +syntax_pos = { path = "../libsyntax_pos" } diff --git a/src/librustc_incremental/assert_dep_graph.rs b/src/librustc_incremental/assert_dep_graph.rs index b28454cddb247..28aab1fdd4167 100644 --- a/src/librustc_incremental/assert_dep_graph.rs +++ b/src/librustc_incremental/assert_dep_graph.rs @@ -59,9 +59,7 @@ use std::io::Write; use syntax::ast; use syntax::parse::token::InternedString; use syntax_pos::Span; - -const IF_THIS_CHANGED: &'static str = "rustc_if_this_changed"; -const THEN_THIS_WOULD_NEED: &'static str = "rustc_then_this_would_need"; +use {ATTR_IF_THIS_CHANGED, ATTR_THEN_THIS_WOULD_NEED}; pub fn assert_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { let _ignore = tcx.dep_graph.in_ignore(); @@ -91,7 +89,7 @@ pub fn assert_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { assert!(tcx.sess.opts.debugging_opts.query_dep_graph, "cannot use the `#[{}]` or `#[{}]` annotations \ without supplying `-Z query-dep-graph`", - IF_THIS_CHANGED, THEN_THIS_WOULD_NEED); + ATTR_IF_THIS_CHANGED, ATTR_THEN_THIS_WOULD_NEED); } // Check paths. @@ -125,7 +123,7 @@ impl<'a, 'tcx> IfThisChanged<'a, 'tcx> { fn process_attrs(&mut self, node_id: ast::NodeId, attrs: &[ast::Attribute]) { let def_id = self.tcx.map.local_def_id(node_id); for attr in attrs { - if attr.check_name(IF_THIS_CHANGED) { + if attr.check_name(ATTR_IF_THIS_CHANGED) { let dep_node_interned = self.argument(attr); let dep_node = match dep_node_interned { None => DepNode::Hir(def_id), @@ -141,7 +139,7 @@ impl<'a, 'tcx> IfThisChanged<'a, 'tcx> { } }; self.if_this_changed.push((attr.span, def_id, dep_node)); - } else if attr.check_name(THEN_THIS_WOULD_NEED) { + } else if attr.check_name(ATTR_THEN_THIS_WOULD_NEED) { let dep_node_interned = self.argument(attr); let dep_node = match dep_node_interned { Some(ref n) => { diff --git a/src/librustc_incremental/calculate_svh/hasher.rs b/src/librustc_incremental/calculate_svh/hasher.rs new file mode 100644 index 0000000000000..49683a81227b1 --- /dev/null +++ b/src/librustc_incremental/calculate_svh/hasher.rs @@ -0,0 +1,52 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::mem; +use rustc_data_structures::blake2b::Blake2bHasher; +use rustc::ty::util::ArchIndependentHasher; +use ich::Fingerprint; + +#[derive(Debug)] +pub struct IchHasher { + state: ArchIndependentHasher, + bytes_hashed: u64, +} + +impl IchHasher { + pub fn new() -> IchHasher { + let hash_size = mem::size_of::(); + IchHasher { + state: ArchIndependentHasher::new(Blake2bHasher::new(hash_size, &[])), + bytes_hashed: 0 + } + } + + pub fn bytes_hashed(&self) -> u64 { + self.bytes_hashed + } + + pub fn finish(self) -> Fingerprint { + let mut fingerprint = Fingerprint::zero(); + fingerprint.0.copy_from_slice(self.state.into_inner().finalize()); + fingerprint + } +} + +impl ::std::hash::Hasher for IchHasher { + fn finish(&self) -> u64 { + bug!("Use other finish() implementation to get the full 128-bit hash."); + } + + #[inline] + fn write(&mut self, bytes: &[u8]) { + self.state.write(bytes); + self.bytes_hashed += bytes.len() as u64; + } +} diff --git a/src/librustc_incremental/calculate_svh/mod.rs b/src/librustc_incremental/calculate_svh/mod.rs index c54fe2114517e..3b0b37bb01ce3 100644 --- a/src/librustc_incremental/calculate_svh/mod.rs +++ b/src/librustc_incremental/calculate_svh/mod.rs @@ -28,7 +28,8 @@ //! at the beginning. use syntax::ast; -use std::hash::{Hash, SipHasher, Hasher}; +use std::cell::RefCell; +use std::hash::Hash; use rustc::dep_graph::DepNode; use rustc::hir; use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId}; @@ -41,12 +42,55 @@ use rustc::session::config::DebugInfoLevel::NoDebugInfo; use self::def_path_hash::DefPathHashes; use self::svh_visitor::StrictVersionHashVisitor; use self::caching_codemap_view::CachingCodemapView; +use self::hasher::IchHasher; +use ich::Fingerprint; mod def_path_hash; mod svh_visitor; mod caching_codemap_view; +pub mod hasher; + +pub struct IncrementalHashesMap { + hashes: FnvHashMap, Fingerprint>, + + // These are the metadata hashes for the current crate as they were stored + // during the last compilation session. They are only loaded if + // -Z query-dep-graph was specified and are needed for auto-tests using + // the #[rustc_metadata_dirty] and #[rustc_metadata_clean] attributes to + // check whether some metadata hash has changed in between two revisions. + pub prev_metadata_hashes: RefCell>, +} + +impl IncrementalHashesMap { + pub fn new() -> IncrementalHashesMap { + IncrementalHashesMap { + hashes: FnvHashMap(), + prev_metadata_hashes: RefCell::new(FnvHashMap()), + } + } + + pub fn insert(&mut self, k: DepNode, v: Fingerprint) -> Option { + self.hashes.insert(k, v) + } + + pub fn iter<'a>(&'a self) + -> ::std::collections::hash_map::Iter<'a, DepNode, Fingerprint> { + self.hashes.iter() + } + + pub fn len(&self) -> usize { + self.hashes.len() + } +} + +impl<'a> ::std::ops::Index<&'a DepNode> for IncrementalHashesMap { + type Output = Fingerprint; + + fn index(&self, index: &'a DepNode) -> &Fingerprint { + &self.hashes[index] + } +} -pub type IncrementalHashesMap = FnvHashMap, u64>; pub fn compute_incremental_hashes_map<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> IncrementalHashesMap { @@ -55,7 +99,7 @@ pub fn compute_incremental_hashes_map<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>) let hash_spans = tcx.sess.opts.debuginfo != NoDebugInfo; let mut visitor = HashItemsVisitor { tcx: tcx, - hashes: FnvHashMap(), + hashes: IncrementalHashesMap::new(), def_path_hashes: DefPathHashes::new(tcx), codemap: CachingCodemapView::new(tcx), hash_spans: hash_spans, @@ -65,6 +109,9 @@ pub fn compute_incremental_hashes_map<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>) |v| visit::walk_crate(v, krate)); krate.visit_all_items(&mut visitor); }); + + tcx.sess.perf_stats.incr_comp_hashes_count.set(visitor.hashes.len() as u64); + record_time(&tcx.sess.perf_stats.svh_time, || visitor.compute_crate_hash()); visitor.hashes } @@ -90,23 +137,26 @@ impl<'a, 'tcx> HashItemsVisitor<'a, 'tcx> { { assert!(def_id.is_local()); debug!("HashItemsVisitor::calculate(def_id={:?})", def_id); - // FIXME: this should use SHA1, not SipHash. SipHash is not - // built to avoid collisions. - let mut state = SipHasher::new(); + let mut state = IchHasher::new(); walk_op(&mut StrictVersionHashVisitor::new(&mut state, self.tcx, &mut self.def_path_hashes, &mut self.codemap, self.hash_spans)); + let bytes_hashed = state.bytes_hashed(); let item_hash = state.finish(); self.hashes.insert(DepNode::Hir(def_id), item_hash); debug!("calculate_item_hash: def_id={:?} hash={:?}", def_id, item_hash); + + let bytes_hashed = self.tcx.sess.perf_stats.incr_comp_bytes_hashed.get() + + bytes_hashed; + self.tcx.sess.perf_stats.incr_comp_bytes_hashed.set(bytes_hashed); } fn compute_crate_hash(&mut self) { let krate = self.tcx.map.krate(); - let mut crate_state = SipHasher::new(); + let mut crate_state = IchHasher::new(); let crate_disambiguator = self.tcx.sess.local_crate_disambiguator(); "crate_disambiguator".hash(&mut crate_state); diff --git a/src/librustc_incremental/calculate_svh/svh_visitor.rs b/src/librustc_incremental/calculate_svh/svh_visitor.rs index af8ec6c62578f..51c894e1b78f0 100644 --- a/src/librustc_incremental/calculate_svh/svh_visitor.rs +++ b/src/librustc_incremental/calculate_svh/svh_visitor.rs @@ -15,6 +15,11 @@ use self::SawExprComponent::*; use self::SawAbiComponent::*; +use self::SawItemComponent::*; +use self::SawPatComponent::*; +use self::SawTyComponent::*; +use self::SawTraitOrImplItemComponent::*; +use syntax::abi::Abi; use syntax::ast::{self, Name, NodeId}; use syntax::parse::token; use syntax_pos::{Span, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos}; @@ -25,18 +30,25 @@ use rustc::hir::def_id::DefId; use rustc::hir::intravisit as visit; use rustc::ty::TyCtxt; use rustc_data_structures::fnv; -use std::hash::{Hash, SipHasher}; +use std::hash::Hash; use super::def_path_hash::DefPathHashes; use super::caching_codemap_view::CachingCodemapView; - -const IGNORED_ATTRIBUTES: &'static [&'static str] = &["cfg", - "rustc_clean", - "rustc_dirty"]; +use super::hasher::IchHasher; + +const IGNORED_ATTRIBUTES: &'static [&'static str] = &[ + "cfg", + ::ATTR_IF_THIS_CHANGED, + ::ATTR_THEN_THIS_WOULD_NEED, + ::ATTR_DIRTY, + ::ATTR_CLEAN, + ::ATTR_DIRTY_METADATA, + ::ATTR_CLEAN_METADATA +]; pub struct StrictVersionHashVisitor<'a, 'hash: 'a, 'tcx: 'hash> { pub tcx: TyCtxt<'hash, 'tcx, 'tcx>, - pub st: &'a mut SipHasher, + pub st: &'a mut IchHasher, // collect a deterministic hash of def-ids that we have seen def_path_hashes: &'a mut DefPathHashes<'hash, 'tcx>, hash_spans: bool, @@ -44,7 +56,7 @@ pub struct StrictVersionHashVisitor<'a, 'hash: 'a, 'tcx: 'hash> { } impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> { - pub fn new(st: &'a mut SipHasher, + pub fn new(st: &'a mut IchHasher, tcx: TyCtxt<'hash, 'tcx, 'tcx>, def_path_hashes: &'a mut DefPathHashes<'hash, 'tcx>, codemap: &'a mut CachingCodemapView<'tcx>, @@ -148,11 +160,11 @@ enum SawAbiComponent<'a> { SawMod, SawForeignItem, - SawItem, - SawTy, + SawItem(SawItemComponent), + SawTy(SawTyComponent), SawGenerics, - SawTraitItem, - SawImplItem, + SawTraitItem(SawTraitOrImplItemComponent), + SawImplItem(SawTraitOrImplItemComponent), SawStructField, SawVariant, SawPath(bool), @@ -160,7 +172,7 @@ enum SawAbiComponent<'a> { SawPathParameters, SawPathListItem, SawBlock, - SawPat, + SawPat(SawPatComponent), SawLocal, SawArm, SawExpr(SawExprComponent<'a>), @@ -191,6 +203,9 @@ enum SawAbiComponent<'a> { /// because the SVH is just a developer convenience; there is no /// guarantee of collision-freedom, hash collisions are just /// (hopefully) unlikely.) +/// +/// The xxxComponent enums and saw_xxx functions for Item, Pat, +/// Ty, TraitItem and ImplItem follow the same methodology. #[derive(Hash)] enum SawExprComponent<'a> { @@ -201,7 +216,7 @@ enum SawExprComponent<'a> { SawExprAgain(Option), SawExprBox, - SawExprVec, + SawExprArray, SawExprCall, SawExprMethodCall, SawExprTup, @@ -229,7 +244,7 @@ enum SawExprComponent<'a> { fn saw_expr<'a>(node: &'a Expr_) -> SawExprComponent<'a> { match *node { ExprBox(..) => SawExprBox, - ExprVec(..) => SawExprVec, + ExprArray(..) => SawExprArray, ExprCall(..) => SawExprCall, ExprMethodCall(..) => SawExprMethodCall, ExprTup(..) => SawExprTup, @@ -260,6 +275,134 @@ fn saw_expr<'a>(node: &'a Expr_) -> SawExprComponent<'a> { } } +#[derive(Hash)] +enum SawItemComponent { + SawItemExternCrate, + SawItemUse, + SawItemStatic(Mutability), + SawItemConst, + SawItemFn(Unsafety, Constness, Abi), + SawItemMod, + SawItemForeignMod, + SawItemTy, + SawItemEnum, + SawItemStruct, + SawItemUnion, + SawItemTrait(Unsafety), + SawItemDefaultImpl(Unsafety), + SawItemImpl(Unsafety, ImplPolarity) +} + +fn saw_item(node: &Item_) -> SawItemComponent { + match *node { + ItemExternCrate(..) => SawItemExternCrate, + ItemUse(..) => SawItemUse, + ItemStatic(_, mutability, _) => SawItemStatic(mutability), + ItemConst(..) =>SawItemConst, + ItemFn(_, unsafety, constness, abi, _, _) => SawItemFn(unsafety, constness, abi), + ItemMod(..) => SawItemMod, + ItemForeignMod(..) => SawItemForeignMod, + ItemTy(..) => SawItemTy, + ItemEnum(..) => SawItemEnum, + ItemStruct(..) => SawItemStruct, + ItemUnion(..) => SawItemUnion, + ItemTrait(unsafety, ..) => SawItemTrait(unsafety), + ItemDefaultImpl(unsafety, _) => SawItemDefaultImpl(unsafety), + ItemImpl(unsafety, implpolarity, ..) => SawItemImpl(unsafety, implpolarity) + } +} + +#[derive(Hash)] +enum SawPatComponent { + SawPatWild, + SawPatBinding(BindingMode), + SawPatStruct, + SawPatTupleStruct, + SawPatPath, + SawPatTuple, + SawPatBox, + SawPatRef(Mutability), + SawPatLit, + SawPatRange, + SawPatSlice +} + +fn saw_pat(node: &PatKind) -> SawPatComponent { + match *node { + PatKind::Wild => SawPatWild, + PatKind::Binding(bindingmode, ..) => SawPatBinding(bindingmode), + PatKind::Struct(..) => SawPatStruct, + PatKind::TupleStruct(..) => SawPatTupleStruct, + PatKind::Path(..) => SawPatPath, + PatKind::Tuple(..) => SawPatTuple, + PatKind::Box(..) => SawPatBox, + PatKind::Ref(_, mutability) => SawPatRef(mutability), + PatKind::Lit(..) => SawPatLit, + PatKind::Range(..) => SawPatRange, + PatKind::Slice(..) => SawPatSlice + } +} + +#[derive(Hash)] +enum SawTyComponent { + SawTySlice, + SawTyArray, + SawTyPtr(Mutability), + SawTyRptr(Mutability), + SawTyBareFn(Unsafety, Abi), + SawTyNever, + SawTyTup, + SawTyPath, + SawTyObjectSum, + SawTyPolyTraitRef, + SawTyImplTrait, + SawTyTypeof, + SawTyInfer +} + +fn saw_ty(node: &Ty_) -> SawTyComponent { + match *node { + TySlice(..) => SawTySlice, + TyArray(..) => SawTyArray, + TyPtr(ref mty) => SawTyPtr(mty.mutbl), + TyRptr(_, ref mty) => SawTyRptr(mty.mutbl), + TyBareFn(ref barefnty) => SawTyBareFn(barefnty.unsafety, barefnty.abi), + TyNever => SawTyNever, + TyTup(..) => SawTyTup, + TyPath(..) => SawTyPath, + TyObjectSum(..) => SawTyObjectSum, + TyPolyTraitRef(..) => SawTyPolyTraitRef, + TyImplTrait(..) => SawTyImplTrait, + TyTypeof(..) => SawTyTypeof, + TyInfer => SawTyInfer + } +} + +#[derive(Hash)] +enum SawTraitOrImplItemComponent { + SawTraitOrImplItemConst, + SawTraitOrImplItemMethod(Unsafety, Constness, Abi), + SawTraitOrImplItemType +} + +fn saw_trait_item(ti: &TraitItem_) -> SawTraitOrImplItemComponent { + match *ti { + ConstTraitItem(..) => SawTraitOrImplItemConst, + MethodTraitItem(ref sig, _) => + SawTraitOrImplItemMethod(sig.unsafety, sig.constness, sig.abi), + TypeTraitItem(..) => SawTraitOrImplItemType + } +} + +fn saw_impl_item(ii: &ImplItemKind) -> SawTraitOrImplItemComponent { + match *ii { + ImplItemKind::Const(..) => SawTraitOrImplItemConst, + ImplItemKind::Method(ref sig, _) => + SawTraitOrImplItemMethod(sig.unsafety, sig.constness, sig.abi), + ImplItemKind::Type(..) => SawTraitOrImplItemType + } +} + #[derive(Clone, Copy, Hash, Eq, PartialEq)] enum SawSpanExpnKind { NoExpansion, @@ -376,10 +519,7 @@ impl<'a, 'hash, 'tcx> visit::Visitor<'tcx> for StrictVersionHashVisitor<'a, 'has fn visit_item(&mut self, i: &'tcx Item) { debug!("visit_item: {:?} st={:?}", i, self.st); - - SawItem.hash(self.st); - // Hash the value of the discriminant of the Item variant. - self.hash_discriminant(&i.node); + SawItem(saw_item(&i.node)).hash(self.st); hash_span!(self, i.span); hash_attrs!(self, &i.attrs); visit::walk_item(self, i) @@ -392,7 +532,7 @@ impl<'a, 'hash, 'tcx> visit::Visitor<'tcx> for StrictVersionHashVisitor<'a, 'has fn visit_ty(&mut self, t: &'tcx Ty) { debug!("visit_ty: st={:?}", self.st); - SawTy.hash(self.st); + SawTy(saw_ty(&t.node)).hash(self.st); hash_span!(self, t.span); visit::walk_ty(self, t) } @@ -405,8 +545,7 @@ impl<'a, 'hash, 'tcx> visit::Visitor<'tcx> for StrictVersionHashVisitor<'a, 'has fn visit_trait_item(&mut self, ti: &'tcx TraitItem) { debug!("visit_trait_item: st={:?}", self.st); - SawTraitItem.hash(self.st); - self.hash_discriminant(&ti.node); + SawTraitItem(saw_trait_item(&ti.node)).hash(self.st); hash_span!(self, ti.span); hash_attrs!(self, &ti.attrs); visit::walk_trait_item(self, ti) @@ -414,8 +553,7 @@ impl<'a, 'hash, 'tcx> visit::Visitor<'tcx> for StrictVersionHashVisitor<'a, 'has fn visit_impl_item(&mut self, ii: &'tcx ImplItem) { debug!("visit_impl_item: st={:?}", self.st); - SawImplItem.hash(self.st); - self.hash_discriminant(&ii.node); + SawImplItem(saw_impl_item(&ii.node)).hash(self.st); hash_span!(self, ii.span); hash_attrs!(self, &ii.attrs); visit::walk_impl_item(self, ii) @@ -445,8 +583,7 @@ impl<'a, 'hash, 'tcx> visit::Visitor<'tcx> for StrictVersionHashVisitor<'a, 'has fn visit_pat(&mut self, p: &'tcx Pat) { debug!("visit_pat: st={:?}", self.st); - SawPat.hash(self.st); - self.hash_discriminant(&p.node); + SawPat(saw_pat(&p.node)).hash(self.st); hash_span!(self, p.span); visit::walk_pat(self, p) } @@ -538,13 +675,11 @@ impl<'a, 'hash, 'tcx> visit::Visitor<'tcx> for StrictVersionHashVisitor<'a, 'has fn visit_macro_def(&mut self, macro_def: &'tcx MacroDef) { debug!("visit_macro_def: st={:?}", self.st); - if macro_def.export { - SawMacroDef.hash(self.st); - hash_attrs!(self, ¯o_def.attrs); - visit::walk_macro_def(self, macro_def) - // FIXME(mw): We should hash the body of the macro too but we don't - // have a stable way of doing so yet. - } + SawMacroDef.hash(self.st); + hash_attrs!(self, ¯o_def.attrs); + visit::walk_macro_def(self, macro_def) + // FIXME(mw): We should hash the body of the macro too but we don't + // have a stable way of doing so yet. } } @@ -602,14 +737,15 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> { // def-id is the same, so it suffices to hash the def-id Def::Fn(..) | Def::Mod(..) | - Def::ForeignMod(..) | Def::Static(..) | Def::Variant(..) | + Def::VariantCtor(..) | Def::Enum(..) | Def::TyAlias(..) | Def::AssociatedTy(..) | Def::TyParam(..) | Def::Struct(..) | + Def::StructCtor(..) | Def::Union(..) | Def::Trait(..) | Def::Method(..) | diff --git a/src/librustc_incremental/ich/fingerprint.rs b/src/librustc_incremental/ich/fingerprint.rs new file mode 100644 index 0000000000000..005ac3896ce4c --- /dev/null +++ b/src/librustc_incremental/ich/fingerprint.rs @@ -0,0 +1,81 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use rustc_serialize::{Encodable, Decodable, Encoder, Decoder}; + +const FINGERPRINT_LENGTH: usize = 16; + +#[derive(Eq, PartialEq, Ord, PartialOrd, Hash, Debug, Clone, Copy)] +pub struct Fingerprint(pub [u8; FINGERPRINT_LENGTH]); + +impl Fingerprint { + #[inline] + pub fn zero() -> Fingerprint { + Fingerprint([0; FINGERPRINT_LENGTH]) + } + + pub fn from_smaller_hash(hash: u64) -> Fingerprint { + let mut result = Fingerprint::zero(); + result.0[0] = (hash >> 0) as u8; + result.0[1] = (hash >> 8) as u8; + result.0[2] = (hash >> 16) as u8; + result.0[3] = (hash >> 24) as u8; + result.0[4] = (hash >> 32) as u8; + result.0[5] = (hash >> 40) as u8; + result.0[6] = (hash >> 48) as u8; + result.0[7] = (hash >> 56) as u8; + result + } + + pub fn to_smaller_hash(&self) -> u64 { + ((self.0[0] as u64) << 0) | + ((self.0[1] as u64) << 8) | + ((self.0[2] as u64) << 16) | + ((self.0[3] as u64) << 24) | + ((self.0[4] as u64) << 32) | + ((self.0[5] as u64) << 40) | + ((self.0[6] as u64) << 48) | + ((self.0[7] as u64) << 56) + } +} + +impl Encodable for Fingerprint { + #[inline] + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + for &byte in &self.0[..] { + s.emit_u8(byte)?; + } + Ok(()) + } +} + +impl Decodable for Fingerprint { + #[inline] + fn decode(d: &mut D) -> Result { + let mut result = Fingerprint([0u8; FINGERPRINT_LENGTH]); + for byte in &mut result.0[..] { + *byte = d.read_u8()?; + } + Ok(result) + } +} + +impl ::std::fmt::Display for Fingerprint { + fn fmt(&self, formatter: &mut ::std::fmt::Formatter) -> Result<(), ::std::fmt::Error> { + for i in 0 .. self.0.len() { + if i > 0 { + write!(formatter, "::")?; + } + + write!(formatter, "{}", self.0[i])?; + } + Ok(()) + } +} diff --git a/src/librustc_incremental/ich/mod.rs b/src/librustc_incremental/ich/mod.rs new file mode 100644 index 0000000000000..8edd04322d7f6 --- /dev/null +++ b/src/librustc_incremental/ich/mod.rs @@ -0,0 +1,13 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub use self::fingerprint::Fingerprint; + +mod fingerprint; diff --git a/src/librustc_incremental/lib.rs b/src/librustc_incremental/lib.rs index 1f823eedda0dd..577e50699bffd 100644 --- a/src/librustc_incremental/lib.rs +++ b/src/librustc_incremental/lib.rs @@ -20,14 +20,13 @@ #![cfg_attr(not(stage0), deny(warnings))] #![feature(dotdot_in_tuple_patterns)] -#![feature(question_mark)] +#![cfg_attr(stage0, feature(question_mark))] #![feature(rustc_private)] #![feature(staged_api)] #![feature(rand)] #![feature(core_intrinsics)] extern crate graphviz; -extern crate rbml; #[macro_use] extern crate rustc; extern crate rustc_data_structures; extern crate serialize as rustc_serialize; @@ -36,9 +35,17 @@ extern crate serialize as rustc_serialize; #[macro_use] extern crate syntax; extern crate syntax_pos; +const ATTR_DIRTY: &'static str = "rustc_dirty"; +const ATTR_CLEAN: &'static str = "rustc_clean"; +const ATTR_DIRTY_METADATA: &'static str = "rustc_metadata_dirty"; +const ATTR_CLEAN_METADATA: &'static str = "rustc_metadata_clean"; +const ATTR_IF_THIS_CHANGED: &'static str = "rustc_if_this_changed"; +const ATTR_THEN_THIS_WOULD_NEED: &'static str = "rustc_then_this_would_need"; + mod assert_dep_graph; mod calculate_svh; mod persist; +pub mod ich; pub use assert_dep_graph::assert_dep_graph; pub use calculate_svh::compute_incremental_hashes_map; diff --git a/src/librustc_incremental/persist/data.rs b/src/librustc_incremental/persist/data.rs index 12f3ed8ae2bd4..734ffe6a94412 100644 --- a/src/librustc_incremental/persist/data.rs +++ b/src/librustc_incremental/persist/data.rs @@ -13,6 +13,8 @@ use rustc::dep_graph::{DepNode, WorkProduct, WorkProductId}; use rustc::hir::def_id::DefIndex; use std::sync::Arc; +use rustc_data_structures::fnv::FnvHashMap; +use ich::Fingerprint; use super::directory::DefPathIndex; @@ -59,7 +61,7 @@ pub struct SerializedHash { /// the hash as of previous compilation, computed by code in /// `hash` module - pub hash: u64, + pub hash: Fingerprint, } #[derive(Debug, RustcEncodable, RustcDecodable)] @@ -93,6 +95,18 @@ pub struct SerializedMetadataHashes { /// a `DefPathIndex` that gets retracted to the current `DefId` /// (matching the one found in this structure). pub hashes: Vec, + + /// For each DefIndex (as it occurs in SerializedMetadataHash), this + /// map stores the DefPathIndex (as it occurs in DefIdDirectory), so + /// that we can find the new DefId for a SerializedMetadataHash in a + /// subsequent compilation session. + /// + /// This map is only needed for running auto-tests using the + /// #[rustc_metadata_dirty] and #[rustc_metadata_clean] attributes, and + /// is only populated if -Z query-dep-graph is specified. It will be + /// empty otherwise. Importing crates are perfectly happy with just having + /// the DefIndex. + pub index_map: FnvHashMap } /// The hash for some metadata that (when saving) will be exported @@ -103,5 +117,5 @@ pub struct SerializedMetadataHash { pub def_index: DefIndex, /// the hash itself, computed by `calculate_item_hash` - pub hash: u64, + pub hash: Fingerprint, } diff --git a/src/librustc_incremental/persist/directory.rs b/src/librustc_incremental/persist/directory.rs index 89a79d1a487e0..d238121872be6 100644 --- a/src/librustc_incremental/persist/directory.rs +++ b/src/librustc_incremental/persist/directory.rs @@ -15,13 +15,12 @@ use rustc::dep_graph::DepNode; use rustc::hir::map::DefPath; -use rustc::hir::def_id::DefId; -use rustc::middle::cstore::LOCAL_CRATE; +use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE}; use rustc::ty::TyCtxt; use rustc::util::nodemap::DefIdMap; use std::fmt::{self, Debug}; use std::iter::once; -use syntax::ast; +use std::collections::HashMap; /// Index into the DefIdDirectory #[derive(Copy, Clone, Debug, PartialOrd, Ord, Hash, PartialEq, Eq, @@ -43,7 +42,7 @@ pub struct DefIdDirectory { #[derive(Debug, RustcEncodable, RustcDecodable)] pub struct CrateInfo { - krate: ast::CrateNum, + krate: CrateNum, name: String, disambiguator: String, } @@ -53,7 +52,7 @@ impl DefIdDirectory { DefIdDirectory { paths: vec![], krates: krates } } - fn max_current_crate(&self, tcx: TyCtxt) -> ast::CrateNum { + fn max_current_crate(&self, tcx: TyCtxt) -> CrateNum { tcx.sess.cstore.crates() .into_iter() .max() @@ -72,8 +71,8 @@ impl DefIdDirectory { pub fn krate_still_valid(&self, tcx: TyCtxt, - max_current_crate: ast::CrateNum, - krate: ast::CrateNum) -> bool { + max_current_crate: CrateNum, + krate: CrateNum) -> bool { // Check that the crate-number still matches. For now, if it // doesn't, just return None. We could do better, such as // finding the new number. @@ -81,7 +80,7 @@ impl DefIdDirectory { if krate > max_current_crate { false } else { - let old_info = &self.krates[krate as usize]; + let old_info = &self.krates[krate.as_usize()]; assert_eq!(old_info.krate, krate); let old_name: &str = &old_info.name; let old_disambiguator: &str = &old_info.disambiguator; @@ -92,18 +91,29 @@ impl DefIdDirectory { } pub fn retrace(&self, tcx: TyCtxt) -> RetracedDefIdDirectory { - let max_current_crate = self.max_current_crate(tcx); + + fn make_key(name: &str, disambiguator: &str) -> String { + format!("{}/{}", name, disambiguator) + } + + let new_krates: HashMap<_, _> = + once(LOCAL_CRATE) + .chain(tcx.sess.cstore.crates()) + .map(|krate| (make_key(&tcx.crate_name(krate), + &tcx.crate_disambiguator(krate)), krate)) + .collect(); let ids = self.paths.iter() .map(|path| { - if self.krate_still_valid(tcx, max_current_crate, path.krate) { - tcx.retrace_path(path) + let old_krate_id = path.krate.as_usize(); + assert!(old_krate_id < self.krates.len()); + let old_crate_info = &self.krates[old_krate_id]; + let old_crate_key = make_key(&old_crate_info.name, + &old_crate_info.disambiguator); + if let Some(&new_crate_key) = new_krates.get(&old_crate_key) { + tcx.retrace_path(new_crate_key, &path.data) } else { - debug!("crate {} changed from {:?} to {:?}/{:?}", - path.krate, - self.krates[path.krate as usize], - tcx.crate_name(path.krate), - tcx.crate_disambiguator(path.krate)); + debug!("crate {:?} no longer exists", old_crate_key); None } }) @@ -180,7 +190,6 @@ impl<'a,'tcx> DefIdDirectoryBuilder<'a,'tcx> { &self.directory.paths[id.index as usize] } - pub fn map(&mut self, node: &DepNode) -> DepNode { node.map_def(|&def_id| Some(self.add(def_id))).unwrap() } diff --git a/src/librustc_incremental/persist/dirty_clean.rs b/src/librustc_incremental/persist/dirty_clean.rs index fda7ef207a344..94478f6603a6e 100644 --- a/src/librustc_incremental/persist/dirty_clean.rs +++ b/src/librustc_incremental/persist/dirty_clean.rs @@ -9,10 +9,10 @@ // except according to those terms. //! Debugging code to test the state of the dependency graph just -//! after it is loaded from disk. For each node marked with -//! `#[rustc_clean]` or `#[rustc_dirty]`, we will check that a -//! suitable node for that item either appears or does not appear in -//! the dep-graph, as appropriate: +//! after it is loaded from disk and just after it has been saved. +//! For each node marked with `#[rustc_clean]` or `#[rustc_dirty]`, +//! we will check that a suitable node for that item either appears +//! or does not appear in the dep-graph, as appropriate: //! //! - `#[rustc_dirty(label="TypeckItemBody", cfg="rev2")]` if we are //! in `#[cfg(rev2)]`, then there MUST NOT be a node @@ -23,6 +23,22 @@ //! //! Errors are reported if we are in the suitable configuration but //! the required condition is not met. +//! +//! The `#[rustc_metadata_dirty]` and `#[rustc_metadata_clean]` attributes +//! can be used to check the incremental compilation hash (ICH) values of +//! metadata exported in rlibs. +//! +//! - If a node is marked with `#[rustc_metadata_clean(cfg="rev2")]` we +//! check that the metadata hash for that node is the same for "rev2" +//! it was for "rev1". +//! - If a node is marked with `#[rustc_metadata_dirty(cfg="rev2")]` we +//! check that the metadata hash for that node is *different* for "rev2" +//! than it was for "rev1". +//! +//! Note that the metadata-testing attributes must never specify the +//! first revision. This would lead to a crash since there is no +//! previous revision to compare things to. +//! use super::directory::RetracedDefIdDirectory; use super::load::DirtyNodes; @@ -30,13 +46,15 @@ use rustc::dep_graph::{DepGraphQuery, DepNode}; use rustc::hir; use rustc::hir::def_id::DefId; use rustc::hir::intravisit::Visitor; -use rustc_data_structures::fnv::FnvHashSet; use syntax::ast::{self, Attribute, NestedMetaItem}; +use rustc_data_structures::fnv::{FnvHashSet, FnvHashMap}; use syntax::parse::token::InternedString; +use syntax_pos::Span; use rustc::ty::TyCtxt; +use ich::Fingerprint; + +use {ATTR_DIRTY, ATTR_CLEAN, ATTR_DIRTY_METADATA, ATTR_CLEAN_METADATA}; -const DIRTY: &'static str = "rustc_dirty"; -const CLEAN: &'static str = "rustc_clean"; const LABEL: &'static str = "label"; const CFG: &'static str = "cfg"; @@ -70,50 +88,11 @@ pub struct DirtyCleanVisitor<'a, 'tcx:'a> { } impl<'a, 'tcx> DirtyCleanVisitor<'a, 'tcx> { - fn expect_associated_value(&self, item: &NestedMetaItem) -> InternedString { - if let Some(value) = item.value_str() { - value - } else { - let msg = if let Some(name) = item.name() { - format!("associated value expected for `{}`", name) - } else { - "expected an associated value".to_string() - }; - - self.tcx.sess.span_fatal(item.span, &msg); - } - } - - /// Given a `#[rustc_dirty]` or `#[rustc_clean]` attribute, scan - /// for a `cfg="foo"` attribute and check whether we have a cfg - /// flag called `foo`. - fn check_config(&self, attr: &ast::Attribute) -> bool { - debug!("check_config(attr={:?})", attr); - let config = &self.tcx.map.krate().config; - debug!("check_config: config={:?}", config); - for item in attr.meta_item_list().unwrap_or(&[]) { - if item.check_name(CFG) { - let value = self.expect_associated_value(item); - debug!("check_config: searching for cfg {:?}", value); - for cfg in &config[..] { - if cfg.check_name(&value[..]) { - debug!("check_config: matched {:?}", cfg); - return true; - } - } - return false; - } - } - - self.tcx.sess.span_fatal( - attr.span, - &format!("no cfg attribute")); - } fn dep_node(&self, attr: &Attribute, def_id: DefId) -> DepNode { for item in attr.meta_item_list().unwrap_or(&[]) { if item.check_name(LABEL) { - let value = self.expect_associated_value(item); + let value = expect_associated_value(self.tcx, item); match DepNode::from_label_string(&value[..], def_id) { Ok(def_id) => return def_id, Err(()) => { @@ -194,12 +173,12 @@ impl<'a, 'tcx> Visitor<'tcx> for DirtyCleanVisitor<'a, 'tcx> { fn visit_item(&mut self, item: &'tcx hir::Item) { let def_id = self.tcx.map.local_def_id(item.id); for attr in self.tcx.get_attrs(def_id).iter() { - if attr.check_name(DIRTY) { - if self.check_config(attr) { + if attr.check_name(ATTR_DIRTY) { + if check_config(self.tcx, attr) { self.assert_dirty(item, self.dep_node(attr, def_id)); } - } else if attr.check_name(CLEAN) { - if self.check_config(attr) { + } else if attr.check_name(ATTR_CLEAN) { + if check_config(self.tcx, attr) { self.assert_clean(item, self.dep_node(attr, def_id)); } } @@ -207,3 +186,115 @@ impl<'a, 'tcx> Visitor<'tcx> for DirtyCleanVisitor<'a, 'tcx> { } } +pub fn check_dirty_clean_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + prev_metadata_hashes: &FnvHashMap, + current_metadata_hashes: &FnvHashMap) { + if !tcx.sess.opts.debugging_opts.query_dep_graph { + return; + } + + tcx.dep_graph.with_ignore(||{ + let krate = tcx.map.krate(); + krate.visit_all_items(&mut DirtyCleanMetadataVisitor { + tcx: tcx, + prev_metadata_hashes: prev_metadata_hashes, + current_metadata_hashes: current_metadata_hashes, + }); + }); +} + +pub struct DirtyCleanMetadataVisitor<'a, 'tcx:'a, 'm> { + tcx: TyCtxt<'a, 'tcx, 'tcx>, + prev_metadata_hashes: &'m FnvHashMap, + current_metadata_hashes: &'m FnvHashMap, +} + +impl<'a, 'tcx, 'm> Visitor<'tcx> for DirtyCleanMetadataVisitor<'a, 'tcx, 'm> { + fn visit_item(&mut self, item: &'tcx hir::Item) { + let def_id = self.tcx.map.local_def_id(item.id); + + for attr in self.tcx.get_attrs(def_id).iter() { + if attr.check_name(ATTR_DIRTY_METADATA) { + if check_config(self.tcx, attr) { + self.assert_state(false, def_id, item.span); + } + } else if attr.check_name(ATTR_CLEAN_METADATA) { + if check_config(self.tcx, attr) { + self.assert_state(true, def_id, item.span); + } + } + } + } +} + +impl<'a, 'tcx, 'm> DirtyCleanMetadataVisitor<'a, 'tcx, 'm> { + + fn assert_state(&self, should_be_clean: bool, def_id: DefId, span: Span) { + let item_path = self.tcx.item_path_str(def_id); + debug!("assert_state({})", item_path); + + if let Some(&prev_hash) = self.prev_metadata_hashes.get(&def_id) { + let hashes_are_equal = prev_hash == self.current_metadata_hashes[&def_id]; + + if should_be_clean && !hashes_are_equal { + self.tcx.sess.span_err( + span, + &format!("Metadata hash of `{}` is dirty, but should be clean", + item_path)); + } + + let should_be_dirty = !should_be_clean; + if should_be_dirty && hashes_are_equal { + self.tcx.sess.span_err( + span, + &format!("Metadata hash of `{}` is clean, but should be dirty", + item_path)); + } + } else { + self.tcx.sess.span_err( + span, + &format!("Could not find previous metadata hash of `{}`", + item_path)); + } + } +} + +/// Given a `#[rustc_dirty]` or `#[rustc_clean]` attribute, scan +/// for a `cfg="foo"` attribute and check whether we have a cfg +/// flag called `foo`. +fn check_config(tcx: TyCtxt, attr: &ast::Attribute) -> bool { + debug!("check_config(attr={:?})", attr); + let config = &tcx.sess.parse_sess.config; + debug!("check_config: config={:?}", config); + for item in attr.meta_item_list().unwrap_or(&[]) { + if item.check_name(CFG) { + let value = expect_associated_value(tcx, item); + debug!("check_config: searching for cfg {:?}", value); + for cfg in &config[..] { + if cfg.check_name(&value[..]) { + debug!("check_config: matched {:?}", cfg); + return true; + } + } + return false; + } + } + + tcx.sess.span_fatal( + attr.span, + &format!("no cfg attribute")); +} + +fn expect_associated_value(tcx: TyCtxt, item: &NestedMetaItem) -> InternedString { + if let Some(value) = item.value_str() { + value + } else { + let msg = if let Some(name) = item.name() { + format!("associated value expected for `{}`", name) + } else { + "expected an associated value".to_string() + }; + + tcx.sess.span_fatal(item.span, &msg); + } +} diff --git a/src/librustc_incremental/persist/file_format.rs b/src/librustc_incremental/persist/file_format.rs new file mode 100644 index 0000000000000..7c2b69e762b93 --- /dev/null +++ b/src/librustc_incremental/persist/file_format.rs @@ -0,0 +1,122 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! This module defines a generic file format that allows to check if a given +//! file generated by incremental compilation was generated by a compatible +//! compiler version. This file format is used for the on-disk version of the +//! dependency graph and the exported metadata hashes. +//! +//! In practice "compatible compiler version" means "exactly the same compiler +//! version", since the header encodes the git commit hash of the compiler. +//! Since we can always just ignore the incremental compilation cache and +//! compiler versions don't change frequently for the typical user, being +//! conservative here practically has no downside. + +use std::io::{self, Read}; +use std::path::Path; +use std::fs::File; +use std::env; + +use rustc::session::config::nightly_options; + +/// The first few bytes of files generated by incremental compilation +const FILE_MAGIC: &'static [u8] = b"RSIC"; + +/// Change this if the header format changes +const HEADER_FORMAT_VERSION: u16 = 0; + +/// A version string that hopefully is always different for compiler versions +/// with different encodings of incremental compilation artifacts. Contains +/// the git commit hash. +const RUSTC_VERSION: Option<&'static str> = option_env!("CFG_VERSION"); + +pub fn write_file_header(stream: &mut W) -> io::Result<()> { + stream.write_all(FILE_MAGIC)?; + stream.write_all(&[(HEADER_FORMAT_VERSION >> 0) as u8, + (HEADER_FORMAT_VERSION >> 8) as u8])?; + + let rustc_version = rustc_version(); + assert_eq!(rustc_version.len(), (rustc_version.len() as u8) as usize); + stream.write_all(&[rustc_version.len() as u8])?; + stream.write_all(rustc_version.as_bytes())?; + + Ok(()) +} + +/// Reads the contents of a file with a file header as defined in this module. +/// +/// - Returns `Ok(Some(data))` if the file existed and was generated by a +/// compatible compiler version. `data` is the entire contents of the file +/// *after* the header. +/// - Returns `Ok(None)` if the file did not exist or was generated by an +/// incompatible version of the compiler. +/// - Returns `Err(..)` if some kind of IO error occurred while reading the +/// file. +pub fn read_file(path: &Path) -> io::Result>> { + if !path.exists() { + return Ok(None); + } + + let mut file = File::open(path)?; + + // Check FILE_MAGIC + { + debug_assert!(FILE_MAGIC.len() == 4); + let mut file_magic = [0u8; 4]; + file.read_exact(&mut file_magic)?; + if file_magic != FILE_MAGIC { + return Ok(None) + } + } + + // Check HEADER_FORMAT_VERSION + { + debug_assert!(::std::mem::size_of_val(&HEADER_FORMAT_VERSION) == 2); + let mut header_format_version = [0u8; 2]; + file.read_exact(&mut header_format_version)?; + let header_format_version = (header_format_version[0] as u16) | + ((header_format_version[1] as u16) << 8); + + if header_format_version != HEADER_FORMAT_VERSION { + return Ok(None) + } + } + + // Check RUSTC_VERSION + { + let mut rustc_version_str_len = [0u8; 1]; + file.read_exact(&mut rustc_version_str_len)?; + let rustc_version_str_len = rustc_version_str_len[0] as usize; + let mut buffer = Vec::with_capacity(rustc_version_str_len); + buffer.resize(rustc_version_str_len, 0); + file.read_exact(&mut buffer[..])?; + + if &buffer[..] != rustc_version().as_bytes() { + return Ok(None); + } + } + + let mut data = vec![]; + file.read_to_end(&mut data)?; + + Ok(Some(data)) +} + +fn rustc_version() -> String { + if nightly_options::is_nightly_build() { + if let Some(val) = env::var_os("RUSTC_FORCE_INCR_COMP_ARTIFACT_HEADER") { + return val.to_string_lossy().into_owned() + } + } + + RUSTC_VERSION.expect("Cannot use rustc without explicit version for \ + incremental compilation") + .to_string() +} diff --git a/src/librustc_incremental/persist/fs.rs b/src/librustc_incremental/persist/fs.rs index 8166045be5f6f..ff7c3d0512e4f 100644 --- a/src/librustc_incremental/persist/fs.rs +++ b/src/librustc_incremental/persist/fs.rs @@ -114,8 +114,8 @@ //! unsupported file system and emit a warning in that case. This is not yet //! implemented. +use rustc::hir::def_id::{CrateNum, LOCAL_CRATE}; use rustc::hir::svh::Svh; -use rustc::middle::cstore::LOCAL_CRATE; use rustc::session::Session; use rustc::ty::TyCtxt; use rustc::util::fs as fs_util; @@ -129,7 +129,6 @@ use std::mem; use std::path::{Path, PathBuf}; use std::time::{UNIX_EPOCH, SystemTime, Duration}; use std::__rand::{thread_rng, Rng}; -use syntax::ast; const LOCK_FILE_EXT: &'static str = ".lock"; const DEP_GRAPH_FILENAME: &'static str = "dep-graph.bin"; @@ -235,10 +234,21 @@ pub fn prepare_session_directory(tcx: TyCtxt) -> Result { let print_file_copy_stats = tcx.sess.opts.debugging_opts.incremental_info; // Try copying over all files from the source directory - if copy_files(&session_dir, &source_directory, print_file_copy_stats).is_ok() { + if let Ok(allows_links) = copy_files(&session_dir, &source_directory, + print_file_copy_stats) { debug!("successfully copied data from: {}", source_directory.display()); + if !allows_links { + tcx.sess.warn(&format!("Hard linking files in the incremental \ + compilation cache failed. Copying files \ + instead. Consider moving the cache \ + directory to a file system which supports \ + hard linking in session dir `{}`", + session_dir.display()) + ); + } + tcx.sess.init_incr_comp_session(session_dir, directory_lock); return Ok(true) } else { @@ -346,10 +356,19 @@ pub fn finalize_session_directory(sess: &Session, svh: Svh) { let _ = garbage_collect_session_directories(sess); } +pub fn delete_all_session_dir_contents(sess: &Session) -> io::Result<()> { + let sess_dir_iterator = sess.incr_comp_session_dir().read_dir()?; + for entry in sess_dir_iterator { + let entry = entry?; + safe_remove_file(&entry.path())? + } + Ok(()) +} + fn copy_files(target_dir: &Path, source_dir: &Path, print_stats_on_success: bool) - -> Result<(), ()> { + -> Result { // We acquire a shared lock on the lock file of the directory, so that // nobody deletes it out from under us while we are reading from it. let lock_file_path = lock_file_path(source_dir); @@ -401,7 +420,7 @@ fn copy_files(target_dir: &Path, println!("incr. comp. session directory: {} files copied", files_copied); } - Ok(()) + Ok(files_linked > 0 || files_copied == 0) } /// Generate unique directory path of the form: @@ -580,7 +599,7 @@ fn string_to_timestamp(s: &str) -> Result { Ok(UNIX_EPOCH + duration) } -fn crate_path_tcx(tcx: TyCtxt, cnum: ast::CrateNum) -> PathBuf { +fn crate_path_tcx(tcx: TyCtxt, cnum: CrateNum) -> PathBuf { crate_path(tcx.sess, &tcx.crate_name(cnum), &tcx.crate_disambiguator(cnum)) } @@ -592,7 +611,7 @@ fn crate_path_tcx(tcx: TyCtxt, cnum: ast::CrateNum) -> PathBuf { /// crate's (name, disambiguator) pair. The metadata hashes are only valid for /// the exact version of the binary we are reading from now (i.e. the hashes /// are part of the dependency graph of a specific compilation session). -pub fn find_metadata_hashes_for(tcx: TyCtxt, cnum: ast::CrateNum) -> Option { +pub fn find_metadata_hashes_for(tcx: TyCtxt, cnum: CrateNum) -> Option { let crate_directory = crate_path_tcx(tcx, cnum); if !crate_directory.exists() { @@ -648,13 +667,14 @@ fn crate_path(sess: &Session, crate_name: &str, crate_disambiguator: &str) -> PathBuf { - use std::hash::{SipHasher, Hasher, Hash}; + use std::hash::{Hasher, Hash}; + use std::collections::hash_map::DefaultHasher; let incr_dir = sess.opts.incremental.as_ref().unwrap().clone(); // The full crate disambiguator is really long. A hash of it should be // sufficient. - let mut hasher = SipHasher::new(); + let mut hasher = DefaultHasher::new(); crate_disambiguator.hash(&mut hasher); let crate_name = format!("{}-{}", crate_name, encode_base_36(hasher.finish())); diff --git a/src/librustc_incremental/persist/hash.rs b/src/librustc_incremental/persist/hash.rs index bafaafd4afa0e..e365cbbd3a9a1 100644 --- a/src/librustc_incremental/persist/hash.rs +++ b/src/librustc_incremental/persist/hash.rs @@ -8,28 +8,26 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use rbml::Error; -use rbml::opaque::Decoder; use rustc::dep_graph::DepNode; -use rustc::hir::def_id::DefId; +use rustc::hir::def_id::{CrateNum, DefId}; use rustc::hir::svh::Svh; use rustc::ty::TyCtxt; use rustc_data_structures::fnv::FnvHashMap; use rustc_data_structures::flock; use rustc_serialize::Decodable; -use std::io::{ErrorKind, Read}; -use std::fs::File; -use syntax::ast; +use rustc_serialize::opaque::Decoder; use IncrementalHashesMap; +use ich::Fingerprint; use super::data::*; use super::fs::*; +use super::file_format; pub struct HashContext<'a, 'tcx: 'a> { pub tcx: TyCtxt<'a, 'tcx, 'tcx>, incremental_hashes_map: &'a IncrementalHashesMap, - item_metadata_hashes: FnvHashMap, - crate_hashes: FnvHashMap, + item_metadata_hashes: FnvHashMap, + crate_hashes: FnvHashMap, } impl<'a, 'tcx> HashContext<'a, 'tcx> { @@ -53,7 +51,7 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> { } } - pub fn hash(&mut self, dep_node: &DepNode) -> Option { + pub fn hash(&mut self, dep_node: &DepNode) -> Option { match *dep_node { DepNode::Krate => { Some(self.incremental_hashes_map[dep_node]) @@ -92,7 +90,7 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> { } } - fn metadata_hash(&mut self, def_id: DefId) -> u64 { + fn metadata_hash(&mut self, def_id: DefId) -> Fingerprint { debug!("metadata_hash(def_id={:?})", def_id); assert!(!def_id.is_local()); @@ -105,14 +103,15 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> { // check whether we did not find detailed metadata for this // krate; in that case, we just use the krate's overall hash - if let Some(&hash) = self.crate_hashes.get(&def_id.krate) { - debug!("metadata_hash: def_id={:?} crate_hash={:?}", def_id, hash); + if let Some(&svh) = self.crate_hashes.get(&def_id.krate) { + debug!("metadata_hash: def_id={:?} crate_hash={:?}", def_id, svh); // micro-"optimization": avoid a cache miss if we ask // for metadata from this particular def-id again. - self.item_metadata_hashes.insert(def_id, hash.as_u64()); + let fingerprint = svh_to_fingerprint(svh); + self.item_metadata_hashes.insert(def_id, fingerprint); - return hash.as_u64(); + return fingerprint; } // otherwise, load the data and repeat. @@ -121,7 +120,7 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> { } } - fn load_data(&mut self, cnum: ast::CrateNum) { + fn load_data(&mut self, cnum: CrateNum) { debug!("load_data(cnum={})", cnum); let svh = self.tcx.sess.cstore.crate_hash(cnum); @@ -155,12 +154,9 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> { let hashes_file_path = metadata_hash_import_path(&session_dir); - let mut data = vec![]; - match - File::open(&hashes_file_path) - .and_then(|mut file| file.read_to_end(&mut data)) + match file_format::read_file(&hashes_file_path) { - Ok(_) => { + Ok(Some(data)) => { match self.load_from_data(cnum, &data, svh) { Ok(()) => { } Err(err) => { @@ -169,27 +165,22 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> { } } } + Ok(None) => { + // If the file is not found, that's ok. + } Err(err) => { - match err.kind() { - ErrorKind::NotFound => { - // If the file is not found, that's ok. - } - _ => { - self.tcx.sess.err( - &format!("could not load dep information from `{}`: {}", - hashes_file_path.display(), err)); - return; - } - } + self.tcx.sess.err( + &format!("could not load dep information from `{}`: {}", + hashes_file_path.display(), err)); } } } } fn load_from_data(&mut self, - cnum: ast::CrateNum, + cnum: CrateNum, data: &[u8], - expected_svh: Svh) -> Result<(), Error> { + expected_svh: Svh) -> Result<(), String> { debug!("load_from_data(cnum={})", cnum); // Load up the hashes for the def-ids from this crate. @@ -217,3 +208,7 @@ impl<'a, 'tcx> HashContext<'a, 'tcx> { Ok(()) } } + +fn svh_to_fingerprint(svh: Svh) -> Fingerprint { + Fingerprint::from_smaller_hash(svh.as_u64()) +} diff --git a/src/librustc_incremental/persist/load.rs b/src/librustc_incremental/persist/load.rs index 6e6464e49683a..1f43e79ace3ae 100644 --- a/src/librustc_incremental/persist/load.rs +++ b/src/librustc_incremental/persist/load.rs @@ -10,24 +10,25 @@ //! Code to save/load the dep-graph from files. -use rbml::Error; -use rbml::opaque::Decoder; use rustc::dep_graph::DepNode; use rustc::hir::def_id::DefId; +use rustc::hir::svh::Svh; use rustc::session::Session; use rustc::ty::TyCtxt; -use rustc_data_structures::fnv::FnvHashSet; +use rustc_data_structures::fnv::{FnvHashSet, FnvHashMap}; use rustc_serialize::Decodable as RustcDecodable; -use std::io::Read; -use std::fs::{self, File}; +use rustc_serialize::opaque::Decoder; +use std::fs; use std::path::{Path}; use IncrementalHashesMap; +use ich::Fingerprint; use super::data::*; use super::directory::*; use super::dirty_clean; use super::hash::*; use super::fs::*; +use super::file_format; pub type DirtyNodes = FnvHashSet>; @@ -94,25 +95,26 @@ fn load_dep_graph_if_exists<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, } fn load_data(sess: &Session, path: &Path) -> Option> { - if !path.exists() { - return None; - } - - let mut data = vec![]; - match - File::open(path) - .and_then(|mut file| file.read_to_end(&mut data)) - { - Ok(_) => { - Some(data) + match file_format::read_file(path) { + Ok(Some(data)) => return Some(data), + Ok(None) => { + // The file either didn't exist or was produced by an incompatible + // compiler version. Neither is an error. } Err(err) => { sess.err( &format!("could not load dep-graph from `{}`: {}", path.display(), err)); - None } } + + if let Err(err) = delete_all_session_dir_contents(sess) { + sess.err(&format!("could not clear incompatible incremental \ + compilation session directory `{}`: {}", + path.display(), err)); + } + + None } /// Decode the dep graph and load the edges/nodes that are still clean @@ -121,7 +123,7 @@ pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, incremental_hashes_map: &IncrementalHashesMap, dep_graph_data: &[u8], work_products_data: &[u8]) - -> Result<(), Error> + -> Result<(), String> { // Decode the list of work_products let mut work_product_decoder = Decoder::new(work_products_data, 0); @@ -225,6 +227,9 @@ pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, dirty_clean::check_dirty_clean_annotations(tcx, &dirty_raw_source_nodes, &retraced); + load_prev_metadata_hashes(tcx, + &retraced, + &mut *incremental_hashes_map.prev_metadata_hashes.borrow_mut()); Ok(()) } @@ -242,6 +247,9 @@ fn dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, if let Some(dep_node) = retraced.map(&hash.dep_node) { let current_hash = hcx.hash(&dep_node).unwrap(); if current_hash == hash.hash { + debug!("initial_dirty_nodes: {:?} is clean (hash={:?})", + dep_node.map_def(|&def_id| Some(tcx.def_path(def_id))).unwrap(), + current_hash); continue; } debug!("initial_dirty_nodes: {:?} is dirty as hash is {:?}, was {:?}", @@ -305,3 +313,57 @@ fn delete_dirty_work_product(tcx: TyCtxt, } } } + +fn load_prev_metadata_hashes(tcx: TyCtxt, + retraced: &RetracedDefIdDirectory, + output: &mut FnvHashMap) { + if !tcx.sess.opts.debugging_opts.query_dep_graph { + return + } + + debug!("load_prev_metadata_hashes() - Loading previous metadata hashes"); + + let file_path = metadata_hash_export_path(tcx.sess); + + if !file_path.exists() { + debug!("load_prev_metadata_hashes() - Couldn't find file containing \ + hashes at `{}`", file_path.display()); + return + } + + debug!("load_prev_metadata_hashes() - File: {}", file_path.display()); + + let data = match file_format::read_file(&file_path) { + Ok(Some(data)) => data, + Ok(None) => { + debug!("load_prev_metadata_hashes() - File produced by incompatible \ + compiler version: {}", file_path.display()); + return + } + Err(err) => { + debug!("load_prev_metadata_hashes() - Error reading file `{}`: {}", + file_path.display(), err); + return + } + }; + + debug!("load_prev_metadata_hashes() - Decoding hashes"); + let mut decoder = Decoder::new(&data, 0); + let _ = Svh::decode(&mut decoder).unwrap(); + let serialized_hashes = SerializedMetadataHashes::decode(&mut decoder).unwrap(); + + debug!("load_prev_metadata_hashes() - Mapping DefIds"); + + assert_eq!(serialized_hashes.index_map.len(), serialized_hashes.hashes.len()); + for serialized_hash in serialized_hashes.hashes { + let def_path_index = serialized_hashes.index_map[&serialized_hash.def_index]; + if let Some(def_id) = retraced.def_id(def_path_index) { + let old = output.insert(def_id, serialized_hash.hash); + assert!(old.is_none(), "already have hash for {:?}", def_id); + } + } + + debug!("load_prev_metadata_hashes() - successfully loaded {} hashes", + serialized_hashes.index_map.len()); +} + diff --git a/src/librustc_incremental/persist/mod.rs b/src/librustc_incremental/persist/mod.rs index ba0f71971bb45..26fcde05868b9 100644 --- a/src/librustc_incremental/persist/mod.rs +++ b/src/librustc_incremental/persist/mod.rs @@ -21,6 +21,7 @@ mod load; mod preds; mod save; mod work_product; +mod file_format; pub use self::fs::finalize_session_directory; pub use self::fs::in_incr_comp_dir; diff --git a/src/librustc_incremental/persist/preds.rs b/src/librustc_incremental/persist/preds.rs index af13484e4288d..fe1d627253f28 100644 --- a/src/librustc_incremental/persist/preds.rs +++ b/src/librustc_incremental/persist/preds.rs @@ -14,6 +14,7 @@ use rustc_data_structures::fnv::FnvHashMap; use rustc_data_structures::graph::{DepthFirstTraversal, INCOMING, NodeIndex}; use super::hash::*; +use ich::Fingerprint; /// A data-structure that makes it easy to enumerate the hashable /// predecessors of any given dep-node. @@ -26,7 +27,7 @@ pub struct Predecessors<'query> { // - Keys: some hashable node // - Values: the hash thereof - pub hashes: FnvHashMap<&'query DepNode, u64>, + pub hashes: FnvHashMap<&'query DepNode, Fingerprint>, } impl<'q> Predecessors<'q> { diff --git a/src/librustc_incremental/persist/save.rs b/src/librustc_incremental/persist/save.rs index 41212d8e138da..bc156b0e8913b 100644 --- a/src/librustc_incremental/persist/save.rs +++ b/src/librustc_incremental/persist/save.rs @@ -8,7 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use rbml::opaque::Encoder; use rustc::dep_graph::DepNode; use rustc::hir::def_id::DefId; use rustc::hir::svh::Svh; @@ -16,17 +15,22 @@ use rustc::session::Session; use rustc::ty::TyCtxt; use rustc_data_structures::fnv::FnvHashMap; use rustc_serialize::Encodable as RustcEncodable; -use std::hash::{Hash, Hasher, SipHasher}; +use rustc_serialize::opaque::Encoder; +use std::hash::Hash; use std::io::{self, Cursor, Write}; use std::fs::{self, File}; use std::path::PathBuf; use IncrementalHashesMap; +use ich::Fingerprint; use super::data::*; use super::directory::*; use super::hash::*; use super::preds::*; use super::fs::*; +use super::dirty_clean; +use super::file_format; +use calculate_svh::hasher::IchHasher; pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, incremental_hashes_map: &IncrementalHashesMap, @@ -37,16 +41,32 @@ pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, if sess.opts.incremental.is_none() { return; } - let mut hcx = HashContext::new(tcx, incremental_hashes_map); + let mut builder = DefIdDirectoryBuilder::new(tcx); let query = tcx.dep_graph.query(); + let mut hcx = HashContext::new(tcx, incremental_hashes_map); let preds = Predecessors::new(&query, &mut hcx); + let mut current_metadata_hashes = FnvHashMap(); + + // IMPORTANT: We are saving the metadata hashes *before* the dep-graph, + // since metadata-encoding might add new entries to the + // DefIdDirectory (which is saved in the dep-graph file). + save_in(sess, + metadata_hash_export_path(sess), + |e| encode_metadata_hashes(tcx, + svh, + &preds, + &mut builder, + &mut current_metadata_hashes, + e)); save_in(sess, dep_graph_path(sess), |e| encode_dep_graph(&preds, &mut builder, e)); - save_in(sess, - metadata_hash_export_path(sess), - |e| encode_metadata_hashes(tcx, svh, &preds, &mut builder, e)); + + let prev_metadata_hashes = incremental_hashes_map.prev_metadata_hashes.borrow(); + dirty_clean::check_dirty_clean_metadata(tcx, + &*prev_metadata_hashes, + ¤t_metadata_hashes); } pub fn save_work_products(sess: &Session) { @@ -63,13 +83,17 @@ pub fn save_work_products(sess: &Session) { fn save_in(sess: &Session, path_buf: PathBuf, encode: F) where F: FnOnce(&mut Encoder) -> io::Result<()> { + debug!("save: storing data in {}", path_buf.display()); + // delete the old dep-graph, if any // Note: It's important that we actually delete the old file and not just // truncate and overwrite it, since it might be a shared hard-link, the // underlying data of which we don't want to modify if path_buf.exists() { match fs::remove_file(&path_buf) { - Ok(()) => {} + Ok(()) => { + debug!("save: remove old file"); + } Err(err) => { sess.err(&format!("unable to delete old dep-graph at `{}`: {}", path_buf.display(), @@ -81,6 +105,7 @@ fn save_in(sess: &Session, path_buf: PathBuf, encode: F) // generate the data in a memory buffer let mut wr = Cursor::new(Vec::new()); + file_format::write_file_header(&mut wr).unwrap(); match encode(&mut Encoder::new(&mut wr)) { Ok(()) => {} Err(err) => { @@ -94,7 +119,9 @@ fn save_in(sess: &Session, path_buf: PathBuf, encode: F) // write the data out let data = wr.into_inner(); match File::create(&path_buf).and_then(|mut file| file.write_all(&data)) { - Ok(_) => {} + Ok(_) => { + debug!("save: data written to disk successfully"); + } Err(err) => { sess.err(&format!("failed to write dep-graph to `{}`: {}", path_buf.display(), @@ -159,18 +186,9 @@ pub fn encode_metadata_hashes(tcx: TyCtxt, svh: Svh, preds: &Predecessors, builder: &mut DefIdDirectoryBuilder, + current_metadata_hashes: &mut FnvHashMap, encoder: &mut Encoder) -> io::Result<()> { - let mut def_id_hashes = FnvHashMap(); - let mut def_id_hash = |def_id: DefId| -> u64 { - *def_id_hashes.entry(def_id) - .or_insert_with(|| { - let index = builder.add(def_id); - let path = builder.lookup_def_path(index); - path.deterministic_hash(tcx) - }) - }; - // For each `MetaData(X)` node where `X` is local, accumulate a // hash. These are the metadata items we export. Downstream // crates will want to see a hash that tells them whether we might @@ -178,7 +196,13 @@ pub fn encode_metadata_hashes(tcx: TyCtxt, // compiled. // // (I initially wrote this with an iterator, but it seemed harder to read.) - let mut serialized_hashes = SerializedMetadataHashes { hashes: vec![] }; + let mut serialized_hashes = SerializedMetadataHashes { + hashes: vec![], + index_map: FnvHashMap() + }; + + let mut def_id_hashes = FnvHashMap(); + for (&target, sources) in &preds.inputs { let def_id = match *target { DepNode::MetaData(def_id) => { @@ -188,6 +212,15 @@ pub fn encode_metadata_hashes(tcx: TyCtxt, _ => continue, }; + let mut def_id_hash = |def_id: DefId| -> u64 { + *def_id_hashes.entry(def_id) + .or_insert_with(|| { + let index = builder.add(def_id); + let path = builder.lookup_def_path(index); + path.deterministic_hash(tcx) + }) + }; + // To create the hash for each item `X`, we don't hash the raw // bytes of the metadata (though in principle we // could). Instead, we walk the predecessors of `MetaData(X)` @@ -201,7 +234,7 @@ pub fn encode_metadata_hashes(tcx: TyCtxt, // is the det. hash of the def-path. This is convenient // because we can sort this to get a stable ordering across // compilations, even if the def-ids themselves have changed. - let mut hashes: Vec<(DepNode, u64)> = sources.iter() + let mut hashes: Vec<(DepNode, Fingerprint)> = sources.iter() .map(|dep_node| { let hash_dep_node = dep_node.map_def(|&def_id| Some(def_id_hash(def_id))).unwrap(); let hash = preds.hashes[dep_node]; @@ -210,7 +243,7 @@ pub fn encode_metadata_hashes(tcx: TyCtxt, .collect(); hashes.sort(); - let mut state = SipHasher::new(); + let mut state = IchHasher::new(); hashes.hash(&mut state); let hash = state.finish(); @@ -221,6 +254,22 @@ pub fn encode_metadata_hashes(tcx: TyCtxt, }); } + if tcx.sess.opts.debugging_opts.query_dep_graph { + for serialized_hash in &serialized_hashes.hashes { + let def_id = DefId::local(serialized_hash.def_index); + + // Store entry in the index_map + let def_path_index = builder.add(def_id); + serialized_hashes.index_map.insert(def_id.index, def_path_index); + + // Record hash in current_metadata_hashes + current_metadata_hashes.insert(def_id, serialized_hash.hash); + } + + debug!("save: stored index_map (len={}) for serialized hashes", + serialized_hashes.index_map.len()); + } + // Encode everything. svh.encode(encoder)?; serialized_hashes.encode(encoder)?; diff --git a/src/librustc_lint/bad_style.rs b/src/librustc_lint/bad_style.rs index 84d65308f952f..fea3de59520ca 100644 --- a/src/librustc_lint/bad_style.rs +++ b/src/librustc_lint/bad_style.rs @@ -24,19 +24,21 @@ use rustc::hir::intravisit::FnKind; pub enum MethodLateContext { TraitDefaultImpl, TraitImpl, - PlainImpl + PlainImpl, } pub fn method_context(cx: &LateContext, id: ast::NodeId, span: Span) -> MethodLateContext { let def_id = cx.tcx.map.local_def_id(id); match cx.tcx.impl_or_trait_items.borrow().get(&def_id) { None => span_bug!(span, "missing method descriptor?!"), - Some(item) => match item.container() { - ty::TraitContainer(..) => MethodLateContext::TraitDefaultImpl, - ty::ImplContainer(cid) => { - match cx.tcx.impl_trait_ref(cid) { - Some(_) => MethodLateContext::TraitImpl, - None => MethodLateContext::PlainImpl + Some(item) => { + match item.container() { + ty::TraitContainer(..) => MethodLateContext::TraitDefaultImpl, + ty::ImplContainer(cid) => { + match cx.tcx.impl_trait_ref(cid) { + Some(_) => MethodLateContext::TraitImpl, + None => MethodLateContext::PlainImpl, + } } } } @@ -63,19 +65,20 @@ impl NonCamelCaseTypes { // start with a non-lowercase letter rather than non-uppercase // ones (some scripts don't have a concept of upper/lowercase) - !name.is_empty() && - !name.chars().next().unwrap().is_lowercase() && - !name.contains('_') + !name.is_empty() && !name.chars().next().unwrap().is_lowercase() && !name.contains('_') } fn to_camel_case(s: &str) -> String { - s.split('_').flat_map(|word| word.chars().enumerate().map(|(i, c)| - if i == 0 { - c.to_uppercase().collect::() - } else { - c.to_lowercase().collect() - } - )).collect::>().concat() + s.split('_') + .flat_map(|word| { + word.chars().enumerate().map(|(i, c)| if i == 0 { + c.to_uppercase().collect::() + } else { + c.to_lowercase().collect() + }) + }) + .collect::>() + .concat() } let s = name.as_str(); @@ -83,9 +86,14 @@ impl NonCamelCaseTypes { if !is_camel_case(name) { let c = to_camel_case(&s); let m = if c.is_empty() { - format!("{} `{}` should have a camel case name such as `CamelCase`", sort, s) + format!("{} `{}` should have a camel case name such as `CamelCase`", + sort, + s) } else { - format!("{} `{}` should have a camel case name such as `{}`", sort, s, c) + format!("{} `{}` should have a camel case name such as `{}`", + sort, + s, + c) }; cx.span_lint(NON_CAMEL_CASE_TYPES, span, &m[..]); } @@ -100,10 +108,14 @@ impl LintPass for NonCamelCaseTypes { impl LateLintPass for NonCamelCaseTypes { fn check_item(&mut self, cx: &LateContext, it: &hir::Item) { - let extern_repr_count = it.attrs.iter().filter(|attr| { - attr::find_repr_attrs(cx.tcx.sess.diagnostic(), attr).iter() - .any(|r| r == &attr::ReprExtern) - }).count(); + let extern_repr_count = it.attrs + .iter() + .filter(|attr| { + attr::find_repr_attrs(cx.tcx.sess.diagnostic(), attr) + .iter() + .any(|r| r == &attr::ReprExtern) + }) + .count(); let has_extern_repr = extern_repr_count > 0; if has_extern_repr { @@ -111,12 +123,10 @@ impl LateLintPass for NonCamelCaseTypes { } match it.node { - hir::ItemTy(..) | hir::ItemStruct(..) | hir::ItemUnion(..) => { - self.check_case(cx, "type", it.name, it.span) - } - hir::ItemTrait(..) => { - self.check_case(cx, "trait", it.name, it.span) - } + hir::ItemTy(..) | + hir::ItemStruct(..) | + hir::ItemUnion(..) => self.check_case(cx, "type", it.name, it.span), + hir::ItemTrait(..) => self.check_case(cx, "trait", it.name, it.span), hir::ItemEnum(ref enum_definition, _) => { if has_extern_repr { return; @@ -126,7 +136,7 @@ impl LateLintPass for NonCamelCaseTypes { self.check_case(cx, "variant", variant.node.name, variant.span); } } - _ => () + _ => (), } } @@ -165,9 +175,7 @@ impl NonSnakeCase { continue; } for ch in s.chars() { - if !buf.is_empty() && buf != "'" - && ch.is_uppercase() - && !last_upper { + if !buf.is_empty() && buf != "'" && ch.is_uppercase() && !last_upper { words.push(buf); buf = String::new(); } @@ -205,10 +213,11 @@ impl NonSnakeCase { let sc = NonSnakeCase::to_snake_case(name); let msg = if sc != name { format!("{} `{}` should have a snake case name such as `{}`", - sort, name, sc) + sort, + name, + sc) } else { - format!("{} `{}` should have a snake case name", - sort, name) + format!("{} `{}` should have a snake case name", sort, name) }; match span { Some(span) => cx.span_lint(NON_SNAKE_CASE, span, &msg), @@ -226,8 +235,10 @@ impl LintPass for NonSnakeCase { impl LateLintPass for NonSnakeCase { fn check_crate(&mut self, cx: &LateContext, cr: &hir::Crate) { - let attr_crate_name = cr.attrs.iter().find(|at| at.check_name("crate_name")) - .and_then(|at| at.value_str().map(|s| (at, s))); + let attr_crate_name = cr.attrs + .iter() + .find(|at| at.check_name("crate_name")) + .and_then(|at| at.value_str().map(|s| (at, s))); if let Some(ref name) = cx.tcx.sess.opts.crate_name { self.check_snake_case(cx, "crate", name, None); } else if let Some((attr, ref name)) = attr_crate_name { @@ -235,22 +246,28 @@ impl LateLintPass for NonSnakeCase { } } - fn check_fn(&mut self, cx: &LateContext, - fk: FnKind, _: &hir::FnDecl, - _: &hir::Block, span: Span, id: ast::NodeId) { + fn check_fn(&mut self, + cx: &LateContext, + fk: FnKind, + _: &hir::FnDecl, + _: &hir::Block, + span: Span, + id: ast::NodeId) { match fk { - FnKind::Method(name, ..) => match method_context(cx, id, span) { - MethodLateContext::PlainImpl => { - self.check_snake_case(cx, "method", &name.as_str(), Some(span)) - }, - MethodLateContext::TraitDefaultImpl => { - self.check_snake_case(cx, "trait method", &name.as_str(), Some(span)) - }, - _ => (), - }, + FnKind::Method(name, ..) => { + match method_context(cx, id, span) { + MethodLateContext::PlainImpl => { + self.check_snake_case(cx, "method", &name.as_str(), Some(span)) + } + MethodLateContext::TraitDefaultImpl => { + self.check_snake_case(cx, "trait method", &name.as_str(), Some(span)) + } + _ => (), + } + } FnKind::ItemFn(name, ..) => { self.check_snake_case(cx, "function", &name.as_str(), Some(span)) - }, + } FnKind::Closure(_) => (), } } @@ -263,13 +280,17 @@ impl LateLintPass for NonSnakeCase { fn check_trait_item(&mut self, cx: &LateContext, trait_item: &hir::TraitItem) { if let hir::MethodTraitItem(_, None) = trait_item.node { - self.check_snake_case(cx, "trait method", &trait_item.name.as_str(), + self.check_snake_case(cx, + "trait method", + &trait_item.name.as_str(), Some(trait_item.span)); } } fn check_lifetime_def(&mut self, cx: &LateContext, t: &hir::LifetimeDef) { - self.check_snake_case(cx, "lifetime", &t.lifetime.name.as_str(), + self.check_snake_case(cx, + "lifetime", + &t.lifetime.name.as_str(), Some(t.lifetime.span)); } @@ -282,8 +303,12 @@ impl LateLintPass for NonSnakeCase { } } - fn check_struct_def(&mut self, cx: &LateContext, s: &hir::VariantData, - _: ast::Name, _: &hir::Generics, _: ast::NodeId) { + fn check_struct_def(&mut self, + cx: &LateContext, + s: &hir::VariantData, + _: ast::Name, + _: &hir::Generics, + _: ast::NodeId) { for sf in s.fields() { self.check_snake_case(cx, "structure field", &sf.name.as_str(), Some(sf.span)); } @@ -306,13 +331,16 @@ impl NonUpperCaseGlobals { if s.chars().any(|c| c.is_lowercase()) { let uc = NonSnakeCase::to_snake_case(&s).to_uppercase(); if uc != &s[..] { - cx.span_lint(NON_UPPER_CASE_GLOBALS, span, - &format!("{} `{}` should have an upper case name such as `{}`", - sort, s, uc)); + cx.span_lint(NON_UPPER_CASE_GLOBALS, + span, + &format!("{} `{}` should have an upper case name such as `{}`", + sort, + s, + uc)); } else { - cx.span_lint(NON_UPPER_CASE_GLOBALS, span, - &format!("{} `{}` should have an upper case name", - sort, s)); + cx.span_lint(NON_UPPER_CASE_GLOBALS, + span, + &format!("{} `{}` should have an upper case name", sort, s)); } } } @@ -327,9 +355,8 @@ impl LintPass for NonUpperCaseGlobals { impl LateLintPass for NonUpperCaseGlobals { fn check_item(&mut self, cx: &LateContext, it: &hir::Item) { match it.node { - // only check static constants - hir::ItemStatic(_, hir::MutImmutable, _) => { - NonUpperCaseGlobals::check_upper_case(cx, "static constant", it.name, it.span); + hir::ItemStatic(..) => { + NonUpperCaseGlobals::check_upper_case(cx, "static variable", it.name, it.span); } hir::ItemConst(..) => { NonUpperCaseGlobals::check_upper_case(cx, "constant", it.name, it.span); @@ -341,8 +368,7 @@ impl LateLintPass for NonUpperCaseGlobals { fn check_trait_item(&mut self, cx: &LateContext, ti: &hir::TraitItem) { match ti.node { hir::ConstTraitItem(..) => { - NonUpperCaseGlobals::check_upper_case(cx, "associated constant", - ti.name, ti.span); + NonUpperCaseGlobals::check_upper_case(cx, "associated constant", ti.name, ti.span); } _ => {} } @@ -351,8 +377,7 @@ impl LateLintPass for NonUpperCaseGlobals { fn check_impl_item(&mut self, cx: &LateContext, ii: &hir::ImplItem) { match ii.node { hir::ImplItemKind::Const(..) => { - NonUpperCaseGlobals::check_upper_case(cx, "associated constant", - ii.name, ii.span); + NonUpperCaseGlobals::check_upper_case(cx, "associated constant", ii.name, ii.span); } _ => {} } @@ -363,8 +388,10 @@ impl LateLintPass for NonUpperCaseGlobals { if let PatKind::Path(None, ref path) = p.node { if !path.global && path.segments.len() == 1 && path.segments[0].parameters.is_empty() { if let Def::Const(..) = cx.tcx.expect_def(p.id) { - NonUpperCaseGlobals::check_upper_case(cx, "constant in pattern", - path.segments[0].name, path.span); + NonUpperCaseGlobals::check_upper_case(cx, + "constant in pattern", + path.segments[0].name, + path.span); } } } diff --git a/src/librustc_lint/builtin.rs b/src/librustc_lint/builtin.rs index b610a924a3396..eee34324a6583 100644 --- a/src/librustc_lint/builtin.rs +++ b/src/librustc_lint/builtin.rs @@ -37,15 +37,16 @@ use rustc::ty::{self, Ty, TyCtxt}; use rustc::ty::adjustment; use rustc::traits::{self, Reveal}; use rustc::hir::map as hir_map; -use util::nodemap::{NodeSet}; +use util::nodemap::NodeSet; use lint::{Level, LateContext, LintContext, LintArray, Lint}; -use lint::{LintPass, LateLintPass}; +use lint::{LintPass, LateLintPass, EarlyLintPass, EarlyContext}; use std::collections::HashSet; -use syntax::{ast}; +use syntax::ast; use syntax::attr; -use syntax_pos::{Span}; +use syntax::feature_gate::{AttributeGate, AttributeType, Stability, deprecated_attributes}; +use syntax_pos::Span; use rustc::hir::{self, PatKind}; use rustc::hir::intravisit::FnKind; @@ -75,7 +76,8 @@ impl LateLintPass for WhileTrue { if let hir::ExprWhile(ref cond, ..) = e.node { if let hir::ExprLit(ref lit) = cond.node { if let ast::LitKind::Bool(true) = lit.node { - cx.span_lint(WHILE_TRUE, e.span, + cx.span_lint(WHILE_TRUE, + e.span, "denote infinite loops with loop { ... }"); } } @@ -93,8 +95,7 @@ declare_lint! { pub struct BoxPointers; impl BoxPointers { - fn check_heap_type<'a, 'tcx>(&self, cx: &LateContext<'a, 'tcx>, - span: Span, ty: Ty<'tcx>) { + fn check_heap_type<'a, 'tcx>(&self, cx: &LateContext<'a, 'tcx>, span: Span, ty: Ty<'tcx>) { for leaf_ty in ty.walk() { if let ty::TyBox(_) = leaf_ty.sty { let m = format!("type uses owned (Box type) pointers: {}", ty); @@ -117,10 +118,8 @@ impl LateLintPass for BoxPointers { hir::ItemTy(..) | hir::ItemEnum(..) | hir::ItemStruct(..) | - hir::ItemUnion(..) => - self.check_heap_type(cx, it.span, - cx.tcx.node_id_to_type(it.id)), - _ => () + hir::ItemUnion(..) => self.check_heap_type(cx, it.span, cx.tcx.node_id_to_type(it.id)), + _ => (), } // If it's a struct, we also have to check the fields' types @@ -128,11 +127,12 @@ impl LateLintPass for BoxPointers { hir::ItemStruct(ref struct_def, _) | hir::ItemUnion(ref struct_def, _) => { for struct_field in struct_def.fields() { - self.check_heap_type(cx, struct_field.span, + self.check_heap_type(cx, + struct_field.span, cx.tcx.node_id_to_type(struct_field.id)); } } - _ => () + _ => (), } } @@ -166,9 +166,11 @@ impl LateLintPass for NonShorthandFieldPatterns { } if let PatKind::Binding(_, ident, None) = fieldpat.node.pat.node { if ident.node == fieldpat.node.name { - cx.span_lint(NON_SHORTHAND_FIELD_PATTERNS, fieldpat.span, + cx.span_lint(NON_SHORTHAND_FIELD_PATTERNS, + fieldpat.span, &format!("the `{}:` in this pattern is redundant and can \ - be removed", ident.node)) + be removed", + ident.node)) } } } @@ -203,27 +205,35 @@ impl LateLintPass for UnsafeCode { fn check_item(&mut self, cx: &LateContext, it: &hir::Item) { match it.node { - hir::ItemTrait(hir::Unsafety::Unsafe, ..) => - cx.span_lint(UNSAFE_CODE, it.span, "declaration of an `unsafe` trait"), + hir::ItemTrait(hir::Unsafety::Unsafe, ..) => { + cx.span_lint(UNSAFE_CODE, it.span, "declaration of an `unsafe` trait") + } - hir::ItemImpl(hir::Unsafety::Unsafe, ..) => - cx.span_lint(UNSAFE_CODE, it.span, "implementation of an `unsafe` trait"), + hir::ItemImpl(hir::Unsafety::Unsafe, ..) => { + cx.span_lint(UNSAFE_CODE, it.span, "implementation of an `unsafe` trait") + } _ => return, } } - fn check_fn(&mut self, cx: &LateContext, fk: FnKind, _: &hir::FnDecl, - _: &hir::Block, span: Span, _: ast::NodeId) { + fn check_fn(&mut self, + cx: &LateContext, + fk: FnKind, + _: &hir::FnDecl, + _: &hir::Block, + span: Span, + _: ast::NodeId) { match fk { - FnKind::ItemFn(_, _, hir::Unsafety::Unsafe, ..) => - cx.span_lint(UNSAFE_CODE, span, "declaration of an `unsafe` function"), + FnKind::ItemFn(_, _, hir::Unsafety::Unsafe, ..) => { + cx.span_lint(UNSAFE_CODE, span, "declaration of an `unsafe` function") + } FnKind::Method(_, sig, ..) => { if sig.unsafety == hir::Unsafety::Unsafe { cx.span_lint(UNSAFE_CODE, span, "implementation of an `unsafe` method") } - }, + } _ => (), } @@ -232,7 +242,8 @@ impl LateLintPass for UnsafeCode { fn check_trait_item(&mut self, cx: &LateContext, trait_item: &hir::TraitItem) { if let hir::MethodTraitItem(ref sig, None) = trait_item.node { if sig.unsafety == hir::Unsafety::Unsafe { - cx.span_lint(UNSAFE_CODE, trait_item.span, + cx.span_lint(UNSAFE_CODE, + trait_item.span, "declaration of an `unsafe` method") } } @@ -263,9 +274,9 @@ pub struct MissingDoc { impl MissingDoc { pub fn new() -> MissingDoc { MissingDoc { - struct_def_stack: vec!(), + struct_def_stack: vec![], in_variant: false, - doc_hidden_stack: vec!(false), + doc_hidden_stack: vec![false], private_traits: HashSet::new(), } } @@ -275,11 +286,11 @@ impl MissingDoc { } fn check_missing_docs_attrs(&self, - cx: &LateContext, - id: Option, - attrs: &[ast::Attribute], - sp: Span, - desc: &'static str) { + cx: &LateContext, + id: Option, + attrs: &[ast::Attribute], + sp: Span, + desc: &'static str) { // If we're building a test harness, then warning about // documentation is probably not really relevant right now. if cx.sess().opts.test { @@ -302,7 +313,8 @@ impl MissingDoc { let has_doc = attrs.iter().any(|a| a.is_value_str() && a.name() == "doc"); if !has_doc { - cx.span_lint(MISSING_DOCS, sp, + cx.span_lint(MISSING_DOCS, + sp, &format!("missing documentation for {}", desc)); } } @@ -316,8 +328,10 @@ impl LintPass for MissingDoc { impl LateLintPass for MissingDoc { fn enter_lint_attrs(&mut self, _: &LateContext, attrs: &[ast::Attribute]) { - let doc_hidden = self.doc_hidden() || attrs.iter().any(|attr| { - attr.check_name("doc") && match attr.meta_item_list() { + let doc_hidden = self.doc_hidden() || + attrs.iter().any(|attr| { + attr.check_name("doc") && + match attr.meta_item_list() { None => false, Some(l) => attr::list_contains_name(&l[..], "hidden"), } @@ -329,13 +343,21 @@ impl LateLintPass for MissingDoc { self.doc_hidden_stack.pop().expect("empty doc_hidden_stack"); } - fn check_struct_def(&mut self, _: &LateContext, _: &hir::VariantData, - _: ast::Name, _: &hir::Generics, item_id: ast::NodeId) { + fn check_struct_def(&mut self, + _: &LateContext, + _: &hir::VariantData, + _: ast::Name, + _: &hir::Generics, + item_id: ast::NodeId) { self.struct_def_stack.push(item_id); } - fn check_struct_def_post(&mut self, _: &LateContext, _: &hir::VariantData, - _: ast::Name, _: &hir::Generics, item_id: ast::NodeId) { + fn check_struct_def_post(&mut self, + _: &LateContext, + _: &hir::VariantData, + _: ast::Name, + _: &hir::Generics, + item_id: ast::NodeId) { let popped = self.struct_def_stack.pop().expect("empty struct_def_stack"); assert!(popped == item_id); } @@ -358,10 +380,10 @@ impl LateLintPass for MissingDoc { for itm in items { self.private_traits.insert(itm.id); } - return + return; } "a trait" - }, + } hir::ItemTy(..) => "a type alias", hir::ItemImpl(.., Some(ref trait_ref), _, ref impl_items) => { // If the trait is private, add the impl items to private_traits so they don't get @@ -369,26 +391,30 @@ impl LateLintPass for MissingDoc { let real_trait = cx.tcx.expect_def(trait_ref.ref_id).def_id(); if let Some(node_id) = cx.tcx.map.as_local_node_id(real_trait) { match cx.tcx.map.find(node_id) { - Some(hir_map::NodeItem(item)) => if item.vis == hir::Visibility::Inherited { - for itm in impl_items { - self.private_traits.insert(itm.id); + Some(hir_map::NodeItem(item)) => { + if item.vis == hir::Visibility::Inherited { + for itm in impl_items { + self.private_traits.insert(itm.id); + } } - }, - _ => { } + } + _ => {} } } - return - }, + return; + } hir::ItemConst(..) => "a constant", hir::ItemStatic(..) => "a static", - _ => return + _ => return, }; self.check_missing_docs_attrs(cx, Some(it.id), &it.attrs, it.span, desc); } fn check_trait_item(&mut self, cx: &LateContext, trait_item: &hir::TraitItem) { - if self.private_traits.contains(&trait_item.id) { return } + if self.private_traits.contains(&trait_item.id) { + return; + } let desc = match trait_item.node { hir::ConstTraitItem(..) => "an associated constant", @@ -396,9 +422,11 @@ impl LateLintPass for MissingDoc { hir::TypeTraitItem(..) => "an associated type", }; - self.check_missing_docs_attrs(cx, Some(trait_item.id), + self.check_missing_docs_attrs(cx, + Some(trait_item.id), &trait_item.attrs, - trait_item.span, desc); + trait_item.span, + desc); } fn check_impl_item(&mut self, cx: &LateContext, impl_item: &hir::ImplItem) { @@ -412,26 +440,34 @@ impl LateLintPass for MissingDoc { hir::ImplItemKind::Method(..) => "a method", hir::ImplItemKind::Type(_) => "an associated type", }; - self.check_missing_docs_attrs(cx, Some(impl_item.id), + self.check_missing_docs_attrs(cx, + Some(impl_item.id), &impl_item.attrs, - impl_item.span, desc); + impl_item.span, + desc); } fn check_struct_field(&mut self, cx: &LateContext, sf: &hir::StructField) { if !sf.is_positional() { if sf.vis == hir::Public || self.in_variant { - let cur_struct_def = *self.struct_def_stack.last() + let cur_struct_def = *self.struct_def_stack + .last() .expect("empty struct_def_stack"); - self.check_missing_docs_attrs(cx, Some(cur_struct_def), - &sf.attrs, sf.span, + self.check_missing_docs_attrs(cx, + Some(cur_struct_def), + &sf.attrs, + sf.span, "a struct field") } } } fn check_variant(&mut self, cx: &LateContext, v: &hir::Variant, _: &hir::Generics) { - self.check_missing_docs_attrs(cx, Some(v.node.data.id()), - &v.node.attrs, v.span, "a variant"); + self.check_missing_docs_attrs(cx, + Some(v.node.data.id()), + &v.node.attrs, + v.span, + "a variant"); assert!(!self.in_variant); self.in_variant = true; } @@ -468,25 +504,27 @@ impl LateLintPass for MissingCopyImplementations { return; } let def = cx.tcx.lookup_adt_def(cx.tcx.map.local_def_id(item.id)); - (def, cx.tcx.mk_adt(def, Substs::empty(cx.tcx))) + (def, cx.tcx.mk_adt(def, cx.tcx.intern_substs(&[]))) } hir::ItemUnion(_, ref ast_generics) => { if ast_generics.is_parameterized() { return; } let def = cx.tcx.lookup_adt_def(cx.tcx.map.local_def_id(item.id)); - (def, cx.tcx.mk_adt(def, Substs::empty(cx.tcx))) + (def, cx.tcx.mk_adt(def, cx.tcx.intern_substs(&[]))) } hir::ItemEnum(_, ref ast_generics) => { if ast_generics.is_parameterized() { return; } let def = cx.tcx.lookup_adt_def(cx.tcx.map.local_def_id(item.id)); - (def, cx.tcx.mk_adt(def, Substs::empty(cx.tcx))) + (def, cx.tcx.mk_adt(def, cx.tcx.intern_substs(&[]))) } _ => return, }; - if def.has_dtor() { return; } + if def.has_dtor() { + return; + } let parameter_environment = cx.tcx.empty_parameter_environment(); // FIXME (@jroesch) should probably inver this so that the parameter env still impls this // method @@ -514,9 +552,7 @@ pub struct MissingDebugImplementations { impl MissingDebugImplementations { pub fn new() -> MissingDebugImplementations { - MissingDebugImplementations { - impling_types: None, - } + MissingDebugImplementations { impling_types: None } } } @@ -533,7 +569,9 @@ impl LateLintPass for MissingDebugImplementations { } match item.node { - hir::ItemStruct(..) | hir::ItemUnion(..) | hir::ItemEnum(..) => {}, + hir::ItemStruct(..) | + hir::ItemUnion(..) | + hir::ItemEnum(..) => {} _ => return, } @@ -585,12 +623,13 @@ pub struct Deprecated { impl Deprecated { pub fn new() -> Deprecated { - Deprecated { - current_item: ast::CRATE_NODE_ID, - } + Deprecated { current_item: ast::CRATE_NODE_ID } } - fn lint(&self, cx: &LateContext, _id: DefId, span: Span, + fn lint(&self, + cx: &LateContext, + _id: DefId, + span: Span, stability: &Option<&attr::Stability>, deprecation: &Option) { // Deprecated attributes apply in-crate and cross-crate. @@ -641,9 +680,10 @@ impl LintPass for Deprecated { impl LateLintPass for Deprecated { fn check_item(&mut self, cx: &LateContext, item: &hir::Item) { self.push_item(item.id); - stability::check_item(cx.tcx, item, false, - &mut |id, sp, stab, depr| - self.lint(cx, id, sp, &stab, &depr)); + stability::check_item(cx.tcx, + item, + false, + &mut |id, sp, stab, depr| self.lint(cx, id, sp, &stab, &depr)); } fn check_item_post(&mut self, cx: &LateContext, item: &hir::Item) { @@ -651,27 +691,30 @@ impl LateLintPass for Deprecated { } fn check_expr(&mut self, cx: &LateContext, e: &hir::Expr) { - stability::check_expr(cx.tcx, e, - &mut |id, sp, stab, depr| - self.lint(cx, id, sp, &stab, &depr)); + stability::check_expr(cx.tcx, + e, + &mut |id, sp, stab, depr| self.lint(cx, id, sp, &stab, &depr)); } fn check_path(&mut self, cx: &LateContext, path: &hir::Path, id: ast::NodeId) { - stability::check_path(cx.tcx, path, id, - &mut |id, sp, stab, depr| - self.lint(cx, id, sp, &stab, &depr)); + stability::check_path(cx.tcx, + path, + id, + &mut |id, sp, stab, depr| self.lint(cx, id, sp, &stab, &depr)); } fn check_path_list_item(&mut self, cx: &LateContext, item: &hir::PathListItem) { - stability::check_path_list_item(cx.tcx, item, - &mut |id, sp, stab, depr| - self.lint(cx, id, sp, &stab, &depr)); + stability::check_path_list_item(cx.tcx, + item, + &mut |id, sp, stab, depr| { + self.lint(cx, id, sp, &stab, &depr) + }); } fn check_pat(&mut self, cx: &LateContext, pat: &hir::Pat) { - stability::check_pat(cx.tcx, pat, - &mut |id, sp, stab, depr| - self.lint(cx, id, sp, &stab, &depr)); + stability::check_pat(cx.tcx, + pat, + &mut |id, sp, stab, depr| self.lint(cx, id, sp, &stab, &depr)); } fn check_impl_item(&mut self, _: &LateContext, item: &hir::ImplItem) { @@ -699,6 +742,54 @@ impl LateLintPass for Deprecated { } } +declare_lint! { + DEPRECATED_ATTR, + Warn, + "detects use of deprecated attributes" +} + +/// Checks for use of attributes which have been deprecated. +#[derive(Clone)] +pub struct DeprecatedAttr { + // This is not free to compute, so we want to keep it around, rather than + // compute it for every attribute. + depr_attrs: Vec<&'static (&'static str, AttributeType, AttributeGate)>, +} + +impl DeprecatedAttr { + pub fn new() -> DeprecatedAttr { + DeprecatedAttr { + depr_attrs: deprecated_attributes(), + } + } +} + +impl LintPass for DeprecatedAttr { + fn get_lints(&self) -> LintArray { + lint_array!(DEPRECATED_ATTR) + } +} + +impl EarlyLintPass for DeprecatedAttr { + fn check_attribute(&mut self, cx: &EarlyContext, attr: &ast::Attribute) { + let name = &*attr.name(); + for &&(n, _, ref g) in &self.depr_attrs { + if n == name { + if let &AttributeGate::Gated(Stability::Deprecated(link), + ref name, + ref reason, + _) = g { + cx.span_lint(DEPRECATED, + attr.span, + &format!("use of deprecated attribute `{}`: {}. See {}", + name, reason, link)); + } + return; + } + } + } +} + declare_lint! { pub UNCONDITIONAL_RECURSION, Warn, @@ -716,15 +807,20 @@ impl LintPass for UnconditionalRecursion { } impl LateLintPass for UnconditionalRecursion { - fn check_fn(&mut self, cx: &LateContext, fn_kind: FnKind, _: &hir::FnDecl, - blk: &hir::Block, sp: Span, id: ast::NodeId) { + fn check_fn(&mut self, + cx: &LateContext, + fn_kind: FnKind, + _: &hir::FnDecl, + blk: &hir::Block, + sp: Span, + id: ast::NodeId) { let method = match fn_kind { FnKind::ItemFn(..) => None, FnKind::Method(..) => { cx.tcx.impl_or_trait_item(cx.tcx.map.local_def_id(id)).as_opt_method() } // closures can't recur, so they don't matter. - FnKind::Closure(_) => return + FnKind::Closure(_) => return, }; // Walk through this function (say `f`) looking to see if @@ -779,10 +875,8 @@ impl LateLintPass for UnconditionalRecursion { // is this a recursive call? let self_recursive = if node_id != ast::DUMMY_NODE_ID { match method { - Some(ref method) => { - expr_refers_to_this_method(cx.tcx, method, node_id) - } - None => expr_refers_to_this_fn(cx.tcx, id, node_id) + Some(ref method) => expr_refers_to_this_method(cx.tcx, method, node_id), + None => expr_refers_to_this_fn(cx.tcx, id, node_id), } } else { false @@ -808,7 +902,8 @@ impl LateLintPass for UnconditionalRecursion { // no break */ }`) shouldn't be linted unless it actually // recurs. if !reached_exit_without_self_call && !self_call_spans.is_empty() { - let mut db = cx.struct_span_lint(UNCONDITIONAL_RECURSION, sp, + let mut db = cx.struct_span_lint(UNCONDITIONAL_RECURSION, + sp, "function cannot return without recurring"); // FIXME #19668: these could be span_lint_note's instead of this manual guard. @@ -829,23 +924,21 @@ impl LateLintPass for UnconditionalRecursion { // Functions for identifying if the given Expr NodeId `id` // represents a call to the function `fn_id`/method `method`. - fn expr_refers_to_this_fn(tcx: TyCtxt, - fn_id: ast::NodeId, - id: ast::NodeId) -> bool { + fn expr_refers_to_this_fn(tcx: TyCtxt, fn_id: ast::NodeId, id: ast::NodeId) -> bool { match tcx.map.get(id) { hir_map::NodeExpr(&hir::Expr { node: hir::ExprCall(ref callee, _), .. }) => { - tcx.expect_def_or_none(callee.id).map_or(false, |def| { - def.def_id() == tcx.map.local_def_id(fn_id) - }) + tcx.expect_def_or_none(callee.id) + .map_or(false, |def| def.def_id() == tcx.map.local_def_id(fn_id)) } - _ => false + _ => false, } } // Check if the expression `id` performs a call to `method`. fn expr_refers_to_this_method<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, method: &ty::Method, - id: ast::NodeId) -> bool { + id: ast::NodeId) + -> bool { // Check for method calls and overloaded operators. let opt_m = tcx.tables.borrow().method_map.get(&ty::MethodCall::expr(id)).cloned(); if let Some(m) = opt_m { @@ -859,9 +952,11 @@ impl LateLintPass for UnconditionalRecursion { if let Some(adjustment::AdjustDerefRef(adj)) = opt_adj { for i in 0..adj.autoderefs { let method_call = ty::MethodCall::autoderef(id, i as u32); - if let Some(m) = tcx.tables.borrow().method_map - .get(&method_call) - .cloned() { + if let Some(m) = tcx.tables + .borrow() + .method_map + .get(&method_call) + .cloned() { if method_call_refers_to_method(tcx, method, m.def_id, m.substs, id) { return true; } @@ -877,13 +972,16 @@ impl LateLintPass for UnconditionalRecursion { match tcx.expect_def_or_none(callee.id) { Some(Def::Method(def_id)) => { let item_substs = tcx.node_id_item_substs(callee.id); - method_call_refers_to_method( - tcx, method, def_id, &item_substs.substs, id) + method_call_refers_to_method(tcx, + method, + def_id, + &item_substs.substs, + id) } - _ => false + _ => false, } } - _ => false + _ => false, } } @@ -893,15 +991,14 @@ impl LateLintPass for UnconditionalRecursion { method: &ty::Method, callee_id: DefId, callee_substs: &Substs<'tcx>, - expr_id: ast::NodeId) -> bool { + expr_id: ast::NodeId) + -> bool { let callee_item = tcx.impl_or_trait_item(callee_id); match callee_item.container() { // This is an inherent method, so the `def_id` refers // directly to the method definition. - ty::ImplContainer(_) => { - callee_id == method.def_id - } + ty::ImplContainer(_) => callee_id == method.def_id, // A trait method, from any number of possible sources. // Attempt to select a concrete impl before checking. @@ -939,13 +1036,12 @@ impl LateLintPass for UnconditionalRecursion { let container = ty::ImplContainer(vtable_impl.impl_def_id); // It matches if it comes from the same impl, // and has the same method name. - container == method.container - && callee_item.name() == method.name + container == method.container && callee_item.name() == method.name } // There's no way to know if this call is // recursive, so we assume it's not. - _ => false + _ => false, } }) } @@ -992,7 +1088,8 @@ impl LateLintPass for PluginAsLibrary { }; if prfn.is_some() { - cx.span_lint(PLUGIN_AS_LIBRARY, it.span, + cx.span_lint(PLUGIN_AS_LIBRARY, + it.span, "compiler plugin used as an ordinary library"); } } @@ -1050,15 +1147,15 @@ impl LateLintPass for InvalidNoMangleItems { "generic functions must be mangled"); } } - }, + } hir::ItemStatic(..) => { if attr::contains_name(&it.attrs, "no_mangle") && - !cx.access_levels.is_reachable(it.id) { + !cx.access_levels.is_reachable(it.id) { let msg = format!("static {} is marked #[no_mangle], but not exported", it.name); cx.span_lint(PRIVATE_NO_MANGLE_STATICS, it.span, &msg); } - }, + } hir::ItemConst(..) => { if attr::contains_name(&it.attrs, "no_mangle") { // Const items do not refer to a particular location in memory, and therefore @@ -1068,7 +1165,7 @@ impl LateLintPass for InvalidNoMangleItems { cx.span_lint(NO_MANGLE_CONST_ITEMS, it.span, msg); } } - _ => {}, + _ => {} } } } @@ -1096,19 +1193,21 @@ impl LateLintPass for MutableTransmutes { consider instead using an UnsafeCell"; match get_transmute_from_to(cx, expr) { Some((&ty::TyRef(_, from_mt), &ty::TyRef(_, to_mt))) => { - if to_mt.mutbl == hir::Mutability::MutMutable - && from_mt.mutbl == hir::Mutability::MutImmutable { + if to_mt.mutbl == hir::Mutability::MutMutable && + from_mt.mutbl == hir::Mutability::MutImmutable { cx.span_lint(MUTABLE_TRANSMUTES, expr.span, msg); } } - _ => () + _ => (), } - fn get_transmute_from_to<'a, 'tcx>(cx: &LateContext<'a, 'tcx>, expr: &hir::Expr) - -> Option<(&'tcx ty::TypeVariants<'tcx>, &'tcx ty::TypeVariants<'tcx>)> { + fn get_transmute_from_to<'a, 'tcx> + (cx: &LateContext<'a, 'tcx>, + expr: &hir::Expr) + -> Option<(&'tcx ty::TypeVariants<'tcx>, &'tcx ty::TypeVariants<'tcx>)> { match expr.node { hir::ExprPath(..) => (), - _ => return None + _ => return None, } if let Def::Fn(did) = cx.tcx.expect_def(expr.id) { if !def_id_is_transmute(cx, did) { @@ -1120,8 +1219,8 @@ impl LateLintPass for MutableTransmutes { let from = bare_fn.sig.0.inputs[0]; let to = bare_fn.sig.0.output; return Some((&from.sty, &to.sty)); - }, - _ => () + } + _ => (), } } None @@ -1130,7 +1229,7 @@ impl LateLintPass for MutableTransmutes { fn def_id_is_transmute(cx: &LateContext, def_id: DefId) -> bool { match cx.tcx.lookup_item_type(def_id).ty.sty { ty::TyFnDef(.., ref bfty) if bfty.abi == RustIntrinsic => (), - _ => return false + _ => return false, } cx.tcx.item_name(def_id).as_str() == "transmute" } diff --git a/src/librustc_lint/lib.rs b/src/librustc_lint/lib.rs index bc2979c806f65..6f114e09a6c7d 100644 --- a/src/librustc_lint/lib.rs +++ b/src/librustc_lint/lib.rs @@ -37,6 +37,7 @@ #![feature(rustc_private)] #![feature(slice_patterns)] #![feature(staged_api)] +#![feature(dotdot_in_tuple_patterns)] #[macro_use] extern crate syntax; @@ -48,10 +49,10 @@ extern crate rustc_back; extern crate rustc_const_eval; extern crate syntax_pos; -pub use rustc::lint as lint; -pub use rustc::middle as middle; -pub use rustc::session as session; -pub use rustc::util as util; +pub use rustc::lint; +pub use rustc::middle; +pub use rustc::session; +pub use rustc::util; use session::Session; use lint::LintId; @@ -95,6 +96,14 @@ pub fn register_builtins(store: &mut lint::LintStore, sess: Option<&Session>) { ) } + macro_rules! add_early_builtin_with_new { + ($sess:ident, $($name:ident),*,) => ( + {$( + store.register_early_pass($sess, false, box $name::new()); + )*} + ) + } + macro_rules! add_lint_group { ($sess:ident, $name:expr, $($lint:ident),*) => ( store.register_group($sess, false, $name, vec![$(LintId::of($lint)),*]); @@ -105,6 +114,10 @@ pub fn register_builtins(store: &mut lint::LintStore, sess: Option<&Session>) { UnusedParens, ); + add_early_builtin_with_new!(sess, + DeprecatedAttr, + ); + add_builtin!(sess, HardwiredLints, WhileTrue, @@ -139,13 +152,24 @@ pub fn register_builtins(store: &mut lint::LintStore, sess: Option<&Session>) { MissingDebugImplementations, ); - add_lint_group!(sess, "bad_style", - NON_CAMEL_CASE_TYPES, NON_SNAKE_CASE, NON_UPPER_CASE_GLOBALS); - - add_lint_group!(sess, "unused", - UNUSED_IMPORTS, UNUSED_VARIABLES, UNUSED_ASSIGNMENTS, DEAD_CODE, - UNUSED_MUT, UNREACHABLE_CODE, UNUSED_MUST_USE, - UNUSED_UNSAFE, PATH_STATEMENTS, UNUSED_ATTRIBUTES); + add_lint_group!(sess, + "bad_style", + NON_CAMEL_CASE_TYPES, + NON_SNAKE_CASE, + NON_UPPER_CASE_GLOBALS); + + add_lint_group!(sess, + "unused", + UNUSED_IMPORTS, + UNUSED_VARIABLES, + UNUSED_ASSIGNMENTS, + DEAD_CODE, + UNUSED_MUT, + UNREACHABLE_CODE, + UNUSED_MUST_USE, + UNUSED_UNSAFE, + PATH_STATEMENTS, + UNUSED_ATTRIBUTES); // Guidelines for creating a future incompatibility lint: // @@ -155,27 +179,23 @@ pub fn register_builtins(store: &mut lint::LintStore, sess: Option<&Session>) { // and include the full URL. // - Later, change lint to error // - Eventually, remove lint - store.register_future_incompatible(sess, vec![ + store.register_future_incompatible(sess, + vec![ FutureIncompatibleInfo { id: LintId::of(PRIVATE_IN_PUBLIC), reference: "issue #34537 ", }, FutureIncompatibleInfo { id: LintId::of(INACCESSIBLE_EXTERN_CRATE), - reference: "PR 31362 ", + reference: "issue #36886 ", }, FutureIncompatibleInfo { id: LintId::of(INVALID_TYPE_PARAM_DEFAULT), - reference: "PR 30724 ", + reference: "issue #36887 ", }, FutureIncompatibleInfo { id: LintId::of(SUPER_OR_SELF_IN_GLOBAL_PATH), - reference: "PR #32403 ", - }, - FutureIncompatibleInfo { - id: LintId::of(MATCH_OF_UNIT_VARIANT_VIA_PAREN_DOTDOT), - reference: "RFC 218 ", + reference: "issue #36888 ", }, FutureIncompatibleInfo { id: LintId::of(TRANSMUTE_FROM_FN_ITEM_TYPES), @@ -183,15 +203,15 @@ pub fn register_builtins(store: &mut lint::LintStore, sess: Option<&Session>) { }, FutureIncompatibleInfo { id: LintId::of(OVERLAPPING_INHERENT_IMPLS), - reference: "issue #22889 ", + reference: "issue #36889 ", }, FutureIncompatibleInfo { id: LintId::of(ILLEGAL_FLOATING_POINT_CONSTANT_PATTERN), - reference: "RFC 1445 ", + reference: "issue #36890 ", }, FutureIncompatibleInfo { id: LintId::of(ILLEGAL_STRUCT_OR_ENUM_CONSTANT_PATTERN), - reference: "RFC 1445 ", + reference: "issue #36891 ", }, FutureIncompatibleInfo { id: LintId::of(HR_LIFETIME_IN_ASSOC_TYPE), @@ -199,21 +219,27 @@ pub fn register_builtins(store: &mut lint::LintStore, sess: Option<&Session>) { }, FutureIncompatibleInfo { id: LintId::of(LIFETIME_UNDERSCORE), - reference: "RFC 1177 ", + reference: "issue #36892 ", }, FutureIncompatibleInfo { id: LintId::of(SAFE_EXTERN_STATICS), - reference: "issue 36247 ", + reference: "issue #36247 ", + }, + FutureIncompatibleInfo { + id: LintId::of(PATTERNS_IN_FNS_WITHOUT_BODY), + reference: "issue #35203 ", }, ]); // Register renamed and removed lints store.register_renamed("unknown_features", "unused_features"); - store.register_removed("unsigned_negation", "replaced by negate_unsigned feature gate"); + store.register_removed("unsigned_negation", + "replaced by negate_unsigned feature gate"); store.register_removed("negate_unsigned", "cast a signed value instead"); store.register_removed("raw_pointer_derive", "using derive with raw pointers is ok"); // This was renamed to raw_pointer_derive, which was then removed, // so it is also considered removed - store.register_removed("raw_pointer_deriving", "using derive with raw pointers is ok"); + store.register_removed("raw_pointer_deriving", + "using derive with raw pointers is ok"); store.register_removed("drop_with_repr_extern", "drop flags have been removed"); } diff --git a/src/librustc_lint/types.rs b/src/librustc_lint/types.rs index e8d9e90456efc..9464bf30b693f 100644 --- a/src/librustc_lint/types.rs +++ b/src/librustc_lint/types.rs @@ -18,7 +18,7 @@ use rustc::traits::Reveal; use middle::const_val::ConstVal; use rustc_const_eval::eval_const_expr_partial; use rustc_const_eval::EvalHint::ExprTypeChecked; -use util::nodemap::{FnvHashSet}; +use util::nodemap::FnvHashSet; use lint::{LateContext, LintContext, LintArray}; use lint::{LintPass, LateLintPass}; @@ -91,15 +91,15 @@ pub struct TypeLimits { impl TypeLimits { pub fn new() -> TypeLimits { - TypeLimits { - negated_expr_id: !0, - } + TypeLimits { negated_expr_id: ast::DUMMY_NODE_ID } } } impl LintPass for TypeLimits { fn get_lints(&self) -> LintArray { - lint_array!(UNUSED_COMPARISONS, OVERFLOWING_LITERALS, EXCEEDING_BITSHIFTS) + lint_array!(UNUSED_COMPARISONS, + OVERFLOWING_LITERALS, + EXCEEDING_BITSHIFTS) } } @@ -111,13 +111,13 @@ impl LateLintPass for TypeLimits { match lit.node { ast::LitKind::Int(_, ast::LitIntType::Unsigned(_)) => { forbid_unsigned_negation(cx, e.span); - }, + } ast::LitKind::Int(_, ast::LitIntType::Unsuffixed) => { if let ty::TyUint(_) = cx.tcx.node_id_to_type(e.id).sty { forbid_unsigned_negation(cx, e.span); } - }, - _ => () + } + _ => (), } } else { let t = cx.tcx.node_id_to_type(expr.id); @@ -129,10 +129,11 @@ impl LateLintPass for TypeLimits { if self.negated_expr_id != e.id { self.negated_expr_id = expr.id; } - }, + } hir::ExprBinary(binop, ref l, ref r) => { if is_comparison(binop) && !check_limits(cx.tcx, binop, &l, &r) { - cx.span_lint(UNUSED_COMPARISONS, e.span, + cx.span_lint(UNUSED_COMPARISONS, + e.span, "comparison is useless due to type limits"); } @@ -140,30 +141,35 @@ impl LateLintPass for TypeLimits { let opt_ty_bits = match cx.tcx.node_id_to_type(l.id).sty { ty::TyInt(t) => Some(int_ty_bits(t, cx.sess().target.int_type)), ty::TyUint(t) => Some(uint_ty_bits(t, cx.sess().target.uint_type)), - _ => None + _ => None, }; if let Some(bits) = opt_ty_bits { let exceeding = if let hir::ExprLit(ref lit) = r.node { - if let ast::LitKind::Int(shift, _) = lit.node { shift >= bits } - else { false } + if let ast::LitKind::Int(shift, _) = lit.node { + shift >= bits + } else { + false + } } else { match eval_const_expr_partial(cx.tcx, &r, ExprTypeChecked, None) { Ok(ConstVal::Integral(i)) => { - i.is_negative() || i.to_u64() - .map(|i| i >= bits) - .unwrap_or(true) - }, - _ => { false } + i.is_negative() || + i.to_u64() + .map(|i| i >= bits) + .unwrap_or(true) + } + _ => false, } }; if exceeding { - cx.span_lint(EXCEEDING_BITSHIFTS, e.span, + cx.span_lint(EXCEEDING_BITSHIFTS, + e.span, "bitshift exceeds the type's number of bits"); } }; } - }, + } hir::ExprLit(ref lit) => { match cx.tcx.node_id_to_type(e.id).sty { ty::TyInt(t) => { @@ -182,14 +188,15 @@ impl LateLintPass for TypeLimits { // avoiding use of -min to prevent overflow/panic if (negative && v > max as u64 + 1) || (!negative && v > max as u64) { - cx.span_lint(OVERFLOWING_LITERALS, e.span, + cx.span_lint(OVERFLOWING_LITERALS, + e.span, &format!("literal out of range for {:?}", t)); return; } } - _ => bug!() + _ => bug!(), }; - }, + } ty::TyUint(t) => { let uint_type = if let ast::UintTy::Us = t { cx.sess().target.uint_type @@ -201,13 +208,14 @@ impl LateLintPass for TypeLimits { // _v is u8, within range by definition ast::LitKind::Byte(_v) => return, ast::LitKind::Int(v, _) => v, - _ => bug!() + _ => bug!(), }; if lit_val < min || lit_val > max { - cx.span_lint(OVERFLOWING_LITERALS, e.span, + cx.span_lint(OVERFLOWING_LITERALS, + e.span, &format!("literal out of range for {:?}", t)); } - }, + } ty::TyFloat(t) => { let (min, max) = float_ty_range(t); let lit_val: f64 = match lit.node { @@ -215,70 +223,71 @@ impl LateLintPass for TypeLimits { ast::LitKind::FloatUnsuffixed(ref v) => { match v.parse() { Ok(f) => f, - Err(_) => return + Err(_) => return, } } - _ => bug!() + _ => bug!(), }; if lit_val < min || lit_val > max { - cx.span_lint(OVERFLOWING_LITERALS, e.span, + cx.span_lint(OVERFLOWING_LITERALS, + e.span, &format!("literal out of range for {:?}", t)); } - }, - _ => () + } + _ => (), }; - }, - _ => () + } + _ => (), }; - fn is_valid(binop: hir::BinOp, v: T, - min: T, max: T) -> bool { + fn is_valid(binop: hir::BinOp, v: T, min: T, max: T) -> bool { match binop.node { - hir::BiLt => v > min && v <= max, - hir::BiLe => v >= min && v < max, - hir::BiGt => v >= min && v < max, - hir::BiGe => v > min && v <= max, + hir::BiLt => v > min && v <= max, + hir::BiLe => v >= min && v < max, + hir::BiGt => v >= min && v < max, + hir::BiGe => v > min && v <= max, hir::BiEq | hir::BiNe => v >= min && v <= max, - _ => bug!() + _ => bug!(), } } fn rev_binop(binop: hir::BinOp) -> hir::BinOp { - codemap::respan(binop.span, match binop.node { - hir::BiLt => hir::BiGt, - hir::BiLe => hir::BiGe, - hir::BiGt => hir::BiLt, - hir::BiGe => hir::BiLe, - _ => return binop - }) + codemap::respan(binop.span, + match binop.node { + hir::BiLt => hir::BiGt, + hir::BiLe => hir::BiGe, + hir::BiGt => hir::BiLt, + hir::BiGe => hir::BiLe, + _ => return binop, + }) } // for isize & usize, be conservative with the warnings, so that the // warnings are consistent between 32- and 64-bit platforms fn int_ty_range(int_ty: ast::IntTy) -> (i64, i64) { match int_ty { - ast::IntTy::Is => (i64::MIN, i64::MAX), - ast::IntTy::I8 => (i8::MIN as i64, i8::MAX as i64), - ast::IntTy::I16 => (i16::MIN as i64, i16::MAX as i64), - ast::IntTy::I32 => (i32::MIN as i64, i32::MAX as i64), - ast::IntTy::I64 => (i64::MIN, i64::MAX) + ast::IntTy::Is => (i64::MIN, i64::MAX), + ast::IntTy::I8 => (i8::MIN as i64, i8::MAX as i64), + ast::IntTy::I16 => (i16::MIN as i64, i16::MAX as i64), + ast::IntTy::I32 => (i32::MIN as i64, i32::MAX as i64), + ast::IntTy::I64 => (i64::MIN, i64::MAX), } } fn uint_ty_range(uint_ty: ast::UintTy) -> (u64, u64) { match uint_ty { - ast::UintTy::Us => (u64::MIN, u64::MAX), - ast::UintTy::U8 => (u8::MIN as u64, u8::MAX as u64), - ast::UintTy::U16 => (u16::MIN as u64, u16::MAX as u64), - ast::UintTy::U32 => (u32::MIN as u64, u32::MAX as u64), - ast::UintTy::U64 => (u64::MIN, u64::MAX) + ast::UintTy::Us => (u64::MIN, u64::MAX), + ast::UintTy::U8 => (u8::MIN as u64, u8::MAX as u64), + ast::UintTy::U16 => (u16::MIN as u64, u16::MAX as u64), + ast::UintTy::U32 => (u32::MIN as u64, u32::MAX as u64), + ast::UintTy::U64 => (u64::MIN, u64::MAX), } } fn float_ty_range(float_ty: ast::FloatTy) -> (f64, f64) { match float_ty { ast::FloatTy::F32 => (f32::MIN as f64, f32::MAX as f64), - ast::FloatTy::F64 => (f64::MIN, f64::MAX) + ast::FloatTy::F64 => (f64::MIN, f64::MAX), } } @@ -305,60 +314,60 @@ impl LateLintPass for TypeLimits { fn check_limits<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, binop: hir::BinOp, l: &hir::Expr, - r: &hir::Expr) -> bool { + r: &hir::Expr) + -> bool { let (lit, expr, swap) = match (&l.node, &r.node) { (&hir::ExprLit(_), _) => (l, r, true), (_, &hir::ExprLit(_)) => (r, l, false), - _ => return true + _ => return true, }; // Normalize the binop so that the literal is always on the RHS in // the comparison - let norm_binop = if swap { - rev_binop(binop) - } else { - binop - }; + let norm_binop = if swap { rev_binop(binop) } else { binop }; match tcx.node_id_to_type(expr.id).sty { ty::TyInt(int_ty) => { let (min, max) = int_ty_range(int_ty); let lit_val: i64 = match lit.node { - hir::ExprLit(ref li) => match li.node { - ast::LitKind::Int(v, ast::LitIntType::Signed(_)) | - ast::LitKind::Int(v, ast::LitIntType::Unsuffixed) => v as i64, - _ => return true - }, - _ => bug!() + hir::ExprLit(ref li) => { + match li.node { + ast::LitKind::Int(v, ast::LitIntType::Signed(_)) | + ast::LitKind::Int(v, ast::LitIntType::Unsuffixed) => v as i64, + _ => return true, + } + } + _ => bug!(), }; is_valid(norm_binop, lit_val, min, max) } ty::TyUint(uint_ty) => { let (min, max): (u64, u64) = uint_ty_range(uint_ty); let lit_val: u64 = match lit.node { - hir::ExprLit(ref li) => match li.node { - ast::LitKind::Int(v, _) => v, - _ => return true - }, - _ => bug!() + hir::ExprLit(ref li) => { + match li.node { + ast::LitKind::Int(v, _) => v, + _ => return true, + } + } + _ => bug!(), }; is_valid(norm_binop, lit_val, min, max) } - _ => true + _ => true, } } fn is_comparison(binop: hir::BinOp) -> bool { match binop.node { - hir::BiEq | hir::BiLt | hir::BiLe | - hir::BiNe | hir::BiGe | hir::BiGt => true, - _ => false + hir::BiEq | hir::BiLt | hir::BiLe | hir::BiNe | hir::BiGe | hir::BiGt => true, + _ => false, } } fn forbid_unsigned_negation(cx: &LateContext, span: Span) { cx.sess() - .struct_span_err_with_code(span, "unary negation of unsigned integer", "E0519") - .span_help(span, "use a cast or the `!` operator") - .emit(); + .struct_span_err_with_code(span, "unary negation of unsigned integer", "E0519") + .span_help(span, "use a cast or the `!` operator") + .emit(); } } } @@ -370,7 +379,7 @@ declare_lint! { } struct ImproperCTypesVisitor<'a, 'tcx: 'a> { - cx: &'a LateContext<'a, 'tcx> + cx: &'a LateContext<'a, 'tcx>, } enum FfiResult { @@ -403,9 +412,13 @@ fn is_repr_nullable_ptr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, if def.variants[data_idx].fields.len() == 1 { match def.variants[data_idx].fields[0].ty(tcx, substs).sty { - ty::TyFnPtr(_) => { return true; } - ty::TyRef(..) => { return true; } - _ => { } + ty::TyFnPtr(_) => { + return true; + } + ty::TyRef(..) => { + return true; + } + _ => {} } } } @@ -415,10 +428,7 @@ fn is_repr_nullable_ptr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { /// Check if the given type is "ffi-safe" (has a stable, well-defined /// representation which can be exported to C code). - fn check_type_for_ffi(&self, - cache: &mut FnvHashSet>, - ty: Ty<'tcx>) - -> FfiResult { + fn check_type_for_ffi(&self, cache: &mut FnvHashSet>, ty: Ty<'tcx>) -> FfiResult { use self::FfiResult::*; let cx = self.cx.tcx; @@ -431,112 +441,118 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { } match ty.sty { - ty::TyAdt(def, substs) => match def.adt_kind() { - AdtKind::Struct => { - if !cx.lookup_repr_hints(def.did).contains(&attr::ReprExtern) { - return FfiUnsafe( - "found struct without foreign-function-safe \ - representation annotation in foreign module, \ - consider adding a #[repr(C)] attribute to \ - the type"); - } + ty::TyAdt(def, substs) => { + match def.adt_kind() { + AdtKind::Struct => { + if !cx.lookup_repr_hints(def.did).contains(&attr::ReprExtern) { + return FfiUnsafe("found struct without foreign-function-safe \ + representation annotation in foreign module, \ + consider adding a #[repr(C)] attribute to the type"); + } - // We can't completely trust repr(C) markings; make sure the - // fields are actually safe. - if def.struct_variant().fields.is_empty() { - return FfiUnsafe( - "found zero-size struct in foreign module, consider \ - adding a member to this struct"); - } + // We can't completely trust repr(C) markings; make sure the + // fields are actually safe. + if def.struct_variant().fields.is_empty() { + return FfiUnsafe("found zero-size struct in foreign module, consider \ + adding a member to this struct"); + } - for field in &def.struct_variant().fields { - let field_ty = cx.normalize_associated_type(&field.ty(cx, substs)); - let r = self.check_type_for_ffi(cache, field_ty); - match r { - FfiSafe => {} - FfiBadStruct(..) | FfiBadUnion(..) | FfiBadEnum(..) => { return r; } - FfiUnsafe(s) => { return FfiBadStruct(def.did, s); } + for field in &def.struct_variant().fields { + let field_ty = cx.normalize_associated_type(&field.ty(cx, substs)); + let r = self.check_type_for_ffi(cache, field_ty); + match r { + FfiSafe => {} + FfiBadStruct(..) | FfiBadUnion(..) | FfiBadEnum(..) => { + return r; + } + FfiUnsafe(s) => { + return FfiBadStruct(def.did, s); + } + } } + FfiSafe } - FfiSafe - } - AdtKind::Union => { - if !cx.lookup_repr_hints(def.did).contains(&attr::ReprExtern) { - return FfiUnsafe( - "found union without foreign-function-safe \ - representation annotation in foreign module, \ - consider adding a #[repr(C)] attribute to \ - the type"); - } + AdtKind::Union => { + if !cx.lookup_repr_hints(def.did).contains(&attr::ReprExtern) { + return FfiUnsafe("found union without foreign-function-safe \ + representation annotation in foreign module, \ + consider adding a #[repr(C)] attribute to the type"); + } - for field in &def.struct_variant().fields { - let field_ty = cx.normalize_associated_type(&field.ty(cx, substs)); - let r = self.check_type_for_ffi(cache, field_ty); - match r { - FfiSafe => {} - FfiBadStruct(..) | FfiBadUnion(..) | FfiBadEnum(..) => { return r; } - FfiUnsafe(s) => { return FfiBadUnion(def.did, s); } + for field in &def.struct_variant().fields { + let field_ty = cx.normalize_associated_type(&field.ty(cx, substs)); + let r = self.check_type_for_ffi(cache, field_ty); + match r { + FfiSafe => {} + FfiBadStruct(..) | FfiBadUnion(..) | FfiBadEnum(..) => { + return r; + } + FfiUnsafe(s) => { + return FfiBadUnion(def.did, s); + } + } } + FfiSafe } - FfiSafe - } - AdtKind::Enum => { - if def.variants.is_empty() { - // Empty enums are okay... although sort of useless. - return FfiSafe - } + AdtKind::Enum => { + if def.variants.is_empty() { + // Empty enums are okay... although sort of useless. + return FfiSafe; + } - // Check for a repr() attribute to specify the size of the - // discriminant. - let repr_hints = cx.lookup_repr_hints(def.did); - match &repr_hints[..] { - &[] => { - // Special-case types like `Option`. - if !is_repr_nullable_ptr(cx, def, substs) { - return FfiUnsafe( - "found enum without foreign-function-safe \ - representation annotation in foreign module, \ - consider adding a #[repr(...)] attribute to \ - the type") + // Check for a repr() attribute to specify the size of the + // discriminant. + let repr_hints = cx.lookup_repr_hints(def.did); + match &repr_hints[..] { + &[] => { + // Special-case types like `Option`. + if !is_repr_nullable_ptr(cx, def, substs) { + return FfiUnsafe("found enum without foreign-function-safe \ + representation annotation in foreign \ + module, consider adding a #[repr(...)] \ + attribute to the type"); + } } - } - &[ref hint] => { - if !hint.is_ffi_safe() { + &[ref hint] => { + if !hint.is_ffi_safe() { + // FIXME: This shouldn't be reachable: we should check + // this earlier. + return FfiUnsafe("enum has unexpected #[repr(...)] attribute"); + } + + // Enum with an explicitly sized discriminant; either + // a C-style enum or a discriminated union. + + // The layout of enum variants is implicitly repr(C). + // FIXME: Is that correct? + } + _ => { // FIXME: This shouldn't be reachable: we should check // this earlier. - return FfiUnsafe( - "enum has unexpected #[repr(...)] attribute") + return FfiUnsafe("enum has too many #[repr(...)] attributes"); } - - // Enum with an explicitly sized discriminant; either - // a C-style enum or a discriminated union. - - // The layout of enum variants is implicitly repr(C). - // FIXME: Is that correct? } - _ => { - // FIXME: This shouldn't be reachable: we should check - // this earlier. - return FfiUnsafe( - "enum has too many #[repr(...)] attributes"); - } - } - // Check the contained variants. - for variant in &def.variants { - for field in &variant.fields { - let arg = cx.normalize_associated_type(&field.ty(cx, substs)); - let r = self.check_type_for_ffi(cache, arg); - match r { - FfiSafe => {} - FfiBadStruct(..) | FfiBadUnion(..) | FfiBadEnum(..) => { return r; } - FfiUnsafe(s) => { return FfiBadEnum(def.did, s); } + // Check the contained variants. + for variant in &def.variants { + for field in &variant.fields { + let arg = cx.normalize_associated_type(&field.ty(cx, substs)); + let r = self.check_type_for_ffi(cache, arg); + match r { + FfiSafe => {} + FfiBadStruct(..) | FfiBadUnion(..) | FfiBadEnum(..) => { + return r; + } + FfiUnsafe(s) => { + return FfiBadEnum(def.did, s); + } + } } } + FfiSafe } - FfiSafe } - }, + } ty::TyChar => { FfiUnsafe("found Rust type `char` in foreign module, while \ @@ -544,8 +560,7 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { } // Primitive types with a stable representation. - ty::TyBool | ty::TyInt(..) | ty::TyUint(..) | - ty::TyFloat(..) | ty::TyNever => FfiSafe, + ty::TyBool | ty::TyInt(..) | ty::TyUint(..) | ty::TyFloat(..) | ty::TyNever => FfiSafe, ty::TyBox(..) => { FfiUnsafe("found Rust type Box<_> in foreign module, \ @@ -572,24 +587,17 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { consider using a struct instead") } - ty::TyRawPtr(ref m) | ty::TyRef(_, ref m) => { - self.check_type_for_ffi(cache, m.ty) - } + ty::TyRawPtr(ref m) | + ty::TyRef(_, ref m) => self.check_type_for_ffi(cache, m.ty), - ty::TyArray(ty, _) => { - self.check_type_for_ffi(cache, ty) - } + ty::TyArray(ty, _) => self.check_type_for_ffi(cache, ty), ty::TyFnPtr(bare_fn) => { match bare_fn.abi { - Abi::Rust | - Abi::RustIntrinsic | - Abi::PlatformIntrinsic | - Abi::RustCall => { - return FfiUnsafe( - "found function pointer with Rust calling \ - convention in foreign module; consider using an \ - `extern` function pointer") + Abi::Rust | Abi::RustIntrinsic | Abi::PlatformIntrinsic | Abi::RustCall => { + return FfiUnsafe("found function pointer with Rust calling convention in \ + foreign module; consider using an `extern` function \ + pointer") } _ => {} } @@ -599,24 +607,30 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { let r = self.check_type_for_ffi(cache, sig.output); match r { FfiSafe => {} - _ => { return r; } + _ => { + return r; + } } } for arg in sig.inputs { let r = self.check_type_for_ffi(cache, arg); match r { FfiSafe => {} - _ => { return r; } + _ => { + return r; + } } } FfiSafe } - ty::TyParam(..) | ty::TyInfer(..) | ty::TyError | - ty::TyClosure(..) | ty::TyProjection(..) | ty::TyAnon(..) | - ty::TyFnDef(..) => { - bug!("Unexpected type in foreign function") - } + ty::TyParam(..) | + ty::TyInfer(..) | + ty::TyError | + ty::TyClosure(..) | + ty::TyProjection(..) | + ty::TyAnon(..) | + ty::TyFnDef(..) => bug!("Unexpected type in foreign function"), } } @@ -633,23 +647,28 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { FfiResult::FfiBadStruct(_, s) => { // FIXME: This diagnostic is difficult to read, and doesn't // point at the relevant field. - self.cx.span_lint(IMPROPER_CTYPES, sp, - &format!("found non-foreign-function-safe member in \ - struct marked #[repr(C)]: {}", s)); + self.cx.span_lint(IMPROPER_CTYPES, + sp, + &format!("found non-foreign-function-safe member in struct \ + marked #[repr(C)]: {}", + s)); } FfiResult::FfiBadUnion(_, s) => { // FIXME: This diagnostic is difficult to read, and doesn't // point at the relevant field. - self.cx.span_lint(IMPROPER_CTYPES, sp, - &format!("found non-foreign-function-safe member in \ - union marked #[repr(C)]: {}", s)); + self.cx.span_lint(IMPROPER_CTYPES, + sp, + &format!("found non-foreign-function-safe member in union \ + marked #[repr(C)]: {}", + s)); } FfiResult::FfiBadEnum(_, s) => { // FIXME: This diagnostic is difficult to read, and doesn't // point at the relevant variant. - self.cx.span_lint(IMPROPER_CTYPES, sp, - &format!("found non-foreign-function-safe member in \ - enum: {}", s)); + self.cx.span_lint(IMPROPER_CTYPES, + sp, + &format!("found non-foreign-function-safe member in enum: {}", + s)); } } } @@ -719,13 +738,13 @@ impl LintPass for VariantSizeDifferences { impl LateLintPass for VariantSizeDifferences { fn check_item(&mut self, cx: &LateContext, it: &hir::Item) { if let hir::ItemEnum(ref enum_definition, ref gens) = it.node { - if gens.ty_params.is_empty() { // sizes only make sense for non-generic types + if gens.ty_params.is_empty() { + // sizes only make sense for non-generic types let t = cx.tcx.node_id_to_type(it.id); let layout = cx.tcx.infer_ctxt(None, None, Reveal::All).enter(|infcx| { let ty = cx.tcx.erase_regions(&t); - ty.layout(&infcx).unwrap_or_else(|e| { - bug!("failed to get layout for `{}`: {}", t, e) - }) + ty.layout(&infcx) + .unwrap_or_else(|e| bug!("failed to get layout for `{}`: {}", t, e)) }); if let Layout::General { ref variants, ref size, discr, .. } = *layout { @@ -738,23 +757,21 @@ impl LateLintPass for VariantSizeDifferences { .zip(variants) .map(|(variant, variant_layout)| { // Subtract the size of the enum discriminant - let bytes = variant_layout.min_size().bytes() - .saturating_sub(discr_size); + let bytes = variant_layout.min_size + .bytes() + .saturating_sub(discr_size); debug!("- variant `{}` is {} bytes large", variant.node.name, bytes); bytes }) .enumerate() - .fold((0, 0, 0), - |(l, s, li), (idx, size)| - if size > l { - (size, l, idx) - } else if size > s { - (l, size, li) - } else { - (l, s, li) - } - ); + .fold((0, 0, 0), |(l, s, li), (idx, size)| if size > l { + (size, l, idx) + } else if size > s { + (l, size, li) + } else { + (l, s, li) + }); // we only warn if the largest variant is at least thrice as large as // the second-largest. @@ -762,7 +779,8 @@ impl LateLintPass for VariantSizeDifferences { cx.span_lint(VARIANT_SIZE_DIFFERENCES, enum_definition.variants[largest_index].span, &format!("enum variant is more than three times larger \ - ({} bytes) than the next largest", largest)); + ({} bytes) than the next largest", + largest)); } } } diff --git a/src/librustc_lint/unused.rs b/src/librustc_lint/unused.rs index d31f16df69356..a29ff18ab5318 100644 --- a/src/librustc_lint/unused.rs +++ b/src/librustc_lint/unused.rs @@ -49,8 +49,12 @@ impl UnusedMut { if let hir::BindByValue(hir::MutMutable) = mode { if !name.as_str().starts_with("_") { match mutables.entry(name.0 as usize) { - Vacant(entry) => { entry.insert(vec![id]); }, - Occupied(mut entry) => { entry.get_mut().push(id); }, + Vacant(entry) => { + entry.insert(vec![id]); + } + Occupied(mut entry) => { + entry.get_mut().push(id); + } } } } @@ -60,7 +64,8 @@ impl UnusedMut { let used_mutables = cx.tcx.used_mut_nodes.borrow(); for (_, v) in &mutables { if !v.iter().any(|e| used_mutables.contains(e)) { - cx.span_lint(UNUSED_MUT, cx.tcx.map.span(v[0]), + cx.span_lint(UNUSED_MUT, + cx.tcx.map.span(v[0]), "variable does not need to be mutable"); } } @@ -90,9 +95,13 @@ impl LateLintPass for UnusedMut { } } - fn check_fn(&mut self, cx: &LateContext, - _: FnKind, decl: &hir::FnDecl, - _: &hir::Block, _: Span, _: ast::NodeId) { + fn check_fn(&mut self, + cx: &LateContext, + _: FnKind, + decl: &hir::FnDecl, + _: &hir::Block, + _: Span, + _: ast::NodeId) { for a in &decl.inputs { self.check_unused_mut_pat(cx, slice::ref_slice(&a.pat)); } @@ -124,7 +133,7 @@ impl LateLintPass for UnusedResults { fn check_stmt(&mut self, cx: &LateContext, s: &hir::Stmt) { let expr = match s.node { hir::StmtSemi(ref expr, _) => &**expr, - _ => return + _ => return, }; if let hir::ExprRet(..) = expr.node { @@ -184,8 +193,8 @@ impl LateLintPass for UnusedUnsafe { if let hir::ExprBlock(ref blk) = e.node { // Don't warn about generated blocks, that'll just pollute the output. if blk.rules == hir::UnsafeBlock(hir::UserProvided) && - !cx.tcx.used_unsafe.borrow().contains(&blk.id) { - cx.span_lint(UNUSED_UNSAFE, blk.span, "unnecessary `unsafe` block"); + !cx.tcx.used_unsafe.borrow().contains(&blk.id) { + cx.span_lint(UNUSED_UNSAFE, blk.span, "unnecessary `unsafe` block"); } } } @@ -210,8 +219,7 @@ impl LateLintPass for PathStatements { fn check_stmt(&mut self, cx: &LateContext, s: &hir::Stmt) { if let hir::StmtSemi(ref expr, _) = s.node { if let hir::ExprPath(..) = expr.node { - cx.span_lint(PATH_STATEMENTS, s.span, - "path statement with no effect"); + cx.span_lint(PATH_STATEMENTS, s.span, "path statement with no effect"); } } } @@ -242,8 +250,8 @@ impl LateLintPass for UnusedAttributes { AttributeType::Whitelisted if attr.check_name(name) => { debug!("{:?} is Whitelisted", name); break; - }, - _ => () + } + _ => (), } } @@ -259,24 +267,22 @@ impl LateLintPass for UnusedAttributes { debug!("Emitting warning for: {:?}", attr); cx.span_lint(UNUSED_ATTRIBUTES, attr.span, "unused attribute"); // Is it a builtin attribute that must be used at the crate level? - let known_crate = KNOWN_ATTRIBUTES.iter().find(|&&(name, ty, _)| { - attr.name() == name && - ty == AttributeType::CrateLevel - }).is_some(); + let known_crate = KNOWN_ATTRIBUTES.iter() + .find(|&&(name, ty, _)| attr.name() == name && ty == AttributeType::CrateLevel) + .is_some(); // Has a plugin registered this attribute as one which must be used at // the crate level? let plugin_crate = plugin_attributes.iter() - .find(|&&(ref x, t)| { - &*attr.name() == x && - AttributeType::CrateLevel == t - }).is_some(); - if known_crate || plugin_crate { + .find(|&&(ref x, t)| &*attr.name() == x && AttributeType::CrateLevel == t) + .is_some(); + if known_crate || plugin_crate { let msg = match attr.node.style { - ast::AttrStyle::Outer => "crate-level attribute should be an inner \ - attribute: add an exclamation mark: #![foo]", - ast::AttrStyle::Inner => "crate-level attribute should be in the \ - root module", + ast::AttrStyle::Outer => { + "crate-level attribute should be an inner attribute: add an exclamation \ + mark: #![foo]" + } + ast::AttrStyle::Inner => "crate-level attribute should be in the root module", }; cx.span_lint(UNUSED_ATTRIBUTES, attr.span, msg); } @@ -296,12 +302,16 @@ declare_lint! { pub struct UnusedParens; impl UnusedParens { - fn check_unused_parens_core(&self, cx: &EarlyContext, value: &ast::Expr, msg: &str, + fn check_unused_parens_core(&self, + cx: &EarlyContext, + value: &ast::Expr, + msg: &str, struct_lit_needs_parens: bool) { if let ast::ExprKind::Paren(ref inner) = value.node { let necessary = struct_lit_needs_parens && contains_exterior_struct_lit(&inner); if !necessary { - cx.span_lint(UNUSED_PARENS, value.span, + cx.span_lint(UNUSED_PARENS, + value.span, &format!("unnecessary parentheses around {}", msg)) } } @@ -319,8 +329,7 @@ impl UnusedParens { ast::ExprKind::AssignOp(_, ref lhs, ref rhs) | ast::ExprKind::Binary(_, ref lhs, ref rhs) => { // X { y: 1 } + X { y: 2 } - contains_exterior_struct_lit(&lhs) || - contains_exterior_struct_lit(&rhs) + contains_exterior_struct_lit(&lhs) || contains_exterior_struct_lit(&rhs) } ast::ExprKind::Unary(_, ref x) | ast::ExprKind::Cast(ref x, _) | @@ -337,7 +346,7 @@ impl UnusedParens { contains_exterior_struct_lit(&exprs[0]) } - _ => false + _ => false, } } } @@ -363,18 +372,20 @@ impl EarlyLintPass for UnusedParens { Assign(_, ref value) => (value, "assigned value", false), AssignOp(.., ref value) => (value, "assigned value", false), InPlace(_, ref value) => (value, "emplacement value", false), - _ => return + _ => return, }; self.check_unused_parens_core(cx, &value, msg, struct_lit_needs_parens); } fn check_stmt(&mut self, cx: &EarlyContext, s: &ast::Stmt) { let (value, msg) = match s.node { - ast::StmtKind::Local(ref local) => match local.init { - Some(ref value) => (value, "assigned value"), - None => return - }, - _ => return + ast::StmtKind::Local(ref local) => { + match local.init { + Some(ref value) => (value, "assigned value"), + None => return, + } + } + _ => return, }; self.check_unused_parens_core(cx, &value, msg, false); } @@ -427,23 +438,24 @@ impl LateLintPass for UnusedAllocation { fn check_expr(&mut self, cx: &LateContext, e: &hir::Expr) { match e.node { hir::ExprBox(_) => {} - _ => return + _ => return, } if let Some(adjustment) = cx.tcx.tables.borrow().adjustments.get(&e.id) { - if let adjustment::AdjustDerefRef(adjustment::AutoDerefRef { - ref autoref, .. - }) = *adjustment { + if let adjustment::AdjustDerefRef(adjustment::AutoDerefRef { ref autoref, .. }) = + *adjustment { match autoref { &Some(adjustment::AutoPtr(_, hir::MutImmutable)) => { - cx.span_lint(UNUSED_ALLOCATION, e.span, + cx.span_lint(UNUSED_ALLOCATION, + e.span, "unnecessary allocation, use & instead"); } &Some(adjustment::AutoPtr(_, hir::MutMutable)) => { - cx.span_lint(UNUSED_ALLOCATION, e.span, + cx.span_lint(UNUSED_ALLOCATION, + e.span, "unnecessary allocation, use &mut instead"); } - _ => () + _ => (), } } } diff --git a/src/librustc_llvm/build.rs b/src/librustc_llvm/build.rs index ac83d860b6e90..35140d5ab4ae6 100644 --- a/src/librustc_llvm/build.rs +++ b/src/librustc_llvm/build.rs @@ -20,24 +20,23 @@ use build_helper::output; fn main() { println!("cargo:rustc-cfg=cargobuild"); - let target = env::var("TARGET").unwrap(); + let target = env::var("TARGET").expect("TARGET was not set"); let llvm_config = env::var_os("LLVM_CONFIG") - .map(PathBuf::from) - .unwrap_or_else(|| { - if let Some(dir) = env::var_os("CARGO_TARGET_DIR") - .map(PathBuf::from) { - let to_test = dir.parent() - .unwrap() - .parent() - .unwrap() - .join(&target) - .join("llvm/bin/llvm-config"); - if Command::new(&to_test).output().is_ok() { - return to_test; - } - } - PathBuf::from("llvm-config") - }); + .map(PathBuf::from) + .unwrap_or_else(|| { + if let Some(dir) = env::var_os("CARGO_TARGET_DIR").map(PathBuf::from) { + let to_test = dir.parent() + .unwrap() + .parent() + .unwrap() + .join(&target) + .join("llvm/bin/llvm-config"); + if Command::new(&to_test).output().is_ok() { + return to_test; + } + } + PathBuf::from("llvm-config") + }); println!("cargo:rerun-if-changed={}", llvm_config.display()); @@ -62,11 +61,12 @@ fn main() { // can't trust all the output of llvm-config becaues it might be targeted // for the host rather than the target. As a result a bunch of blocks below // are gated on `if !is_crossed` - let target = env::var("TARGET").unwrap(); - let host = env::var("HOST").unwrap(); + let target = env::var("TARGET").expect("TARGET was not set"); + let host = env::var("HOST").expect("HOST was not set"); let is_crossed = target != host; - let optional_components = ["x86", "arm", "aarch64", "mips", "powerpc", "pnacl", "systemz"]; + let optional_components = + ["x86", "arm", "aarch64", "mips", "powerpc", "pnacl", "systemz", "jsbackend"]; // FIXME: surely we don't need all these components, right? Stuff like mcjit // or interpreter the compiler itself never uses. @@ -148,7 +148,7 @@ fn main() { // that off lib.trim_right_matches(".lib") } else { - continue + continue; }; // Don't need or want this library, but LLVM's CMake build system @@ -157,7 +157,7 @@ fn main() { // library and it otherwise may just pull in extra dependencies on // libedit which we don't want if name == "LLVMLineEditor" { - continue + continue; } let kind = if name.starts_with("LLVM") { @@ -178,7 +178,7 @@ fn main() { cmd.arg("--ldflags"); for lib in output(&mut cmd).split_whitespace() { if lib.starts_with("-LIBPATH:") { - println!("cargo:rustc-link-search=native={}", &lib[9..]); + println!("cargo:rustc-link-search=native={}", &lib[9..]); } else if is_crossed { if lib.starts_with("-L") { println!("cargo:rustc-link-search=native={}", diff --git a/src/librustc_llvm/diagnostic.rs b/src/librustc_llvm/diagnostic.rs index 8520ae1df60dd..8767f03b3e756 100644 --- a/src/librustc_llvm/diagnostic.rs +++ b/src/librustc_llvm/diagnostic.rs @@ -33,8 +33,7 @@ pub enum OptimizationDiagnosticKind { impl OptimizationDiagnosticKind { pub fn describe(self) -> &'static str { match self { - OptimizationRemark | - OptimizationRemarkOther => "remark", + OptimizationRemark | OptimizationRemarkOther => "remark", OptimizationMissed => "missed", OptimizationAnalysis => "analysis", OptimizationAnalysisFPCommute => "floating-point", @@ -130,18 +129,14 @@ impl Diagnostic { Optimization(OptimizationDiagnostic::unpack(OptimizationAnalysis, di)) } - Dk::OptimizationRemarkAnalysisFPCommute => { - Optimization(OptimizationDiagnostic::unpack( - OptimizationAnalysisFPCommute, di)) + Optimization(OptimizationDiagnostic::unpack(OptimizationAnalysisFPCommute, di)) } Dk::OptimizationRemarkAnalysisAliasing => { - Optimization(OptimizationDiagnostic::unpack( - OptimizationAnalysisAliasing, di)) + Optimization(OptimizationDiagnostic::unpack(OptimizationAnalysisAliasing, di)) } - Dk::OptimizationFailure => { Optimization(OptimizationDiagnostic::unpack(OptimizationFailure, di)) } diff --git a/src/librustc_llvm/ffi.rs b/src/librustc_llvm/ffi.rs index 50c68d5e75eef..78a9d67ed7700 100644 --- a/src/librustc_llvm/ffi.rs +++ b/src/librustc_llvm/ffi.rs @@ -8,11 +8,10 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use debuginfo::{DIBuilderRef, DIDescriptor, - DIFile, DILexicalBlock, DISubprogram, DIType, - DIBasicType, DIDerivedType, DICompositeType, DIScope, - DIVariable, DIGlobalVariable, DIArray, DISubrange, - DITemplateTypeParameter, DIEnumerator, DINameSpace}; +use debuginfo::{DIBuilderRef, DIDescriptor, DIFile, DILexicalBlock, DISubprogram, DIType, + DIBasicType, DIDerivedType, DICompositeType, DIScope, DIVariable, + DIGlobalVariable, DIArray, DISubrange, DITemplateTypeParameter, DIEnumerator, + DINameSpace}; use libc::{c_uint, c_int, size_t, c_char}; use libc::{c_longlong, c_ulonglong, c_void}; @@ -44,7 +43,7 @@ pub enum CallConv { X86FastcallCallConv = 65, X86_64_SysV = 78, X86_64_Win64 = 79, - X86_VectorCall = 80 + X86_VectorCall = 80, } /// LLVMRustLinkage @@ -78,9 +77,9 @@ pub enum DiagnosticSeverity { #[derive(Copy, Clone)] #[repr(C)] pub enum DLLStorageClass { - Default = 0, - DllImport = 1, /* Function to be imported from DLL. */ - DllExport = 2, /* Function to be accessible from DLL. */ + Default = 0, + DllImport = 1, // Function to be imported from DLL. + DllExport = 2, // Function to be accessible from DLL. } bitflags! { @@ -180,23 +179,23 @@ pub enum RealPredicate { #[derive(Copy, Clone, PartialEq, Debug)] #[repr(C)] pub enum TypeKind { - Void = 0, - Half = 1, - Float = 2, - Double = 3, - X86_FP80 = 4, - FP128 = 5, + Void = 0, + Half = 1, + Float = 2, + Double = 3, + X86_FP80 = 4, + FP128 = 5, PPC_FP128 = 6, - Label = 7, - Integer = 8, - Function = 9, - Struct = 10, - Array = 11, - Pointer = 12, - Vector = 13, - Metadata = 14, - X86_MMX = 15, - Token = 16, + Label = 7, + Integer = 8, + Function = 9, + Struct = 10, + Array = 11, + Pointer = 12, + Vector = 13, + Metadata = 14, + X86_MMX = 15, + Token = 16, } /// LLVMAtomicRmwBinOp @@ -204,14 +203,14 @@ pub enum TypeKind { #[repr(C)] pub enum AtomicRmwBinOp { AtomicXchg = 0, - AtomicAdd = 1, - AtomicSub = 2, - AtomicAnd = 3, + AtomicAdd = 1, + AtomicSub = 2, + AtomicAnd = 3, AtomicNand = 4, - AtomicOr = 5, - AtomicXor = 6, - AtomicMax = 7, - AtomicMin = 8, + AtomicOr = 5, + AtomicXor = 6, + AtomicMax = 7, + AtomicMin = 8, AtomicUMax = 9, AtomicUMin = 10, } @@ -227,7 +226,7 @@ pub enum AtomicOrdering { Acquire = 4, Release = 5, AcquireRelease = 6, - SequentiallyConsistent = 7 + SequentiallyConsistent = 7, } /// LLVMRustSynchronizationScope @@ -429,7 +428,7 @@ pub type InlineAsmDiagHandler = unsafe extern "C" fn(SMDiagnosticRef, *const c_v pub mod debuginfo { pub use self::DIDescriptorFlags::*; - use super::{MetadataRef}; + use super::MetadataRef; #[allow(missing_copy_implementations)] pub enum DIBuilder_opaque {} @@ -455,22 +454,22 @@ pub mod debuginfo { #[derive(Copy, Clone)] pub enum DIDescriptorFlags { - FlagPrivate = 1 << 0, - FlagProtected = 1 << 1, - FlagFwdDecl = 1 << 2, - FlagAppleBlock = 1 << 3, - FlagBlockByrefStruct = 1 << 4, - FlagVirtual = 1 << 5, - FlagArtificial = 1 << 6, - FlagExplicit = 1 << 7, - FlagPrototyped = 1 << 8, - FlagObjcClassComplete = 1 << 9, - FlagObjectPointer = 1 << 10, - FlagVector = 1 << 11, - FlagStaticMember = 1 << 12, - FlagIndirectVariable = 1 << 13, - FlagLValueReference = 1 << 14, - FlagRValueReference = 1 << 15 + FlagPrivate = 1 << 0, + FlagProtected = 1 << 1, + FlagFwdDecl = 1 << 2, + FlagAppleBlock = 1 << 3, + FlagBlockByrefStruct = 1 << 4, + FlagVirtual = 1 << 5, + FlagArtificial = 1 << 6, + FlagExplicit = 1 << 7, + FlagPrototyped = 1 << 8, + FlagObjcClassComplete = 1 << 9, + FlagObjectPointer = 1 << 10, + FlagVector = 1 << 11, + FlagStaticMember = 1 << 12, + FlagIndirectVariable = 1 << 13, + FlagLValueReference = 1 << 14, + FlagRValueReference = 1 << 15, } } @@ -487,22 +486,17 @@ pub mod debuginfo { // set of the libraries we need to link to LLVM for. #[link(name = "rustllvm", kind = "static")] #[cfg(not(cargobuild))] -extern {} +extern "C" {} #[linked_from = "rustllvm"] // not quite true but good enough -extern { - /* Create and destroy contexts. */ +extern "C" { + // Create and destroy contexts. pub fn LLVMContextCreate() -> ContextRef; pub fn LLVMContextDispose(C: ContextRef); - pub fn LLVMGetMDKindIDInContext(C: ContextRef, - Name: *const c_char, - SLen: c_uint) - -> c_uint; + pub fn LLVMGetMDKindIDInContext(C: ContextRef, Name: *const c_char, SLen: c_uint) -> c_uint; - /* Create and destroy modules. */ - pub fn LLVMModuleCreateWithNameInContext(ModuleID: *const c_char, - C: ContextRef) - -> ModuleRef; + // Create and destroy modules. + pub fn LLVMModuleCreateWithNameInContext(ModuleID: *const c_char, C: ContextRef) -> ModuleRef; pub fn LLVMGetModuleContext(M: ModuleRef) -> ContextRef; pub fn LLVMCloneModule(M: ModuleRef) -> ModuleRef; pub fn LLVMDisposeModule(M: ModuleRef); @@ -527,25 +521,24 @@ extern { /// See llvm::LLVMType::getContext. pub fn LLVMGetTypeContext(Ty: TypeRef) -> ContextRef; - /* Operations on integer types */ + // Operations on integer types pub fn LLVMInt1TypeInContext(C: ContextRef) -> TypeRef; pub fn LLVMInt8TypeInContext(C: ContextRef) -> TypeRef; pub fn LLVMInt16TypeInContext(C: ContextRef) -> TypeRef; pub fn LLVMInt32TypeInContext(C: ContextRef) -> TypeRef; pub fn LLVMInt64TypeInContext(C: ContextRef) -> TypeRef; - pub fn LLVMIntTypeInContext(C: ContextRef, NumBits: c_uint) - -> TypeRef; + pub fn LLVMIntTypeInContext(C: ContextRef, NumBits: c_uint) -> TypeRef; pub fn LLVMGetIntTypeWidth(IntegerTy: TypeRef) -> c_uint; - /* Operations on real types */ + // Operations on real types pub fn LLVMFloatTypeInContext(C: ContextRef) -> TypeRef; pub fn LLVMDoubleTypeInContext(C: ContextRef) -> TypeRef; pub fn LLVMX86FP80TypeInContext(C: ContextRef) -> TypeRef; pub fn LLVMFP128TypeInContext(C: ContextRef) -> TypeRef; pub fn LLVMPPCFP128TypeInContext(C: ContextRef) -> TypeRef; - /* Operations on function types */ + // Operations on function types pub fn LLVMFunctionType(ReturnType: TypeRef, ParamTypes: *const TypeRef, ParamCount: c_uint, @@ -556,37 +549,33 @@ extern { pub fn LLVMCountParamTypes(FunctionTy: TypeRef) -> c_uint; pub fn LLVMGetParamTypes(FunctionTy: TypeRef, Dest: *mut TypeRef); - /* Operations on struct types */ + // Operations on struct types pub fn LLVMStructTypeInContext(C: ContextRef, ElementTypes: *const TypeRef, ElementCount: c_uint, Packed: Bool) -> TypeRef; pub fn LLVMCountStructElementTypes(StructTy: TypeRef) -> c_uint; - pub fn LLVMGetStructElementTypes(StructTy: TypeRef, - Dest: *mut TypeRef); + pub fn LLVMGetStructElementTypes(StructTy: TypeRef, Dest: *mut TypeRef); pub fn LLVMIsPackedStruct(StructTy: TypeRef) -> Bool; - /* Operations on array, pointer, and vector types (sequence types) */ + // Operations on array, pointer, and vector types (sequence types) pub fn LLVMRustArrayType(ElementType: TypeRef, ElementCount: u64) -> TypeRef; - pub fn LLVMPointerType(ElementType: TypeRef, AddressSpace: c_uint) - -> TypeRef; - pub fn LLVMVectorType(ElementType: TypeRef, ElementCount: c_uint) - -> TypeRef; + pub fn LLVMPointerType(ElementType: TypeRef, AddressSpace: c_uint) -> TypeRef; + pub fn LLVMVectorType(ElementType: TypeRef, ElementCount: c_uint) -> TypeRef; pub fn LLVMGetElementType(Ty: TypeRef) -> TypeRef; pub fn LLVMGetArrayLength(ArrayTy: TypeRef) -> c_uint; pub fn LLVMGetPointerAddressSpace(PointerTy: TypeRef) -> c_uint; - pub fn LLVMGetPointerToGlobal(EE: ExecutionEngineRef, V: ValueRef) - -> *const c_void; + pub fn LLVMGetPointerToGlobal(EE: ExecutionEngineRef, V: ValueRef) -> *const c_void; pub fn LLVMGetVectorSize(VectorTy: TypeRef) -> c_uint; - /* Operations on other types */ + // Operations on other types pub fn LLVMVoidTypeInContext(C: ContextRef) -> TypeRef; pub fn LLVMLabelTypeInContext(C: ContextRef) -> TypeRef; pub fn LLVMRustMetadataTypeInContext(C: ContextRef) -> TypeRef; - /* Operations on all values */ + // Operations on all values pub fn LLVMTypeOf(Val: ValueRef) -> TypeRef; pub fn LLVMGetValueName(Val: ValueRef) -> *const c_char; pub fn LLVMSetValueName(Val: ValueRef, Name: *const c_char); @@ -594,58 +583,45 @@ extern { pub fn LLVMReplaceAllUsesWith(OldVal: ValueRef, NewVal: ValueRef); pub fn LLVMSetMetadata(Val: ValueRef, KindID: c_uint, Node: ValueRef); - /* Operations on Uses */ + // Operations on Uses pub fn LLVMGetFirstUse(Val: ValueRef) -> UseRef; pub fn LLVMGetNextUse(U: UseRef) -> UseRef; pub fn LLVMGetUser(U: UseRef) -> ValueRef; pub fn LLVMGetUsedValue(U: UseRef) -> ValueRef; - /* Operations on Users */ + // Operations on Users pub fn LLVMGetNumOperands(Val: ValueRef) -> c_int; pub fn LLVMGetOperand(Val: ValueRef, Index: c_uint) -> ValueRef; pub fn LLVMSetOperand(Val: ValueRef, Index: c_uint, Op: ValueRef); - /* Operations on constants of any type */ + // Operations on constants of any type pub fn LLVMConstNull(Ty: TypeRef) -> ValueRef; - /* all zeroes */ + // all zeroes pub fn LLVMConstAllOnes(Ty: TypeRef) -> ValueRef; - pub fn LLVMConstICmp(Pred: IntPredicate, V1: ValueRef, V2: ValueRef) - -> ValueRef; - pub fn LLVMConstFCmp(Pred: RealPredicate, V1: ValueRef, V2: ValueRef) - -> ValueRef; - /* only for isize/vector */ + pub fn LLVMConstICmp(Pred: IntPredicate, V1: ValueRef, V2: ValueRef) -> ValueRef; + pub fn LLVMConstFCmp(Pred: RealPredicate, V1: ValueRef, V2: ValueRef) -> ValueRef; + // only for isize/vector pub fn LLVMGetUndef(Ty: TypeRef) -> ValueRef; pub fn LLVMIsConstant(Val: ValueRef) -> Bool; pub fn LLVMIsNull(Val: ValueRef) -> Bool; pub fn LLVMIsUndef(Val: ValueRef) -> Bool; pub fn LLVMConstPointerNull(Ty: TypeRef) -> ValueRef; - /* Operations on metadata */ - pub fn LLVMMDStringInContext(C: ContextRef, - Str: *const c_char, - SLen: c_uint) - -> ValueRef; - pub fn LLVMMDNodeInContext(C: ContextRef, - Vals: *const ValueRef, - Count: c_uint) - -> ValueRef; - pub fn LLVMAddNamedMetadataOperand(M: ModuleRef, - Str: *const c_char, - Val: ValueRef); + // Operations on metadata + pub fn LLVMMDStringInContext(C: ContextRef, Str: *const c_char, SLen: c_uint) -> ValueRef; + pub fn LLVMMDNodeInContext(C: ContextRef, Vals: *const ValueRef, Count: c_uint) -> ValueRef; + pub fn LLVMAddNamedMetadataOperand(M: ModuleRef, Str: *const c_char, Val: ValueRef); - /* Operations on scalar constants */ - pub fn LLVMConstInt(IntTy: TypeRef, N: c_ulonglong, SignExtend: Bool) - -> ValueRef; - pub fn LLVMConstIntOfString(IntTy: TypeRef, Text: *const c_char, Radix: u8) - -> ValueRef; + // Operations on scalar constants + pub fn LLVMConstInt(IntTy: TypeRef, N: c_ulonglong, SignExtend: Bool) -> ValueRef; + pub fn LLVMConstIntOfString(IntTy: TypeRef, Text: *const c_char, Radix: u8) -> ValueRef; pub fn LLVMConstIntOfStringAndSize(IntTy: TypeRef, Text: *const c_char, SLen: c_uint, Radix: u8) -> ValueRef; pub fn LLVMConstReal(RealTy: TypeRef, N: f64) -> ValueRef; - pub fn LLVMConstRealOfString(RealTy: TypeRef, Text: *const c_char) - -> ValueRef; + pub fn LLVMConstRealOfString(RealTy: TypeRef, Text: *const c_char) -> ValueRef; pub fn LLVMConstRealOfStringAndSize(RealTy: TypeRef, Text: *const c_char, SLen: c_uint) @@ -654,7 +630,7 @@ extern { pub fn LLVMConstIntGetSExtValue(ConstantVal: ValueRef) -> c_longlong; - /* Operations on composite constants */ + // Operations on composite constants pub fn LLVMConstStringInContext(C: ContextRef, Str: *const c_char, Length: c_uint, @@ -670,10 +646,9 @@ extern { ConstantVals: *const ValueRef, Length: c_uint) -> ValueRef; - pub fn LLVMConstVector(ScalarConstantVals: *const ValueRef, Size: c_uint) - -> ValueRef; + pub fn LLVMConstVector(ScalarConstantVals: *const ValueRef, Size: c_uint) -> ValueRef; - /* Constant expressions */ + // Constant expressions pub fn LLVMAlignOf(Ty: TypeRef) -> ValueRef; pub fn LLVMSizeOf(Ty: TypeRef) -> ValueRef; pub fn LLVMConstNeg(ConstantVal: ValueRef) -> ValueRef; @@ -681,57 +656,31 @@ extern { pub fn LLVMConstNUWNeg(ConstantVal: ValueRef) -> ValueRef; pub fn LLVMConstFNeg(ConstantVal: ValueRef) -> ValueRef; pub fn LLVMConstNot(ConstantVal: ValueRef) -> ValueRef; - pub fn LLVMConstAdd(LHSConstant: ValueRef, RHSConstant: ValueRef) - -> ValueRef; - pub fn LLVMConstNSWAdd(LHSConstant: ValueRef, RHSConstant: ValueRef) - -> ValueRef; - pub fn LLVMConstNUWAdd(LHSConstant: ValueRef, RHSConstant: ValueRef) - -> ValueRef; - pub fn LLVMConstFAdd(LHSConstant: ValueRef, RHSConstant: ValueRef) - -> ValueRef; - pub fn LLVMConstSub(LHSConstant: ValueRef, RHSConstant: ValueRef) - -> ValueRef; - pub fn LLVMConstNSWSub(LHSConstant: ValueRef, RHSConstant: ValueRef) - -> ValueRef; - pub fn LLVMConstNUWSub(LHSConstant: ValueRef, RHSConstant: ValueRef) - -> ValueRef; - pub fn LLVMConstFSub(LHSConstant: ValueRef, RHSConstant: ValueRef) - -> ValueRef; - pub fn LLVMConstMul(LHSConstant: ValueRef, RHSConstant: ValueRef) - -> ValueRef; - pub fn LLVMConstNSWMul(LHSConstant: ValueRef, RHSConstant: ValueRef) - -> ValueRef; - pub fn LLVMConstNUWMul(LHSConstant: ValueRef, RHSConstant: ValueRef) - -> ValueRef; - pub fn LLVMConstFMul(LHSConstant: ValueRef, RHSConstant: ValueRef) - -> ValueRef; - pub fn LLVMConstUDiv(LHSConstant: ValueRef, RHSConstant: ValueRef) - -> ValueRef; - pub fn LLVMConstSDiv(LHSConstant: ValueRef, RHSConstant: ValueRef) - -> ValueRef; - pub fn LLVMConstExactSDiv(LHSConstant: ValueRef, - RHSConstant: ValueRef) - -> ValueRef; - pub fn LLVMConstFDiv(LHSConstant: ValueRef, RHSConstant: ValueRef) - -> ValueRef; - pub fn LLVMConstURem(LHSConstant: ValueRef, RHSConstant: ValueRef) - -> ValueRef; - pub fn LLVMConstSRem(LHSConstant: ValueRef, RHSConstant: ValueRef) - -> ValueRef; - pub fn LLVMConstFRem(LHSConstant: ValueRef, RHSConstant: ValueRef) - -> ValueRef; - pub fn LLVMConstAnd(LHSConstant: ValueRef, RHSConstant: ValueRef) - -> ValueRef; - pub fn LLVMConstOr(LHSConstant: ValueRef, RHSConstant: ValueRef) - -> ValueRef; - pub fn LLVMConstXor(LHSConstant: ValueRef, RHSConstant: ValueRef) - -> ValueRef; - pub fn LLVMConstShl(LHSConstant: ValueRef, RHSConstant: ValueRef) - -> ValueRef; - pub fn LLVMConstLShr(LHSConstant: ValueRef, RHSConstant: ValueRef) - -> ValueRef; - pub fn LLVMConstAShr(LHSConstant: ValueRef, RHSConstant: ValueRef) - -> ValueRef; + pub fn LLVMConstAdd(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; + pub fn LLVMConstNSWAdd(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; + pub fn LLVMConstNUWAdd(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; + pub fn LLVMConstFAdd(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; + pub fn LLVMConstSub(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; + pub fn LLVMConstNSWSub(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; + pub fn LLVMConstNUWSub(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; + pub fn LLVMConstFSub(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; + pub fn LLVMConstMul(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; + pub fn LLVMConstNSWMul(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; + pub fn LLVMConstNUWMul(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; + pub fn LLVMConstFMul(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; + pub fn LLVMConstUDiv(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; + pub fn LLVMConstSDiv(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; + pub fn LLVMConstExactSDiv(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; + pub fn LLVMConstFDiv(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; + pub fn LLVMConstURem(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; + pub fn LLVMConstSRem(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; + pub fn LLVMConstFRem(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; + pub fn LLVMConstAnd(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; + pub fn LLVMConstOr(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; + pub fn LLVMConstXor(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; + pub fn LLVMConstShl(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; + pub fn LLVMConstLShr(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; + pub fn LLVMConstAShr(LHSConstant: ValueRef, RHSConstant: ValueRef) -> ValueRef; pub fn LLVMConstGEP(ConstantVal: ValueRef, ConstantIndices: *const ValueRef, NumIndices: c_uint) @@ -740,51 +689,29 @@ extern { ConstantIndices: *const ValueRef, NumIndices: c_uint) -> ValueRef; - pub fn LLVMConstTrunc(ConstantVal: ValueRef, ToType: TypeRef) - -> ValueRef; - pub fn LLVMConstSExt(ConstantVal: ValueRef, ToType: TypeRef) - -> ValueRef; - pub fn LLVMConstZExt(ConstantVal: ValueRef, ToType: TypeRef) - -> ValueRef; - pub fn LLVMConstFPTrunc(ConstantVal: ValueRef, ToType: TypeRef) - -> ValueRef; - pub fn LLVMConstFPExt(ConstantVal: ValueRef, ToType: TypeRef) - -> ValueRef; - pub fn LLVMConstUIToFP(ConstantVal: ValueRef, ToType: TypeRef) - -> ValueRef; - pub fn LLVMConstSIToFP(ConstantVal: ValueRef, ToType: TypeRef) - -> ValueRef; - pub fn LLVMConstFPToUI(ConstantVal: ValueRef, ToType: TypeRef) - -> ValueRef; - pub fn LLVMConstFPToSI(ConstantVal: ValueRef, ToType: TypeRef) - -> ValueRef; - pub fn LLVMConstPtrToInt(ConstantVal: ValueRef, ToType: TypeRef) - -> ValueRef; - pub fn LLVMConstIntToPtr(ConstantVal: ValueRef, ToType: TypeRef) - -> ValueRef; - pub fn LLVMConstBitCast(ConstantVal: ValueRef, ToType: TypeRef) - -> ValueRef; - pub fn LLVMConstZExtOrBitCast(ConstantVal: ValueRef, ToType: TypeRef) - -> ValueRef; - pub fn LLVMConstSExtOrBitCast(ConstantVal: ValueRef, ToType: TypeRef) - -> ValueRef; - pub fn LLVMConstTruncOrBitCast(ConstantVal: ValueRef, ToType: TypeRef) - -> ValueRef; - pub fn LLVMConstPointerCast(ConstantVal: ValueRef, ToType: TypeRef) - -> ValueRef; - pub fn LLVMConstIntCast(ConstantVal: ValueRef, - ToType: TypeRef, - isSigned: Bool) - -> ValueRef; - pub fn LLVMConstFPCast(ConstantVal: ValueRef, ToType: TypeRef) - -> ValueRef; + pub fn LLVMConstTrunc(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; + pub fn LLVMConstSExt(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; + pub fn LLVMConstZExt(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; + pub fn LLVMConstFPTrunc(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; + pub fn LLVMConstFPExt(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; + pub fn LLVMConstUIToFP(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; + pub fn LLVMConstSIToFP(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; + pub fn LLVMConstFPToUI(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; + pub fn LLVMConstFPToSI(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; + pub fn LLVMConstPtrToInt(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; + pub fn LLVMConstIntToPtr(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; + pub fn LLVMConstBitCast(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; + pub fn LLVMConstZExtOrBitCast(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; + pub fn LLVMConstSExtOrBitCast(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; + pub fn LLVMConstTruncOrBitCast(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; + pub fn LLVMConstPointerCast(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; + pub fn LLVMConstIntCast(ConstantVal: ValueRef, ToType: TypeRef, isSigned: Bool) -> ValueRef; + pub fn LLVMConstFPCast(ConstantVal: ValueRef, ToType: TypeRef) -> ValueRef; pub fn LLVMConstSelect(ConstantCondition: ValueRef, ConstantIfTrue: ValueRef, ConstantIfFalse: ValueRef) -> ValueRef; - pub fn LLVMConstExtractElement(VectorConstant: ValueRef, - IndexConstant: ValueRef) - -> ValueRef; + pub fn LLVMConstExtractElement(VectorConstant: ValueRef, IndexConstant: ValueRef) -> ValueRef; pub fn LLVMConstInsertElement(VectorConstant: ValueRef, ElementValueConstant: ValueRef, IndexConstant: ValueRef) @@ -812,7 +739,7 @@ extern { - /* Operations on global variables, functions, and aliases (globals) */ + // Operations on global variables, functions, and aliases (globals) pub fn LLVMGetGlobalParent(Global: ValueRef) -> ModuleRef; pub fn LLVMIsDeclaration(Global: ValueRef) -> Bool; pub fn LLVMRustGetLinkage(Global: ValueRef) -> Linkage; @@ -823,52 +750,41 @@ extern { pub fn LLVMSetVisibility(Global: ValueRef, Viz: c_uint); pub fn LLVMGetAlignment(Global: ValueRef) -> c_uint; pub fn LLVMSetAlignment(Global: ValueRef, Bytes: c_uint); - pub fn LLVMSetDLLStorageClass(V: ValueRef, - C: DLLStorageClass); + pub fn LLVMSetDLLStorageClass(V: ValueRef, C: DLLStorageClass); - /* Operations on global variables */ + // Operations on global variables pub fn LLVMIsAGlobalVariable(GlobalVar: ValueRef) -> ValueRef; - pub fn LLVMAddGlobal(M: ModuleRef, Ty: TypeRef, Name: *const c_char) - -> ValueRef; + pub fn LLVMAddGlobal(M: ModuleRef, Ty: TypeRef, Name: *const c_char) -> ValueRef; pub fn LLVMAddGlobalInAddressSpace(M: ModuleRef, Ty: TypeRef, Name: *const c_char, AddressSpace: c_uint) -> ValueRef; - pub fn LLVMGetNamedGlobal(M: ModuleRef, - Name: *const c_char) - -> ValueRef; - pub fn LLVMRustGetOrInsertGlobal(M: ModuleRef, - Name: *const c_char, - T: TypeRef) - -> ValueRef; + pub fn LLVMGetNamedGlobal(M: ModuleRef, Name: *const c_char) -> ValueRef; + pub fn LLVMRustGetOrInsertGlobal(M: ModuleRef, Name: *const c_char, T: TypeRef) -> ValueRef; pub fn LLVMGetFirstGlobal(M: ModuleRef) -> ValueRef; pub fn LLVMGetLastGlobal(M: ModuleRef) -> ValueRef; pub fn LLVMGetNextGlobal(GlobalVar: ValueRef) -> ValueRef; pub fn LLVMGetPreviousGlobal(GlobalVar: ValueRef) -> ValueRef; pub fn LLVMDeleteGlobal(GlobalVar: ValueRef); pub fn LLVMGetInitializer(GlobalVar: ValueRef) -> ValueRef; - pub fn LLVMSetInitializer(GlobalVar: ValueRef, - ConstantVal: ValueRef); + pub fn LLVMSetInitializer(GlobalVar: ValueRef, ConstantVal: ValueRef); pub fn LLVMIsThreadLocal(GlobalVar: ValueRef) -> Bool; pub fn LLVMSetThreadLocal(GlobalVar: ValueRef, IsThreadLocal: Bool); pub fn LLVMIsGlobalConstant(GlobalVar: ValueRef) -> Bool; pub fn LLVMSetGlobalConstant(GlobalVar: ValueRef, IsConstant: Bool); pub fn LLVMRustGetNamedValue(M: ModuleRef, Name: *const c_char) -> ValueRef; - /* Operations on aliases */ + // Operations on aliases pub fn LLVMAddAlias(M: ModuleRef, Ty: TypeRef, Aliasee: ValueRef, Name: *const c_char) -> ValueRef; - /* Operations on functions */ - pub fn LLVMAddFunction(M: ModuleRef, - Name: *const c_char, - FunctionTy: TypeRef) - -> ValueRef; + // Operations on functions + pub fn LLVMAddFunction(M: ModuleRef, Name: *const c_char, FunctionTy: TypeRef) -> ValueRef; pub fn LLVMGetNamedFunction(M: ModuleRef, Name: *const c_char) -> ValueRef; pub fn LLVMGetFirstFunction(M: ModuleRef) -> ValueRef; pub fn LLVMGetLastFunction(M: ModuleRef) -> ValueRef; @@ -887,19 +803,16 @@ extern { pub fn LLVMRustAddDereferenceableAttr(Fn: ValueRef, index: c_uint, bytes: u64); pub fn LLVMRustAddFunctionAttribute(Fn: ValueRef, index: c_uint, PA: u64); pub fn LLVMRustAddFunctionAttrString(Fn: ValueRef, index: c_uint, Name: *const c_char); - pub fn LLVMRustAddFunctionAttrStringValue(Fn: ValueRef, index: c_uint, + pub fn LLVMRustAddFunctionAttrStringValue(Fn: ValueRef, + index: c_uint, Name: *const c_char, Value: *const c_char); - pub fn LLVMRustRemoveFunctionAttributes(Fn: ValueRef, - index: c_uint, - attr: u64); - pub fn LLVMRustRemoveFunctionAttrString(Fn: ValueRef, - index: c_uint, - Name: *const c_char); + pub fn LLVMRustRemoveFunctionAttributes(Fn: ValueRef, index: c_uint, attr: u64); + pub fn LLVMRustRemoveFunctionAttrString(Fn: ValueRef, index: c_uint, Name: *const c_char); pub fn LLVMGetFunctionAttr(Fn: ValueRef) -> c_uint; pub fn LLVMRemoveFunctionAttr(Fn: ValueRef, val: c_uint); - /* Operations on parameters */ + // Operations on parameters pub fn LLVMCountParams(Fn: ValueRef) -> c_uint; pub fn LLVMGetParams(Fn: ValueRef, Params: *const ValueRef); pub fn LLVMGetParam(Fn: ValueRef, Index: c_uint) -> ValueRef; @@ -913,7 +826,7 @@ extern { pub fn LLVMGetAttribute(Arg: ValueRef) -> c_uint; pub fn LLVMSetParamAlignment(Arg: ValueRef, align: c_uint); - /* Operations on basic blocks */ + // Operations on basic blocks pub fn LLVMBasicBlockAsValue(BB: BasicBlockRef) -> ValueRef; pub fn LLVMValueIsBasicBlock(Val: ValueRef) -> Bool; pub fn LLVMValueAsBasicBlock(Val: ValueRef) -> BasicBlockRef; @@ -936,13 +849,11 @@ extern { -> BasicBlockRef; pub fn LLVMDeleteBasicBlock(BB: BasicBlockRef); - pub fn LLVMMoveBasicBlockAfter(BB: BasicBlockRef, - MoveAfter: BasicBlockRef); + pub fn LLVMMoveBasicBlockAfter(BB: BasicBlockRef, MoveAfter: BasicBlockRef); - pub fn LLVMMoveBasicBlockBefore(BB: BasicBlockRef, - MoveBefore: BasicBlockRef); + pub fn LLVMMoveBasicBlockBefore(BB: BasicBlockRef, MoveBefore: BasicBlockRef); - /* Operations on instructions */ + // Operations on instructions pub fn LLVMGetInstructionParent(Inst: ValueRef) -> BasicBlockRef; pub fn LLVMGetFirstInstruction(BB: BasicBlockRef) -> ValueRef; pub fn LLVMGetLastInstruction(BB: BasicBlockRef) -> ValueRef; @@ -950,53 +861,37 @@ extern { pub fn LLVMGetPreviousInstruction(Inst: ValueRef) -> ValueRef; pub fn LLVMInstructionEraseFromParent(Inst: ValueRef); - /* Operations on call sites */ + // Operations on call sites pub fn LLVMSetInstructionCallConv(Instr: ValueRef, CC: c_uint); pub fn LLVMGetInstructionCallConv(Instr: ValueRef) -> c_uint; - pub fn LLVMAddInstrAttribute(Instr: ValueRef, - index: c_uint, - IA: c_uint); - pub fn LLVMRemoveInstrAttribute(Instr: ValueRef, - index: c_uint, - IA: c_uint); - pub fn LLVMSetInstrParamAlignment(Instr: ValueRef, - index: c_uint, - align: c_uint); - pub fn LLVMRustAddCallSiteAttribute(Instr: ValueRef, - index: c_uint, - Val: u64); - pub fn LLVMRustAddDereferenceableCallSiteAttr(Instr: ValueRef, - index: c_uint, - bytes: u64); - - /* Operations on call instructions (only) */ + pub fn LLVMAddInstrAttribute(Instr: ValueRef, index: c_uint, IA: c_uint); + pub fn LLVMRemoveInstrAttribute(Instr: ValueRef, index: c_uint, IA: c_uint); + pub fn LLVMSetInstrParamAlignment(Instr: ValueRef, index: c_uint, align: c_uint); + pub fn LLVMRustAddCallSiteAttribute(Instr: ValueRef, index: c_uint, Val: u64); + pub fn LLVMRustAddDereferenceableCallSiteAttr(Instr: ValueRef, index: c_uint, bytes: u64); + + // Operations on call instructions (only) pub fn LLVMIsTailCall(CallInst: ValueRef) -> Bool; pub fn LLVMSetTailCall(CallInst: ValueRef, IsTailCall: Bool); - /* Operations on load/store instructions (only) */ + // Operations on load/store instructions (only) pub fn LLVMGetVolatile(MemoryAccessInst: ValueRef) -> Bool; pub fn LLVMSetVolatile(MemoryAccessInst: ValueRef, volatile: Bool); - /* Operations on phi nodes */ + // Operations on phi nodes pub fn LLVMAddIncoming(PhiNode: ValueRef, IncomingValues: *const ValueRef, IncomingBlocks: *const BasicBlockRef, Count: c_uint); pub fn LLVMCountIncoming(PhiNode: ValueRef) -> c_uint; - pub fn LLVMGetIncomingValue(PhiNode: ValueRef, Index: c_uint) - -> ValueRef; - pub fn LLVMGetIncomingBlock(PhiNode: ValueRef, Index: c_uint) - -> BasicBlockRef; + pub fn LLVMGetIncomingValue(PhiNode: ValueRef, Index: c_uint) -> ValueRef; + pub fn LLVMGetIncomingBlock(PhiNode: ValueRef, Index: c_uint) -> BasicBlockRef; - /* Instruction builders */ + // Instruction builders pub fn LLVMCreateBuilderInContext(C: ContextRef) -> BuilderRef; - pub fn LLVMPositionBuilder(Builder: BuilderRef, - Block: BasicBlockRef, - Instr: ValueRef); - pub fn LLVMPositionBuilderBefore(Builder: BuilderRef, - Instr: ValueRef); - pub fn LLVMPositionBuilderAtEnd(Builder: BuilderRef, - Block: BasicBlockRef); + pub fn LLVMPositionBuilder(Builder: BuilderRef, Block: BasicBlockRef, Instr: ValueRef); + pub fn LLVMPositionBuilderBefore(Builder: BuilderRef, Instr: ValueRef); + pub fn LLVMPositionBuilderAtEnd(Builder: BuilderRef, Block: BasicBlockRef); pub fn LLVMGetInsertBlock(Builder: BuilderRef) -> BasicBlockRef; pub fn LLVMClearInsertionPosition(Builder: BuilderRef); pub fn LLVMInsertIntoBuilder(Builder: BuilderRef, Instr: ValueRef); @@ -1005,18 +900,15 @@ extern { Name: *const c_char); pub fn LLVMDisposeBuilder(Builder: BuilderRef); - /* Metadata */ + // Metadata pub fn LLVMSetCurrentDebugLocation(Builder: BuilderRef, L: ValueRef); pub fn LLVMGetCurrentDebugLocation(Builder: BuilderRef) -> ValueRef; pub fn LLVMSetInstDebugLocation(Builder: BuilderRef, Inst: ValueRef); - /* Terminators */ + // Terminators pub fn LLVMBuildRetVoid(B: BuilderRef) -> ValueRef; pub fn LLVMBuildRet(B: BuilderRef, V: ValueRef) -> ValueRef; - pub fn LLVMBuildAggregateRet(B: BuilderRef, - RetVals: *const ValueRef, - N: c_uint) - -> ValueRef; + pub fn LLVMBuildAggregateRet(B: BuilderRef, RetVals: *const ValueRef, N: c_uint) -> ValueRef; pub fn LLVMBuildBr(B: BuilderRef, Dest: BasicBlockRef) -> ValueRef; pub fn LLVMBuildCondBr(B: BuilderRef, If: ValueRef, @@ -1028,10 +920,7 @@ extern { Else: BasicBlockRef, NumCases: c_uint) -> ValueRef; - pub fn LLVMBuildIndirectBr(B: BuilderRef, - Addr: ValueRef, - NumDests: c_uint) - -> ValueRef; + pub fn LLVMBuildIndirectBr(B: BuilderRef, Addr: ValueRef, NumDests: c_uint) -> ValueRef; pub fn LLVMRustBuildInvoke(B: BuilderRef, Fn: ValueRef, Args: *const ValueRef, @@ -1055,42 +944,41 @@ extern { ParentPad: ValueRef, ArgCnt: c_uint, Args: *const ValueRef, - Name: *const c_char) -> ValueRef; + Name: *const c_char) + -> ValueRef; pub fn LLVMRustBuildCleanupRet(B: BuilderRef, CleanupPad: ValueRef, - UnwindBB: BasicBlockRef) -> ValueRef; + UnwindBB: BasicBlockRef) + -> ValueRef; pub fn LLVMRustBuildCatchPad(B: BuilderRef, ParentPad: ValueRef, ArgCnt: c_uint, Args: *const ValueRef, - Name: *const c_char) -> ValueRef; - pub fn LLVMRustBuildCatchRet(B: BuilderRef, - Pad: ValueRef, - BB: BasicBlockRef) -> ValueRef; + Name: *const c_char) + -> ValueRef; + pub fn LLVMRustBuildCatchRet(B: BuilderRef, Pad: ValueRef, BB: BasicBlockRef) -> ValueRef; pub fn LLVMRustBuildCatchSwitch(Builder: BuilderRef, ParentPad: ValueRef, BB: BasicBlockRef, NumHandlers: c_uint, - Name: *const c_char) -> ValueRef; - pub fn LLVMRustAddHandler(CatchSwitch: ValueRef, - Handler: BasicBlockRef); + Name: *const c_char) + -> ValueRef; + pub fn LLVMRustAddHandler(CatchSwitch: ValueRef, Handler: BasicBlockRef); pub fn LLVMRustSetPersonalityFn(B: BuilderRef, Pers: ValueRef); - /* Add a case to the switch instruction */ - pub fn LLVMAddCase(Switch: ValueRef, - OnVal: ValueRef, - Dest: BasicBlockRef); + // Add a case to the switch instruction + pub fn LLVMAddCase(Switch: ValueRef, OnVal: ValueRef, Dest: BasicBlockRef); - /* Add a destination to the indirectbr instruction */ + // Add a destination to the indirectbr instruction pub fn LLVMAddDestination(IndirectBr: ValueRef, Dest: BasicBlockRef); - /* Add a clause to the landing pad instruction */ + // Add a clause to the landing pad instruction pub fn LLVMAddClause(LandingPad: ValueRef, ClauseVal: ValueRef); - /* Set the cleanup on a landing pad instruction */ + // Set the cleanup on a landing pad instruction pub fn LLVMSetCleanup(LandingPad: ValueRef, Val: Bool); - /* Arithmetic */ + // Arithmetic pub fn LLVMBuildAdd(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, @@ -1210,7 +1098,7 @@ extern { LHS: ValueRef, RHS: ValueRef, Name: *const c_char) - -> ValueRef; + -> ValueRef; pub fn LLVMBuildXor(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, @@ -1222,29 +1110,19 @@ extern { RHS: ValueRef, Name: *const c_char) -> ValueRef; - pub fn LLVMBuildNeg(B: BuilderRef, V: ValueRef, Name: *const c_char) - -> ValueRef; - pub fn LLVMBuildNSWNeg(B: BuilderRef, V: ValueRef, Name: *const c_char) - -> ValueRef; - pub fn LLVMBuildNUWNeg(B: BuilderRef, V: ValueRef, Name: *const c_char) - -> ValueRef; - pub fn LLVMBuildFNeg(B: BuilderRef, V: ValueRef, Name: *const c_char) - -> ValueRef; - pub fn LLVMBuildNot(B: BuilderRef, V: ValueRef, Name: *const c_char) - -> ValueRef; + pub fn LLVMBuildNeg(B: BuilderRef, V: ValueRef, Name: *const c_char) -> ValueRef; + pub fn LLVMBuildNSWNeg(B: BuilderRef, V: ValueRef, Name: *const c_char) -> ValueRef; + pub fn LLVMBuildNUWNeg(B: BuilderRef, V: ValueRef, Name: *const c_char) -> ValueRef; + pub fn LLVMBuildFNeg(B: BuilderRef, V: ValueRef, Name: *const c_char) -> ValueRef; + pub fn LLVMBuildNot(B: BuilderRef, V: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMRustSetHasUnsafeAlgebra(Instr: ValueRef); - /* Memory */ - pub fn LLVMBuildAlloca(B: BuilderRef, Ty: TypeRef, Name: *const c_char) - -> ValueRef; + // Memory + pub fn LLVMBuildAlloca(B: BuilderRef, Ty: TypeRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildFree(B: BuilderRef, PointerVal: ValueRef) -> ValueRef; - pub fn LLVMBuildLoad(B: BuilderRef, - PointerVal: ValueRef, - Name: *const c_char) - -> ValueRef; + pub fn LLVMBuildLoad(B: BuilderRef, PointerVal: ValueRef, Name: *const c_char) -> ValueRef; - pub fn LLVMBuildStore(B: BuilderRef, Val: ValueRef, Ptr: ValueRef) - -> ValueRef; + pub fn LLVMBuildStore(B: BuilderRef, Val: ValueRef, Ptr: ValueRef) -> ValueRef; pub fn LLVMBuildGEP(B: BuilderRef, Pointer: ValueRef, @@ -1272,7 +1150,7 @@ extern { Name: *const c_char) -> ValueRef; - /* Casts */ + // Casts pub fn LLVMBuildTrunc(B: BuilderRef, Val: ValueRef, DestTy: TypeRef, @@ -1352,7 +1230,8 @@ extern { Op: Opcode, Val: ValueRef, DestTy: TypeRef, - Name: *const c_char) -> ValueRef; + Name: *const c_char) + -> ValueRef; pub fn LLVMBuildPointerCast(B: BuilderRef, Val: ValueRef, DestTy: TypeRef, @@ -1369,7 +1248,7 @@ extern { Name: *const c_char) -> ValueRef; - /* Comparisons */ + // Comparisons pub fn LLVMBuildICmp(B: BuilderRef, Op: c_uint, LHS: ValueRef, @@ -1383,9 +1262,8 @@ extern { Name: *const c_char) -> ValueRef; - /* Miscellaneous instructions */ - pub fn LLVMBuildPhi(B: BuilderRef, Ty: TypeRef, Name: *const c_char) - -> ValueRef; + // Miscellaneous instructions + pub fn LLVMBuildPhi(B: BuilderRef, Ty: TypeRef, Name: *const c_char) -> ValueRef; pub fn LLVMRustBuildCall(B: BuilderRef, Fn: ValueRef, Args: *const ValueRef, @@ -1433,17 +1311,15 @@ extern { Name: *const c_char) -> ValueRef; - pub fn LLVMBuildIsNull(B: BuilderRef, Val: ValueRef, Name: *const c_char) - -> ValueRef; - pub fn LLVMBuildIsNotNull(B: BuilderRef, Val: ValueRef, Name: *const c_char) - -> ValueRef; + pub fn LLVMBuildIsNull(B: BuilderRef, Val: ValueRef, Name: *const c_char) -> ValueRef; + pub fn LLVMBuildIsNotNull(B: BuilderRef, Val: ValueRef, Name: *const c_char) -> ValueRef; pub fn LLVMBuildPtrDiff(B: BuilderRef, LHS: ValueRef, RHS: ValueRef, Name: *const c_char) -> ValueRef; - /* Atomic Operations */ + // Atomic Operations pub fn LLVMRustBuildAtomicLoad(B: BuilderRef, PointerVal: ValueRef, Name: *const c_char, @@ -1480,7 +1356,7 @@ extern { Scope: SynchronizationScope); - /* Selected entries from the downcasts. */ + // Selected entries from the downcasts. pub fn LLVMIsATerminatorInst(Inst: ValueRef) -> ValueRef; pub fn LLVMIsAStoreInst(Inst: ValueRef) -> ValueRef; @@ -1490,22 +1366,18 @@ extern { /// Creates target data from a target layout string. pub fn LLVMCreateTargetData(StringRep: *const c_char) -> TargetDataRef; /// Number of bytes clobbered when doing a Store to *T. - pub fn LLVMStoreSizeOfType(TD: TargetDataRef, Ty: TypeRef) - -> c_ulonglong; + pub fn LLVMStoreSizeOfType(TD: TargetDataRef, Ty: TypeRef) -> c_ulonglong; /// Number of bytes clobbered when doing a Store to *T. - pub fn LLVMSizeOfTypeInBits(TD: TargetDataRef, Ty: TypeRef) - -> c_ulonglong; + pub fn LLVMSizeOfTypeInBits(TD: TargetDataRef, Ty: TypeRef) -> c_ulonglong; /// Distance between successive elements in an array of T. Includes ABI padding. pub fn LLVMABISizeOfType(TD: TargetDataRef, Ty: TypeRef) -> c_ulonglong; /// Returns the preferred alignment of a type. - pub fn LLVMPreferredAlignmentOfType(TD: TargetDataRef, Ty: TypeRef) - -> c_uint; + pub fn LLVMPreferredAlignmentOfType(TD: TargetDataRef, Ty: TypeRef) -> c_uint; /// Returns the minimum alignment of a type. - pub fn LLVMABIAlignmentOfType(TD: TargetDataRef, Ty: TypeRef) - -> c_uint; + pub fn LLVMABIAlignmentOfType(TD: TargetDataRef, Ty: TypeRef) -> c_uint; /// Computes the byte offset of the indexed struct element for a /// target. @@ -1515,8 +1387,7 @@ extern { -> c_ulonglong; /// Returns the minimum alignment of a type when part of a call frame. - pub fn LLVMCallFrameAlignmentOfType(TD: TargetDataRef, Ty: TypeRef) - -> c_uint; + pub fn LLVMCallFrameAlignmentOfType(TD: TargetDataRef, Ty: TypeRef) -> c_uint; /// Disposes target data. pub fn LLVMDisposeTargetData(TD: TargetDataRef); @@ -1525,8 +1396,7 @@ extern { pub fn LLVMCreatePassManager() -> PassManagerRef; /// Creates a function-by-function pass manager - pub fn LLVMCreateFunctionPassManagerForModule(M: ModuleRef) - -> PassManagerRef; + pub fn LLVMCreateFunctionPassManagerForModule(M: ModuleRef) -> PassManagerRef; /// Disposes a pass manager. pub fn LLVMDisposePassManager(PM: PassManagerRef); @@ -1535,8 +1405,7 @@ extern { pub fn LLVMRunPassManager(PM: PassManagerRef, M: ModuleRef) -> Bool; /// Runs the function passes on the provided function. - pub fn LLVMRunFunctionPassManager(FPM: PassManagerRef, F: ValueRef) - -> Bool; + pub fn LLVMRunFunctionPassManager(FPM: PassManagerRef, F: ValueRef) -> Bool; /// Initializes all the function passes scheduled in the manager pub fn LLVMInitializeFunctionPassManager(FPM: PassManagerRef) -> Bool; @@ -1589,38 +1458,28 @@ extern { pub fn LLVMPassManagerBuilderDispose(PMB: PassManagerBuilderRef); pub fn LLVMPassManagerBuilderSetOptLevel(PMB: PassManagerBuilderRef, OptimizationLevel: c_uint); - pub fn LLVMPassManagerBuilderSetSizeLevel(PMB: PassManagerBuilderRef, - Value: Bool); - pub fn LLVMPassManagerBuilderSetDisableUnitAtATime( - PMB: PassManagerBuilderRef, - Value: Bool); - pub fn LLVMPassManagerBuilderSetDisableUnrollLoops( - PMB: PassManagerBuilderRef, - Value: Bool); - pub fn LLVMPassManagerBuilderSetDisableSimplifyLibCalls( - PMB: PassManagerBuilderRef, - Value: Bool); - pub fn LLVMPassManagerBuilderUseInlinerWithThreshold( - PMB: PassManagerBuilderRef, - threshold: c_uint); - pub fn LLVMPassManagerBuilderPopulateModulePassManager( - PMB: PassManagerBuilderRef, - PM: PassManagerRef); - - pub fn LLVMPassManagerBuilderPopulateFunctionPassManager( - PMB: PassManagerBuilderRef, - PM: PassManagerRef); - pub fn LLVMPassManagerBuilderPopulateLTOPassManager( - PMB: PassManagerBuilderRef, - PM: PassManagerRef, - Internalize: Bool, - RunInliner: Bool); + pub fn LLVMPassManagerBuilderSetSizeLevel(PMB: PassManagerBuilderRef, Value: Bool); + pub fn LLVMPassManagerBuilderSetDisableUnitAtATime(PMB: PassManagerBuilderRef, Value: Bool); + pub fn LLVMPassManagerBuilderSetDisableUnrollLoops(PMB: PassManagerBuilderRef, Value: Bool); + pub fn LLVMPassManagerBuilderSetDisableSimplifyLibCalls(PMB: PassManagerBuilderRef, + Value: Bool); + pub fn LLVMPassManagerBuilderUseInlinerWithThreshold(PMB: PassManagerBuilderRef, + threshold: c_uint); + pub fn LLVMPassManagerBuilderPopulateModulePassManager(PMB: PassManagerBuilderRef, + PM: PassManagerRef); + + pub fn LLVMPassManagerBuilderPopulateFunctionPassManager(PMB: PassManagerBuilderRef, + PM: PassManagerRef); + pub fn LLVMPassManagerBuilderPopulateLTOPassManager(PMB: PassManagerBuilderRef, + PM: PassManagerRef, + Internalize: Bool, + RunInliner: Bool); /// Destroys a memory buffer. pub fn LLVMDisposeMemoryBuffer(MemBuf: MemoryBufferRef); - /* Stuff that's in rustllvm/ because it's not upstream yet. */ + // Stuff that's in rustllvm/ because it's not upstream yet. /// Opens an object file. pub fn LLVMCreateObjectFile(MemBuf: MemoryBufferRef) -> ObjectFileRef; @@ -1633,9 +1492,7 @@ extern { pub fn LLVMDisposeSectionIterator(SI: SectionIteratorRef); /// Returns true if the section iterator is at the end of the section /// list: - pub fn LLVMIsSectionIteratorAtEnd(ObjFile: ObjectFileRef, - SI: SectionIteratorRef) - -> Bool; + pub fn LLVMIsSectionIteratorAtEnd(ObjFile: ObjectFileRef, SI: SectionIteratorRef) -> Bool; /// Moves the section iterator to point to the next section. pub fn LLVMMoveToNextSection(SI: SectionIteratorRef); /// Returns the current section size. @@ -1645,8 +1502,7 @@ extern { /// Reads the given file and returns it as a memory buffer. Use /// LLVMDisposeMemoryBuffer() to get rid of it. - pub fn LLVMRustCreateMemoryBufferWithContentsOfFile(Path: *const c_char) - -> MemoryBufferRef; + pub fn LLVMRustCreateMemoryBufferWithContentsOfFile(Path: *const c_char) -> MemoryBufferRef; /// Borrows the contents of the memory buffer (doesn't copy it) pub fn LLVMCreateMemoryBufferWithMemoryRange(InputData: *const c_char, InputDataLength: size_t, @@ -1695,9 +1551,7 @@ extern { pub fn LLVMRustVersionMajor() -> u32; pub fn LLVMRustVersionMinor() -> u32; - pub fn LLVMRustAddModuleFlag(M: ModuleRef, - name: *const c_char, - value: u32); + pub fn LLVMRustAddModuleFlag(M: ModuleRef, name: *const c_char, value: u32); pub fn LLVMRustDIBuilderCreate(M: ModuleRef) -> DIBuilderRef; @@ -1751,11 +1605,11 @@ extern { -> DIBasicType; pub fn LLVMRustDIBuilderCreatePointerType(Builder: DIBuilderRef, - PointeeTy: DIType, - SizeInBits: u64, - AlignInBits: u64, - Name: *const c_char) - -> DIDerivedType; + PointeeTy: DIType, + SizeInBits: u64, + AlignInBits: u64, + Name: *const c_char) + -> DIDerivedType; pub fn LLVMRustDIBuilderCreateStructType(Builder: DIBuilderRef, Scope: DIDescriptor, @@ -1817,8 +1671,6 @@ extern { Ty: DIType, AlwaysPreserve: bool, Flags: c_uint, - AddrOps: *const i64, - AddrOpsCount: c_uint, ArgNo: c_uint) -> DIVariable; @@ -1855,15 +1707,6 @@ extern { InsertAtEnd: BasicBlockRef) -> ValueRef; - pub fn LLVMRustDIBuilderInsertDeclareBefore(Builder: DIBuilderRef, - Val: ValueRef, - VarInfo: DIVariable, - AddrOps: *const i64, - AddrOpsCount: c_uint, - DL: ValueRef, - InsertBefore: ValueRef) - -> ValueRef; - pub fn LLVMRustDIBuilderCreateEnumerator(Builder: DIBuilderRef, Name: *const c_char, Val: u64) @@ -1937,8 +1780,7 @@ extern { pub fn LLVMRustFindAndCreatePass(Pass: *const c_char) -> PassRef; pub fn LLVMRustAddPass(PM: PassManagerRef, Pass: PassRef); - pub fn LLVMRustHasFeature(T: TargetMachineRef, - s: *const c_char) -> bool; + pub fn LLVMRustHasFeature(T: TargetMachineRef, s: *const c_char) -> bool; pub fn LLVMRustPrintTargetCPUs(T: TargetMachineRef); pub fn LLVMRustPrintTargetFeatures(T: TargetMachineRef); @@ -1952,11 +1794,10 @@ extern { UseSoftFP: bool, PositionIndependentExecutable: bool, FunctionSections: bool, - DataSections: bool) -> TargetMachineRef; + DataSections: bool) + -> TargetMachineRef; pub fn LLVMRustDisposeTargetMachine(T: TargetMachineRef); - pub fn LLVMRustAddAnalysisPasses(T: TargetMachineRef, - PM: PassManagerRef, - M: ModuleRef); + pub fn LLVMRustAddAnalysisPasses(T: TargetMachineRef, PM: PassManagerRef, M: ModuleRef); pub fn LLVMRustAddBuilderLibraryInfo(PMB: PassManagerBuilderRef, M: ModuleRef, DisableSimplifyLibCalls: bool); @@ -1965,7 +1806,8 @@ extern { MergeFunctions: bool, SLPVectorize: bool, LoopVectorize: bool); - pub fn LLVMRustAddLibraryInfo(PM: PassManagerRef, M: ModuleRef, + pub fn LLVMRustAddLibraryInfo(PM: PassManagerRef, + M: ModuleRef, DisableSimplifyLibCalls: bool); pub fn LLVMRustRunFunctionPassManager(PM: PassManagerRef, M: ModuleRef); pub fn LLVMRustWriteOutputFile(T: TargetMachineRef, @@ -1974,35 +1816,25 @@ extern { Output: *const c_char, FileType: FileType) -> LLVMRustResult; - pub fn LLVMRustPrintModule(PM: PassManagerRef, - M: ModuleRef, - Output: *const c_char); + pub fn LLVMRustPrintModule(PM: PassManagerRef, M: ModuleRef, Output: *const c_char); pub fn LLVMRustSetLLVMOptions(Argc: c_int, Argv: *const *const c_char); pub fn LLVMRustPrintPasses(); pub fn LLVMRustSetNormalizedTarget(M: ModuleRef, triple: *const c_char); - pub fn LLVMRustAddAlwaysInlinePass(P: PassManagerBuilderRef, - AddLifetimes: bool); - pub fn LLVMRustLinkInExternalBitcode(M: ModuleRef, - bc: *const c_char, - len: size_t) -> bool; - pub fn LLVMRustRunRestrictionPass(M: ModuleRef, - syms: *const *const c_char, - len: size_t); + pub fn LLVMRustAddAlwaysInlinePass(P: PassManagerBuilderRef, AddLifetimes: bool); + pub fn LLVMRustLinkInExternalBitcode(M: ModuleRef, bc: *const c_char, len: size_t) -> bool; + pub fn LLVMRustRunRestrictionPass(M: ModuleRef, syms: *const *const c_char, len: size_t); pub fn LLVMRustMarkAllFunctionsNounwind(M: ModuleRef); pub fn LLVMRustOpenArchive(path: *const c_char) -> ArchiveRef; pub fn LLVMRustArchiveIteratorNew(AR: ArchiveRef) -> ArchiveIteratorRef; pub fn LLVMRustArchiveIteratorNext(AIR: ArchiveIteratorRef) -> ArchiveChildRef; - pub fn LLVMRustArchiveChildName(ACR: ArchiveChildRef, - size: *mut size_t) -> *const c_char; - pub fn LLVMRustArchiveChildData(ACR: ArchiveChildRef, - size: *mut size_t) -> *const c_char; + pub fn LLVMRustArchiveChildName(ACR: ArchiveChildRef, size: *mut size_t) -> *const c_char; + pub fn LLVMRustArchiveChildData(ACR: ArchiveChildRef, size: *mut size_t) -> *const c_char; pub fn LLVMRustArchiveChildFree(ACR: ArchiveChildRef); pub fn LLVMRustArchiveIteratorFree(AIR: ArchiveIteratorRef); pub fn LLVMRustDestroyArchive(AR: ArchiveRef); - pub fn LLVMRustGetSectionName(SI: SectionIteratorRef, - data: *mut *const c_char) -> size_t; + pub fn LLVMRustGetSectionName(SI: SectionIteratorRef, data: *mut *const c_char) -> size_t; pub fn LLVMRustWriteTwineToString(T: TwineRef, s: RustStringRef); @@ -2020,14 +1852,11 @@ extern { message_out: *mut TwineRef, instruction_out: *mut ValueRef); - pub fn LLVMRustWriteDiagnosticInfoToString(DI: DiagnosticInfoRef, - s: RustStringRef); + pub fn LLVMRustWriteDiagnosticInfoToString(DI: DiagnosticInfoRef, s: RustStringRef); pub fn LLVMGetDiagInfoSeverity(DI: DiagnosticInfoRef) -> DiagnosticSeverity; pub fn LLVMRustGetDiagInfoKind(DI: DiagnosticInfoRef) -> DiagnosticKind; - pub fn LLVMRustWriteDebugLocToString(C: ContextRef, - DL: DebugLocRef, - s: RustStringRef); + pub fn LLVMRustWriteDebugLocToString(C: ContextRef, DL: DebugLocRef, s: RustStringRef); pub fn LLVMRustSetInlineAsmDiagnosticHandler(C: ContextRef, H: InlineAsmDiagHandler, @@ -2039,15 +1868,15 @@ extern { NumMembers: size_t, Members: *const RustArchiveMemberRef, WriteSymbtab: bool, - Kind: ArchiveKind) -> - LLVMRustResult; + Kind: ArchiveKind) + -> LLVMRustResult; pub fn LLVMRustArchiveMemberNew(Filename: *const c_char, Name: *const c_char, - Child: ArchiveChildRef) -> RustArchiveMemberRef; + Child: ArchiveChildRef) + -> RustArchiveMemberRef; pub fn LLVMRustArchiveMemberFree(Member: RustArchiveMemberRef); - pub fn LLVMRustSetDataLayoutFromTargetMachine(M: ModuleRef, - TM: TargetMachineRef); + pub fn LLVMRustSetDataLayoutFromTargetMachine(M: ModuleRef, TM: TargetMachineRef); pub fn LLVMRustGetModuleDataLayout(M: ModuleRef) -> TargetDataRef; pub fn LLVMRustBuildOperandBundleDef(Name: *const c_char, @@ -2068,4 +1897,4 @@ extern { // during llvm-config? #[cfg(windows)] #[link(name = "ole32")] -extern {} +extern "C" {} diff --git a/src/librustc_llvm/lib.rs b/src/librustc_llvm/lib.rs index eb45d3d25c5c0..da09bfa66d28f 100644 --- a/src/librustc_llvm/lib.rs +++ b/src/librustc_llvm/lib.rs @@ -31,7 +31,9 @@ #![feature(concat_idents)] extern crate libc; -#[macro_use] #[no_link] extern crate rustc_bitflags; +#[macro_use] +#[no_link] +extern crate rustc_bitflags; pub use self::IntPredicate::*; pub use self::RealPredicate::*; @@ -68,7 +70,7 @@ impl LLVMRustResult { #[derive(Copy, Clone, Default, Debug)] pub struct Attributes { regular: Attribute, - dereferenceable_bytes: u64 + dereferenceable_bytes: u64, } impl Attributes { @@ -96,10 +98,7 @@ impl Attributes { unsafe { self.regular.apply_llfn(idx, llfn); if self.dereferenceable_bytes != 0 { - LLVMRustAddDereferenceableAttr( - llfn, - idx.as_uint(), - self.dereferenceable_bytes); + LLVMRustAddDereferenceableAttr(llfn, idx.as_uint(), self.dereferenceable_bytes); } } } @@ -108,27 +107,23 @@ impl Attributes { unsafe { self.regular.apply_callsite(idx, callsite); if self.dereferenceable_bytes != 0 { - LLVMRustAddDereferenceableCallSiteAttr( - callsite, - idx.as_uint(), - self.dereferenceable_bytes); + LLVMRustAddDereferenceableCallSiteAttr(callsite, + idx.as_uint(), + self.dereferenceable_bytes); } } } } -pub fn AddFunctionAttrStringValue( - llfn: ValueRef, - idx: AttributePlace, - attr: &'static str, - value: &'static str -) { +pub fn AddFunctionAttrStringValue(llfn: ValueRef, + idx: AttributePlace, + attr: &'static str, + value: &'static str) { unsafe { - LLVMRustAddFunctionAttrStringValue( - llfn, - idx.as_uint(), - attr.as_ptr() as *const _, - value.as_ptr() as *const _) + LLVMRustAddFunctionAttrStringValue(llfn, + idx.as_uint(), + attr.as_ptr() as *const _, + value.as_ptr() as *const _) } } @@ -233,44 +228,30 @@ pub fn set_thread_local(global: ValueRef, is_thread_local: bool) { impl Attribute { pub fn apply_llfn(&self, idx: AttributePlace, llfn: ValueRef) { - unsafe { - LLVMRustAddFunctionAttribute( - llfn, idx.as_uint(), self.bits()) - } + unsafe { LLVMRustAddFunctionAttribute(llfn, idx.as_uint(), self.bits()) } } pub fn apply_callsite(&self, idx: AttributePlace, callsite: ValueRef) { - unsafe { - LLVMRustAddCallSiteAttribute( - callsite, idx.as_uint(), self.bits()) - } + unsafe { LLVMRustAddCallSiteAttribute(callsite, idx.as_uint(), self.bits()) } } pub fn unapply_llfn(&self, idx: AttributePlace, llfn: ValueRef) { - unsafe { - LLVMRustRemoveFunctionAttributes( - llfn, idx.as_uint(), self.bits()) - } + unsafe { LLVMRustRemoveFunctionAttributes(llfn, idx.as_uint(), self.bits()) } } - pub fn toggle_llfn(&self, - idx: AttributePlace, - llfn: ValueRef, - set: bool) - { + pub fn toggle_llfn(&self, idx: AttributePlace, llfn: ValueRef, set: bool) { if set { self.apply_llfn(idx, llfn); } else { self.unapply_llfn(idx, llfn); } } - } -/* Memory-managed interface to target data. */ +// Memory-managed interface to target data. pub struct TargetData { - pub lltd: TargetDataRef + pub lltd: TargetDataRef, } impl Drop for TargetData { @@ -283,12 +264,10 @@ impl Drop for TargetData { pub fn mk_target_data(string_rep: &str) -> TargetData { let string_rep = CString::new(string_rep).unwrap(); - TargetData { - lltd: unsafe { LLVMCreateTargetData(string_rep.as_ptr()) } - } + TargetData { lltd: unsafe { LLVMCreateTargetData(string_rep.as_ptr()) } } } -/* Memory-managed interface to object files. */ +// Memory-managed interface to object files. pub struct ObjectFile { pub llof: ObjectFileRef, @@ -301,12 +280,10 @@ impl ObjectFile { let llof = LLVMCreateObjectFile(llmb); if llof as isize == 0 { // LLVMCreateObjectFile took ownership of llmb - return None + return None; } - Some(ObjectFile { - llof: llof, - }) + Some(ObjectFile { llof: llof }) } } } @@ -319,10 +296,10 @@ impl Drop for ObjectFile { } } -/* Memory-managed interface to section iterators. */ +// Memory-managed interface to section iterators. pub struct SectionIter { - pub llsi: SectionIteratorRef + pub llsi: SectionIteratorRef, } impl Drop for SectionIter { @@ -334,11 +311,7 @@ impl Drop for SectionIter { } pub fn mk_section_iter(llof: ObjectFileRef) -> SectionIter { - unsafe { - SectionIter { - llsi: LLVMGetSections(llof) - } - } + unsafe { SectionIter { llsi: LLVMGetSections(llof) } } } /// Safe wrapper around `LLVMGetParam`, because segfaults are no fun. @@ -361,15 +334,16 @@ pub fn get_params(llfn: ValueRef) -> Vec { } } -pub fn build_string(f: F) -> Option where F: FnOnce(RustStringRef){ +pub fn build_string(f: F) -> Option + where F: FnOnce(RustStringRef) +{ let mut buf = RefCell::new(Vec::new()); f(&mut buf as RustStringRepr as RustStringRef); String::from_utf8(buf.into_inner()).ok() } pub unsafe fn twine_to_string(tr: TwineRef) -> String { - build_string(|s| LLVMRustWriteTwineToString(tr, s)) - .expect("got a non-UTF8 Twine from LLVM") + build_string(|s| LLVMRustWriteTwineToString(tr, s)).expect("got a non-UTF8 Twine from LLVM") } pub unsafe fn debug_loc_to_string(c: ContextRef, tr: DebugLocRef) -> String { @@ -434,6 +408,10 @@ pub fn initialize_available_targets() { LLVMInitializeSystemZTargetMC, LLVMInitializeSystemZAsmPrinter, LLVMInitializeSystemZAsmParser); + init_target!(llvm_component = "jsbackend", + LLVMInitializeJSBackendTargetInfo, + LLVMInitializeJSBackendTarget, + LLVMInitializeJSBackendTargetMC); } pub fn last_error() -> Option { @@ -458,9 +436,7 @@ impl OperandBundleDef { pub fn new(name: &str, vals: &[ValueRef]) -> OperandBundleDef { let name = CString::new(name).unwrap(); let def = unsafe { - LLVMRustBuildOperandBundleDef(name.as_ptr(), - vals.as_ptr(), - vals.len() as c_uint) + LLVMRustBuildOperandBundleDef(name.as_ptr(), vals.as_ptr(), vals.len() as c_uint) }; OperandBundleDef { inner: def } } diff --git a/src/librustc_macro/lib.rs b/src/librustc_macro/lib.rs deleted file mode 100644 index c2a2cc2ecd64d..0000000000000 --- a/src/librustc_macro/lib.rs +++ /dev/null @@ -1,169 +0,0 @@ -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! A support library for macro authors when defining new macros. -//! -//! This library, provided by the standard distribution, provides the types -//! consumed in the interfaces of procedurally defined macro definitions. -//! Currently the primary use of this crate is to provide the ability to define -//! new custom derive modes through `#[rustc_macro_derive]`. -//! -//! Added recently as part of [RFC 1681] this crate is currently *unstable* and -//! requires the `#![feature(rustc_macro_lib)]` directive to use. Eventually, -//! though, it is intended for this crate to become stable to use (perhaps under -//! a different name). -//! -//! [RFC 1681]: https://github.com/rust-lang/rfcs/blob/master/text/1681-macros-1.1.md -//! -//! Note that this crate is intentionally very bare-bones currently. The main -//! type, `TokenStream`, only supports `fmt::Display` and `FromStr` -//! implementations, indicating that it can only go to and come from a string. -//! This functionality is intended to be expanded over time as more surface -//! area for macro authors is stabilized. - -#![crate_name = "rustc_macro"] -#![unstable(feature = "rustc_macro_lib", issue = "27812")] -#![crate_type = "rlib"] -#![crate_type = "dylib"] -#![cfg_attr(not(stage0), deny(warnings))] -#![deny(missing_docs)] - -#![feature(rustc_private)] -#![feature(staged_api)] -#![feature(lang_items)] - -extern crate syntax; - -use std::fmt; -use std::str::FromStr; - -use syntax::ast; -use syntax::parse; -use syntax::ptr::P; - -/// The main type provided by this crate, representing an abstract stream of -/// tokens. -/// -/// This is both the input and output of `#[rustc_macro_derive]` definitions. -/// Currently it's required to be a list of valid Rust items, but this -/// restriction may be lifted in the future. -/// -/// The API of this type is intentionally bare-bones, but it'll be expanded over -/// time! -pub struct TokenStream { - inner: Vec>, -} - -/// Error returned from `TokenStream::from_str`. -#[derive(Debug)] -pub struct LexError { - _inner: (), -} - -/// Permanently unstable internal implementation details of this crate. This -/// should not be used. -/// -/// These methods are used by the rest of the compiler to generate instances of -/// `TokenStream` to hand to macro definitions, as well as consume the output. -/// -/// Note that this module is also intentionally separate from the rest of the -/// crate. This allows the `#[unstable]` directive below to naturally apply to -/// all of the contents. -#[unstable(feature = "rustc_macro_internals", issue = "27812")] -#[doc(hidden)] -pub mod __internal { - use std::cell::Cell; - - use syntax::ast; - use syntax::ptr::P; - use syntax::parse::ParseSess; - use super::TokenStream; - - pub fn new_token_stream(item: P) -> TokenStream { - TokenStream { inner: vec![item] } - } - - pub fn token_stream_items(stream: TokenStream) -> Vec> { - stream.inner - } - - pub trait Registry { - fn register_custom_derive(&mut self, - trait_name: &str, - expand: fn(TokenStream) -> TokenStream); - } - - // Emulate scoped_thread_local!() here essentially - thread_local! { - static CURRENT_SESS: Cell<*const ParseSess> = Cell::new(0 as *const _); - } - - pub fn set_parse_sess(sess: &ParseSess, f: F) -> R - where F: FnOnce() -> R - { - struct Reset { prev: *const ParseSess } - - impl Drop for Reset { - fn drop(&mut self) { - CURRENT_SESS.with(|p| p.set(self.prev)); - } - } - - CURRENT_SESS.with(|p| { - let _reset = Reset { prev: p.get() }; - p.set(sess); - f() - }) - } - - pub fn with_parse_sess(f: F) -> R - where F: FnOnce(&ParseSess) -> R - { - let p = CURRENT_SESS.with(|p| p.get()); - assert!(!p.is_null()); - f(unsafe { &*p }) - } -} - -impl FromStr for TokenStream { - type Err = LexError; - - fn from_str(src: &str) -> Result { - __internal::with_parse_sess(|sess| { - let src = src.to_string(); - let cfg = Vec::new(); - let name = "rustc-macro source code".to_string(); - let mut parser = parse::new_parser_from_source_str(sess, cfg, name, - src); - let mut ret = TokenStream { inner: Vec::new() }; - loop { - match parser.parse_item() { - Ok(Some(item)) => ret.inner.push(item), - Ok(None) => return Ok(ret), - Err(mut err) => { - err.cancel(); - return Err(LexError { _inner: () }) - } - } - } - }) - } -} - -impl fmt::Display for TokenStream { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - for item in self.inner.iter() { - let item = syntax::print::pprust::item_to_string(item); - try!(f.write_str(&item)); - try!(f.write_str("\n")); - } - Ok(()) - } -} diff --git a/src/librustc_metadata/Cargo.toml b/src/librustc_metadata/Cargo.toml index d70510896b96e..6f7f03ca216b9 100644 --- a/src/librustc_metadata/Cargo.toml +++ b/src/librustc_metadata/Cargo.toml @@ -11,15 +11,13 @@ crate-type = ["dylib"] [dependencies] flate = { path = "../libflate" } log = { path = "../liblog" } -rbml = { path = "../librbml" } +proc_macro = { path = "../libproc_macro" } rustc = { path = "../librustc" } rustc_back = { path = "../librustc_back" } -rustc_bitflags = { path = "../librustc_bitflags" } rustc_const_math = { path = "../librustc_const_math" } rustc_data_structures = { path = "../librustc_data_structures" } rustc_errors = { path = "../librustc_errors" } rustc_llvm = { path = "../librustc_llvm" } -rustc_macro = { path = "../librustc_macro" } serialize = { path = "../libserialize" } syntax = { path = "../libsyntax" } syntax_ext = { path = "../libsyntax_ext" } diff --git a/src/librustc_metadata/astencode.rs b/src/librustc_metadata/astencode.rs index fb7e1c0f7895e..f001f85131978 100644 --- a/src/librustc_metadata/astencode.rs +++ b/src/librustc_metadata/astencode.rs @@ -8,1369 +8,152 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![allow(non_camel_case_types)] -// FIXME: remove this after snapshot, and Results are handled -#![allow(unused_must_use)] - use rustc::hir::map as ast_map; -use rustc::session::Session; -use rustc::hir; -use rustc::hir::fold; -use rustc::hir::fold::Folder; use rustc::hir::intravisit::{Visitor, IdRangeComputingVisitor, IdRange}; -use common as c; -use cstore; -use decoder; -use encoder as e; -use tydecode; -use tyencode; +use cstore::CrateMetadata; +use encoder::EncodeContext; +use schema::*; -use middle::cstore::{InlinedItem, InlinedItemRef}; -use rustc::ty::adjustment; -use rustc::ty::cast; -use middle::const_qualif::ConstQualif; +use rustc::middle::cstore::{InlinedItem, InlinedItemRef}; +use rustc::middle::const_qualif::ConstQualif; use rustc::hir::def::{self, Def}; use rustc::hir::def_id::DefId; -use middle::region; -use rustc::ty::subst::Substs; -use rustc::ty::{self, Ty, TyCtxt}; +use rustc::ty::{self, TyCtxt, Ty}; use syntax::ast; -use syntax::ptr::P; -use syntax_pos; - -use std::cell::Cell; -use std::io::SeekFrom; -use std::io::prelude::*; -use std::fmt::Debug; - -use rbml::reader; -use rbml::writer::Encoder; -use rbml; -use rustc_serialize as serialize; -use rustc_serialize::{Decodable, Decoder, DecoderHelpers}; -use rustc_serialize::{Encodable, EncoderHelpers}; - -#[cfg(test)] use std::io::Cursor; -#[cfg(test)] use syntax::parse; -#[cfg(test)] use rustc::hir::print as pprust; -#[cfg(test)] use rustc::hir::lowering::{LoweringContext, DummyResolver}; - -struct DecodeContext<'a, 'b, 'tcx: 'a> { - tcx: TyCtxt<'a, 'tcx, 'tcx>, - cdata: &'b cstore::CrateMetadata, - from_id_range: IdRange, - to_id_range: IdRange, - // Cache the last used filemap for translating spans as an optimization. - last_filemap_index: Cell, -} - -trait tr { - fn tr(&self, dcx: &DecodeContext) -> Self; -} - -// ______________________________________________________________________ -// Top-level methods. - -pub fn encode_inlined_item(ecx: &e::EncodeContext, - rbml_w: &mut Encoder, - ii: InlinedItemRef) { - let id = match ii { - InlinedItemRef::Item(_, i) => i.id, - InlinedItemRef::TraitItem(_, ti) => ti.id, - InlinedItemRef::ImplItem(_, ii) => ii.id, - }; - debug!("> Encoding inlined item: {} ({:?})", - ecx.tcx.node_path_str(id), - rbml_w.writer.seek(SeekFrom::Current(0))); - - // Folding could be avoided with a smarter encoder. - let (ii, expected_id_range) = simplify_ast(ii); - let id_range = inlined_item_id_range(&ii); - assert_eq!(expected_id_range, id_range); - - rbml_w.start_tag(c::tag_ast as usize); - id_range.encode(rbml_w); - encode_ast(rbml_w, &ii); - encode_side_tables_for_ii(ecx, rbml_w, &ii); - rbml_w.end_tag(); - - debug!("< Encoded inlined fn: {} ({:?})", - ecx.tcx.node_path_str(id), - rbml_w.writer.seek(SeekFrom::Current(0))); -} - -impl<'a, 'b, 'c, 'tcx> ast_map::FoldOps for &'a DecodeContext<'b, 'c, 'tcx> { - fn new_id(&self, id: ast::NodeId) -> ast::NodeId { - if id == ast::DUMMY_NODE_ID { - // Used by ast_map to map the NodeInlinedParent. - self.tcx.sess.next_node_id() - } else { - self.tr_id(id) - } - } - fn new_def_id(&self, def_id: DefId) -> DefId { - self.tr_def_id(def_id) - } - fn new_span(&self, span: syntax_pos::Span) -> syntax_pos::Span { - self.tr_span(span) - } -} - -/// Decodes an item from its AST in the cdata's metadata and adds it to the -/// ast-map. -pub fn decode_inlined_item<'a, 'tcx>(cdata: &cstore::CrateMetadata, - tcx: TyCtxt<'a, 'tcx, 'tcx>, - parent_def_path: ast_map::DefPath, - parent_did: DefId, - ast_doc: rbml::Doc, - orig_did: DefId) - -> &'tcx InlinedItem { - debug!("> Decoding inlined fn: {:?}", tcx.item_path_str(orig_did)); - let mut ast_dsr = reader::Decoder::new(ast_doc); - let from_id_range = Decodable::decode(&mut ast_dsr).unwrap(); - let to_id_range = reserve_id_range(&tcx.sess, from_id_range); - let dcx = &DecodeContext { - cdata: cdata, - tcx: tcx, - from_id_range: from_id_range, - to_id_range: to_id_range, - last_filemap_index: Cell::new(0) - }; - let ii = ast_map::map_decoded_item(&dcx.tcx.map, - parent_def_path, - parent_did, - decode_ast(ast_doc), - dcx); - let name = match *ii { - InlinedItem::Item(_, ref i) => i.name, - InlinedItem::TraitItem(_, ref ti) => ti.name, - InlinedItem::ImplItem(_, ref ii) => ii.name - }; - debug!("Fn named: {}", name); - debug!("< Decoded inlined fn: {}::{}", - tcx.item_path_str(parent_did), - name); - region::resolve_inlined_item(&tcx.sess, &tcx.region_maps, ii); - decode_side_tables(dcx, ast_doc); - copy_item_types(dcx, ii, orig_did); - if let InlinedItem::Item(_, ref i) = *ii { - debug!(">>> DECODED ITEM >>>\n{}\n<<< DECODED ITEM <<<", - ::rustc::hir::print::item_to_string(&i)); - } - - ii -} - -// ______________________________________________________________________ -// Enumerating the IDs which appear in an AST - -fn reserve_id_range(sess: &Session, - from_id_range: IdRange) -> IdRange { - // Handle the case of an empty range: - if from_id_range.empty() { return from_id_range; } - let cnt = from_id_range.max - from_id_range.min; - let to_id_min = sess.reserve_node_ids(cnt); - let to_id_max = to_id_min + cnt; - IdRange { min: to_id_min, max: to_id_max } -} - -impl<'a, 'b, 'tcx> DecodeContext<'a, 'b, 'tcx> { - /// Translates an internal id, meaning a node id that is known to refer to some part of the - /// item currently being inlined, such as a local variable or argument. All naked node-ids - /// that appear in types have this property, since if something might refer to an external item - /// we would use a def-id to allow for the possibility that the item resides in another crate. - pub fn tr_id(&self, id: ast::NodeId) -> ast::NodeId { - // from_id_range should be non-empty - assert!(!self.from_id_range.empty()); - // Make sure that translating the NodeId will actually yield a - // meaningful result - assert!(self.from_id_range.contains(id)); - - // Use wrapping arithmetic because otherwise it introduces control flow. - // Maybe we should just have the control flow? -- aatch - (id.wrapping_sub(self.from_id_range.min).wrapping_add(self.to_id_range.min)) - } - - /// Translates an EXTERNAL def-id, converting the crate number from the one used in the encoded - /// data to the current crate numbers.. By external, I mean that it be translated to a - /// reference to the item in its original crate, as opposed to being translated to a reference - /// to the inlined version of the item. This is typically, but not always, what you want, - /// because most def-ids refer to external things like types or other fns that may or may not - /// be inlined. Note that even when the inlined function is referencing itself recursively, we - /// would want `tr_def_id` for that reference--- conceptually the function calls the original, - /// non-inlined version, and trans deals with linking that recursive call to the inlined copy. - pub fn tr_def_id(&self, did: DefId) -> DefId { - decoder::translate_def_id(self.cdata, did) - } - - /// Translates a `Span` from an extern crate to the corresponding `Span` - /// within the local crate's codemap. - pub fn tr_span(&self, span: syntax_pos::Span) -> syntax_pos::Span { - decoder::translate_span(self.cdata, - self.tcx.sess.codemap(), - &self.last_filemap_index, - span) - } -} - -impl tr for DefId { - fn tr(&self, dcx: &DecodeContext) -> DefId { - dcx.tr_def_id(*self) - } -} - -impl tr for Option { - fn tr(&self, dcx: &DecodeContext) -> Option { - self.map(|d| dcx.tr_def_id(d)) - } -} - -impl tr for syntax_pos::Span { - fn tr(&self, dcx: &DecodeContext) -> syntax_pos::Span { - dcx.tr_span(*self) - } -} - -trait def_id_encoder_helpers { - fn emit_def_id(&mut self, did: DefId); -} - -impl def_id_encoder_helpers for S - where ::Error: Debug -{ - fn emit_def_id(&mut self, did: DefId) { - did.encode(self).unwrap() - } -} - -trait def_id_decoder_helpers { - fn read_def_id(&mut self, dcx: &DecodeContext) -> DefId; - fn read_def_id_nodcx(&mut self, - cdata: &cstore::CrateMetadata) -> DefId; -} - -impl def_id_decoder_helpers for D - where ::Error: Debug -{ - fn read_def_id(&mut self, dcx: &DecodeContext) -> DefId { - let did: DefId = Decodable::decode(self).unwrap(); - did.tr(dcx) - } - - fn read_def_id_nodcx(&mut self, - cdata: &cstore::CrateMetadata) - -> DefId { - let did: DefId = Decodable::decode(self).unwrap(); - decoder::translate_def_id(cdata, did) - } -} - -// ______________________________________________________________________ -// Encoding and decoding the AST itself -// -// When decoding, we have to renumber the AST so that the node ids that -// appear within are disjoint from the node ids in our existing ASTs. -// We also have to adjust the spans: for now we just insert a dummy span, -// but eventually we should add entries to the local codemap as required. - -fn encode_ast(rbml_w: &mut Encoder, item: &InlinedItem) { - rbml_w.start_tag(c::tag_tree as usize); - rbml_w.emit_opaque(|this| item.encode(this)); - rbml_w.end_tag(); -} - -struct NestedItemsDropper { - id_range: IdRange -} - -impl Folder for NestedItemsDropper { - - // The unit tests below run on HIR with NodeIds not properly assigned. That - // causes an integer overflow. So we just don't track the id_range when - // building the unit tests. - #[cfg(not(test))] - fn new_id(&mut self, id: ast::NodeId) -> ast::NodeId { - // Record the range of NodeIds we are visiting, so we can do a sanity - // check later - self.id_range.add(id); - id - } - - fn fold_block(&mut self, blk: P) -> P { - blk.and_then(|hir::Block {id, stmts, expr, rules, span, ..}| { - let stmts_sans_items = stmts.into_iter().filter_map(|stmt| { - let use_stmt = match stmt.node { - hir::StmtExpr(..) | hir::StmtSemi(..) => true, - hir::StmtDecl(ref decl, _) => { - match decl.node { - hir::DeclLocal(_) => true, - hir::DeclItem(_) => false, - } - } - }; - if use_stmt { - Some(stmt) - } else { - None - } - }).collect(); - let blk_sans_items = P(hir::Block { - stmts: stmts_sans_items, - expr: expr, - id: id, - rules: rules, - span: span, - }); - fold::noop_fold_block(blk_sans_items, self) - }) - } -} - -// Produces a simplified copy of the AST which does not include things -// that we do not need to or do not want to export. For example, we -// do not include any nested items: if these nested items are to be -// inlined, their AST will be exported separately (this only makes -// sense because, in Rust, nested items are independent except for -// their visibility). -// -// As it happens, trans relies on the fact that we do not export -// nested items, as otherwise it would get confused when translating -// inlined items. -fn simplify_ast(ii: InlinedItemRef) -> (InlinedItem, IdRange) { - let mut fld = NestedItemsDropper { - id_range: IdRange::max() - }; - - let ii = match ii { - // HACK we're not dropping items. - InlinedItemRef::Item(d, i) => { - InlinedItem::Item(d, P(fold::noop_fold_item(i.clone(), &mut fld))) - } - InlinedItemRef::TraitItem(d, ti) => { - InlinedItem::TraitItem(d, P(fold::noop_fold_trait_item(ti.clone(), &mut fld))) - } - InlinedItemRef::ImplItem(d, ii) => { - InlinedItem::ImplItem(d, P(fold::noop_fold_impl_item(ii.clone(), &mut fld))) - } - }; - - (ii, fld.id_range) -} - -fn decode_ast(item_doc: rbml::Doc) -> InlinedItem { - let chi_doc = item_doc.get(c::tag_tree as usize); - let mut rbml_r = reader::Decoder::new(chi_doc); - rbml_r.read_opaque(|decoder, _| Decodable::decode(decoder)).unwrap() -} - -// ______________________________________________________________________ -// Encoding and decoding of ast::def - -fn decode_def(dcx: &DecodeContext, dsr: &mut reader::Decoder) -> Def { - let def: Def = Decodable::decode(dsr).unwrap(); - def.tr(dcx) -} - -impl tr for Def { - fn tr(&self, dcx: &DecodeContext) -> Def { - match *self { - Def::Fn(did) => Def::Fn(did.tr(dcx)), - Def::Method(did) => Def::Method(did.tr(dcx)), - Def::SelfTy(opt_did, impl_id) => { - // Since the impl_id will never lie within the reserved range of - // imported NodeIds, it does not make sense to translate it. - // The result would not make any sense within the importing crate. - // We also don't allow for impl items to be inlined (just their - // members), so even if we had a DefId here, we wouldn't be able - // to do much with it. - // So, we set the id to DUMMY_NODE_ID. That way we make it - // explicit that this is no usable NodeId. - Def::SelfTy(opt_did.map(|did| did.tr(dcx)), - impl_id.map(|_| ast::DUMMY_NODE_ID)) - } - Def::Mod(did) => { Def::Mod(did.tr(dcx)) } - Def::ForeignMod(did) => { Def::ForeignMod(did.tr(dcx)) } - Def::Static(did, m) => { Def::Static(did.tr(dcx), m) } - Def::Const(did) => { Def::Const(did.tr(dcx)) } - Def::AssociatedConst(did) => Def::AssociatedConst(did.tr(dcx)), - Def::Local(_, nid) => { - let nid = dcx.tr_id(nid); - let did = dcx.tcx.map.local_def_id(nid); - Def::Local(did, nid) - } - Def::Variant(e_did, v_did) => Def::Variant(e_did.tr(dcx), v_did.tr(dcx)), - Def::Trait(did) => Def::Trait(did.tr(dcx)), - Def::Enum(did) => Def::Enum(did.tr(dcx)), - Def::TyAlias(did) => Def::TyAlias(did.tr(dcx)), - Def::AssociatedTy(trait_did, did) => - Def::AssociatedTy(trait_did.tr(dcx), did.tr(dcx)), - Def::PrimTy(p) => Def::PrimTy(p), - Def::TyParam(did) => Def::TyParam(did.tr(dcx)), - Def::Upvar(_, nid1, index, nid2) => { - let nid1 = dcx.tr_id(nid1); - let nid2 = dcx.tr_id(nid2); - let did1 = dcx.tcx.map.local_def_id(nid1); - Def::Upvar(did1, nid1, index, nid2) - } - Def::Struct(did) => Def::Struct(did.tr(dcx)), - Def::Union(did) => Def::Union(did.tr(dcx)), - Def::Label(nid) => Def::Label(dcx.tr_id(nid)), - Def::Err => Def::Err, - } - } -} - -// ______________________________________________________________________ -// Encoding and decoding of freevar information - -fn encode_freevar_entry(rbml_w: &mut Encoder, fv: &hir::Freevar) { - (*fv).encode(rbml_w).unwrap(); -} - -trait rbml_decoder_helper { - fn read_freevar_entry(&mut self, dcx: &DecodeContext) - -> hir::Freevar; - fn read_capture_mode(&mut self) -> hir::CaptureClause; -} - -impl<'a> rbml_decoder_helper for reader::Decoder<'a> { - fn read_freevar_entry(&mut self, dcx: &DecodeContext) - -> hir::Freevar { - let fv: hir::Freevar = Decodable::decode(self).unwrap(); - fv.tr(dcx) - } - - fn read_capture_mode(&mut self) -> hir::CaptureClause { - let cm: hir::CaptureClause = Decodable::decode(self).unwrap(); - cm - } -} - -impl tr for hir::Freevar { - fn tr(&self, dcx: &DecodeContext) -> hir::Freevar { - hir::Freevar { - def: self.def.tr(dcx), - span: self.span.tr(dcx), - } - } -} - -// ______________________________________________________________________ -// Encoding and decoding of MethodCallee - -trait read_method_callee_helper<'tcx> { - fn read_method_callee<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>) - -> (u32, ty::MethodCallee<'tcx>); -} - -fn encode_method_callee<'a, 'tcx>(ecx: &e::EncodeContext<'a, 'tcx>, - rbml_w: &mut Encoder, - autoderef: u32, - method: &ty::MethodCallee<'tcx>) { - use rustc_serialize::Encoder; - - rbml_w.emit_struct("MethodCallee", 4, |rbml_w| { - rbml_w.emit_struct_field("autoderef", 0, |rbml_w| { - autoderef.encode(rbml_w) - }); - rbml_w.emit_struct_field("def_id", 1, |rbml_w| { - Ok(rbml_w.emit_def_id(method.def_id)) - }); - rbml_w.emit_struct_field("ty", 2, |rbml_w| { - Ok(rbml_w.emit_ty(ecx, method.ty)) - }); - rbml_w.emit_struct_field("substs", 3, |rbml_w| { - Ok(rbml_w.emit_substs(ecx, &method.substs)) - }) - }).unwrap(); -} - -impl<'a, 'tcx> read_method_callee_helper<'tcx> for reader::Decoder<'a> { - fn read_method_callee<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>) - -> (u32, ty::MethodCallee<'tcx>) { - - self.read_struct("MethodCallee", 4, |this| { - let autoderef = this.read_struct_field("autoderef", 0, - Decodable::decode).unwrap(); - Ok((autoderef, ty::MethodCallee { - def_id: this.read_struct_field("def_id", 1, |this| { - Ok(this.read_def_id(dcx)) - }).unwrap(), - ty: this.read_struct_field("ty", 2, |this| { - Ok(this.read_ty(dcx)) - }).unwrap(), - substs: this.read_struct_field("substs", 3, |this| { - Ok(this.read_substs(dcx)) - }).unwrap() - })) - }).unwrap() - } -} - -pub fn encode_cast_kind(ebml_w: &mut Encoder, kind: cast::CastKind) { - kind.encode(ebml_w).unwrap(); -} - -// ______________________________________________________________________ -// Encoding and decoding the side tables - -trait rbml_writer_helpers<'tcx> { - fn emit_region(&mut self, ecx: &e::EncodeContext, r: &'tcx ty::Region); - fn emit_ty<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>, ty: Ty<'tcx>); - fn emit_substs<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>, - substs: &Substs<'tcx>); - fn emit_upvar_capture(&mut self, ecx: &e::EncodeContext, capture: &ty::UpvarCapture); - fn emit_auto_adjustment<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>, - adj: &adjustment::AutoAdjustment<'tcx>); - fn emit_autoref<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>, - autoref: &adjustment::AutoRef<'tcx>); - fn emit_auto_deref_ref<'a>(&mut self, ecx: &e::EncodeContext<'a, 'tcx>, - auto_deref_ref: &adjustment::AutoDerefRef<'tcx>); -} - -impl<'a, 'tcx> rbml_writer_helpers<'tcx> for Encoder<'a> { - fn emit_region(&mut self, ecx: &e::EncodeContext, r: &'tcx ty::Region) { - self.emit_opaque(|this| Ok(tyencode::enc_region(&mut this.cursor, - &ecx.ty_str_ctxt(), - r))); - } - - fn emit_ty<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>, ty: Ty<'tcx>) { - self.emit_opaque(|this| Ok(tyencode::enc_ty(&mut this.cursor, - &ecx.ty_str_ctxt(), - ty))); - } - - fn emit_upvar_capture(&mut self, ecx: &e::EncodeContext, capture: &ty::UpvarCapture) { - use rustc_serialize::Encoder; - - self.emit_enum("UpvarCapture", |this| { - match *capture { - ty::UpvarCapture::ByValue => { - this.emit_enum_variant("ByValue", 1, 0, |_| Ok(())) - } - ty::UpvarCapture::ByRef(ty::UpvarBorrow { kind, region }) => { - this.emit_enum_variant("ByRef", 2, 0, |this| { - this.emit_enum_variant_arg(0, - |this| kind.encode(this)); - this.emit_enum_variant_arg(1, - |this| Ok(this.emit_region(ecx, region))) - }) - } - } - }).unwrap() - } - - fn emit_substs<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>, - substs: &Substs<'tcx>) { - self.emit_opaque(|this| Ok(tyencode::enc_substs(&mut this.cursor, - &ecx.ty_str_ctxt(), - substs))); - } - - fn emit_auto_adjustment<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>, - adj: &adjustment::AutoAdjustment<'tcx>) { - use rustc_serialize::Encoder; - - self.emit_enum("AutoAdjustment", |this| { - match *adj { - adjustment::AdjustReifyFnPointer => { - this.emit_enum_variant("AdjustReifyFnPointer", 1, 0, |_| Ok(())) - } - - adjustment::AdjustUnsafeFnPointer => { - this.emit_enum_variant("AdjustUnsafeFnPointer", 2, 0, |_| { - Ok(()) - }) - } - - adjustment::AdjustMutToConstPointer => { - this.emit_enum_variant("AdjustMutToConstPointer", 3, 0, |_| { - Ok(()) - }) - } - - adjustment::AdjustDerefRef(ref auto_deref_ref) => { - this.emit_enum_variant("AdjustDerefRef", 4, 2, |this| { - this.emit_enum_variant_arg(0, - |this| Ok(this.emit_auto_deref_ref(ecx, auto_deref_ref))) - }) - } - adjustment::AdjustNeverToAny(ref ty) => { - this.emit_enum_variant("AdjustNeverToAny", 5, 1, |this| { - this.emit_enum_variant_arg(0, |this| Ok(this.emit_ty(ecx, ty))) - }) - } - } - }); - } - - fn emit_autoref<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>, - autoref: &adjustment::AutoRef<'tcx>) { - use rustc_serialize::Encoder; - - self.emit_enum("AutoRef", |this| { - match autoref { - &adjustment::AutoPtr(r, m) => { - this.emit_enum_variant("AutoPtr", 0, 2, |this| { - this.emit_enum_variant_arg(0, - |this| Ok(this.emit_region(ecx, r))); - this.emit_enum_variant_arg(1, |this| m.encode(this)) - }) - } - &adjustment::AutoUnsafe(m) => { - this.emit_enum_variant("AutoUnsafe", 1, 1, |this| { - this.emit_enum_variant_arg(0, |this| m.encode(this)) - }) - } - } - }); - } - - fn emit_auto_deref_ref<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>, - auto_deref_ref: &adjustment::AutoDerefRef<'tcx>) { - use rustc_serialize::Encoder; - - self.emit_struct("AutoDerefRef", 2, |this| { - this.emit_struct_field("autoderefs", 0, |this| auto_deref_ref.autoderefs.encode(this)); +use rustc_serialize::Encodable; - this.emit_struct_field("autoref", 1, |this| { - this.emit_option(|this| { - match auto_deref_ref.autoref { - None => this.emit_option_none(), - Some(ref a) => this.emit_option_some(|this| Ok(this.emit_autoref(ecx, a))), - } - }) - }); - - this.emit_struct_field("unsize", 2, |this| { - this.emit_option(|this| { - match auto_deref_ref.unsize { - None => this.emit_option_none(), - Some(target) => this.emit_option_some(|this| { - Ok(this.emit_ty(ecx, target)) - }) - } - }) - }) - }); - } +#[derive(RustcEncodable, RustcDecodable)] +pub struct Ast<'tcx> { + id_range: IdRange, + item: Lazy, + side_tables: LazySeq<(ast::NodeId, TableEntry<'tcx>)>, } -trait write_tag_and_id { - fn tag(&mut self, tag_id: c::astencode_tag, f: F) where F: FnOnce(&mut Self); - fn id(&mut self, id: ast::NodeId); +#[derive(RustcEncodable, RustcDecodable)] +enum TableEntry<'tcx> { + Def(Def), + NodeType(Ty<'tcx>), + ItemSubsts(ty::ItemSubsts<'tcx>), + Adjustment(ty::adjustment::AutoAdjustment<'tcx>), + ConstQualif(ConstQualif), } -impl<'a> write_tag_and_id for Encoder<'a> { - fn tag(&mut self, - tag_id: c::astencode_tag, - f: F) where - F: FnOnce(&mut Encoder<'a>), - { - self.start_tag(tag_id as usize); - f(self); - self.end_tag(); - } - - fn id(&mut self, id: ast::NodeId) { - id.encode(self).unwrap(); - } -} - -struct SideTableEncodingIdVisitor<'a, 'b:'a, 'c:'a, 'tcx:'c> { - ecx: &'a e::EncodeContext<'c, 'tcx>, - rbml_w: &'a mut Encoder<'b>, -} - -impl<'a, 'b, 'c, 'tcx, 'v> Visitor<'v> for - SideTableEncodingIdVisitor<'a, 'b, 'c, 'tcx> { - fn visit_id(&mut self, id: ast::NodeId) { - encode_side_tables_for_id(self.ecx, self.rbml_w, id) - } -} - -fn encode_side_tables_for_ii(ecx: &e::EncodeContext, - rbml_w: &mut Encoder, - ii: &InlinedItem) { - rbml_w.start_tag(c::tag_table as usize); - ii.visit(&mut SideTableEncodingIdVisitor { - ecx: ecx, - rbml_w: rbml_w - }); - rbml_w.end_tag(); -} - -fn encode_side_tables_for_id(ecx: &e::EncodeContext, - rbml_w: &mut Encoder, - id: ast::NodeId) { - let tcx = ecx.tcx; - - debug!("Encoding side tables for id {}", id); - - if let Some(def) = tcx.expect_def_or_none(id) { - rbml_w.tag(c::tag_table_def, |rbml_w| { - rbml_w.id(id); - def.encode(rbml_w).unwrap(); - }) - } - - if let Some(ty) = tcx.node_types().get(&id) { - rbml_w.tag(c::tag_table_node_type, |rbml_w| { - rbml_w.id(id); - rbml_w.emit_ty(ecx, *ty); - }) - } - - if let Some(item_substs) = tcx.tables.borrow().item_substs.get(&id) { - rbml_w.tag(c::tag_table_item_subst, |rbml_w| { - rbml_w.id(id); - rbml_w.emit_substs(ecx, &item_substs.substs); - }) - } - - if let Some(fv) = tcx.freevars.borrow().get(&id) { - rbml_w.tag(c::tag_table_freevars, |rbml_w| { - rbml_w.id(id); - rbml_w.emit_from_vec(fv, |rbml_w, fv_entry| { - Ok(encode_freevar_entry(rbml_w, fv_entry)) - }); - }); - - for freevar in fv { - rbml_w.tag(c::tag_table_upvar_capture_map, |rbml_w| { - rbml_w.id(id); - - let var_id = freevar.def.var_id(); - let upvar_id = ty::UpvarId { - var_id: var_id, - closure_expr_id: id - }; - let upvar_capture = tcx.tables - .borrow() - .upvar_capture_map - .get(&upvar_id) - .unwrap() - .clone(); - var_id.encode(rbml_w); - rbml_w.emit_upvar_capture(ecx, &upvar_capture); - }) +impl<'a, 'tcx> EncodeContext<'a, 'tcx> { + pub fn encode_inlined_item(&mut self, ii: InlinedItemRef) -> Lazy> { + let mut id_visitor = IdRangeComputingVisitor::new(); + match ii { + InlinedItemRef::Item(_, i) => id_visitor.visit_item(i), + InlinedItemRef::TraitItem(_, ti) => id_visitor.visit_trait_item(ti), + InlinedItemRef::ImplItem(_, ii) => id_visitor.visit_impl_item(ii), } - } - let method_call = ty::MethodCall::expr(id); - if let Some(method) = tcx.tables.borrow().method_map.get(&method_call) { - rbml_w.tag(c::tag_table_method_map, |rbml_w| { - rbml_w.id(id); - encode_method_callee(ecx, rbml_w, method_call.autoderef, method) - }) - } - - if let Some(adjustment) = tcx.tables.borrow().adjustments.get(&id) { - match *adjustment { - adjustment::AdjustDerefRef(ref adj) => { - for autoderef in 0..adj.autoderefs { - let method_call = ty::MethodCall::autoderef(id, autoderef as u32); - if let Some(method) = tcx.tables.borrow().method_map.get(&method_call) { - rbml_w.tag(c::tag_table_method_map, |rbml_w| { - rbml_w.id(id); - encode_method_callee(ecx, rbml_w, - method_call.autoderef, method) - }) - } - } + let ii_pos = self.position(); + ii.encode(self).unwrap(); + + let tables_pos = self.position(); + let tables_count = { + let mut visitor = SideTableEncodingIdVisitor { + ecx: self, + count: 0, + }; + match ii { + InlinedItemRef::Item(_, i) => visitor.visit_item(i), + InlinedItemRef::TraitItem(_, ti) => visitor.visit_trait_item(ti), + InlinedItemRef::ImplItem(_, ii) => visitor.visit_impl_item(ii), } - _ => {} - } - - rbml_w.tag(c::tag_table_adjustments, |rbml_w| { - rbml_w.id(id); - rbml_w.emit_auto_adjustment(ecx, adjustment); - }) - } - - if let Some(cast_kind) = tcx.cast_kinds.borrow().get(&id) { - rbml_w.tag(c::tag_table_cast_kinds, |rbml_w| { - rbml_w.id(id); - encode_cast_kind(rbml_w, *cast_kind) - }) - } + visitor.count + }; - if let Some(qualif) = tcx.const_qualif_map.borrow().get(&id) { - rbml_w.tag(c::tag_table_const_qualif, |rbml_w| { - rbml_w.id(id); - qualif.encode(rbml_w).unwrap() + self.lazy(&Ast { + id_range: id_visitor.result(), + item: Lazy::with_position(ii_pos), + side_tables: LazySeq::with_position_and_length(tables_pos, tables_count), }) } } -trait doc_decoder_helpers: Sized { - fn as_int(&self) -> isize; - fn opt_child(&self, tag: c::astencode_tag) -> Option; +struct SideTableEncodingIdVisitor<'a, 'b: 'a, 'tcx: 'b> { + ecx: &'a mut EncodeContext<'b, 'tcx>, + count: usize, } -impl<'a> doc_decoder_helpers for rbml::Doc<'a> { - fn as_int(&self) -> isize { reader::doc_as_u64(*self) as isize } - fn opt_child(&self, tag: c::astencode_tag) -> Option> { - reader::maybe_get_doc(*self, tag as usize) - } -} - -trait rbml_decoder_decoder_helpers<'tcx> { - fn read_ty_encoded<'a, 'b, F, R>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>, - f: F) -> R - where F: for<'x> FnOnce(&mut tydecode::TyDecoder<'x, 'tcx>) -> R; - - fn read_region<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>) -> &'tcx ty::Region; - fn read_ty<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>) -> Ty<'tcx>; - fn read_tys<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>) -> Vec>; - fn read_trait_ref<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>) - -> ty::TraitRef<'tcx>; - fn read_poly_trait_ref<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>) - -> ty::PolyTraitRef<'tcx>; - fn read_predicate<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>) - -> ty::Predicate<'tcx>; - fn read_substs<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>) - -> &'tcx Substs<'tcx>; - fn read_upvar_capture<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>) - -> ty::UpvarCapture<'tcx>; - fn read_auto_adjustment<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>) - -> adjustment::AutoAdjustment<'tcx>; - fn read_cast_kind<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>) - -> cast::CastKind; - fn read_auto_deref_ref<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>) - -> adjustment::AutoDerefRef<'tcx>; - fn read_autoref<'a, 'b>(&mut self, dcx: &DecodeContext<'a, 'b, 'tcx>) - -> adjustment::AutoRef<'tcx>; - - // Versions of the type reading functions that don't need the full - // DecodeContext. - fn read_ty_nodcx<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, - cdata: &cstore::CrateMetadata) -> Ty<'tcx>; - fn read_tys_nodcx<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, - cdata: &cstore::CrateMetadata) -> Vec>; - fn read_substs_nodcx<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, - cdata: &cstore::CrateMetadata) - -> &'tcx Substs<'tcx>; -} - -impl<'a, 'tcx> rbml_decoder_decoder_helpers<'tcx> for reader::Decoder<'a> { - fn read_ty_nodcx<'b>(&mut self, tcx: TyCtxt<'b, 'tcx, 'tcx>, - cdata: &cstore::CrateMetadata) - -> Ty<'tcx> { - self.read_opaque(|_, doc| { - Ok( - tydecode::TyDecoder::with_doc(tcx, cdata.cnum, doc, - &mut |id| decoder::translate_def_id(cdata, id)) - .parse_ty()) - }).unwrap() - } - - fn read_tys_nodcx<'b>(&mut self, tcx: TyCtxt<'b, 'tcx, 'tcx>, - cdata: &cstore::CrateMetadata) -> Vec> { - self.read_to_vec(|this| Ok(this.read_ty_nodcx(tcx, cdata)) ) - .unwrap() - .into_iter() - .collect() - } - - fn read_substs_nodcx<'b>(&mut self, tcx: TyCtxt<'b, 'tcx, 'tcx>, - cdata: &cstore::CrateMetadata) - -> &'tcx Substs<'tcx> - { - self.read_opaque(|_, doc| { - Ok( - tydecode::TyDecoder::with_doc(tcx, cdata.cnum, doc, - &mut |id| decoder::translate_def_id(cdata, id)) - .parse_substs()) - }).unwrap() - } - - fn read_ty_encoded<'b, 'c, F, R>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>, op: F) -> R - where F: for<'x> FnOnce(&mut tydecode::TyDecoder<'x,'tcx>) -> R - { - return self.read_opaque(|_, doc| { - debug!("read_ty_encoded({})", type_string(doc)); - Ok(op( - &mut tydecode::TyDecoder::with_doc( - dcx.tcx, dcx.cdata.cnum, doc, - &mut |d| convert_def_id(dcx, d)))) - }).unwrap(); +impl<'a, 'b, 'tcx, 'v> Visitor<'v> for SideTableEncodingIdVisitor<'a, 'b, 'tcx> { + fn visit_id(&mut self, id: ast::NodeId) { + debug!("Encoding side tables for id {}", id); - fn type_string(doc: rbml::Doc) -> String { - let mut str = String::new(); - for i in doc.start..doc.end { - str.push(doc.data[i] as char); + let tcx = self.ecx.tcx; + let mut encode = |entry: Option| { + if let Some(entry) = entry { + (id, entry).encode(self.ecx).unwrap(); + self.count += 1; } - str - } - } - fn read_region<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>) -> &'tcx ty::Region { - // Note: regions types embed local node ids. In principle, we - // should translate these node ids into the new decode - // context. However, we do not bother, because region types - // are not used during trans. This also applies to read_ty. - return self.read_ty_encoded(dcx, |decoder| decoder.parse_region()); - } - fn read_ty<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>) -> Ty<'tcx> { - return self.read_ty_encoded(dcx, |decoder| decoder.parse_ty()); - } + }; - fn read_tys<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>) - -> Vec> { - self.read_to_vec(|this| Ok(this.read_ty(dcx))).unwrap().into_iter().collect() - } - - fn read_trait_ref<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>) - -> ty::TraitRef<'tcx> { - self.read_ty_encoded(dcx, |decoder| decoder.parse_trait_ref()) - } - - fn read_poly_trait_ref<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>) - -> ty::PolyTraitRef<'tcx> { - ty::Binder(self.read_ty_encoded(dcx, |decoder| decoder.parse_trait_ref())) - } - - fn read_predicate<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>) - -> ty::Predicate<'tcx> - { - self.read_ty_encoded(dcx, |decoder| decoder.parse_predicate()) - } - - fn read_substs<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>) - -> &'tcx Substs<'tcx> { - self.read_opaque(|_, doc| { - Ok(tydecode::TyDecoder::with_doc(dcx.tcx, dcx.cdata.cnum, doc, - &mut |d| convert_def_id(dcx, d)) - .parse_substs()) - }).unwrap() - } - fn read_upvar_capture<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>) - -> ty::UpvarCapture<'tcx> { - self.read_enum("UpvarCapture", |this| { - let variants = ["ByValue", "ByRef"]; - this.read_enum_variant(&variants, |this, i| { - Ok(match i { - 1 => ty::UpvarCapture::ByValue, - 2 => ty::UpvarCapture::ByRef(ty::UpvarBorrow { - kind: this.read_enum_variant_arg(0, - |this| Decodable::decode(this)).unwrap(), - region: this.read_enum_variant_arg(1, - |this| Ok(this.read_region(dcx))).unwrap() - }), - _ => bug!("bad enum variant for ty::UpvarCapture") - }) - }) - }).unwrap() - } - fn read_auto_adjustment<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>) - -> adjustment::AutoAdjustment<'tcx> { - self.read_enum("AutoAdjustment", |this| { - let variants = ["AdjustReifyFnPointer", "AdjustUnsafeFnPointer", - "AdjustMutToConstPointer", "AdjustDerefRef", - "AdjustNeverToAny"]; - this.read_enum_variant(&variants, |this, i| { - Ok(match i { - 1 => adjustment::AdjustReifyFnPointer, - 2 => adjustment::AdjustUnsafeFnPointer, - 3 => adjustment::AdjustMutToConstPointer, - 4 => { - let auto_deref_ref: adjustment::AutoDerefRef = - this.read_enum_variant_arg(0, - |this| Ok(this.read_auto_deref_ref(dcx))).unwrap(); - - adjustment::AdjustDerefRef(auto_deref_ref) - } - 5 => { - let ty: Ty<'tcx> = this.read_enum_variant_arg(0, |this| { - Ok(this.read_ty(dcx)) - }).unwrap(); - - adjustment::AdjustNeverToAny(ty) - } - _ => bug!("bad enum variant for adjustment::AutoAdjustment") - }) - }) - }).unwrap() - } - - fn read_auto_deref_ref<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>) - -> adjustment::AutoDerefRef<'tcx> { - self.read_struct("AutoDerefRef", 2, |this| { - Ok(adjustment::AutoDerefRef { - autoderefs: this.read_struct_field("autoderefs", 0, |this| { - Decodable::decode(this) - }).unwrap(), - autoref: this.read_struct_field("autoref", 1, |this| { - this.read_option(|this, b| { - if b { - Ok(Some(this.read_autoref(dcx))) - } else { - Ok(None) - } - }) - }).unwrap(), - unsize: this.read_struct_field("unsize", 2, |this| { - this.read_option(|this, b| { - if b { - Ok(Some(this.read_ty(dcx))) - } else { - Ok(None) - } - }) - }).unwrap(), - }) - }).unwrap() - } - - fn read_autoref<'b, 'c>(&mut self, dcx: &DecodeContext<'b, 'c, 'tcx>) - -> adjustment::AutoRef<'tcx> { - self.read_enum("AutoRef", |this| { - let variants = ["AutoPtr", "AutoUnsafe"]; - this.read_enum_variant(&variants, |this, i| { - Ok(match i { - 0 => { - let r: &'tcx ty::Region = - this.read_enum_variant_arg(0, |this| { - Ok(this.read_region(dcx)) - }).unwrap(); - let m: hir::Mutability = - this.read_enum_variant_arg(1, |this| { - Decodable::decode(this) - }).unwrap(); - - adjustment::AutoPtr(r, m) - } - 1 => { - let m: hir::Mutability = - this.read_enum_variant_arg(0, |this| Decodable::decode(this)).unwrap(); - - adjustment::AutoUnsafe(m) - } - _ => bug!("bad enum variant for adjustment::AutoRef") - }) - }) - }).unwrap() - } - - fn read_cast_kind<'b, 'c>(&mut self, _dcx: &DecodeContext<'b, 'c, 'tcx>) - -> cast::CastKind - { - Decodable::decode(self).unwrap() + encode(tcx.expect_def_or_none(id).map(TableEntry::Def)); + encode(tcx.node_types().get(&id).cloned().map(TableEntry::NodeType)); + encode(tcx.tables.borrow().item_substs.get(&id).cloned().map(TableEntry::ItemSubsts)); + encode(tcx.tables.borrow().adjustments.get(&id).cloned().map(TableEntry::Adjustment)); + encode(tcx.const_qualif_map.borrow().get(&id).cloned().map(TableEntry::ConstQualif)); } } -// Converts a def-id that appears in a type. The correct -// translation will depend on what kind of def-id this is. -// This is a subtle point: type definitions are not -// inlined into the current crate, so if the def-id names -// a nominal type or type alias, then it should be -// translated to refer to the source crate. -// -// However, *type parameters* are cloned along with the function -// they are attached to. So we should translate those def-ids -// to refer to the new, cloned copy of the type parameter. -// We only see references to free type parameters in the body of -// an inlined function. In such cases, we need the def-id to -// be a local id so that the TypeContents code is able to lookup -// the relevant info in the ty_param_defs table. -// -// *Region parameters*, unfortunately, are another kettle of fish. -// In such cases, def_id's can appear in types to distinguish -// shadowed bound regions and so forth. It doesn't actually -// matter so much what we do to these, since regions are erased -// at trans time, but it's good to keep them consistent just in -// case. We translate them with `tr_def_id()` which will map -// the crate numbers back to the original source crate. -// -// Scopes will end up as being totally bogus. This can actually -// be fixed though. -// -// Unboxed closures are cloned along with the function being -// inlined, and all side tables use interned node IDs, so we -// translate their def IDs accordingly. -// -// It'd be really nice to refactor the type repr to not include -// def-ids so that all these distinctions were unnecessary. -fn convert_def_id(dcx: &DecodeContext, - did: DefId) - -> DefId { - let r = dcx.tr_def_id(did); - debug!("convert_def_id(did={:?})={:?}", did, r); - return r; -} - -fn decode_side_tables(dcx: &DecodeContext, - ast_doc: rbml::Doc) { - let tbl_doc = ast_doc.get(c::tag_table as usize); - for (tag, entry_doc) in reader::docs(tbl_doc) { - let mut entry_dsr = reader::Decoder::new(entry_doc); - let id0: ast::NodeId = Decodable::decode(&mut entry_dsr).unwrap(); - let id = dcx.tr_id(id0); - - debug!(">> Side table document with tag 0x{:x} \ - found for id {} (orig {})", - tag, id, id0); - let tag = tag as u32; - let decoded_tag: Option = c::astencode_tag::from_u32(tag); - match decoded_tag { - None => { - bug!("unknown tag found in side tables: {:x}", tag); - } - Some(value) => { - let val_dsr = &mut entry_dsr; - - match value { - c::tag_table_def => { - let def = decode_def(dcx, val_dsr); - dcx.tcx.def_map.borrow_mut().insert(id, def::PathResolution::new(def)); - } - c::tag_table_node_type => { - let ty = val_dsr.read_ty(dcx); - debug!("inserting ty for node {}: {:?}", - id, ty); - dcx.tcx.node_type_insert(id, ty); - } - c::tag_table_item_subst => { - let item_substs = ty::ItemSubsts { - substs: val_dsr.read_substs(dcx) - }; - dcx.tcx.tables.borrow_mut().item_substs.insert( - id, item_substs); - } - c::tag_table_freevars => { - let fv_info = val_dsr.read_to_vec(|val_dsr| { - Ok(val_dsr.read_freevar_entry(dcx)) - }).unwrap().into_iter().collect(); - dcx.tcx.freevars.borrow_mut().insert(id, fv_info); - } - c::tag_table_upvar_capture_map => { - let var_id: ast::NodeId = Decodable::decode(val_dsr).unwrap(); - let upvar_id = ty::UpvarId { - var_id: dcx.tr_id(var_id), - closure_expr_id: id - }; - let ub = val_dsr.read_upvar_capture(dcx); - dcx.tcx.tables.borrow_mut().upvar_capture_map.insert(upvar_id, ub); - } - c::tag_table_method_map => { - let (autoderef, method) = val_dsr.read_method_callee(dcx); - let method_call = ty::MethodCall { - expr_id: id, - autoderef: autoderef - }; - dcx.tcx.tables.borrow_mut().method_map.insert(method_call, method); - } - c::tag_table_adjustments => { - let adj = - val_dsr.read_auto_adjustment(dcx); - dcx.tcx.tables.borrow_mut().adjustments.insert(id, adj); - } - c::tag_table_cast_kinds => { - let cast_kind = - val_dsr.read_cast_kind(dcx); - dcx.tcx.cast_kinds.borrow_mut().insert(id, cast_kind); - } - c::tag_table_const_qualif => { - let qualif: ConstQualif = Decodable::decode(val_dsr).unwrap(); - dcx.tcx.const_qualif_map.borrow_mut().insert(id, qualif); - } - _ => { - bug!("unknown tag found in side tables: {:x}", tag); - } - } - } - } +/// Decodes an item from its AST in the cdata's metadata and adds it to the +/// ast-map. +pub fn decode_inlined_item<'a, 'tcx>(cdata: &CrateMetadata, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + parent_def_path: ast_map::DefPath, + parent_did: DefId, + ast: Ast<'tcx>, + orig_did: DefId) + -> &'tcx InlinedItem { + debug!("> Decoding inlined fn: {:?}", tcx.item_path_str(orig_did)); - debug!(">< Side table doc loaded"); - } -} + let cnt = ast.id_range.max.as_usize() - ast.id_range.min.as_usize(); + let start = tcx.sess.reserve_node_ids(cnt); + let id_ranges = [ast.id_range, + IdRange { + min: start, + max: ast::NodeId::new(start.as_usize() + cnt), + }]; -// copy the tcache entries from the original item to the new -// inlined item -fn copy_item_types(dcx: &DecodeContext, ii: &InlinedItem, orig_did: DefId) { - fn copy_item_type(dcx: &DecodeContext, - inlined_id: ast::NodeId, - remote_did: DefId) { - let inlined_did = dcx.tcx.map.local_def_id(inlined_id); - dcx.tcx.register_item_type(inlined_did, - dcx.tcx.lookup_item_type(remote_did)); + let ii = ast.item.decode((cdata, tcx, id_ranges)); + let ii = ast_map::map_decoded_item(&tcx.map, + parent_def_path, + parent_did, + ii, + tcx.sess.next_node_id()); - } - // copy the entry for the item itself let item_node_id = match ii { &InlinedItem::Item(_, ref i) => i.id, &InlinedItem::TraitItem(_, ref ti) => ti.id, - &InlinedItem::ImplItem(_, ref ii) => ii.id + &InlinedItem::ImplItem(_, ref ii) => ii.id, }; - copy_item_type(dcx, item_node_id, orig_did); + let inlined_did = tcx.map.local_def_id(item_node_id); + tcx.register_item_type(inlined_did, tcx.lookup_item_type(orig_did)); - // copy the entries of inner items - if let &InlinedItem::Item(_, ref item) = ii { - match item.node { - hir::ItemEnum(ref def, _) => { - let orig_def = dcx.tcx.lookup_adt_def(orig_did); - for (i_variant, orig_variant) in - def.variants.iter().zip(orig_def.variants.iter()) - { - debug!("astencode: copying variant {:?} => {:?}", - orig_variant.did, i_variant.node.data.id()); - copy_item_type(dcx, i_variant.node.data.id(), orig_variant.did); - } + for (id, entry) in ast.side_tables.decode((cdata, tcx, id_ranges)) { + match entry { + TableEntry::Def(def) => { + tcx.def_map.borrow_mut().insert(id, def::PathResolution::new(def)); } - hir::ItemStruct(ref def, _) => { - if !def.is_struct() { - let ctor_did = dcx.tcx.lookup_adt_def(orig_did) - .struct_variant().did; - debug!("astencode: copying ctor {:?} => {:?}", ctor_did, - def.id()); - copy_item_type(dcx, def.id(), ctor_did); - } + TableEntry::NodeType(ty) => { + tcx.node_type_insert(id, ty); } - _ => {} - } - } -} - -fn inlined_item_id_range(ii: &InlinedItem) -> IdRange { - let mut visitor = IdRangeComputingVisitor::new(); - ii.visit(&mut visitor); - visitor.result() -} - -// ______________________________________________________________________ -// Testing of astencode_gen - -#[cfg(test)] -fn encode_item_ast(rbml_w: &mut Encoder, item: &hir::Item) { - rbml_w.start_tag(c::tag_tree as usize); - (*item).encode(rbml_w); - rbml_w.end_tag(); -} - -#[cfg(test)] -fn decode_item_ast(item_doc: rbml::Doc) -> hir::Item { - let chi_doc = item_doc.get(c::tag_tree as usize); - let mut d = reader::Decoder::new(chi_doc); - Decodable::decode(&mut d).unwrap() -} - -#[cfg(test)] -trait FakeExtCtxt { - fn call_site(&self) -> syntax_pos::Span; - fn cfg(&self) -> ast::CrateConfig; - fn ident_of(&self, st: &str) -> ast::Ident; - fn name_of(&self, st: &str) -> ast::Name; - fn parse_sess(&self) -> &parse::ParseSess; -} - -#[cfg(test)] -impl FakeExtCtxt for parse::ParseSess { - fn call_site(&self) -> syntax_pos::Span { - syntax_pos::Span { - lo: syntax_pos::BytePos(0), - hi: syntax_pos::BytePos(0), - expn_id: syntax_pos::NO_EXPANSION, - } - } - fn cfg(&self) -> ast::CrateConfig { Vec::new() } - fn ident_of(&self, st: &str) -> ast::Ident { - parse::token::str_to_ident(st) - } - fn name_of(&self, st: &str) -> ast::Name { - parse::token::intern(st) - } - fn parse_sess(&self) -> &parse::ParseSess { self } -} - -#[cfg(test)] -fn mk_ctxt() -> parse::ParseSess { - parse::ParseSess::new() -} - -#[cfg(test)] -fn with_testing_context T>(f: F) -> T { - let mut resolver = DummyResolver; - let mut lcx = LoweringContext::testing_context(&mut resolver); - f(&mut lcx) -} - -#[cfg(test)] -fn roundtrip(in_item: hir::Item) { - let mut wr = Cursor::new(Vec::new()); - encode_item_ast(&mut Encoder::new(&mut wr), &in_item); - let rbml_doc = rbml::Doc::new(wr.get_ref()); - let out_item = decode_item_ast(rbml_doc); - - assert!(in_item == out_item); -} - -#[test] -fn test_basic() { - let cx = mk_ctxt(); - with_testing_context(|lcx| { - roundtrip(lcx.lower_item("e_item!(&cx, - fn foo() {} - ).unwrap())); - }); -} - -#[test] -fn test_smalltalk() { - let cx = mk_ctxt(); - with_testing_context(|lcx| { - roundtrip(lcx.lower_item("e_item!(&cx, - fn foo() -> isize { 3 + 4 } // first smalltalk program ever executed. - ).unwrap())); - }); -} - -#[test] -fn test_more() { - let cx = mk_ctxt(); - with_testing_context(|lcx| { - roundtrip(lcx.lower_item("e_item!(&cx, - fn foo(x: usize, y: usize) -> usize { - let z = x + y; - return z; + TableEntry::ItemSubsts(item_substs) => { + tcx.tables.borrow_mut().item_substs.insert(id, item_substs); } - ).unwrap())); - }); -} - -#[test] -fn test_simplification() { - use middle::cstore::LOCAL_CRATE; - use rustc::hir::def_id::CRATE_DEF_INDEX; - - let cx = mk_ctxt(); - let item = quote_item!(&cx, - fn new_int_alist() -> alist { - fn eq_int(a: isize, b: isize) -> bool { a == b } - return alist {eq_fn: eq_int, data: Vec::new()}; - } - ).unwrap(); - let cx = mk_ctxt(); - with_testing_context(|lcx| { - let hir_item = lcx.lower_item(&item); - let def_id = DefId { krate: LOCAL_CRATE, index: CRATE_DEF_INDEX }; // dummy - let item_in = InlinedItemRef::Item(def_id, &hir_item); - let (item_out, _) = simplify_ast(item_in); - let item_exp = InlinedItem::Item(def_id, P(lcx.lower_item("e_item!(&cx, - fn new_int_alist() -> alist { - return alist {eq_fn: eq_int, data: Vec::new()}; + TableEntry::Adjustment(adj) => { + tcx.tables.borrow_mut().adjustments.insert(id, adj); } - ).unwrap()))); - match (item_out, item_exp) { - (InlinedItem::Item(_, item_out), InlinedItem::Item(_, item_exp)) => { - assert!(pprust::item_to_string(&item_out) == - pprust::item_to_string(&item_exp)); + TableEntry::ConstQualif(qualif) => { + tcx.const_qualif_map.borrow_mut().insert(id, qualif); } - _ => bug!() } - }); + } + + ii } diff --git a/src/librustc_metadata/common.rs b/src/librustc_metadata/common.rs deleted file mode 100644 index 29b9cc0d1d923..0000000000000 --- a/src/librustc_metadata/common.rs +++ /dev/null @@ -1,241 +0,0 @@ -// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -#![allow(non_camel_case_types, non_upper_case_globals)] - -pub use self::astencode_tag::*; - -// RBML enum definitions and utils shared by the encoder and decoder -// -// 0x00..0x1f: reserved for RBML generic type tags -// 0x20..0xef: free for use, preferred for frequent tags -// 0xf0..0xff: internally used by RBML to encode 0x100..0xfff in two bytes -// 0x100..0xfff: free for use, preferred for infrequent tags - -pub const tag_items: usize = 0x100; // top-level only - -pub const tag_paths_data_name: usize = 0x20; - -pub const tag_def_id: usize = 0x21; - -pub const tag_items_data: usize = 0x22; - -pub const tag_items_data_item: usize = 0x23; - -pub const tag_items_data_item_family: usize = 0x24; - -pub const tag_items_data_item_type: usize = 0x25; - -// GAP 0x26 - -pub const tag_items_data_item_variant: usize = 0x27; - -pub const tag_items_data_parent_item: usize = 0x28; - -pub const tag_items_data_item_is_tuple_struct_ctor: usize = 0x29; - -pub const tag_items_closure_kind: usize = 0x2a; -pub const tag_items_closure_ty: usize = 0x2b; -pub const tag_def_key: usize = 0x2c; - -// GAP 0x2d 0x34 - -pub const tag_index: usize = 0x110; // top-level only -pub const tag_xref_index: usize = 0x111; // top-level only -pub const tag_xref_data: usize = 0x112; // top-level only -pub const tag_attributes: usize = 0x101; // top-level only - -// The list of crates that this crate depends on -pub const tag_crate_deps: usize = 0x102; // top-level only - -// A single crate dependency -pub const tag_crate_dep: usize = 0x35; - -pub const tag_crate_hash: usize = 0x103; // top-level only -pub const tag_crate_crate_name: usize = 0x104; // top-level only -pub const tag_crate_disambiguator: usize = 0x113; // top-level only - -pub const tag_crate_dep_crate_name: usize = 0x36; -pub const tag_crate_dep_hash: usize = 0x37; -pub const tag_crate_dep_explicitly_linked: usize = 0x38; // top-level only - -pub const tag_item_trait_item: usize = 0x3a; - -pub const tag_item_trait_ref: usize = 0x3b; - -// discriminator value for variants -pub const tag_disr_val: usize = 0x3c; - -// GAP 0x3d, 0x3e, 0x3f, 0x40 - -pub const tag_item_field: usize = 0x41; -// GAP 0x42 -pub const tag_item_variances: usize = 0x43; -/* - trait items contain tag_item_trait_item elements, - impl items contain tag_item_impl_item elements, and classes - have both. That's because some code treats classes like traits, - and other code treats them like impls. Because classes can contain - both, tag_item_trait_item and tag_item_impl_item have to be two - different tags. - */ -pub const tag_item_impl_item: usize = 0x44; -pub const tag_item_trait_method_explicit_self: usize = 0x45; - - -// Reexports are found within module tags. Each reexport contains def_ids -// and names. -pub const tag_items_data_item_reexport: usize = 0x46; -pub const tag_items_data_item_reexport_def_id: usize = 0x47; -pub const tag_items_data_item_reexport_name: usize = 0x48; - -// used to encode crate_ctxt side tables -enum_from_u32! { - #[derive(Copy, Clone, PartialEq)] - #[repr(usize)] - pub enum astencode_tag { // Reserves 0x50 -- 0x6f - tag_ast = 0x50, - - tag_tree = 0x51, - - tag_mir = 0x52, - - tag_table = 0x53, - // GAP 0x54, 0x55 - tag_table_def = 0x56, - tag_table_node_type = 0x57, - tag_table_item_subst = 0x58, - tag_table_freevars = 0x59, - // GAP 0x5a, 0x5b, 0x5c, 0x5d, 0x5e - tag_table_method_map = 0x5f, - // GAP 0x60 - tag_table_adjustments = 0x61, - // GAP 0x62, 0x63, 0x64, 0x65 - tag_table_upvar_capture_map = 0x66, - // GAP 0x67, 0x68 - tag_table_const_qualif = 0x69, - tag_table_cast_kinds = 0x6a, - } -} - -pub const tag_item_trait_item_sort: usize = 0x70; - -pub const tag_crate_triple: usize = 0x105; // top-level only - -pub const tag_dylib_dependency_formats: usize = 0x106; // top-level only - -// Language items are a top-level directory (for speed). Hierarchy: -// -// tag_lang_items -// - tag_lang_items_item -// - tag_lang_items_item_id: u32 -// - tag_lang_items_item_index: u32 - -pub const tag_lang_items: usize = 0x107; // top-level only -pub const tag_lang_items_item: usize = 0x73; -pub const tag_lang_items_item_id: usize = 0x74; -pub const tag_lang_items_item_index: usize = 0x75; -pub const tag_lang_items_missing: usize = 0x76; - -pub const tag_item_unnamed_field: usize = 0x77; -pub const tag_items_data_item_visibility: usize = 0x78; -pub const tag_items_data_item_inherent_impl: usize = 0x79; -// GAP 0x7a -pub const tag_mod_child: usize = 0x7b; -// GAP 0x7c - -// GAP 0x108 -pub const tag_impls: usize = 0x109; // top-level only -pub const tag_impls_trait: usize = 0x7d; -pub const tag_impls_trait_impl: usize = 0x7e; - -// GAP 0x7f, 0x80, 0x81 - -pub const tag_native_libraries: usize = 0x10a; // top-level only -pub const tag_native_libraries_lib: usize = 0x82; -pub const tag_native_libraries_name: usize = 0x83; -pub const tag_native_libraries_kind: usize = 0x84; - -pub const tag_plugin_registrar_fn: usize = 0x10b; // top-level only - -pub const tag_method_argument_names: usize = 0x85; -pub const tag_method_argument_name: usize = 0x86; - -pub const tag_reachable_ids: usize = 0x10c; // top-level only -pub const tag_reachable_id: usize = 0x87; - -pub const tag_items_data_item_stability: usize = 0x88; - -pub const tag_items_data_item_repr: usize = 0x89; - -pub const tag_struct_fields: usize = 0x10d; // top-level only -pub const tag_struct_field: usize = 0x8a; - -pub const tag_items_data_item_struct_ctor: usize = 0x8b; -pub const tag_attribute_is_sugared_doc: usize = 0x8c; -// GAP 0x8d -pub const tag_items_data_region: usize = 0x8e; - -pub const tag_item_generics: usize = 0x8f; -// GAP 0x90, 0x91, 0x92, 0x93, 0x94 - -pub const tag_item_predicates: usize = 0x95; -// GAP 0x96 - -pub const tag_predicate: usize = 0x97; -// GAP 0x98, 0x99 - -pub const tag_unsafety: usize = 0x9a; - -pub const tag_associated_type_names: usize = 0x9b; -pub const tag_associated_type_name: usize = 0x9c; - -pub const tag_polarity: usize = 0x9d; - -pub const tag_macro_defs: usize = 0x10e; // top-level only -pub const tag_macro_def: usize = 0x9e; -pub const tag_macro_def_body: usize = 0x9f; -pub const tag_macro_def_span_lo: usize = 0xa8; -pub const tag_macro_def_span_hi: usize = 0xa9; - -pub const tag_paren_sugar: usize = 0xa0; - -pub const tag_codemap: usize = 0xa1; -pub const tag_codemap_filemap: usize = 0xa2; - -pub const tag_item_super_predicates: usize = 0xa3; - -pub const tag_defaulted_trait: usize = 0xa4; - -pub const tag_impl_coerce_unsized_kind: usize = 0xa5; - -pub const tag_items_data_item_constness: usize = 0xa6; - -pub const tag_items_data_item_deprecation: usize = 0xa7; - -pub const tag_items_data_item_defaultness: usize = 0xa8; - -pub const tag_items_data_parent_impl: usize = 0xa9; - -pub const tag_rustc_version: usize = 0x10f; -pub fn rustc_version() -> String { - format!( - "rustc {}", - option_env!("CFG_VERSION").unwrap_or("unknown version") - ) -} - -pub const tag_panic_strategy: usize = 0x114; - -pub const tag_macro_derive_registrar: usize = 0x115; - -// NB: increment this if you change the format of metadata such that -// rustc_version can't be found. -pub const metadata_encoding_version : &'static [u8] = &[b'r', b'u', b's', b't', 0, 0, 0, 2]; diff --git a/src/librustc_metadata/creader.rs b/src/librustc_metadata/creader.rs index a1ade43f64750..e72ac8419941c 100644 --- a/src/librustc_metadata/creader.rs +++ b/src/librustc_metadata/creader.rs @@ -8,61 +8,50 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![allow(non_camel_case_types)] - //! Validates all used crates and extern libraries and loads their metadata use cstore::{self, CStore, CrateSource, MetadataBlob}; -use decoder; -use loader::{self, CratePaths}; +use locator::{self, CratePaths}; +use schema::CrateRoot; -use rustc::hir::def_id::DefIndex; +use rustc::hir::def_id::{CrateNum, DefIndex}; use rustc::hir::svh::Svh; -use rustc::dep_graph::{DepGraph, DepNode}; +use rustc::middle::cstore::LoadedMacros; use rustc::session::{config, Session}; -use rustc::session::config::PanicStrategy; +use rustc_back::PanicStrategy; use rustc::session::search_paths::PathKind; +use rustc::middle; use rustc::middle::cstore::{CrateStore, validate_crate_name, ExternCrate}; use rustc::util::nodemap::{FnvHashMap, FnvHashSet}; -use rustc::hir::map as hir_map; +use rustc::hir::map::Definitions; use std::cell::{RefCell, Cell}; +use std::ops::Deref; use std::path::PathBuf; use std::rc::Rc; use std::fs; use syntax::ast; use syntax::abi::Abi; -use syntax::codemap; use syntax::parse; use syntax::attr; -use syntax::parse::token::InternedString; -use syntax::visit; -use syntax_pos::{self, Span, mk_sp, Pos}; +use syntax::ext::base::SyntaxExtension; +use syntax::parse::token::{InternedString, intern}; +use syntax_pos::{self, Span, mk_sp}; use log; -struct LocalCrateReader<'a> { - sess: &'a Session, - cstore: &'a CStore, - creader: CrateReader<'a>, - krate: &'a ast::Crate, - definitions: &'a hir_map::Definitions, +pub struct Library { + pub dylib: Option<(PathBuf, PathKind)>, + pub rlib: Option<(PathBuf, PathKind)>, + pub metadata: MetadataBlob, } -pub struct CrateReader<'a> { - sess: &'a Session, +pub struct CrateLoader<'a> { + pub sess: &'a Session, cstore: &'a CStore, - next_crate_num: ast::CrateNum, + next_crate_num: CrateNum, foreign_item_map: FnvHashMap>, local_crate_name: String, - local_crate_config: ast::CrateConfig, -} - -impl<'a> visit::Visitor for LocalCrateReader<'a> { - fn visit_item(&mut self, a: &ast::Item) { - self.process_item(a); - visit::walk_item(self, a); - } } fn dump_crates(cstore: &CStore) { @@ -85,7 +74,7 @@ fn should_link(i: &ast::Item) -> bool { } #[derive(Debug)] -struct CrateInfo { +struct ExternCrateInfo { ident: String, name: String, id: ast::NodeId, @@ -130,60 +119,41 @@ struct ExtensionCrate { metadata: PMDSource, dylib: Option, target_only: bool, - - ident: String, - name: String, - span: Span, - should_link: bool, } enum PMDSource { Registered(Rc), - Owned(loader::Library), + Owned(Library), } -impl PMDSource { - pub fn as_slice<'a>(&'a self) -> &'a [u8] { +impl Deref for PMDSource { + type Target = MetadataBlob; + + fn deref(&self) -> &MetadataBlob { match *self { - PMDSource::Registered(ref cmd) => cmd.data(), - PMDSource::Owned(ref lib) => lib.metadata.as_slice(), + PMDSource::Registered(ref cmd) => &cmd.blob, + PMDSource::Owned(ref lib) => &lib.metadata } } } enum LoadResult { - Previous(ast::CrateNum), - Loaded(loader::Library), + Previous(CrateNum), + Loaded(Library), } -pub struct Macros { - pub macro_rules: Vec, - - /// An array of pairs where the first element is the name of the custom - /// derive (e.g. the trait being derived) and the second element is the - /// index of the definition. - pub custom_derive_registrar: Option, - pub svh: Svh, - pub dylib: Option, -} - -impl<'a> CrateReader<'a> { - pub fn new(sess: &'a Session, - cstore: &'a CStore, - local_crate_name: &str, - local_crate_config: ast::CrateConfig) - -> CrateReader<'a> { - CrateReader { +impl<'a> CrateLoader<'a> { + pub fn new(sess: &'a Session, cstore: &'a CStore, local_crate_name: &str) -> Self { + CrateLoader { sess: sess, cstore: cstore, next_crate_num: cstore.next_crate_num(), foreign_item_map: FnvHashMap(), local_crate_name: local_crate_name.to_owned(), - local_crate_config: local_crate_config, } } - fn extract_crate_info(&self, i: &ast::Item) -> Option { + fn extract_crate_info(&self, i: &ast::Item) -> Option { match i.node { ast::ItemKind::ExternCrate(ref path_opt) => { debug!("resolving extern crate stmt. ident: {} path_opt: {:?}", @@ -196,7 +166,7 @@ impl<'a> CrateReader<'a> { } None => i.ident.to_string(), }; - Some(CrateInfo { + Some(ExternCrateInfo { ident: i.ident.to_string(), name: name, id: i.id, @@ -208,7 +178,7 @@ impl<'a> CrateReader<'a> { } fn existing_match(&self, name: &str, hash: Option<&Svh>, kind: PathKind) - -> Option { + -> Option { let mut ret = None; self.cstore.iter_crate_data(|cnum, data| { if data.name != name { return } @@ -258,32 +228,28 @@ impl<'a> CrateReader<'a> { fn verify_no_symbol_conflicts(&self, span: Span, - metadata: &MetadataBlob) { - let disambiguator = decoder::get_crate_disambiguator(metadata.as_slice()); - let crate_name = decoder::get_crate_name(metadata.as_slice()); - + root: &CrateRoot) { // Check for (potential) conflicts with the local crate - if self.local_crate_name == crate_name && - self.sess.local_crate_disambiguator() == disambiguator { + if self.local_crate_name == root.name && + self.sess.local_crate_disambiguator() == &root.disambiguator[..] { span_fatal!(self.sess, span, E0519, "the current crate is indistinguishable from one of its \ dependencies: it has the same crate-name `{}` and was \ compiled with the same `-C metadata` arguments. This \ will result in symbol conflicts between the two.", - crate_name) + root.name) } - let svh = decoder::get_crate_hash(metadata.as_slice()); // Check for conflicts with any crate loaded so far self.cstore.iter_crate_data(|_, other| { - if other.name() == crate_name && // same crate-name - other.disambiguator() == disambiguator && // same crate-disambiguator - other.hash() != svh { // but different SVH + if other.name() == root.name && // same crate-name + other.disambiguator() == root.disambiguator && // same crate-disambiguator + other.hash() != root.hash { // but different SVH span_fatal!(self.sess, span, E0523, "found two different crates with name `{}` that are \ not distinguished by differing `-C metadata`. This \ will result in symbol conflicts between the two.", - crate_name) + root.name) } }); } @@ -293,16 +259,17 @@ impl<'a> CrateReader<'a> { ident: &str, name: &str, span: Span, - lib: loader::Library, + lib: Library, explicitly_linked: bool) - -> (ast::CrateNum, Rc, + -> (CrateNum, Rc, cstore::CrateSource) { info!("register crate `extern crate {} as {}`", name, ident); - self.verify_no_symbol_conflicts(span, &lib.metadata); + let crate_root = lib.metadata.get_root(); + self.verify_no_symbol_conflicts(span, &crate_root); // Claim this crate number and cache it let cnum = self.next_crate_num; - self.next_crate_num += 1; + self.next_crate_num = CrateNum::from_u32(cnum.as_u32() + 1); // Stash paths for top-most crate locally if necessary. let crate_paths = if root.is_none() { @@ -317,30 +284,27 @@ impl<'a> CrateReader<'a> { // Maintain a reference to the top most crate. let root = if root.is_some() { root } else { &crate_paths }; - let loader::Library { dylib, rlib, metadata } = lib; + let Library { dylib, rlib, metadata } = lib; - let cnum_map = self.resolve_crate_deps(root, metadata.as_slice(), cnum, span); - let staged_api = self.is_staged_api(metadata.as_slice()); + let cnum_map = self.resolve_crate_deps(root, &crate_root, &metadata, cnum, span); + + if crate_root.macro_derive_registrar.is_some() { + self.sess.span_err(span, "crates of the `proc-macro` crate type \ + cannot be linked at runtime"); + } let cmeta = Rc::new(cstore::CrateMetadata { name: name.to_string(), extern_crate: Cell::new(None), - index: decoder::load_index(metadata.as_slice()), - xref_index: decoder::load_xrefs(metadata.as_slice()), - key_map: decoder::load_key_map(metadata.as_slice()), - data: metadata, + key_map: metadata.load_key_map(crate_root.index), + root: crate_root, + blob: metadata, cnum_map: RefCell::new(cnum_map), cnum: cnum, codemap_import_info: RefCell::new(vec![]), - staged_api: staged_api, explicitly_linked: Cell::new(explicitly_linked), }); - if decoder::get_derive_registrar_fn(cmeta.data.as_slice()).is_some() { - self.sess.span_err(span, "crates of the `rustc-macro` crate type \ - cannot be linked at runtime"); - } - let source = cstore::CrateSource { dylib: dylib, rlib: rlib, @@ -352,16 +316,6 @@ impl<'a> CrateReader<'a> { (cnum, cmeta, source) } - fn is_staged_api(&self, data: &[u8]) -> bool { - let attrs = decoder::get_crate_attributes(data); - for attr in &attrs { - if attr.name() == "stable" || attr.name() == "unstable" { - return true - } - } - false - } - fn resolve_crate(&mut self, root: &Option, ident: &str, @@ -370,13 +324,13 @@ impl<'a> CrateReader<'a> { span: Span, kind: PathKind, explicitly_linked: bool) - -> (ast::CrateNum, Rc, cstore::CrateSource) { + -> (CrateNum, Rc, cstore::CrateSource) { info!("resolving crate `extern crate {} as {}`", name, ident); let result = match self.existing_match(name, hash, kind) { Some(cnum) => LoadResult::Previous(cnum), None => { info!("falling back to a load"); - let mut load_ctxt = loader::Context { + let mut locate_ctxt = locator::Context { sess: self.sess, span: span, ident: ident, @@ -386,15 +340,15 @@ impl<'a> CrateReader<'a> { target: &self.sess.target.target, triple: &self.sess.opts.target_triple, root: root, - rejected_via_hash: vec!(), - rejected_via_triple: vec!(), - rejected_via_kind: vec!(), - rejected_via_version: vec!(), + rejected_via_hash: vec![], + rejected_via_triple: vec![], + rejected_via_kind: vec![], + rejected_via_version: vec![], should_match_name: true, }; - match self.load(&mut load_ctxt) { + match self.load(&mut locate_ctxt) { Some(result) => result, - None => load_ctxt.report_load_errs(), + None => locate_ctxt.report_errs(), } } }; @@ -414,8 +368,8 @@ impl<'a> CrateReader<'a> { } } - fn load(&mut self, loader: &mut loader::Context) -> Option { - let library = match loader.maybe_load_library_crate() { + fn load(&mut self, locate_ctxt: &mut locator::Context) -> Option { + let library = match locate_ctxt.maybe_load_library_crate() { Some(lib) => lib, None => return None, }; @@ -428,14 +382,12 @@ impl<'a> CrateReader<'a> { // Note that we only do this for target triple crates, though, as we // don't want to match a host crate against an equivalent target one // already loaded. - if loader.triple == self.sess.opts.target_triple { - let meta_hash = decoder::get_crate_hash(library.metadata.as_slice()); - let meta_name = decoder::get_crate_name(library.metadata.as_slice()) - .to_string(); + let root = library.metadata.get_root(); + if locate_ctxt.triple == self.sess.opts.target_triple { let mut result = LoadResult::Loaded(library); self.cstore.iter_crate_data(|cnum, data| { - if data.name() == meta_name && meta_hash == data.hash() { - assert!(loader.hash.is_none()); + if data.name() == root.name && root.hash == data.hash() { + assert!(locate_ctxt.hash.is_none()); info!("load success, going to previous cnum: {}", cnum); result = LoadResult::Previous(cnum); } @@ -447,9 +399,9 @@ impl<'a> CrateReader<'a> { } fn update_extern_crate(&mut self, - cnum: ast::CrateNum, + cnum: CrateNum, mut extern_crate: ExternCrate, - visited: &mut FnvHashSet<(ast::CrateNum, bool)>) + visited: &mut FnvHashSet<(CrateNum, bool)>) { if !visited.insert((cnum, extern_crate.direct)) { return } @@ -481,42 +433,45 @@ impl<'a> CrateReader<'a> { // Go through the crate metadata and load any crates that it references fn resolve_crate_deps(&mut self, root: &Option, - cdata: &[u8], - krate: ast::CrateNum, + crate_root: &CrateRoot, + metadata: &MetadataBlob, + krate: CrateNum, span: Span) -> cstore::CrateNumMap { debug!("resolving deps of external crate"); // The map from crate numbers in the crate we're resolving to local crate // numbers - let map: FnvHashMap<_, _> = decoder::get_crate_deps(cdata).iter().map(|dep| { + let deps = crate_root.crate_deps.decode(metadata); + let map: FnvHashMap<_, _> = deps.enumerate().map(|(crate_num, dep)| { debug!("resolving dep crate {} hash: `{}`", dep.name, dep.hash); let (local_cnum, ..) = self.resolve_crate(root, - &dep.name, - &dep.name, + &dep.name.as_str(), + &dep.name.as_str(), Some(&dep.hash), span, PathKind::Dependency, dep.explicitly_linked); - (dep.cnum, local_cnum) + (CrateNum::new(crate_num + 1), local_cnum) }).collect(); - let max_cnum = map.values().cloned().max().unwrap_or(0); + let max_cnum = map.values().cloned().max().map(|cnum| cnum.as_u32()).unwrap_or(0); // we map 0 and all other holes in the map to our parent crate. The "additional" // self-dependencies should be harmless. - (0..max_cnum+1).map(|cnum| map.get(&cnum).cloned().unwrap_or(krate)).collect() + (0..max_cnum+1).map(|cnum| { + map.get(&CrateNum::from_u32(cnum)).cloned().unwrap_or(krate) + }).collect() } - fn read_extension_crate(&mut self, span: Span, info: &CrateInfo) -> ExtensionCrate { + fn read_extension_crate(&mut self, span: Span, info: &ExternCrateInfo) -> ExtensionCrate { info!("read extension crate {} `extern crate {} as {}` linked={}", info.id, info.name, info.ident, info.should_link); let target_triple = &self.sess.opts.target_triple[..]; let is_cross = target_triple != config::host_triple(); - let mut should_link = info.should_link && !is_cross; let mut target_only = false; let ident = info.ident.clone(); let name = info.name.clone(); - let mut load_ctxt = loader::Context { + let mut locate_ctxt = locator::Context { sess: self.sess, span: span, ident: &ident[..], @@ -526,30 +481,29 @@ impl<'a> CrateReader<'a> { target: &self.sess.host, triple: config::host_triple(), root: &None, - rejected_via_hash: vec!(), - rejected_via_triple: vec!(), - rejected_via_kind: vec!(), - rejected_via_version: vec!(), + rejected_via_hash: vec![], + rejected_via_triple: vec![], + rejected_via_kind: vec![], + rejected_via_version: vec![], should_match_name: true, }; - let library = self.load(&mut load_ctxt).or_else(|| { + let library = self.load(&mut locate_ctxt).or_else(|| { if !is_cross { return None } // Try loading from target crates. This will abort later if we // try to load a plugin registrar function, target_only = true; - should_link = info.should_link; - load_ctxt.target = &self.sess.target.target; - load_ctxt.triple = target_triple; - load_ctxt.filesearch = self.sess.target_filesearch(PathKind::Crate); + locate_ctxt.target = &self.sess.target.target; + locate_ctxt.triple = target_triple; + locate_ctxt.filesearch = self.sess.target_filesearch(PathKind::Crate); - self.load(&mut load_ctxt) + self.load(&mut locate_ctxt) }); let library = match library { Some(l) => l, - None => load_ctxt.report_load_errs(), + None => locate_ctxt.report_errs(), }; let (dylib, metadata) = match library { @@ -569,32 +523,20 @@ impl<'a> CrateReader<'a> { metadata: metadata, dylib: dylib.map(|p| p.0), target_only: target_only, - name: info.name.to_string(), - ident: info.ident.to_string(), - span: span, - should_link: should_link, } } - pub fn read_macros(&mut self, item: &ast::Item) -> Macros { - let ci = self.extract_crate_info(item).unwrap(); - let ekrate = self.read_extension_crate(item.span, &ci); - + fn read_macros(&mut self, item: &ast::Item, ekrate: &ExtensionCrate) -> LoadedMacros { + let root = ekrate.metadata.get_root(); let source_name = format!("<{} macros>", item.ident); - let mut ret = Macros { - macro_rules: Vec::new(), - custom_derive_registrar: None, - svh: decoder::get_crate_hash(ekrate.metadata.as_slice()), - dylib: None, - }; - decoder::each_exported_macro(ekrate.metadata.as_slice(), - |name, attrs, span, body| { + let mut macro_rules = Vec::new(); + + for def in root.macro_defs.decode(&*ekrate.metadata) { // NB: Don't use parse::parse_tts_from_source_str because it parses with // quote_depth > 0. let mut p = parse::new_parser_from_source_str(&self.sess.parse_sess, - self.local_crate_config.clone(), source_name.clone(), - body); + def.body); let lo = p.span.lo; let body = match p.parse_all_token_trees() { Ok(body) => body, @@ -604,69 +546,104 @@ impl<'a> CrateReader<'a> { unreachable!(); } }; - let local_span = mk_sp(lo, p.last_span.hi); + let local_span = mk_sp(lo, p.prev_span.hi); // Mark the attrs as used - for attr in &attrs { + for attr in &def.attrs { attr::mark_used(attr); } - ret.macro_rules.push(ast::MacroDef { - ident: ast::Ident::with_empty_ctxt(name), - attrs: attrs, + macro_rules.push(ast::MacroDef { + ident: ast::Ident::with_empty_ctxt(def.name), id: ast::DUMMY_NODE_ID, span: local_span, imported_from: Some(item.ident), - // overridden in plugin/load.rs - export: false, - use_locally: false, - allow_internal_unstable: false, - + allow_internal_unstable: attr::contains_name(&def.attrs, "allow_internal_unstable"), + attrs: def.attrs, body: body, }); self.sess.imported_macro_spans.borrow_mut() - .insert(local_span, (name.as_str().to_string(), span)); - true - }); - - match decoder::get_derive_registrar_fn(ekrate.metadata.as_slice()) { - Some(id) => ret.custom_derive_registrar = Some(id), + .insert(local_span, (def.name.as_str().to_string(), def.span)); + } - // If this crate is not a rustc-macro crate then we might be able to - // register it with the local crate store to prevent loading the - // metadata twice. - // - // If it's a rustc-macro crate, though, then we definitely don't - // want to register it with the local crate store as we're just - // going to use it as we would a plugin. - None => { - ekrate.register(self); - return ret + if let Some(id) = root.macro_derive_registrar { + let dylib = match ekrate.dylib.clone() { + Some(dylib) => dylib, + None => span_bug!(item.span, "proc-macro crate not dylib"), + }; + if ekrate.target_only { + let message = format!("proc-macro crate is not available for \ + triple `{}` (only found {})", + config::host_triple(), + self.sess.opts.target_triple); + self.sess.span_fatal(item.span, &message); } - } - self.cstore.add_used_for_derive_macros(item); - ret.dylib = ekrate.dylib.clone(); - if ret.dylib.is_none() { - span_bug!(item.span, "rustc-macro crate not dylib"); + // custom derive crates currently should not have any macro_rules! + // exported macros, enforced elsewhere + assert_eq!(macro_rules.len(), 0); + LoadedMacros::ProcMacros(self.load_derive_macros(item, id, root.hash, dylib)) + } else { + LoadedMacros::MacroRules(macro_rules) } + } - if ekrate.target_only { - let message = format!("rustc-macro crate is not available for \ - triple `{}` (only found {})", - config::host_triple(), - self.sess.opts.target_triple); - self.sess.span_fatal(item.span, &message); + /// Load custom derive macros. + /// + /// Note that this is intentionally similar to how we load plugins today, + /// but also intentionally separate. Plugins are likely always going to be + /// implemented as dynamic libraries, but we have a possible future where + /// custom derive (and other macro-1.1 style features) are implemented via + /// executables and custom IPC. + fn load_derive_macros(&mut self, item: &ast::Item, index: DefIndex, svh: Svh, path: PathBuf) + -> Vec<(ast::Name, SyntaxExtension)> { + use std::{env, mem}; + use proc_macro::TokenStream; + use proc_macro::__internal::Registry; + use rustc_back::dynamic_lib::DynamicLibrary; + use syntax_ext::deriving::custom::CustomDerive; + + // Make sure the path contains a / or the linker will search for it. + let path = env::current_dir().unwrap().join(path); + let lib = match DynamicLibrary::open(Some(&path)) { + Ok(lib) => lib, + Err(err) => self.sess.span_fatal(item.span, &err), + }; + + let sym = self.sess.generate_derive_registrar_symbol(&svh, index); + let registrar = unsafe { + let sym = match lib.symbol(&sym) { + Ok(f) => f, + Err(err) => self.sess.span_fatal(item.span, &err), + }; + mem::transmute::<*mut u8, fn(&mut Registry)>(sym) + }; + + struct MyRegistrar(Vec<(ast::Name, SyntaxExtension)>); + + impl Registry for MyRegistrar { + fn register_custom_derive(&mut self, + trait_name: &str, + expand: fn(TokenStream) -> TokenStream) { + let derive = SyntaxExtension::CustomDerive(Box::new(CustomDerive::new(expand))); + self.0.push((intern(trait_name), derive)); + } } - return ret + let mut my_registrar = MyRegistrar(Vec::new()); + registrar(&mut my_registrar); + + // Intentionally leak the dynamic library. We can't ever unload it + // since the library can make things that will live arbitrarily long. + mem::forget(lib); + my_registrar.0 } /// Look for a plugin registrar. Returns library path, crate /// SVH and DefIndex of the registrar function. pub fn find_plugin_registrar(&mut self, span: Span, name: &str) -> Option<(PathBuf, Svh, DefIndex)> { - let ekrate = self.read_extension_crate(span, &CrateInfo { + let ekrate = self.read_extension_crate(span, &ExternCrateInfo { name: name.to_string(), ident: name.to_string(), id: ast::DUMMY_NODE_ID, @@ -683,13 +660,10 @@ impl<'a> CrateReader<'a> { span_fatal!(self.sess, span, E0456, "{}", &message[..]); } - let svh = decoder::get_crate_hash(ekrate.metadata.as_slice()); - let registrar = - decoder::get_plugin_registrar_fn(ekrate.metadata.as_slice()); - - match (ekrate.dylib.as_ref(), registrar) { + let root = ekrate.metadata.get_root(); + match (ekrate.dylib.as_ref(), root.plugin_registrar_fn) { (Some(dylib), Some(reg)) => { - Some((dylib.to_path_buf(), svh, reg)) + Some((dylib.to_path_buf(), root.hash, reg)) } (None, Some(_)) => { span_err!(self.sess, span, E0457, @@ -736,7 +710,7 @@ impl<'a> CrateReader<'a> { // The logic for finding the panic runtime here is pretty much the same // as the allocator case with the only addition that the panic strategy // compilation mode also comes into play. - let desired_strategy = self.sess.opts.cg.panic.clone(); + let desired_strategy = self.sess.panic_strategy(); let mut runtime_found = false; let mut needs_panic_runtime = attr::contains_name(&krate.attrs, "needs_panic_runtime"); @@ -830,7 +804,7 @@ impl<'a> CrateReader<'a> { match *ct { config::CrateTypeExecutable => need_exe_alloc = true, config::CrateTypeDylib | - config::CrateTypeRustcMacro | + config::CrateTypeProcMacro | config::CrateTypeCdylib | config::CrateTypeStaticlib => need_lib_alloc = true, config::CrateTypeRlib => {} @@ -875,7 +849,7 @@ impl<'a> CrateReader<'a> { } fn inject_dependency_if(&self, - krate: ast::CrateNum, + krate: CrateNum, what: &str, needs_dep: &Fn(&cstore::CrateMetadata) -> bool) { // don't perform this validation if the session has errors, as one of @@ -915,121 +889,15 @@ impl<'a> CrateReader<'a> { } } -impl ExtensionCrate { - fn register(self, creader: &mut CrateReader) { - if !self.should_link { - return - } - - let library = match self.metadata { - PMDSource::Owned(lib) => lib, - PMDSource::Registered(_) => return, - }; - - // Register crate now to avoid double-reading metadata - creader.register_crate(&None, - &self.ident, - &self.name, - self.span, - library, - true); - } -} - -impl<'a> LocalCrateReader<'a> { - fn new(sess: &'a Session, - cstore: &'a CStore, - defs: &'a hir_map::Definitions, - krate: &'a ast::Crate, - local_crate_name: &str) - -> LocalCrateReader<'a> { - LocalCrateReader { - sess: sess, - cstore: cstore, - creader: CrateReader::new(sess, cstore, local_crate_name, krate.config.clone()), - krate: krate, - definitions: defs, - } - } - - // Traverses an AST, reading all the information about use'd crates and - // extern libraries necessary for later resolving, typechecking, linking, - // etc. - fn read_crates(&mut self, dep_graph: &DepGraph) { - let _task = dep_graph.in_task(DepNode::CrateReader); - - self.process_crate(self.krate); - visit::walk_crate(self, self.krate); - self.creader.inject_allocator_crate(); - self.creader.inject_panic_runtime(self.krate); - - if log_enabled!(log::INFO) { - dump_crates(&self.cstore); - } - - for &(ref name, kind) in &self.sess.opts.libs { - register_native_lib(self.sess, self.cstore, None, name.clone(), kind); - } - self.creader.register_statically_included_foreign_items(); - } - - fn process_crate(&self, c: &ast::Crate) { - for a in c.attrs.iter().filter(|m| m.name() == "link_args") { - if let Some(ref linkarg) = a.value_str() { +impl<'a> CrateLoader<'a> { + pub fn preprocess(&mut self, krate: &ast::Crate) { + for attr in krate.attrs.iter().filter(|m| m.name() == "link_args") { + if let Some(ref linkarg) = attr.value_str() { self.cstore.add_used_link_args(&linkarg); } } } - fn process_item(&mut self, i: &ast::Item) { - match i.node { - ast::ItemKind::ExternCrate(_) => { - // If this `extern crate` item has `#[macro_use]` then we can - // safely skip it. These annotations were processed during macro - // expansion and are already loaded (if necessary) into our - // crate store. - // - // Note that it's important we *don't* fall through below as - // some `#[macro_use]` crate are explicitly not linked (e.g. - // macro crates) so we want to ensure we avoid `resolve_crate` - // with those. - if attr::contains_name(&i.attrs, "macro_use") { - if self.cstore.was_used_for_derive_macros(i) { - return - } - } - - if let Some(info) = self.creader.extract_crate_info(i) { - if !info.should_link { - return; - } - let (cnum, ..) = self.creader.resolve_crate(&None, - &info.ident, - &info.name, - None, - i.span, - PathKind::Crate, - true); - - let def_id = self.definitions.opt_local_def_id(i.id).unwrap(); - let len = self.definitions.def_path(def_id.index).data.len(); - - self.creader.update_extern_crate(cnum, - ExternCrate { - def_id: def_id, - span: i.span, - direct: true, - path_len: len, - }, - &mut FnvHashSet()); - self.cstore.add_extern_mod_stmt_cnum(info.id, cnum); - } - } - ast::ItemKind::ForeignMod(ref fm) => self.process_foreign_mod(i, fm), - _ => { } - } - } - fn process_foreign_mod(&mut self, i: &ast::Item, fm: &ast::ForeignMod) { if fm.abi == Abi::Rust || fm.abi == Abi::RustIntrinsic || fm.abi == Abi::PlatformIntrinsic { return; @@ -1084,150 +952,78 @@ impl<'a> LocalCrateReader<'a> { Some(name) => name, None => continue, }; - let list = self.creader.foreign_item_map.entry(lib_name.to_string()) + let list = self.foreign_item_map.entry(lib_name.to_string()) .or_insert(Vec::new()); list.extend(fm.items.iter().map(|it| it.id)); } } } -/// Traverses an AST, reading all the information about use'd crates and extern -/// libraries necessary for later resolving, typechecking, linking, etc. -pub fn read_local_crates(sess: & Session, - cstore: & CStore, - defs: & hir_map::Definitions, - krate: & ast::Crate, - local_crate_name: &str, - dep_graph: &DepGraph) { - LocalCrateReader::new(sess, cstore, defs, krate, local_crate_name).read_crates(dep_graph) -} +impl<'a> middle::cstore::CrateLoader for CrateLoader<'a> { + fn postprocess(&mut self, krate: &ast::Crate) { + self.inject_allocator_crate(); + self.inject_panic_runtime(krate); -/// Imports the codemap from an external crate into the codemap of the crate -/// currently being compiled (the "local crate"). -/// -/// The import algorithm works analogous to how AST items are inlined from an -/// external crate's metadata: -/// For every FileMap in the external codemap an 'inline' copy is created in the -/// local codemap. The correspondence relation between external and local -/// FileMaps is recorded in the `ImportedFileMap` objects returned from this -/// function. When an item from an external crate is later inlined into this -/// crate, this correspondence information is used to translate the span -/// information of the inlined item so that it refers the correct positions in -/// the local codemap (see `astencode::DecodeContext::tr_span()`). -/// -/// The import algorithm in the function below will reuse FileMaps already -/// existing in the local codemap. For example, even if the FileMap of some -/// source file of libstd gets imported many times, there will only ever be -/// one FileMap object for the corresponding file in the local codemap. -/// -/// Note that imported FileMaps do not actually contain the source code of the -/// file they represent, just information about length, line breaks, and -/// multibyte characters. This information is enough to generate valid debuginfo -/// for items inlined from other crates. -pub fn import_codemap(local_codemap: &codemap::CodeMap, - metadata: &MetadataBlob) - -> Vec { - let external_codemap = decoder::get_imported_filemaps(metadata.as_slice()); - - let imported_filemaps = external_codemap.into_iter().map(|filemap_to_import| { - // Try to find an existing FileMap that can be reused for the filemap to - // be imported. A FileMap is reusable if it is exactly the same, just - // positioned at a different offset within the codemap. - let reusable_filemap = { - local_codemap.files - .borrow() - .iter() - .find(|fm| are_equal_modulo_startpos(&fm, &filemap_to_import)) - .map(|rc| rc.clone()) - }; + if log_enabled!(log::INFO) { + dump_crates(&self.cstore); + } - match reusable_filemap { - Some(fm) => { - cstore::ImportedFileMap { - original_start_pos: filemap_to_import.start_pos, - original_end_pos: filemap_to_import.end_pos, - translated_filemap: fm - } - } - None => { - // We can't reuse an existing FileMap, so allocate a new one - // containing the information we need. - let syntax_pos::FileMap { - name, - abs_path, - start_pos, - end_pos, - lines, - multibyte_chars, - .. - } = filemap_to_import; - - let source_length = (end_pos - start_pos).to_usize(); - - // Translate line-start positions and multibyte character - // position into frame of reference local to file. - // `CodeMap::new_imported_filemap()` will then translate those - // coordinates to their new global frame of reference when the - // offset of the FileMap is known. - let mut lines = lines.into_inner(); - for pos in &mut lines { - *pos = *pos - start_pos; - } - let mut multibyte_chars = multibyte_chars.into_inner(); - for mbc in &mut multibyte_chars { - mbc.pos = mbc.pos - start_pos; - } + for &(ref name, kind) in &self.sess.opts.libs { + register_native_lib(self.sess, self.cstore, None, name.clone(), kind); + } + self.register_statically_included_foreign_items(); + } - let local_version = local_codemap.new_imported_filemap(name, - abs_path, - source_length, - lines, - multibyte_chars); - cstore::ImportedFileMap { - original_start_pos: start_pos, - original_end_pos: end_pos, - translated_filemap: local_version - } + fn process_item(&mut self, item: &ast::Item, definitions: &Definitions, load_macros: bool) + -> Option { + match item.node { + ast::ItemKind::ExternCrate(_) => {} + ast::ItemKind::ForeignMod(ref fm) => { + self.process_foreign_mod(item, fm); + return None; } + _ => return None, } - }).collect(); - - return imported_filemaps; - fn are_equal_modulo_startpos(fm1: &syntax_pos::FileMap, - fm2: &syntax_pos::FileMap) - -> bool { - if fm1.name != fm2.name { - return false; - } + let info = self.extract_crate_info(item).unwrap(); + let loaded_macros = if load_macros { + let ekrate = self.read_extension_crate(item.span, &info); + let loaded_macros = self.read_macros(item, &ekrate); - let lines1 = fm1.lines.borrow(); - let lines2 = fm2.lines.borrow(); + // If this is a proc-macro crate or `#[no_link]` crate, it is only used at compile time, + // so we return here to avoid registering the crate. + if loaded_macros.is_proc_macros() || !info.should_link { + return Some(loaded_macros); + } - if lines1.len() != lines2.len() { - return false; - } + // Register crate now to avoid double-reading metadata + if let PMDSource::Owned(lib) = ekrate.metadata { + if ekrate.target_only || config::host_triple() == self.sess.opts.target_triple { + let ExternCrateInfo { ref ident, ref name, .. } = info; + self.register_crate(&None, ident, name, item.span, lib, true); + } + } - for (&line1, &line2) in lines1.iter().zip(lines2.iter()) { - if (line1 - fm1.start_pos) != (line2 - fm2.start_pos) { - return false; + Some(loaded_macros) + } else { + if !info.should_link { + return None; } - } + None + }; - let multibytes1 = fm1.multibyte_chars.borrow(); - let multibytes2 = fm2.multibyte_chars.borrow(); + let (cnum, ..) = self.resolve_crate( + &None, &info.ident, &info.name, None, item.span, PathKind::Crate, true, + ); - if multibytes1.len() != multibytes2.len() { - return false; - } + let def_id = definitions.opt_local_def_id(item.id).unwrap(); + let len = definitions.def_path(def_id.index).data.len(); - for (mb1, mb2) in multibytes1.iter().zip(multibytes2.iter()) { - if (mb1.bytes != mb2.bytes) || - ((mb1.pos - fm1.start_pos) != (mb2.pos - fm2.start_pos)) { - return false; - } - } + let extern_crate = + ExternCrate { def_id: def_id, span: item.span, direct: true, path_len: len }; + self.update_extern_crate(cnum, extern_crate, &mut FnvHashSet()); + self.cstore.add_extern_mod_stmt_cnum(info.id, cnum); - true + loaded_macros } } diff --git a/src/librustc_metadata/csearch.rs b/src/librustc_metadata/csearch.rs deleted file mode 100644 index 21cf3240321bf..0000000000000 --- a/src/librustc_metadata/csearch.rs +++ /dev/null @@ -1,784 +0,0 @@ -// Copyright 2015 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use cstore; -use common; -use decoder; -use encoder; -use loader; - -use middle::cstore::{InlinedItem, CrateStore, CrateSource, ChildItem, ExternCrate, DefLike}; -use middle::cstore::{NativeLibraryKind, LinkMeta, LinkagePreference}; -use rustc::hir::def; -use middle::lang_items; -use rustc::ty::{self, Ty, TyCtxt, VariantKind}; -use rustc::hir::def_id::{DefId, DefIndex, CRATE_DEF_INDEX}; - -use rustc::dep_graph::DepNode; -use rustc::hir::map as hir_map; -use rustc::hir::map::DefKey; -use rustc::mir::repr::Mir; -use rustc::mir::mir_map::MirMap; -use rustc::util::nodemap::{FnvHashMap, NodeSet, DefIdMap}; -use rustc::session::config::PanicStrategy; - -use std::cell::RefCell; -use std::rc::Rc; -use std::path::PathBuf; -use syntax::ast; -use syntax::attr; -use syntax::parse::token; -use rustc::hir::svh::Svh; -use rustc_back::target::Target; -use rustc::hir; - -impl<'tcx> CrateStore<'tcx> for cstore::CStore { - fn stability(&self, def: DefId) -> Option { - self.dep_graph.read(DepNode::MetaData(def)); - let cdata = self.get_crate_data(def.krate); - decoder::get_stability(&cdata, def.index) - } - - fn deprecation(&self, def: DefId) -> Option { - self.dep_graph.read(DepNode::MetaData(def)); - let cdata = self.get_crate_data(def.krate); - decoder::get_deprecation(&cdata, def.index) - } - - fn visibility(&self, def: DefId) -> ty::Visibility { - self.dep_graph.read(DepNode::MetaData(def)); - let cdata = self.get_crate_data(def.krate); - decoder::get_visibility(&cdata, def.index) - } - - fn closure_kind(&self, def_id: DefId) -> ty::ClosureKind - { - assert!(!def_id.is_local()); - self.dep_graph.read(DepNode::MetaData(def_id)); - let cdata = self.get_crate_data(def_id.krate); - decoder::closure_kind(&cdata, def_id.index) - } - - fn closure_ty<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> ty::ClosureTy<'tcx> { - assert!(!def_id.is_local()); - self.dep_graph.read(DepNode::MetaData(def_id)); - let cdata = self.get_crate_data(def_id.krate); - decoder::closure_ty(&cdata, def_id.index, tcx) - } - - fn item_variances(&self, def: DefId) -> Vec { - self.dep_graph.read(DepNode::MetaData(def)); - let cdata = self.get_crate_data(def.krate); - decoder::get_item_variances(&cdata, def.index) - } - - fn repr_attrs(&self, def: DefId) -> Vec { - self.dep_graph.read(DepNode::MetaData(def)); - let cdata = self.get_crate_data(def.krate); - decoder::get_repr_attrs(&cdata, def.index) - } - - fn item_type<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) - -> Ty<'tcx> - { - self.dep_graph.read(DepNode::MetaData(def)); - let cdata = self.get_crate_data(def.krate); - decoder::get_type(&cdata, def.index, tcx) - } - - fn item_predicates<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) - -> ty::GenericPredicates<'tcx> - { - self.dep_graph.read(DepNode::MetaData(def)); - let cdata = self.get_crate_data(def.krate); - decoder::get_predicates(&cdata, def.index, tcx) - } - - fn item_super_predicates<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) - -> ty::GenericPredicates<'tcx> - { - self.dep_graph.read(DepNode::MetaData(def)); - let cdata = self.get_crate_data(def.krate); - decoder::get_super_predicates(&cdata, def.index, tcx) - } - - fn item_generics<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) - -> &'tcx ty::Generics<'tcx> - { - self.dep_graph.read(DepNode::MetaData(def)); - let cdata = self.get_crate_data(def.krate); - decoder::get_generics(&cdata, def.index, tcx) - } - - fn item_attrs(&self, def_id: DefId) -> Vec - { - self.dep_graph.read(DepNode::MetaData(def_id)); - let cdata = self.get_crate_data(def_id.krate); - decoder::get_item_attrs(&cdata, def_id.index) - } - - fn trait_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> ty::TraitDef<'tcx> - { - self.dep_graph.read(DepNode::MetaData(def)); - let cdata = self.get_crate_data(def.krate); - decoder::get_trait_def(&cdata, def.index, tcx) - } - - fn adt_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> ty::AdtDefMaster<'tcx> - { - self.dep_graph.read(DepNode::MetaData(def)); - let cdata = self.get_crate_data(def.krate); - decoder::get_adt_def(&cdata, def.index, tcx) - } - - fn method_arg_names(&self, did: DefId) -> Vec - { - self.dep_graph.read(DepNode::MetaData(did)); - let cdata = self.get_crate_data(did.krate); - decoder::get_method_arg_names(&cdata, did.index) - } - - fn item_name(&self, def: DefId) -> ast::Name { - self.dep_graph.read(DepNode::MetaData(def)); - let cdata = self.get_crate_data(def.krate); - decoder::get_item_name(&cdata, def.index) - } - - fn opt_item_name(&self, def: DefId) -> Option { - self.dep_graph.read(DepNode::MetaData(def)); - let cdata = self.get_crate_data(def.krate); - decoder::maybe_get_item_name(&cdata, def.index) - } - - fn inherent_implementations_for_type(&self, def_id: DefId) -> Vec - { - self.dep_graph.read(DepNode::MetaData(def_id)); - let mut result = vec![]; - let cdata = self.get_crate_data(def_id.krate); - decoder::each_inherent_implementation_for_type(&cdata, def_id.index, - |iid| result.push(iid)); - result - } - - fn implementations_of_trait(&self, def_id: DefId) -> Vec - { - self.dep_graph.read(DepNode::MetaData(def_id)); - let mut result = vec![]; - self.iter_crate_data(|_, cdata| { - decoder::each_implementation_for_trait(cdata, def_id, &mut |iid| { - result.push(iid) - }) - }); - result - } - - fn provided_trait_methods<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) - -> Vec>> - { - self.dep_graph.read(DepNode::MetaData(def)); - let cdata = self.get_crate_data(def.krate); - decoder::get_provided_trait_methods(&cdata, def.index, tcx) - } - - fn trait_item_def_ids(&self, def: DefId) - -> Vec - { - self.dep_graph.read(DepNode::MetaData(def)); - let cdata = self.get_crate_data(def.krate); - decoder::get_trait_item_def_ids(&cdata, def.index) - } - - fn impl_items(&self, impl_def_id: DefId) -> Vec - { - self.dep_graph.read(DepNode::MetaData(impl_def_id)); - let cdata = self.get_crate_data(impl_def_id.krate); - decoder::get_impl_items(&cdata, impl_def_id.index) - } - - fn impl_polarity(&self, def: DefId) -> Option - { - self.dep_graph.read(DepNode::MetaData(def)); - let cdata = self.get_crate_data(def.krate); - decoder::get_impl_polarity(&cdata, def.index) - } - - fn impl_trait_ref<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) - -> Option> - { - self.dep_graph.read(DepNode::MetaData(def)); - let cdata = self.get_crate_data(def.krate); - decoder::get_impl_trait(&cdata, def.index, tcx) - } - - fn custom_coerce_unsized_kind(&self, def: DefId) - -> Option - { - self.dep_graph.read(DepNode::MetaData(def)); - let cdata = self.get_crate_data(def.krate); - decoder::get_custom_coerce_unsized_kind(&cdata, def.index) - } - - // FIXME: killme - fn associated_consts<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) - -> Vec>> { - self.dep_graph.read(DepNode::MetaData(def)); - let cdata = self.get_crate_data(def.krate); - decoder::get_associated_consts(&cdata, def.index, tcx) - } - - fn impl_parent(&self, impl_def: DefId) -> Option { - self.dep_graph.read(DepNode::MetaData(impl_def)); - let cdata = self.get_crate_data(impl_def.krate); - decoder::get_parent_impl(&*cdata, impl_def.index) - } - - fn trait_of_item(&self, def_id: DefId) -> Option { - self.dep_graph.read(DepNode::MetaData(def_id)); - let cdata = self.get_crate_data(def_id.krate); - decoder::get_trait_of_item(&cdata, def_id.index) - } - - fn impl_or_trait_item<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) - -> Option> - { - self.dep_graph.read(DepNode::MetaData(def)); - let cdata = self.get_crate_data(def.krate); - decoder::get_impl_or_trait_item(&cdata, def.index, tcx) - } - - fn is_const_fn(&self, did: DefId) -> bool - { - self.dep_graph.read(DepNode::MetaData(did)); - let cdata = self.get_crate_data(did.krate); - decoder::is_const_fn(&cdata, did.index) - } - - fn is_defaulted_trait(&self, trait_def_id: DefId) -> bool - { - self.dep_graph.read(DepNode::MetaData(trait_def_id)); - let cdata = self.get_crate_data(trait_def_id.krate); - decoder::is_defaulted_trait(&cdata, trait_def_id.index) - } - - fn is_impl(&self, did: DefId) -> bool - { - self.dep_graph.read(DepNode::MetaData(did)); - let cdata = self.get_crate_data(did.krate); - decoder::is_impl(&cdata, did.index) - } - - fn is_default_impl(&self, impl_did: DefId) -> bool { - self.dep_graph.read(DepNode::MetaData(impl_did)); - let cdata = self.get_crate_data(impl_did.krate); - decoder::is_default_impl(&cdata, impl_did.index) - } - - fn is_extern_item<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, did: DefId) -> bool { - self.dep_graph.read(DepNode::MetaData(did)); - let cdata = self.get_crate_data(did.krate); - decoder::is_extern_item(&cdata, did.index, tcx) - } - - fn is_foreign_item(&self, did: DefId) -> bool { - let cdata = self.get_crate_data(did.krate); - decoder::is_foreign_item(&cdata, did.index) - } - - fn is_statically_included_foreign_item(&self, id: ast::NodeId) -> bool - { - self.do_is_statically_included_foreign_item(id) - } - - fn is_typedef(&self, did: DefId) -> bool { - self.dep_graph.read(DepNode::MetaData(did)); - let cdata = self.get_crate_data(did.krate); - decoder::is_typedef(&cdata, did.index) - } - - fn dylib_dependency_formats(&self, cnum: ast::CrateNum) - -> Vec<(ast::CrateNum, LinkagePreference)> - { - let cdata = self.get_crate_data(cnum); - decoder::get_dylib_dependency_formats(&cdata) - } - - fn lang_items(&self, cnum: ast::CrateNum) -> Vec<(DefIndex, usize)> - { - let mut result = vec![]; - let crate_data = self.get_crate_data(cnum); - decoder::each_lang_item(&crate_data, |did, lid| { - result.push((did, lid)); true - }); - result - } - - fn missing_lang_items(&self, cnum: ast::CrateNum) - -> Vec - { - let cdata = self.get_crate_data(cnum); - decoder::get_missing_lang_items(&cdata) - } - - fn is_staged_api(&self, cnum: ast::CrateNum) -> bool - { - self.get_crate_data(cnum).staged_api - } - - fn is_explicitly_linked(&self, cnum: ast::CrateNum) -> bool - { - self.get_crate_data(cnum).explicitly_linked.get() - } - - fn is_allocator(&self, cnum: ast::CrateNum) -> bool - { - self.get_crate_data(cnum).is_allocator() - } - - fn is_panic_runtime(&self, cnum: ast::CrateNum) -> bool - { - self.get_crate_data(cnum).is_panic_runtime() - } - - fn is_compiler_builtins(&self, cnum: ast::CrateNum) -> bool { - self.get_crate_data(cnum).is_compiler_builtins() - } - - fn panic_strategy(&self, cnum: ast::CrateNum) -> PanicStrategy { - self.get_crate_data(cnum).panic_strategy() - } - - fn crate_attrs(&self, cnum: ast::CrateNum) -> Vec - { - decoder::get_crate_attributes(self.get_crate_data(cnum).data()) - } - - fn crate_name(&self, cnum: ast::CrateNum) -> token::InternedString - { - token::intern_and_get_ident(&self.get_crate_data(cnum).name[..]) - } - - fn original_crate_name(&self, cnum: ast::CrateNum) -> token::InternedString - { - token::intern_and_get_ident(&self.get_crate_data(cnum).name()) - } - - fn extern_crate(&self, cnum: ast::CrateNum) -> Option - { - self.get_crate_data(cnum).extern_crate.get() - } - - fn crate_hash(&self, cnum: ast::CrateNum) -> Svh - { - let cdata = self.get_crate_data(cnum); - decoder::get_crate_hash(cdata.data()) - } - - fn crate_disambiguator(&self, cnum: ast::CrateNum) -> token::InternedString - { - let cdata = self.get_crate_data(cnum); - token::intern_and_get_ident(decoder::get_crate_disambiguator(cdata.data())) - } - - fn crate_struct_field_attrs(&self, cnum: ast::CrateNum) - -> FnvHashMap> - { - decoder::get_struct_field_attrs(&self.get_crate_data(cnum)) - } - - fn plugin_registrar_fn(&self, cnum: ast::CrateNum) -> Option - { - let cdata = self.get_crate_data(cnum); - decoder::get_plugin_registrar_fn(cdata.data()).map(|index| DefId { - krate: cnum, - index: index - }) - } - - fn native_libraries(&self, cnum: ast::CrateNum) -> Vec<(NativeLibraryKind, String)> - { - let cdata = self.get_crate_data(cnum); - decoder::get_native_libraries(&cdata) - } - - fn reachable_ids(&self, cnum: ast::CrateNum) -> Vec - { - let cdata = self.get_crate_data(cnum); - decoder::get_reachable_ids(&cdata) - } - - fn is_no_builtins(&self, cnum: ast::CrateNum) -> bool { - attr::contains_name(&self.crate_attrs(cnum), "no_builtins") - } - - fn def_index_for_def_key(&self, - cnum: ast::CrateNum, - def: DefKey) - -> Option { - let cdata = self.get_crate_data(cnum); - cdata.key_map.get(&def).cloned() - } - - /// Returns the `DefKey` for a given `DefId`. This indicates the - /// parent `DefId` as well as some idea of what kind of data the - /// `DefId` refers to. - fn def_key(&self, def: DefId) -> hir_map::DefKey { - // Note: loading the def-key (or def-path) for a def-id is not - // a *read* of its metadata. This is because the def-id is - // really just an interned shorthand for a def-path, which is the - // canonical name for an item. - // - // self.dep_graph.read(DepNode::MetaData(def)); - let cdata = self.get_crate_data(def.krate); - decoder::def_key(&cdata, def.index) - } - - fn relative_def_path(&self, def: DefId) -> Option { - // See `Note` above in `def_key()` for why this read is - // commented out: - // - // self.dep_graph.read(DepNode::MetaData(def)); - let cdata = self.get_crate_data(def.krate); - decoder::def_path(&cdata, def.index) - } - - fn variant_kind(&self, def_id: DefId) -> Option { - self.dep_graph.read(DepNode::MetaData(def_id)); - let cdata = self.get_crate_data(def_id.krate); - decoder::get_variant_kind(&cdata, def_id.index) - } - - fn struct_ctor_def_id(&self, struct_def_id: DefId) -> Option - { - self.dep_graph.read(DepNode::MetaData(struct_def_id)); - let cdata = self.get_crate_data(struct_def_id.krate); - decoder::get_struct_ctor_def_id(&cdata, struct_def_id.index) - } - - fn tuple_struct_definition_if_ctor(&self, did: DefId) -> Option - { - self.dep_graph.read(DepNode::MetaData(did)); - let cdata = self.get_crate_data(did.krate); - decoder::get_tuple_struct_definition_if_ctor(&cdata, did.index) - } - - fn struct_field_names(&self, def: DefId) -> Vec - { - self.dep_graph.read(DepNode::MetaData(def)); - let cdata = self.get_crate_data(def.krate); - decoder::get_struct_field_names(&cdata, def.index) - } - - fn item_children(&self, def_id: DefId) -> Vec - { - self.dep_graph.read(DepNode::MetaData(def_id)); - let mut result = vec![]; - let crate_data = self.get_crate_data(def_id.krate); - let get_crate_data = |cnum| self.get_crate_data(cnum); - decoder::each_child_of_item(&crate_data, def_id.index, get_crate_data, |def, name, vis| { - result.push(ChildItem { def: def, name: name, vis: vis }); - }); - result - } - - fn crate_top_level_items(&self, cnum: ast::CrateNum) -> Vec - { - let mut result = vec![]; - let crate_data = self.get_crate_data(cnum); - let get_crate_data = |cnum| self.get_crate_data(cnum); - decoder::each_top_level_item_of_crate(&crate_data, get_crate_data, |def, name, vis| { - result.push(ChildItem { def: def, name: name, vis: vis }); - }); - result - } - - fn maybe_get_item_ast<'a>(&'tcx self, - tcx: TyCtxt<'a, 'tcx, 'tcx>, - def_id: DefId) - -> Option<(&'tcx InlinedItem, ast::NodeId)> - { - self.dep_graph.read(DepNode::MetaData(def_id)); - - match self.inlined_item_cache.borrow().get(&def_id) { - Some(&None) => { - return None; // Not inlinable - } - Some(&Some(ref cached_inlined_item)) => { - // Already inline - debug!("maybe_get_item_ast({}): already inline as node id {}", - tcx.item_path_str(def_id), cached_inlined_item.item_id); - return Some((tcx.map.expect_inlined_item(cached_inlined_item.inlined_root), - cached_inlined_item.item_id)); - } - None => { - // Not seen yet - } - } - - debug!("maybe_get_item_ast({}): inlining item", tcx.item_path_str(def_id)); - - let cdata = self.get_crate_data(def_id.krate); - let inlined = decoder::maybe_get_item_ast(&cdata, tcx, def_id.index); - - let cache_inlined_item = |original_def_id, inlined_item_id, inlined_root_node_id| { - let cache_entry = cstore::CachedInlinedItem { - inlined_root: inlined_root_node_id, - item_id: inlined_item_id, - }; - self.inlined_item_cache - .borrow_mut() - .insert(original_def_id, Some(cache_entry)); - self.defid_for_inlined_node - .borrow_mut() - .insert(inlined_item_id, original_def_id); - }; - - let find_inlined_item_root = |inlined_item_id| { - let mut node = inlined_item_id; - let mut path = Vec::with_capacity(10); - - // If we can't find the inline root after a thousand hops, we can - // be pretty sure there's something wrong with the HIR map. - for _ in 0 .. 1000 { - path.push(node); - let parent_node = tcx.map.get_parent_node(node); - if parent_node == node { - return node; - } - node = parent_node; - } - bug!("cycle in HIR map parent chain") - }; - - match inlined { - decoder::FoundAst::NotFound => { - self.inlined_item_cache - .borrow_mut() - .insert(def_id, None); - } - decoder::FoundAst::Found(&InlinedItem::Item(d, ref item)) => { - assert_eq!(d, def_id); - let inlined_root_node_id = find_inlined_item_root(item.id); - cache_inlined_item(def_id, item.id, inlined_root_node_id); - } - decoder::FoundAst::FoundParent(parent_did, item) => { - let inlined_root_node_id = find_inlined_item_root(item.id); - cache_inlined_item(parent_did, item.id, inlined_root_node_id); - - match item.node { - hir::ItemEnum(ref ast_def, _) => { - let ast_vs = &ast_def.variants; - let ty_vs = &tcx.lookup_adt_def(parent_did).variants; - assert_eq!(ast_vs.len(), ty_vs.len()); - for (ast_v, ty_v) in ast_vs.iter().zip(ty_vs.iter()) { - cache_inlined_item(ty_v.did, - ast_v.node.data.id(), - inlined_root_node_id); - } - } - hir::ItemStruct(ref struct_def, _) => { - if struct_def.is_struct() { - bug!("instantiate_inline: called on a non-tuple struct") - } else { - cache_inlined_item(def_id, - struct_def.id(), - inlined_root_node_id); - } - } - _ => bug!("instantiate_inline: item has a \ - non-enum, non-struct parent") - } - } - decoder::FoundAst::Found(&InlinedItem::TraitItem(_, ref trait_item)) => { - let inlined_root_node_id = find_inlined_item_root(trait_item.id); - cache_inlined_item(def_id, trait_item.id, inlined_root_node_id); - - // Associated consts already have to be evaluated in `typeck`, so - // the logic to do that already exists in `middle`. In order to - // reuse that code, it needs to be able to look up the traits for - // inlined items. - let ty_trait_item = tcx.impl_or_trait_item(def_id).clone(); - let trait_item_def_id = tcx.map.local_def_id(trait_item.id); - tcx.impl_or_trait_items.borrow_mut() - .insert(trait_item_def_id, ty_trait_item); - } - decoder::FoundAst::Found(&InlinedItem::ImplItem(_, ref impl_item)) => { - let inlined_root_node_id = find_inlined_item_root(impl_item.id); - cache_inlined_item(def_id, impl_item.id, inlined_root_node_id); - } - } - - // We can be sure to hit the cache now - return self.maybe_get_item_ast(tcx, def_id); - } - - fn local_node_for_inlined_defid(&'tcx self, def_id: DefId) -> Option { - assert!(!def_id.is_local()); - match self.inlined_item_cache.borrow().get(&def_id) { - Some(&Some(ref cached_inlined_item)) => { - Some(cached_inlined_item.item_id) - } - Some(&None) => { - None - } - _ => { - bug!("Trying to lookup inlined NodeId for unexpected item"); - } - } - } - - fn defid_for_inlined_node(&'tcx self, node_id: ast::NodeId) -> Option { - self.defid_for_inlined_node.borrow().get(&node_id).map(|x| *x) - } - - fn maybe_get_item_mir<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) - -> Option> { - self.dep_graph.read(DepNode::MetaData(def)); - let cdata = self.get_crate_data(def.krate); - decoder::maybe_get_item_mir(&cdata, tcx, def.index) - } - - fn is_item_mir_available(&self, def: DefId) -> bool { - self.dep_graph.read(DepNode::MetaData(def)); - let cdata = self.get_crate_data(def.krate); - decoder::is_item_mir_available(&cdata, def.index) - } - - fn crates(&self) -> Vec - { - let mut result = vec![]; - self.iter_crate_data(|cnum, _| result.push(cnum)); - result - } - - fn used_libraries(&self) -> Vec<(String, NativeLibraryKind)> - { - self.get_used_libraries().borrow().clone() - } - - fn used_link_args(&self) -> Vec - { - self.get_used_link_args().borrow().clone() - } - - fn metadata_filename(&self) -> &str - { - loader::METADATA_FILENAME - } - - fn metadata_section_name(&self, target: &Target) -> &str - { - loader::meta_section_name(target) - } - fn encode_type<'a>(&self, - tcx: TyCtxt<'a, 'tcx, 'tcx>, - ty: Ty<'tcx>, - def_id_to_string: for<'b> fn(TyCtxt<'b, 'tcx, 'tcx>, DefId) -> String) - -> Vec - { - encoder::encoded_ty(tcx, ty, def_id_to_string) - } - - fn used_crates(&self, prefer: LinkagePreference) -> Vec<(ast::CrateNum, Option)> - { - self.do_get_used_crates(prefer) - } - - fn used_crate_source(&self, cnum: ast::CrateNum) -> CrateSource - { - self.opt_used_crate_source(cnum).unwrap() - } - - fn extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option - { - self.do_extern_mod_stmt_cnum(emod_id) - } - - fn encode_metadata<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, - reexports: &def::ExportMap, - link_meta: &LinkMeta, - reachable: &NodeSet, - mir_map: &MirMap<'tcx>, - krate: &hir::Crate) -> Vec - { - let ecx = encoder::EncodeContext { - diag: tcx.sess.diagnostic(), - tcx: tcx, - reexports: reexports, - link_meta: link_meta, - cstore: self, - reachable: reachable, - mir_map: mir_map, - type_abbrevs: RefCell::new(FnvHashMap()), - }; - encoder::encode_metadata(ecx, krate) - - } - - fn metadata_encoding_version(&self) -> &[u8] - { - common::metadata_encoding_version - } - - /// Returns a map from a sufficiently visible external item (i.e. an external item that is - /// visible from at least one local module) to a sufficiently visible parent (considering - /// modules that re-export the external item to be parents). - fn visible_parent_map<'a>(&'a self) -> ::std::cell::RefMut<'a, DefIdMap> { - let mut visible_parent_map = self.visible_parent_map.borrow_mut(); - if !visible_parent_map.is_empty() { return visible_parent_map; } - - use rustc::middle::cstore::ChildItem; - use std::collections::vec_deque::VecDeque; - use std::collections::hash_map::Entry; - for cnum in 1 .. self.next_crate_num() { - let cdata = self.get_crate_data(cnum); - - match cdata.extern_crate.get() { - // Ignore crates without a corresponding local `extern crate` item. - Some(extern_crate) if !extern_crate.direct => continue, - _ => {}, - } - - let mut bfs_queue = &mut VecDeque::new(); - let mut add_child = |bfs_queue: &mut VecDeque<_>, child: ChildItem, parent: DefId| { - let child = match child.def { - DefLike::DlDef(def) if child.vis == ty::Visibility::Public => def.def_id(), - _ => return, - }; - - match visible_parent_map.entry(child) { - Entry::Occupied(mut entry) => { - // If `child` is defined in crate `cnum`, ensure - // that it is mapped to a parent in `cnum`. - if child.krate == cnum && entry.get().krate != cnum { - entry.insert(parent); - } - } - Entry::Vacant(entry) => { - entry.insert(parent); - bfs_queue.push_back(child); - } - } - }; - - let croot = DefId { krate: cnum, index: CRATE_DEF_INDEX }; - for child in self.crate_top_level_items(cnum) { - add_child(bfs_queue, child, croot); - } - while let Some(def) = bfs_queue.pop_front() { - for child in self.item_children(def) { - add_child(bfs_queue, child, def); - } - } - } - - visible_parent_map - } -} - diff --git a/src/librustc_metadata/cstore.rs b/src/librustc_metadata/cstore.rs index bc3d92c11a1ea..58c70f959b7cc 100644 --- a/src/librustc_metadata/cstore.rs +++ b/src/librustc_metadata/cstore.rs @@ -8,61 +8,52 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![allow(non_camel_case_types)] - // The crate store - a central repo for information collected about external // crates and libraries -pub use self::MetadataBlob::*; - -use common; -use creader; -use decoder; -use index; -use loader; +use locator; +use schema; use rustc::dep_graph::DepGraph; -use rustc::hir::def_id::{DefIndex, DefId}; +use rustc::hir::def_id::{CRATE_DEF_INDEX, CrateNum, DefIndex, DefId}; use rustc::hir::map::DefKey; use rustc::hir::svh::Svh; use rustc::middle::cstore::ExternCrate; -use rustc::session::config::PanicStrategy; +use rustc_back::PanicStrategy; use rustc_data_structures::indexed_vec::IndexVec; -use rustc::util::nodemap::{FnvHashMap, NodeMap, NodeSet, DefIdMap, FnvHashSet}; +use rustc::util::nodemap::{FnvHashMap, NodeMap, NodeSet, DefIdMap}; -use std::cell::{RefCell, Ref, Cell}; +use std::cell::{RefCell, Cell}; use std::rc::Rc; use std::path::PathBuf; use flate::Bytes; -use syntax::ast::{self, Ident}; -use syntax::attr; -use syntax::codemap; +use syntax::{ast, attr}; use syntax_pos; -pub use middle::cstore::{NativeLibraryKind, LinkagePreference}; -pub use middle::cstore::{NativeStatic, NativeFramework, NativeUnknown}; -pub use middle::cstore::{CrateSource, LinkMeta}; +pub use rustc::middle::cstore::{NativeLibraryKind, LinkagePreference}; +pub use rustc::middle::cstore::{NativeStatic, NativeFramework, NativeUnknown}; +pub use rustc::middle::cstore::{CrateSource, LinkMeta}; // A map from external crate numbers (as decoded from some crate file) to // local crate numbers (as generated during this session). Each external // crate may refer to types in other external crates, and each has their // own crate numbers. -pub type CrateNumMap = IndexVec; +pub type CrateNumMap = IndexVec; pub enum MetadataBlob { - MetadataVec(Bytes), - MetadataArchive(loader::ArchiveMetadata), + Inflated(Bytes), + Archive(locator::ArchiveMetadata), } /// Holds information about a syntax_pos::FileMap imported from another crate. -/// See creader::import_codemap() for more information. +/// See `imported_filemaps()` for more information. pub struct ImportedFileMap { /// This FileMap's byte-offset within the codemap of its original crate pub original_start_pos: syntax_pos::BytePos, /// The end of this FileMap within the codemap of its original crate pub original_end_pos: syntax_pos::BytePos, /// The imported FileMap's representation within the local codemap - pub translated_filemap: Rc + pub translated_filemap: Rc, } pub struct CrateMetadata { @@ -73,14 +64,12 @@ pub struct CrateMetadata { /// (e.g., by the allocator) pub extern_crate: Cell>, - pub data: MetadataBlob, + pub blob: MetadataBlob, pub cnum_map: RefCell, - pub cnum: ast::CrateNum, + pub cnum: CrateNum, pub codemap_import_info: RefCell>, - pub staged_api: bool, - pub index: index::Index, - pub xref_index: index::DenseIndex, + pub root: schema::CrateRoot, /// For each public item in this crate, we encode a key. When the /// crate is loaded, we read all the keys and put them in this @@ -105,9 +94,9 @@ pub struct CachedInlinedItem { pub struct CStore { pub dep_graph: DepGraph, - metas: RefCell>>, + metas: RefCell>>, /// Map from NodeId's of local extern crate statements to crate numbers - extern_mod_crate_map: RefCell>, + extern_mod_crate_map: RefCell>, used_crate_sources: RefCell>, used_libraries: RefCell>, used_link_args: RefCell>, @@ -115,7 +104,6 @@ pub struct CStore { pub inlined_item_cache: RefCell>>, pub defid_for_inlined_node: RefCell>, pub visible_parent_map: RefCell>, - pub used_for_derive_macro: RefCell>, } impl CStore { @@ -131,29 +119,27 @@ impl CStore { visible_parent_map: RefCell::new(FnvHashMap()), inlined_item_cache: RefCell::new(FnvHashMap()), defid_for_inlined_node: RefCell::new(FnvHashMap()), - used_for_derive_macro: RefCell::new(FnvHashSet()), } } - pub fn next_crate_num(&self) -> ast::CrateNum { - self.metas.borrow().len() as ast::CrateNum + 1 + pub fn next_crate_num(&self) -> CrateNum { + CrateNum::new(self.metas.borrow().len() + 1) } - pub fn get_crate_data(&self, cnum: ast::CrateNum) -> Rc { + pub fn get_crate_data(&self, cnum: CrateNum) -> Rc { self.metas.borrow().get(&cnum).unwrap().clone() } - pub fn get_crate_hash(&self, cnum: ast::CrateNum) -> Svh { - let cdata = self.get_crate_data(cnum); - decoder::get_crate_hash(cdata.data()) + pub fn get_crate_hash(&self, cnum: CrateNum) -> Svh { + self.get_crate_data(cnum).hash() } - pub fn set_crate_data(&self, cnum: ast::CrateNum, data: Rc) { + pub fn set_crate_data(&self, cnum: CrateNum, data: Rc) { self.metas.borrow_mut().insert(cnum, data); } - pub fn iter_crate_data(&self, mut i: I) where - I: FnMut(ast::CrateNum, &Rc), + pub fn iter_crate_data(&self, mut i: I) + where I: FnMut(CrateNum, &Rc) { for (&k, v) in self.metas.borrow().iter() { i(k, v); @@ -161,12 +147,14 @@ impl CStore { } /// Like `iter_crate_data`, but passes source paths (if available) as well. - pub fn iter_crate_data_origins(&self, mut i: I) where - I: FnMut(ast::CrateNum, &CrateMetadata, Option), + pub fn iter_crate_data_origins(&self, mut i: I) + where I: FnMut(CrateNum, &CrateMetadata, Option) { for (&k, v) in self.metas.borrow().iter() { let origin = self.opt_used_crate_source(k); - origin.as_ref().map(|cs| { assert!(k == cs.cnum); }); + origin.as_ref().map(|cs| { + assert!(k == cs.cnum); + }); i(k, &v, origin); } } @@ -178,10 +166,12 @@ impl CStore { } } - pub fn opt_used_crate_source(&self, cnum: ast::CrateNum) - -> Option { - self.used_crate_sources.borrow_mut() - .iter().find(|source| source.cnum == cnum).cloned() + pub fn opt_used_crate_source(&self, cnum: CrateNum) -> Option { + self.used_crate_sources + .borrow_mut() + .iter() + .find(|source| source.cnum == cnum) + .cloned() } pub fn reset(&self) { @@ -193,19 +183,17 @@ impl CStore { self.statically_included_foreign_items.borrow_mut().clear(); } - pub fn crate_dependencies_in_rpo(&self, krate: ast::CrateNum) -> Vec - { + pub fn crate_dependencies_in_rpo(&self, krate: CrateNum) -> Vec { let mut ordering = Vec::new(); self.push_dependencies_in_postorder(&mut ordering, krate); ordering.reverse(); ordering } - pub fn push_dependencies_in_postorder(&self, - ordering: &mut Vec, - krate: ast::CrateNum) - { - if ordering.contains(&krate) { return } + pub fn push_dependencies_in_postorder(&self, ordering: &mut Vec, krate: CrateNum) { + if ordering.contains(&krate) { + return; + } let data = self.get_crate_data(krate); for &dep in data.cnum_map.borrow().iter() { @@ -226,20 +214,25 @@ impl CStore { // In order to get this left-to-right dependency ordering, we perform a // topological sort of all crates putting the leaves at the right-most // positions. - pub fn do_get_used_crates(&self, prefer: LinkagePreference) - -> Vec<(ast::CrateNum, Option)> { + pub fn do_get_used_crates(&self, + prefer: LinkagePreference) + -> Vec<(CrateNum, Option)> { let mut ordering = Vec::new(); for (&num, _) in self.metas.borrow().iter() { self.push_dependencies_in_postorder(&mut ordering, num); } info!("topological ordering: {:?}", ordering); ordering.reverse(); - let mut libs = self.used_crate_sources.borrow() + let mut libs = self.used_crate_sources + .borrow() .iter() - .map(|src| (src.cnum, match prefer { - LinkagePreference::RequireDynamic => src.dylib.clone().map(|p| p.0), - LinkagePreference::RequireStatic => src.rlib.clone().map(|p| p.0), - })) + .map(|src| { + (src.cnum, + match prefer { + LinkagePreference::RequireDynamic => src.dylib.clone().map(|p| p.0), + LinkagePreference::RequireStatic => src.rlib.clone().map(|p| p.0), + }) + }) .collect::>(); libs.sort_by(|&(a, _), &(b, _)| { let a = ordering.iter().position(|x| *x == a); @@ -254,9 +247,7 @@ impl CStore { self.used_libraries.borrow_mut().push((lib, kind)); } - pub fn get_used_libraries<'a>(&'a self) - -> &'a RefCell> { + pub fn get_used_libraries<'a>(&'a self) -> &'a RefCell> { &self.used_libraries } @@ -266,13 +257,11 @@ impl CStore { } } - pub fn get_used_link_args<'a>(&'a self) -> &'a RefCell > { + pub fn get_used_link_args<'a>(&'a self) -> &'a RefCell> { &self.used_link_args } - pub fn add_extern_mod_stmt_cnum(&self, - emod_id: ast::NodeId, - cnum: ast::CrateNum) { + pub fn add_extern_mod_stmt_cnum(&self, emod_id: ast::NodeId, cnum: CrateNum) { self.extern_mod_crate_map.borrow_mut().insert(emod_id, cnum); } @@ -284,95 +273,59 @@ impl CStore { self.statically_included_foreign_items.borrow().contains(&id) } - pub fn do_extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option - { + pub fn do_extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option { self.extern_mod_crate_map.borrow().get(&emod_id).cloned() } - - pub fn was_used_for_derive_macros(&self, i: &ast::Item) -> bool { - self.used_for_derive_macro.borrow().contains(&i.ident) - } - - pub fn add_used_for_derive_macros(&self, i: &ast::Item) { - self.used_for_derive_macro.borrow_mut().insert(i.ident); - } } impl CrateMetadata { - pub fn data<'a>(&'a self) -> &'a [u8] { self.data.as_slice() } - pub fn name(&self) -> &str { decoder::get_crate_name(self.data()) } - pub fn hash(&self) -> Svh { decoder::get_crate_hash(self.data()) } + pub fn name(&self) -> &str { + &self.root.name + } + pub fn hash(&self) -> Svh { + self.root.hash + } pub fn disambiguator(&self) -> &str { - decoder::get_crate_disambiguator(self.data()) - } - pub fn imported_filemaps<'a>(&'a self, codemap: &codemap::CodeMap) - -> Ref<'a, Vec> { - let filemaps = self.codemap_import_info.borrow(); - if filemaps.is_empty() { - drop(filemaps); - let filemaps = creader::import_codemap(codemap, &self.data); - - // This shouldn't borrow twice, but there is no way to downgrade RefMut to Ref. - *self.codemap_import_info.borrow_mut() = filemaps; - self.codemap_import_info.borrow() - } else { - filemaps - } + &self.root.disambiguator + } + + pub fn is_staged_api(&self) -> bool { + self.get_item_attrs(CRATE_DEF_INDEX) + .iter() + .any(|attr| attr.name() == "stable" || attr.name() == "unstable") } pub fn is_allocator(&self) -> bool { - let attrs = decoder::get_crate_attributes(self.data()); + let attrs = self.get_item_attrs(CRATE_DEF_INDEX); attr::contains_name(&attrs, "allocator") } pub fn needs_allocator(&self) -> bool { - let attrs = decoder::get_crate_attributes(self.data()); + let attrs = self.get_item_attrs(CRATE_DEF_INDEX); attr::contains_name(&attrs, "needs_allocator") } pub fn is_panic_runtime(&self) -> bool { - let attrs = decoder::get_crate_attributes(self.data()); + let attrs = self.get_item_attrs(CRATE_DEF_INDEX); attr::contains_name(&attrs, "panic_runtime") } pub fn needs_panic_runtime(&self) -> bool { - let attrs = decoder::get_crate_attributes(self.data()); + let attrs = self.get_item_attrs(CRATE_DEF_INDEX); attr::contains_name(&attrs, "needs_panic_runtime") } pub fn is_compiler_builtins(&self) -> bool { - let attrs = decoder::get_crate_attributes(self.data()); + let attrs = self.get_item_attrs(CRATE_DEF_INDEX); attr::contains_name(&attrs, "compiler_builtins") } - pub fn panic_strategy(&self) -> PanicStrategy { - decoder::get_panic_strategy(self.data()) - } -} - -impl MetadataBlob { - pub fn as_slice_raw<'a>(&'a self) -> &'a [u8] { - match *self { - MetadataVec(ref vec) => &vec[..], - MetadataArchive(ref ar) => ar.as_slice(), - } + pub fn is_no_builtins(&self) -> bool { + let attrs = self.get_item_attrs(CRATE_DEF_INDEX); + attr::contains_name(&attrs, "no_builtins") } - pub fn as_slice<'a>(&'a self) -> &'a [u8] { - let slice = self.as_slice_raw(); - let len_offset = 4 + common::metadata_encoding_version.len(); - if slice.len() < len_offset+4 { - &[] // corrupt metadata - } else { - let len = (((slice[len_offset+0] as u32) << 24) | - ((slice[len_offset+1] as u32) << 16) | - ((slice[len_offset+2] as u32) << 8) | - ((slice[len_offset+3] as u32) << 0)) as usize; - if len <= slice.len() - 4 - len_offset { - &slice[len_offset + 4..len_offset + len + 4] - } else { - &[] // corrupt or old metadata - } - } + pub fn panic_strategy(&self) -> PanicStrategy { + self.root.panic_strategy.clone() } } diff --git a/src/librustc_metadata/cstore_impl.rs b/src/librustc_metadata/cstore_impl.rs new file mode 100644 index 0000000000000..a618c98ff774c --- /dev/null +++ b/src/librustc_metadata/cstore_impl.rs @@ -0,0 +1,585 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use cstore; +use encoder; +use locator; +use schema; + +use rustc::middle::cstore::{InlinedItem, CrateStore, CrateSource, ExternCrate}; +use rustc::middle::cstore::{NativeLibraryKind, LinkMeta, LinkagePreference}; +use rustc::hir::def::{self, Def}; +use rustc::middle::lang_items; +use rustc::ty::{self, Ty, TyCtxt}; +use rustc::hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX}; + +use rustc::dep_graph::DepNode; +use rustc::hir::map as hir_map; +use rustc::hir::map::DefKey; +use rustc::mir::Mir; +use rustc::util::nodemap::{NodeSet, DefIdMap}; +use rustc_back::PanicStrategy; + +use std::path::PathBuf; +use syntax::ast; +use syntax::attr; +use syntax::parse::token; +use rustc::hir::svh::Svh; +use rustc_back::target::Target; +use rustc::hir; + +impl<'tcx> CrateStore<'tcx> for cstore::CStore { + fn describe_def(&self, def: DefId) -> Option { + self.dep_graph.read(DepNode::MetaData(def)); + self.get_crate_data(def.krate).get_def(def.index) + } + + fn stability(&self, def: DefId) -> Option { + self.dep_graph.read(DepNode::MetaData(def)); + self.get_crate_data(def.krate).get_stability(def.index) + } + + fn deprecation(&self, def: DefId) -> Option { + self.dep_graph.read(DepNode::MetaData(def)); + self.get_crate_data(def.krate).get_deprecation(def.index) + } + + fn visibility(&self, def: DefId) -> ty::Visibility { + self.dep_graph.read(DepNode::MetaData(def)); + self.get_crate_data(def.krate).get_visibility(def.index) + } + + fn closure_kind(&self, def_id: DefId) -> ty::ClosureKind + { + assert!(!def_id.is_local()); + self.dep_graph.read(DepNode::MetaData(def_id)); + self.get_crate_data(def_id.krate).closure_kind(def_id.index) + } + + fn closure_ty<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> ty::ClosureTy<'tcx> { + assert!(!def_id.is_local()); + self.dep_graph.read(DepNode::MetaData(def_id)); + self.get_crate_data(def_id.krate).closure_ty(def_id.index, tcx) + } + + fn item_variances(&self, def: DefId) -> Vec { + self.dep_graph.read(DepNode::MetaData(def)); + self.get_crate_data(def.krate).get_item_variances(def.index) + } + + fn item_type<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> Ty<'tcx> + { + self.dep_graph.read(DepNode::MetaData(def)); + self.get_crate_data(def.krate).get_type(def.index, tcx) + } + + fn item_predicates<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> ty::GenericPredicates<'tcx> + { + self.dep_graph.read(DepNode::MetaData(def)); + self.get_crate_data(def.krate).get_predicates(def.index, tcx) + } + + fn item_super_predicates<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> ty::GenericPredicates<'tcx> + { + self.dep_graph.read(DepNode::MetaData(def)); + self.get_crate_data(def.krate).get_super_predicates(def.index, tcx) + } + + fn item_generics<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> ty::Generics<'tcx> + { + self.dep_graph.read(DepNode::MetaData(def)); + self.get_crate_data(def.krate).get_generics(def.index, tcx) + } + + fn item_attrs(&self, def_id: DefId) -> Vec + { + self.dep_graph.read(DepNode::MetaData(def_id)); + self.get_crate_data(def_id.krate).get_item_attrs(def_id.index) + } + + fn trait_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> ty::TraitDef<'tcx> + { + self.dep_graph.read(DepNode::MetaData(def)); + self.get_crate_data(def.krate).get_trait_def(def.index, tcx) + } + + fn adt_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> ty::AdtDefMaster<'tcx> + { + self.dep_graph.read(DepNode::MetaData(def)); + self.get_crate_data(def.krate).get_adt_def(def.index, tcx) + } + + fn fn_arg_names(&self, did: DefId) -> Vec + { + self.dep_graph.read(DepNode::MetaData(did)); + self.get_crate_data(did.krate).get_fn_arg_names(did.index) + } + + fn inherent_implementations_for_type(&self, def_id: DefId) -> Vec + { + self.dep_graph.read(DepNode::MetaData(def_id)); + self.get_crate_data(def_id.krate).get_inherent_implementations_for_type(def_id.index) + } + + fn implementations_of_trait(&self, filter: Option) -> Vec + { + if let Some(def_id) = filter { + self.dep_graph.read(DepNode::MetaData(def_id)); + } + let mut result = vec![]; + self.iter_crate_data(|_, cdata| { + cdata.get_implementations_for_trait(filter, &mut result) + }); + result + } + + fn impl_or_trait_items(&self, def_id: DefId) -> Vec { + self.dep_graph.read(DepNode::MetaData(def_id)); + let mut result = vec![]; + self.get_crate_data(def_id.krate) + .each_child_of_item(def_id.index, |child| result.push(child.def.def_id())); + result + } + + fn impl_polarity(&self, def: DefId) -> hir::ImplPolarity + { + self.dep_graph.read(DepNode::MetaData(def)); + self.get_crate_data(def.krate).get_impl_polarity(def.index) + } + + fn impl_trait_ref<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> Option> + { + self.dep_graph.read(DepNode::MetaData(def)); + self.get_crate_data(def.krate).get_impl_trait(def.index, tcx) + } + + fn custom_coerce_unsized_kind(&self, def: DefId) + -> Option + { + self.dep_graph.read(DepNode::MetaData(def)); + self.get_crate_data(def.krate).get_custom_coerce_unsized_kind(def.index) + } + + fn impl_parent(&self, impl_def: DefId) -> Option { + self.dep_graph.read(DepNode::MetaData(impl_def)); + self.get_crate_data(impl_def.krate).get_parent_impl(impl_def.index) + } + + fn trait_of_item(&self, def_id: DefId) -> Option { + self.dep_graph.read(DepNode::MetaData(def_id)); + self.get_crate_data(def_id.krate).get_trait_of_item(def_id.index) + } + + fn impl_or_trait_item<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> Option> + { + self.dep_graph.read(DepNode::MetaData(def)); + self.get_crate_data(def.krate).get_impl_or_trait_item(def.index, tcx) + } + + fn is_const_fn(&self, did: DefId) -> bool + { + self.dep_graph.read(DepNode::MetaData(did)); + self.get_crate_data(did.krate).is_const_fn(did.index) + } + + fn is_defaulted_trait(&self, trait_def_id: DefId) -> bool + { + self.dep_graph.read(DepNode::MetaData(trait_def_id)); + self.get_crate_data(trait_def_id.krate).is_defaulted_trait(trait_def_id.index) + } + + fn is_default_impl(&self, impl_did: DefId) -> bool { + self.dep_graph.read(DepNode::MetaData(impl_did)); + self.get_crate_data(impl_did.krate).is_default_impl(impl_did.index) + } + + fn is_foreign_item(&self, did: DefId) -> bool { + self.get_crate_data(did.krate).is_foreign_item(did.index) + } + + fn is_statically_included_foreign_item(&self, id: ast::NodeId) -> bool + { + self.do_is_statically_included_foreign_item(id) + } + + fn dylib_dependency_formats(&self, cnum: CrateNum) + -> Vec<(CrateNum, LinkagePreference)> + { + self.get_crate_data(cnum).get_dylib_dependency_formats() + } + + fn lang_items(&self, cnum: CrateNum) -> Vec<(DefIndex, usize)> + { + self.get_crate_data(cnum).get_lang_items() + } + + fn missing_lang_items(&self, cnum: CrateNum) + -> Vec + { + self.get_crate_data(cnum).get_missing_lang_items() + } + + fn is_staged_api(&self, cnum: CrateNum) -> bool + { + self.get_crate_data(cnum).is_staged_api() + } + + fn is_explicitly_linked(&self, cnum: CrateNum) -> bool + { + self.get_crate_data(cnum).explicitly_linked.get() + } + + fn is_allocator(&self, cnum: CrateNum) -> bool + { + self.get_crate_data(cnum).is_allocator() + } + + fn is_panic_runtime(&self, cnum: CrateNum) -> bool + { + self.get_crate_data(cnum).is_panic_runtime() + } + + fn is_compiler_builtins(&self, cnum: CrateNum) -> bool { + self.get_crate_data(cnum).is_compiler_builtins() + } + + fn panic_strategy(&self, cnum: CrateNum) -> PanicStrategy { + self.get_crate_data(cnum).panic_strategy() + } + + fn crate_name(&self, cnum: CrateNum) -> token::InternedString + { + token::intern_and_get_ident(&self.get_crate_data(cnum).name[..]) + } + + fn original_crate_name(&self, cnum: CrateNum) -> token::InternedString + { + token::intern_and_get_ident(&self.get_crate_data(cnum).name()) + } + + fn extern_crate(&self, cnum: CrateNum) -> Option + { + self.get_crate_data(cnum).extern_crate.get() + } + + fn crate_hash(&self, cnum: CrateNum) -> Svh + { + self.get_crate_hash(cnum) + } + + fn crate_disambiguator(&self, cnum: CrateNum) -> token::InternedString + { + token::intern_and_get_ident(&self.get_crate_data(cnum).disambiguator()) + } + + fn plugin_registrar_fn(&self, cnum: CrateNum) -> Option + { + self.get_crate_data(cnum).root.plugin_registrar_fn.map(|index| DefId { + krate: cnum, + index: index + }) + } + + fn native_libraries(&self, cnum: CrateNum) -> Vec<(NativeLibraryKind, String)> + { + self.get_crate_data(cnum).get_native_libraries() + } + + fn reachable_ids(&self, cnum: CrateNum) -> Vec + { + self.get_crate_data(cnum).get_reachable_ids() + } + + fn is_no_builtins(&self, cnum: CrateNum) -> bool { + self.get_crate_data(cnum).is_no_builtins() + } + + fn def_index_for_def_key(&self, + cnum: CrateNum, + def: DefKey) + -> Option { + let cdata = self.get_crate_data(cnum); + cdata.key_map.get(&def).cloned() + } + + /// Returns the `DefKey` for a given `DefId`. This indicates the + /// parent `DefId` as well as some idea of what kind of data the + /// `DefId` refers to. + fn def_key(&self, def: DefId) -> hir_map::DefKey { + // Note: loading the def-key (or def-path) for a def-id is not + // a *read* of its metadata. This is because the def-id is + // really just an interned shorthand for a def-path, which is the + // canonical name for an item. + // + // self.dep_graph.read(DepNode::MetaData(def)); + self.get_crate_data(def.krate).def_key(def.index) + } + + fn relative_def_path(&self, def: DefId) -> Option { + // See `Note` above in `def_key()` for why this read is + // commented out: + // + // self.dep_graph.read(DepNode::MetaData(def)); + self.get_crate_data(def.krate).def_path(def.index) + } + + fn struct_field_names(&self, def: DefId) -> Vec + { + self.dep_graph.read(DepNode::MetaData(def)); + self.get_crate_data(def.krate).get_struct_field_names(def.index) + } + + fn item_children(&self, def_id: DefId) -> Vec + { + self.dep_graph.read(DepNode::MetaData(def_id)); + let mut result = vec![]; + self.get_crate_data(def_id.krate) + .each_child_of_item(def_id.index, |child| result.push(child)); + result + } + + fn maybe_get_item_ast<'a>(&'tcx self, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + def_id: DefId) + -> Option<(&'tcx InlinedItem, ast::NodeId)> + { + self.dep_graph.read(DepNode::MetaData(def_id)); + + match self.inlined_item_cache.borrow().get(&def_id) { + Some(&None) => { + return None; // Not inlinable + } + Some(&Some(ref cached_inlined_item)) => { + // Already inline + debug!("maybe_get_item_ast({}): already inline as node id {}", + tcx.item_path_str(def_id), cached_inlined_item.item_id); + return Some((tcx.map.expect_inlined_item(cached_inlined_item.inlined_root), + cached_inlined_item.item_id)); + } + None => { + // Not seen yet + } + } + + debug!("maybe_get_item_ast({}): inlining item", tcx.item_path_str(def_id)); + + let inlined = self.get_crate_data(def_id.krate).maybe_get_item_ast(tcx, def_id.index); + + let cache_inlined_item = |original_def_id, inlined_item_id, inlined_root_node_id| { + let cache_entry = cstore::CachedInlinedItem { + inlined_root: inlined_root_node_id, + item_id: inlined_item_id, + }; + self.inlined_item_cache + .borrow_mut() + .insert(original_def_id, Some(cache_entry)); + self.defid_for_inlined_node + .borrow_mut() + .insert(inlined_item_id, original_def_id); + }; + + let find_inlined_item_root = |inlined_item_id| { + let mut node = inlined_item_id; + let mut path = Vec::with_capacity(10); + + // If we can't find the inline root after a thousand hops, we can + // be pretty sure there's something wrong with the HIR map. + for _ in 0 .. 1000 { + path.push(node); + let parent_node = tcx.map.get_parent_node(node); + if parent_node == node { + return node; + } + node = parent_node; + } + bug!("cycle in HIR map parent chain") + }; + + match inlined { + None => { + self.inlined_item_cache + .borrow_mut() + .insert(def_id, None); + } + Some(&InlinedItem::Item(d, ref item)) => { + assert_eq!(d, def_id); + let inlined_root_node_id = find_inlined_item_root(item.id); + cache_inlined_item(def_id, item.id, inlined_root_node_id); + } + Some(&InlinedItem::TraitItem(_, ref trait_item)) => { + let inlined_root_node_id = find_inlined_item_root(trait_item.id); + cache_inlined_item(def_id, trait_item.id, inlined_root_node_id); + + // Associated consts already have to be evaluated in `typeck`, so + // the logic to do that already exists in `middle`. In order to + // reuse that code, it needs to be able to look up the traits for + // inlined items. + let ty_trait_item = tcx.impl_or_trait_item(def_id).clone(); + let trait_item_def_id = tcx.map.local_def_id(trait_item.id); + tcx.impl_or_trait_items.borrow_mut() + .insert(trait_item_def_id, ty_trait_item); + } + Some(&InlinedItem::ImplItem(_, ref impl_item)) => { + let inlined_root_node_id = find_inlined_item_root(impl_item.id); + cache_inlined_item(def_id, impl_item.id, inlined_root_node_id); + } + } + + // We can be sure to hit the cache now + return self.maybe_get_item_ast(tcx, def_id); + } + + fn local_node_for_inlined_defid(&'tcx self, def_id: DefId) -> Option { + assert!(!def_id.is_local()); + match self.inlined_item_cache.borrow().get(&def_id) { + Some(&Some(ref cached_inlined_item)) => { + Some(cached_inlined_item.item_id) + } + Some(&None) => { + None + } + _ => { + bug!("Trying to lookup inlined NodeId for unexpected item"); + } + } + } + + fn defid_for_inlined_node(&'tcx self, node_id: ast::NodeId) -> Option { + self.defid_for_inlined_node.borrow().get(&node_id).map(|x| *x) + } + + fn get_item_mir<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> Mir<'tcx> { + self.dep_graph.read(DepNode::MetaData(def)); + self.get_crate_data(def.krate).maybe_get_item_mir(tcx, def.index).unwrap_or_else(|| { + bug!("get_item_mir: missing MIR for {}", tcx.item_path_str(def)) + }) + } + + fn is_item_mir_available(&self, def: DefId) -> bool { + self.dep_graph.read(DepNode::MetaData(def)); + self.get_crate_data(def.krate).is_item_mir_available(def.index) + } + + fn crates(&self) -> Vec + { + let mut result = vec![]; + self.iter_crate_data(|cnum, _| result.push(cnum)); + result + } + + fn used_libraries(&self) -> Vec<(String, NativeLibraryKind)> + { + self.get_used_libraries().borrow().clone() + } + + fn used_link_args(&self) -> Vec + { + self.get_used_link_args().borrow().clone() + } + + fn metadata_filename(&self) -> &str + { + locator::METADATA_FILENAME + } + + fn metadata_section_name(&self, target: &Target) -> &str + { + locator::meta_section_name(target) + } + + fn used_crates(&self, prefer: LinkagePreference) -> Vec<(CrateNum, Option)> + { + self.do_get_used_crates(prefer) + } + + fn used_crate_source(&self, cnum: CrateNum) -> CrateSource + { + self.opt_used_crate_source(cnum).unwrap() + } + + fn extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option + { + self.do_extern_mod_stmt_cnum(emod_id) + } + + fn encode_metadata<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, + reexports: &def::ExportMap, + link_meta: &LinkMeta, + reachable: &NodeSet) -> Vec + { + encoder::encode_metadata(tcx, self, reexports, link_meta, reachable) + } + + fn metadata_encoding_version(&self) -> &[u8] + { + schema::METADATA_HEADER + } + + /// Returns a map from a sufficiently visible external item (i.e. an external item that is + /// visible from at least one local module) to a sufficiently visible parent (considering + /// modules that re-export the external item to be parents). + fn visible_parent_map<'a>(&'a self) -> ::std::cell::RefMut<'a, DefIdMap> { + let mut visible_parent_map = self.visible_parent_map.borrow_mut(); + if !visible_parent_map.is_empty() { return visible_parent_map; } + + use std::collections::vec_deque::VecDeque; + use std::collections::hash_map::Entry; + for cnum in (1 .. self.next_crate_num().as_usize()).map(CrateNum::new) { + let cdata = self.get_crate_data(cnum); + + match cdata.extern_crate.get() { + // Ignore crates without a corresponding local `extern crate` item. + Some(extern_crate) if !extern_crate.direct => continue, + _ => {}, + } + + let mut bfs_queue = &mut VecDeque::new(); + let mut add_child = |bfs_queue: &mut VecDeque<_>, child: def::Export, parent: DefId| { + let child = child.def.def_id(); + + if self.visibility(child) != ty::Visibility::Public { + return; + } + + match visible_parent_map.entry(child) { + Entry::Occupied(mut entry) => { + // If `child` is defined in crate `cnum`, ensure + // that it is mapped to a parent in `cnum`. + if child.krate == cnum && entry.get().krate != cnum { + entry.insert(parent); + } + } + Entry::Vacant(entry) => { + entry.insert(parent); + bfs_queue.push_back(child); + } + } + }; + + bfs_queue.push_back(DefId { + krate: cnum, + index: CRATE_DEF_INDEX + }); + while let Some(def) = bfs_queue.pop_front() { + for child in self.item_children(def) { + add_child(bfs_queue, child, def); + } + } + } + + visible_parent_map + } +} diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index 624bffb7e0369..ccd497860de8a 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -10,1637 +10,1212 @@ // Decoding metadata from a single crate's metadata -#![allow(non_camel_case_types)] - -use self::Family::*; - use astencode::decode_inlined_item; -use cstore::{self, CrateMetadata}; -use common::*; -use def_key; -use encoder::def_to_u64; -use index; -use tls_context; -use tydecode::TyDecoder; - -use rustc::hir::def_id::CRATE_DEF_INDEX; -use rustc::hir::svh::Svh; +use cstore::{self, CrateMetadata, MetadataBlob, NativeLibraryKind}; +use index::Index; +use schema::*; + use rustc::hir::map as hir_map; -use rustc::hir::map::DefKey; +use rustc::hir::map::{DefKey, DefPathData}; use rustc::util::nodemap::FnvHashMap; use rustc::hir; -use rustc::session::config::PanicStrategy; +use rustc::hir::intravisit::IdRange; -use middle::cstore::{InlinedItem, LinkagePreference}; -use middle::cstore::{DefLike, DlDef, DlField, DlImpl, tls}; -use rustc::hir::def::Def; -use rustc::hir::def_id::{DefId, DefIndex}; -use middle::lang_items; -use rustc::ty::{ImplContainer, TraitContainer}; -use rustc::ty::{self, AdtKind, Ty, TyCtxt, TypeFoldable, VariantKind}; +use rustc::middle::cstore::{InlinedItem, LinkagePreference}; +use rustc::hir::def::{self, Def, CtorKind}; +use rustc::hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE}; +use rustc::middle::lang_items; +use rustc::ty::{self, Ty, TyCtxt}; +use rustc::ty::subst::Substs; use rustc_const_math::ConstInt; -use rustc::mir; -use rustc::mir::visit::MutVisitor; -use rustc::mir::repr::Location; +use rustc::mir::Mir; -use std::cell::Cell; +use std::borrow::Cow; +use std::cell::Ref; use std::io; +use std::mem; use std::rc::Rc; use std::str; +use std::u32; -use rbml::reader; -use rbml; -use rustc_serialize::Decodable; +use rustc_serialize::{Decodable, Decoder, SpecializedDecoder, opaque}; use syntax::attr; -use syntax::parse::token; -use syntax::ast; +use syntax::ast::{self, NodeId}; use syntax::codemap; -use syntax::print::pprust; -use syntax_pos::{self, Span, BytePos, NO_EXPANSION}; +use syntax_pos::{self, Span, BytePos, Pos}; -pub type Cmd<'a> = &'a CrateMetadata; +pub struct DecodeContext<'a, 'tcx: 'a> { + opaque: opaque::Decoder<'a>, + tcx: Option>, + cdata: Option<&'a CrateMetadata>, + from_id_range: IdRange, + to_id_range: IdRange, -impl CrateMetadata { - fn get_item(&self, item_id: DefIndex) -> Option { - self.index.lookup_item(self.data(), item_id).map(|pos| { - reader::doc_at(self.data(), pos as usize).unwrap().doc - }) + // Cache the last used filemap for translating spans as an optimization. + last_filemap_index: usize, + + lazy_state: LazyState, +} + +/// Abstract over the various ways one can create metadata decoders. +pub trait Metadata<'a, 'tcx>: Copy { + fn raw_bytes(self) -> &'a [u8]; + fn cdata(self) -> Option<&'a CrateMetadata> { + None + } + fn tcx(self) -> Option> { + None } - fn lookup_item(&self, item_id: DefIndex) -> rbml::Doc { - match self.get_item(item_id) { - None => bug!("lookup_item: id not found: {:?} in crate {:?} with number {}", - item_id, - self.name, - self.cnum), - Some(d) => d + fn decoder(self, pos: usize) -> DecodeContext<'a, 'tcx> { + let id_range = IdRange { + min: NodeId::from_u32(u32::MIN), + max: NodeId::from_u32(u32::MAX), + }; + DecodeContext { + opaque: opaque::Decoder::new(self.raw_bytes(), pos), + cdata: self.cdata(), + tcx: self.tcx(), + from_id_range: id_range, + to_id_range: id_range, + last_filemap_index: 0, + lazy_state: LazyState::NoNode, } } } -pub fn load_index(data: &[u8]) -> index::Index { - let index = reader::get_doc(rbml::Doc::new(data), tag_index); - index::Index::from_rbml(index) -} - -pub fn crate_rustc_version(data: &[u8]) -> Option { - let doc = rbml::Doc::new(data); - reader::maybe_get_doc(doc, tag_rustc_version).map(|s| s.to_string()) +impl<'a, 'tcx> Metadata<'a, 'tcx> for &'a MetadataBlob { + fn raw_bytes(self) -> &'a [u8] { + match *self { + MetadataBlob::Inflated(ref vec) => &vec[..], + MetadataBlob::Archive(ref ar) => ar.as_slice(), + } + } } -pub fn load_xrefs(data: &[u8]) -> index::DenseIndex { - let index = reader::get_doc(rbml::Doc::new(data), tag_xref_index); - index::DenseIndex::from_buf(index.data, index.start, index.end) +impl<'a, 'tcx> Metadata<'a, 'tcx> for &'a CrateMetadata { + fn raw_bytes(self) -> &'a [u8] { + self.blob.raw_bytes() + } + fn cdata(self) -> Option<&'a CrateMetadata> { + Some(self) + } } -// Go through each item in the metadata and create a map from that -// item's def-key to the item's DefIndex. -pub fn load_key_map(data: &[u8]) -> FnvHashMap { - let root_doc = rbml::Doc::new(data); - let items_doc = reader::get_doc(root_doc, tag_items); - let items_data_doc = reader::get_doc(items_doc, tag_items_data); - reader::docs(items_data_doc) - .filter(|&(tag, _)| tag == tag_items_data_item) - .map(|(_, item_doc)| { - // load def-key from item - let key = item_def_key(item_doc); - - // load def-index from item; we only encode the full def-id, - // so just pull out the index - let def_id_doc = reader::get_doc(item_doc, tag_def_id); - let def_id = untranslated_def_id(def_id_doc); - assert!(def_id.is_local()); // local to the crate we are decoding, that is - - (key, def_id.index) - }) - .collect() +impl<'a, 'tcx> Metadata<'a, 'tcx> for (&'a CrateMetadata, TyCtxt<'a, 'tcx, 'tcx>) { + fn raw_bytes(self) -> &'a [u8] { + self.0.raw_bytes() + } + fn cdata(self) -> Option<&'a CrateMetadata> { + Some(self.0) + } + fn tcx(self) -> Option> { + Some(self.1) + } } -#[derive(Clone, Copy, Debug, PartialEq)] -enum Family { - ImmStatic, // c - MutStatic, // b - Fn, // f - StaticMethod, // F - Method, // h - Type, // y - Mod, // m - ForeignMod, // n - Enum, // t - Variant(VariantKind), // V, v, w - Impl, // i - DefaultImpl, // d - Trait, // I - Struct(VariantKind), // S, s, u - Union, // U - PublicField, // g - InheritedField, // N - Constant, // C -} +// HACK(eddyb) Only used by astencode to customize the from/to IdRange's. +impl<'a, 'tcx> Metadata<'a, 'tcx> for (&'a CrateMetadata, TyCtxt<'a, 'tcx, 'tcx>, [IdRange; 2]) { + fn raw_bytes(self) -> &'a [u8] { + self.0.raw_bytes() + } + fn cdata(self) -> Option<&'a CrateMetadata> { + Some(self.0) + } + fn tcx(self) -> Option> { + Some(self.1) + } -fn item_family(item: rbml::Doc) -> Family { - let fam = reader::get_doc(item, tag_items_data_item_family); - match reader::doc_as_u8(fam) as char { - 'C' => Constant, - 'c' => ImmStatic, - 'b' => MutStatic, - 'f' => Fn, - 'F' => StaticMethod, - 'h' => Method, - 'y' => Type, - 'm' => Mod, - 'n' => ForeignMod, - 't' => Enum, - 'V' => Variant(VariantKind::Struct), - 'v' => Variant(VariantKind::Tuple), - 'w' => Variant(VariantKind::Unit), - 'i' => Impl, - 'd' => DefaultImpl, - 'I' => Trait, - 'S' => Struct(VariantKind::Struct), - 's' => Struct(VariantKind::Tuple), - 'u' => Struct(VariantKind::Unit), - 'U' => Union, - 'g' => PublicField, - 'N' => InheritedField, - c => bug!("unexpected family char: {}", c) + fn decoder(self, pos: usize) -> DecodeContext<'a, 'tcx> { + let mut dcx = (self.0, self.1).decoder(pos); + dcx.from_id_range = self.2[0]; + dcx.to_id_range = self.2[1]; + dcx } } -fn item_visibility(item: rbml::Doc) -> ty::Visibility { - match reader::maybe_get_doc(item, tag_items_data_item_visibility) { - None => ty::Visibility::Public, - Some(visibility_doc) => { - match reader::doc_as_u8(visibility_doc) as char { - 'y' => ty::Visibility::Public, - 'i' => ty::Visibility::PrivateExternal, - _ => bug!("unknown visibility character") - } - } +impl<'a, 'tcx: 'a, T: Decodable> Lazy { + pub fn decode>(self, meta: M) -> T { + let mut dcx = meta.decoder(self.position); + dcx.lazy_state = LazyState::NodeStart(self.position); + T::decode(&mut dcx).unwrap() } } -fn fn_constness(item: rbml::Doc) -> hir::Constness { - match reader::maybe_get_doc(item, tag_items_data_item_constness) { - None => hir::Constness::NotConst, - Some(constness_doc) => { - match reader::doc_as_u8(constness_doc) as char { - 'c' => hir::Constness::Const, - 'n' => hir::Constness::NotConst, - _ => bug!("unknown constness character") - } - } +impl<'a, 'tcx: 'a, T: Decodable> LazySeq { + pub fn decode>(self, meta: M) -> impl Iterator + 'a { + let mut dcx = meta.decoder(self.position); + dcx.lazy_state = LazyState::NodeStart(self.position); + (0..self.len).map(move |_| T::decode(&mut dcx).unwrap()) } } -fn item_defaultness(item: rbml::Doc) -> hir::Defaultness { - match reader::maybe_get_doc(item, tag_items_data_item_defaultness) { - None => hir::Defaultness::Default, // should occur only for default impls on traits - Some(defaultness_doc) => { - match reader::doc_as_u8(defaultness_doc) as char { - 'd' => hir::Defaultness::Default, - 'f' => hir::Defaultness::Final, - _ => bug!("unknown defaultness character") - } - } +impl<'a, 'tcx> DecodeContext<'a, 'tcx> { + pub fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> { + self.tcx.expect("missing TyCtxt in DecodeContext") } -} -fn item_sort(item: rbml::Doc) -> Option { - reader::tagged_docs(item, tag_item_trait_item_sort).nth(0).map(|doc| { - doc.as_str().as_bytes()[0] as char - }) -} + pub fn cdata(&self) -> &'a CrateMetadata { + self.cdata.expect("missing CrateMetadata in DecodeContext") + } -fn untranslated_def_id(d: rbml::Doc) -> DefId { - let id = reader::doc_as_u64(d); - let index = DefIndex::new((id & 0xFFFF_FFFF) as usize); - DefId { krate: (id >> 32) as u32, index: index } -} + fn with_position R, R>(&mut self, pos: usize, f: F) -> R { + let new_opaque = opaque::Decoder::new(self.opaque.data, pos); + let old_opaque = mem::replace(&mut self.opaque, new_opaque); + let old_state = mem::replace(&mut self.lazy_state, LazyState::NoNode); + let r = f(self); + self.opaque = old_opaque; + self.lazy_state = old_state; + r + } -fn translated_def_id(cdata: Cmd, d: rbml::Doc) -> DefId { - let def_id = untranslated_def_id(d); - translate_def_id(cdata, def_id) + fn read_lazy_distance(&mut self, min_size: usize) -> Result::Error> { + let distance = self.read_usize()?; + let position = match self.lazy_state { + LazyState::NoNode => bug!("read_lazy_distance: outside of a metadata node"), + LazyState::NodeStart(start) => { + assert!(distance + min_size <= start); + start - distance - min_size + } + LazyState::Previous(last_min_end) => last_min_end + distance, + }; + self.lazy_state = LazyState::Previous(position + min_size); + Ok(position) + } } -fn item_parent_item(cdata: Cmd, d: rbml::Doc) -> Option { - reader::tagged_docs(d, tag_items_data_parent_item).nth(0).map(|did| { - translated_def_id(cdata, did) - }) +macro_rules! decoder_methods { + ($($name:ident -> $ty:ty;)*) => { + $(fn $name(&mut self) -> Result<$ty, Self::Error> { + self.opaque.$name() + })* + } } -fn item_require_parent_item(cdata: Cmd, d: rbml::Doc) -> DefId { - translated_def_id(cdata, reader::get_doc(d, tag_items_data_parent_item)) -} +impl<'doc, 'tcx> Decoder for DecodeContext<'doc, 'tcx> { + type Error = as Decoder>::Error; -fn item_def_id(d: rbml::Doc, cdata: Cmd) -> DefId { - translated_def_id(cdata, reader::get_doc(d, tag_def_id)) -} + decoder_methods! { + read_nil -> (); -fn reexports<'a>(d: rbml::Doc<'a>) -> reader::TaggedDocsIterator<'a> { - reader::tagged_docs(d, tag_items_data_item_reexport) -} + read_u64 -> u64; + read_u32 -> u32; + read_u16 -> u16; + read_u8 -> u8; + read_usize -> usize; -fn variant_disr_val(d: rbml::Doc) -> u64 { - let val_doc = reader::get_doc(d, tag_disr_val); - reader::with_doc_data(val_doc, |data| { - str::from_utf8(data).unwrap().parse().unwrap() - }) -} + read_i64 -> i64; + read_i32 -> i32; + read_i16 -> i16; + read_i8 -> i8; + read_isize -> isize; -fn doc_type<'a, 'tcx>(doc: rbml::Doc, tcx: TyCtxt<'a, 'tcx, 'tcx>, cdata: Cmd) -> Ty<'tcx> { - let tp = reader::get_doc(doc, tag_items_data_item_type); - TyDecoder::with_doc(tcx, cdata.cnum, tp, - &mut |did| translate_def_id(cdata, did)) - .parse_ty() -} + read_bool -> bool; + read_f64 -> f64; + read_f32 -> f32; + read_char -> char; + read_str -> Cow; + } -fn maybe_doc_type<'a, 'tcx>(doc: rbml::Doc, tcx: TyCtxt<'a, 'tcx, 'tcx>, cdata: Cmd) - -> Option> { - reader::maybe_get_doc(doc, tag_items_data_item_type).map(|tp| { - TyDecoder::with_doc(tcx, cdata.cnum, tp, - &mut |did| translate_def_id(cdata, did)) - .parse_ty() - }) + fn error(&mut self, err: &str) -> Self::Error { + self.opaque.error(err) + } } -fn doc_trait_ref<'a, 'tcx>(doc: rbml::Doc, tcx: TyCtxt<'a, 'tcx, 'tcx>, cdata: Cmd) - -> ty::TraitRef<'tcx> { - TyDecoder::with_doc(tcx, cdata.cnum, doc, - &mut |did| translate_def_id(cdata, did)) - .parse_trait_ref() +impl<'a, 'tcx, T> SpecializedDecoder> for DecodeContext<'a, 'tcx> { + fn specialized_decode(&mut self) -> Result, Self::Error> { + Ok(Lazy::with_position(self.read_lazy_distance(Lazy::::min_size())?)) + } } -fn item_trait_ref<'a, 'tcx>(doc: rbml::Doc, tcx: TyCtxt<'a, 'tcx, 'tcx>, cdata: Cmd) - -> ty::TraitRef<'tcx> { - let tp = reader::get_doc(doc, tag_item_trait_ref); - doc_trait_ref(tp, tcx, cdata) +impl<'a, 'tcx, T> SpecializedDecoder> for DecodeContext<'a, 'tcx> { + fn specialized_decode(&mut self) -> Result, Self::Error> { + let len = self.read_usize()?; + let position = if len == 0 { + 0 + } else { + self.read_lazy_distance(LazySeq::::min_size(len))? + }; + Ok(LazySeq::with_position_and_length(position, len)) + } } -fn item_name(item: rbml::Doc) -> ast::Name { - maybe_item_name(item).expect("no item in item_name") -} +impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { + fn specialized_decode(&mut self) -> Result { + let id = u32::decode(self)?; -fn maybe_item_name(item: rbml::Doc) -> Option { - reader::maybe_get_doc(item, tag_paths_data_name).map(|name| { - let string = name.as_str(); - token::intern(string) - }) -} + // from_id_range should be non-empty + assert!(!self.from_id_range.empty()); + // Make sure that translating the NodeId will actually yield a + // meaningful result + if !self.from_id_range.contains(NodeId::from_u32(id)) { + bug!("NodeId::decode: {} out of DecodeContext range ({:?} -> {:?})", + id, + self.from_id_range, + self.to_id_range); + } -fn family_to_variant_kind<'tcx>(family: Family) -> Option { - match family { - Struct(VariantKind::Struct) | Variant(VariantKind::Struct) | Union => - Some(ty::VariantKind::Struct), - Struct(VariantKind::Tuple) | Variant(VariantKind::Tuple) => - Some(ty::VariantKind::Tuple), - Struct(VariantKind::Unit) | Variant(VariantKind::Unit) => - Some(ty::VariantKind::Unit), - _ => None, + // Use wrapping arithmetic because otherwise it introduces control flow. + // Maybe we should just have the control flow? -- aatch + Ok(NodeId::from_u32(id.wrapping_sub(self.from_id_range.min.as_u32()) + .wrapping_add(self.to_id_range.min.as_u32()))) } } -fn item_to_def_like(cdata: Cmd, item: rbml::Doc, did: DefId) -> DefLike { - let fam = item_family(item); - match fam { - Constant => { - // Check whether we have an associated const item. - match item_sort(item) { - Some('C') | Some('c') => { - DlDef(Def::AssociatedConst(did)) - } - _ => { - // Regular const item. - DlDef(Def::Const(did)) - } - } - } - ImmStatic => DlDef(Def::Static(did, false)), - MutStatic => DlDef(Def::Static(did, true)), - Struct(..) => DlDef(Def::Struct(did)), - Union => DlDef(Def::Union(did)), - Fn => DlDef(Def::Fn(did)), - Method | StaticMethod => { - DlDef(Def::Method(did)) - } - Type => { - if item_sort(item) == Some('t') { - let trait_did = item_require_parent_item(cdata, item); - DlDef(Def::AssociatedTy(trait_did, did)) - } else { - DlDef(Def::TyAlias(did)) - } - } - Mod => DlDef(Def::Mod(did)), - ForeignMod => DlDef(Def::ForeignMod(did)), - Variant(..) => { - let enum_did = item_require_parent_item(cdata, item); - DlDef(Def::Variant(enum_did, did)) +impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { + fn specialized_decode(&mut self) -> Result { + let cnum = CrateNum::from_u32(u32::decode(self)?); + if cnum == LOCAL_CRATE { + Ok(self.cdata().cnum) + } else { + Ok(self.cdata().cnum_map.borrow()[cnum]) } - Trait => DlDef(Def::Trait(did)), - Enum => DlDef(Def::Enum(did)), - Impl | DefaultImpl => DlImpl(did), - PublicField | InheritedField => DlField, } } -fn parse_unsafety(item_doc: rbml::Doc) -> hir::Unsafety { - let unsafety_doc = reader::get_doc(item_doc, tag_unsafety); - if reader::doc_as_u8(unsafety_doc) != 0 { - hir::Unsafety::Unsafe - } else { - hir::Unsafety::Normal - } -} +impl<'a, 'tcx> SpecializedDecoder for DecodeContext<'a, 'tcx> { + fn specialized_decode(&mut self) -> Result { + let lo = BytePos::decode(self)?; + let hi = BytePos::decode(self)?; -fn parse_paren_sugar(item_doc: rbml::Doc) -> bool { - let paren_sugar_doc = reader::get_doc(item_doc, tag_paren_sugar); - reader::doc_as_u8(paren_sugar_doc) != 0 -} + let tcx = if let Some(tcx) = self.tcx { + tcx + } else { + return Ok(syntax_pos::mk_sp(lo, hi)); + }; -fn parse_polarity(item_doc: rbml::Doc) -> hir::ImplPolarity { - let polarity_doc = reader::get_doc(item_doc, tag_polarity); - if reader::doc_as_u8(polarity_doc) != 0 { - hir::ImplPolarity::Negative - } else { - hir::ImplPolarity::Positive - } -} + let (lo, hi) = if lo > hi { + // Currently macro expansion sometimes produces invalid Span values + // where lo > hi. In order not to crash the compiler when trying to + // translate these values, let's transform them into something we + // can handle (and which will produce useful debug locations at + // least some of the time). + // This workaround is only necessary as long as macro expansion is + // not fixed. FIXME(#23480) + (lo, lo) + } else { + (lo, hi) + }; -fn parse_associated_type_names(item_doc: rbml::Doc) -> Vec { - let names_doc = reader::get_doc(item_doc, tag_associated_type_names); - reader::tagged_docs(names_doc, tag_associated_type_name) - .map(|name_doc| token::intern(name_doc.as_str())) - .collect() -} + let imported_filemaps = self.cdata().imported_filemaps(&tcx.sess.codemap()); + let filemap = { + // Optimize for the case that most spans within a translated item + // originate from the same filemap. + let last_filemap = &imported_filemaps[self.last_filemap_index]; -pub fn get_trait_def<'a, 'tcx>(cdata: Cmd, - item_id: DefIndex, - tcx: TyCtxt<'a, 'tcx, 'tcx>) -> ty::TraitDef<'tcx> -{ - let item_doc = cdata.lookup_item(item_id); - let generics = doc_generics(item_doc, tcx, cdata); - let unsafety = parse_unsafety(item_doc); - let associated_type_names = parse_associated_type_names(item_doc); - let paren_sugar = parse_paren_sugar(item_doc); - let def_path = def_path(cdata, item_id).unwrap(); - - ty::TraitDef::new(unsafety, - paren_sugar, - generics, - item_trait_ref(item_doc, tcx, cdata), - associated_type_names, - def_path.deterministic_hash(tcx)) -} + if lo >= last_filemap.original_start_pos && lo <= last_filemap.original_end_pos && + hi >= last_filemap.original_start_pos && + hi <= last_filemap.original_end_pos { + last_filemap + } else { + let mut a = 0; + let mut b = imported_filemaps.len(); -pub fn get_adt_def<'a, 'tcx>(cdata: Cmd, - item_id: DefIndex, - tcx: TyCtxt<'a, 'tcx, 'tcx>) - -> ty::AdtDefMaster<'tcx> -{ - fn expect_variant_kind(family: Family) -> ty::VariantKind { - match family_to_variant_kind(family) { - Some(kind) => kind, - _ => bug!("unexpected family: {:?}", family), - } - } - fn get_enum_variants<'tcx>(cdata: Cmd, doc: rbml::Doc) -> Vec> { - reader::tagged_docs(doc, tag_items_data_item_variant).map(|p| { - let did = translated_def_id(cdata, p); - let item = cdata.lookup_item(did.index); - let disr = variant_disr_val(item); - ty::VariantDefData { - did: did, - name: item_name(item), - fields: get_variant_fields(cdata, item), - disr_val: ConstInt::Infer(disr), - kind: expect_variant_kind(item_family(item)), + while b - a > 1 { + let m = (a + b) / 2; + if imported_filemaps[m].original_start_pos > lo { + b = m; + } else { + a = m; + } + } + + self.last_filemap_index = a; + &imported_filemaps[a] } - }).collect() - } - fn get_variant_fields<'tcx>(cdata: Cmd, doc: rbml::Doc) -> Vec> { - let mut index = 0; - reader::tagged_docs(doc, tag_item_field).map(|f| { - let ff = item_family(f); - match ff { - PublicField | InheritedField => {}, - _ => bug!("expected field, found {:?}", ff) - }; - ty::FieldDefData::new(item_def_id(f, cdata), - item_name(f), - struct_field_family_to_visibility(ff)) - }).chain(reader::tagged_docs(doc, tag_item_unnamed_field).map(|f| { - let ff = item_family(f); - let name = token::with_ident_interner(|interner| interner.intern(index.to_string())); - index += 1; - ty::FieldDefData::new(item_def_id(f, cdata), name, - struct_field_family_to_visibility(ff)) - })).collect() - } - fn get_struct_variant<'tcx>(cdata: Cmd, - doc: rbml::Doc, - did: DefId) -> ty::VariantDefData<'tcx, 'tcx> { - ty::VariantDefData { - did: did, - name: item_name(doc), - fields: get_variant_fields(cdata, doc), - disr_val: ConstInt::Infer(0), - kind: expect_variant_kind(item_family(doc)), - } + }; + + let lo = (lo - filemap.original_start_pos) + filemap.translated_filemap.start_pos; + let hi = (hi - filemap.original_start_pos) + filemap.translated_filemap.start_pos; + + Ok(syntax_pos::mk_sp(lo, hi)) } +} - let doc = cdata.lookup_item(item_id); - let did = DefId { krate: cdata.cnum, index: item_id }; - let mut ctor_did = None; - let (kind, variants) = match item_family(doc) { - Enum => { - (AdtKind::Enum, get_enum_variants(cdata, doc)) - } - Struct(..) => { - // Use separate constructor id for unit/tuple structs and reuse did for braced structs. - ctor_did = reader::maybe_get_doc(doc, tag_items_data_item_struct_ctor).map(|ctor_doc| { - translated_def_id(cdata, ctor_doc) - }); - (AdtKind::Struct, vec![get_struct_variant(cdata, doc, ctor_did.unwrap_or(did))]) - } - Union => { - (AdtKind::Union, vec![get_struct_variant(cdata, doc, did)]) - } - _ => bug!("get_adt_def called on a non-ADT {:?} - {:?}", item_family(doc), did) - }; - - let adt = tcx.intern_adt_def(did, kind, variants); - if let Some(ctor_did) = ctor_did { - // Make adt definition available through constructor id as well. - tcx.insert_adt_def(ctor_did, adt); - } - - // this needs to be done *after* the variant is interned, - // to support recursive structures - for variant in &adt.variants { - if variant.kind == ty::VariantKind::Tuple && adt.is_enum() { - // tuple-like enum variant fields aren't real items - get the types - // from the ctor. - debug!("evaluating the ctor-type of {:?}", - variant.name); - let ctor_ty = get_type(cdata, variant.did.index, tcx); - debug!("evaluating the ctor-type of {:?}.. {:?}", - variant.name, - ctor_ty); - let field_tys = match ctor_ty.sty { - ty::TyFnDef(.., &ty::BareFnTy { sig: ty::Binder(ty::FnSig { - ref inputs, .. - }), ..}) => { - // tuple-struct constructors don't have escaping regions - assert!(!inputs.has_escaping_regions()); - inputs - }, - _ => bug!("tuple-variant ctor is not an ADT") +// FIXME(#36588) These impls are horribly unsound as they allow +// the caller to pick any lifetime for 'tcx, including 'static, +// by using the unspecialized proxies to them. + +impl<'a, 'tcx> SpecializedDecoder> for DecodeContext<'a, 'tcx> { + fn specialized_decode(&mut self) -> Result, Self::Error> { + let tcx = self.tcx(); + + // Handle shorthands first, if we have an usize > 0x80. + if self.opaque.data[self.opaque.position()] & 0x80 != 0 { + let pos = self.read_usize()?; + assert!(pos >= SHORTHAND_OFFSET); + let key = ty::CReaderCacheKey { + cnum: self.cdata().cnum, + pos: pos - SHORTHAND_OFFSET, }; - for (field, &ty) in variant.fields.iter().zip(field_tys.iter()) { - field.fulfill_ty(ty); + if let Some(ty) = tcx.rcache.borrow().get(&key).cloned() { + return Ok(ty); } + + let ty = self.with_position(key.pos, Ty::decode)?; + tcx.rcache.borrow_mut().insert(key, ty); + Ok(ty) } else { - for field in &variant.fields { - debug!("evaluating the type of {:?}::{:?}", variant.name, field.name); - let ty = get_type(cdata, field.did.index, tcx); - field.fulfill_ty(ty); - debug!("evaluating the type of {:?}::{:?}: {:?}", - variant.name, field.name, ty); - } + Ok(tcx.mk_ty(ty::TypeVariants::decode(self)?)) } } - - adt } -pub fn get_predicates<'a, 'tcx>(cdata: Cmd, - item_id: DefIndex, - tcx: TyCtxt<'a, 'tcx, 'tcx>) - -> ty::GenericPredicates<'tcx> -{ - let item_doc = cdata.lookup_item(item_id); - doc_predicates(item_doc, tcx, cdata, tag_item_predicates) -} -pub fn get_super_predicates<'a, 'tcx>(cdata: Cmd, - item_id: DefIndex, - tcx: TyCtxt<'a, 'tcx, 'tcx>) - -> ty::GenericPredicates<'tcx> -{ - let item_doc = cdata.lookup_item(item_id); - doc_predicates(item_doc, tcx, cdata, tag_item_super_predicates) -} +impl<'a, 'tcx> SpecializedDecoder> for DecodeContext<'a, 'tcx> { + fn specialized_decode(&mut self) -> Result, Self::Error> { + Ok(ty::GenericPredicates { + parent: Decodable::decode(self)?, + predicates: (0..self.read_usize()?).map(|_| { + // Handle shorthands first, if we have an usize > 0x80. + if self.opaque.data[self.opaque.position()] & 0x80 != 0 { + let pos = self.read_usize()?; + assert!(pos >= SHORTHAND_OFFSET); + let pos = pos - SHORTHAND_OFFSET; -pub fn get_generics<'a, 'tcx>(cdata: Cmd, - item_id: DefIndex, - tcx: TyCtxt<'a, 'tcx, 'tcx>) - -> &'tcx ty::Generics<'tcx> -{ - let item_doc = cdata.lookup_item(item_id); - doc_generics(item_doc, tcx, cdata) + self.with_position(pos, ty::Predicate::decode) + } else { + ty::Predicate::decode(self) + } + }) + .collect::, _>>()?, + }) + } } -pub fn get_type<'a, 'tcx>(cdata: Cmd, id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>) - -> Ty<'tcx> -{ - let item_doc = cdata.lookup_item(id); - doc_type(item_doc, tcx, cdata) +impl<'a, 'tcx> SpecializedDecoder<&'tcx Substs<'tcx>> for DecodeContext<'a, 'tcx> { + fn specialized_decode(&mut self) -> Result<&'tcx Substs<'tcx>, Self::Error> { + Ok(self.tcx().mk_substs((0..self.read_usize()?).map(|_| Decodable::decode(self)))?) + } } -pub fn get_stability(cdata: Cmd, id: DefIndex) -> Option { - let item = cdata.lookup_item(id); - reader::maybe_get_doc(item, tag_items_data_item_stability).map(|doc| { - let mut decoder = reader::Decoder::new(doc); - Decodable::decode(&mut decoder).unwrap() - }) +impl<'a, 'tcx> SpecializedDecoder<&'tcx ty::Region> for DecodeContext<'a, 'tcx> { + fn specialized_decode(&mut self) -> Result<&'tcx ty::Region, Self::Error> { + Ok(self.tcx().mk_region(Decodable::decode(self)?)) + } } -pub fn get_deprecation(cdata: Cmd, id: DefIndex) -> Option { - let item = cdata.lookup_item(id); - reader::maybe_get_doc(item, tag_items_data_item_deprecation).map(|doc| { - let mut decoder = reader::Decoder::new(doc); - Decodable::decode(&mut decoder).unwrap() - }) +impl<'a, 'tcx> SpecializedDecoder<&'tcx ty::Slice>> for DecodeContext<'a, 'tcx> { + fn specialized_decode(&mut self) -> Result<&'tcx ty::Slice>, Self::Error> { + Ok(self.tcx().mk_type_list((0..self.read_usize()?).map(|_| Decodable::decode(self)))?) + } } -pub fn get_visibility(cdata: Cmd, id: DefIndex) -> ty::Visibility { - item_visibility(cdata.lookup_item(id)) +impl<'a, 'tcx> SpecializedDecoder<&'tcx ty::BareFnTy<'tcx>> for DecodeContext<'a, 'tcx> { + fn specialized_decode(&mut self) -> Result<&'tcx ty::BareFnTy<'tcx>, Self::Error> { + Ok(self.tcx().mk_bare_fn(Decodable::decode(self)?)) + } } -pub fn get_parent_impl(cdata: Cmd, id: DefIndex) -> Option { - let item = cdata.lookup_item(id); - reader::maybe_get_doc(item, tag_items_data_parent_impl).map(|doc| { - translated_def_id(cdata, doc) - }) +impl<'a, 'tcx> SpecializedDecoder> for DecodeContext<'a, 'tcx> { + fn specialized_decode(&mut self) -> Result, Self::Error> { + let def_id = DefId::decode(self)?; + Ok(self.tcx().lookup_adt_def(def_id)) + } } -pub fn get_repr_attrs(cdata: Cmd, id: DefIndex) -> Vec { - let item = cdata.lookup_item(id); - match reader::maybe_get_doc(item, tag_items_data_item_repr).map(|doc| { - let mut decoder = reader::Decoder::new(doc); - Decodable::decode(&mut decoder).unwrap() - }) { - Some(attrs) => attrs, - None => Vec::new(), +impl<'a, 'tcx> MetadataBlob { + pub fn is_compatible(&self) -> bool { + self.raw_bytes().starts_with(METADATA_HEADER) } -} -pub fn get_impl_polarity<'tcx>(cdata: Cmd, - id: DefIndex) - -> Option -{ - let item_doc = cdata.lookup_item(id); - let fam = item_family(item_doc); - match fam { - Family::Impl => { - Some(parse_polarity(item_doc)) - } - _ => None + pub fn get_root(&self) -> CrateRoot { + let slice = self.raw_bytes(); + let offset = METADATA_HEADER.len(); + let pos = (((slice[offset + 0] as u32) << 24) | ((slice[offset + 1] as u32) << 16) | + ((slice[offset + 2] as u32) << 8) | + ((slice[offset + 3] as u32) << 0)) as usize; + Lazy::with_position(pos).decode(self) } -} -pub fn get_custom_coerce_unsized_kind<'tcx>( - cdata: Cmd, - id: DefIndex) - -> Option -{ - let item_doc = cdata.lookup_item(id); - reader::maybe_get_doc(item_doc, tag_impl_coerce_unsized_kind).map(|kind_doc| { - let mut decoder = reader::Decoder::new(kind_doc); - Decodable::decode(&mut decoder).unwrap() - }) -} + /// Go through each item in the metadata and create a map from that + /// item's def-key to the item's DefIndex. + pub fn load_key_map(&self, index: LazySeq) -> FnvHashMap { + index.iter_enumerated(self.raw_bytes()) + .map(|(index, item)| (item.decode(self).def_key.decode(self), index)) + .collect() + } -pub fn get_impl_trait<'a, 'tcx>(cdata: Cmd, - id: DefIndex, - tcx: TyCtxt<'a, 'tcx, 'tcx>) - -> Option> -{ - let item_doc = cdata.lookup_item(id); - let fam = item_family(item_doc); - match fam { - Family::Impl | Family::DefaultImpl => { - reader::maybe_get_doc(item_doc, tag_item_trait_ref).map(|tp| { - doc_trait_ref(tp, tcx, cdata) - }) + pub fn list_crate_metadata(&self, out: &mut io::Write) -> io::Result<()> { + write!(out, "=External Dependencies=\n")?; + let root = self.get_root(); + for (i, dep) in root.crate_deps.decode(self).enumerate() { + write!(out, "{} {}-{}\n", i + 1, dep.name, dep.hash)?; } - _ => None + write!(out, "\n")?; + Ok(()) } } -/// Iterates over the language items in the given crate. -pub fn each_lang_item(cdata: Cmd, mut f: F) -> bool where - F: FnMut(DefIndex, usize) -> bool, -{ - let root = rbml::Doc::new(cdata.data()); - let lang_items = reader::get_doc(root, tag_lang_items); - reader::tagged_docs(lang_items, tag_lang_items_item).all(|item_doc| { - let id_doc = reader::get_doc(item_doc, tag_lang_items_item_id); - let id = reader::doc_as_u32(id_doc) as usize; - let index_doc = reader::get_doc(item_doc, tag_lang_items_item_index); - let index = DefIndex::from_u32(reader::doc_as_u32(index_doc)); - - f(index, id) - }) +impl<'tcx> EntryKind<'tcx> { + fn to_def(&self, did: DefId) -> Option { + Some(match *self { + EntryKind::Const => Def::Const(did), + EntryKind::AssociatedConst(_) => Def::AssociatedConst(did), + EntryKind::ImmStatic | + EntryKind::ForeignImmStatic => Def::Static(did, false), + EntryKind::MutStatic | + EntryKind::ForeignMutStatic => Def::Static(did, true), + EntryKind::Struct(_) => Def::Struct(did), + EntryKind::Union(_) => Def::Union(did), + EntryKind::Fn(_) | + EntryKind::ForeignFn(_) => Def::Fn(did), + EntryKind::Method(_) => Def::Method(did), + EntryKind::Type => Def::TyAlias(did), + EntryKind::AssociatedType(_) => Def::AssociatedTy(did), + EntryKind::Mod(_) => Def::Mod(did), + EntryKind::Variant(_) => Def::Variant(did), + EntryKind::Trait(_) => Def::Trait(did), + EntryKind::Enum => Def::Enum(did), + + EntryKind::ForeignMod | + EntryKind::Impl(_) | + EntryKind::DefaultImpl(_) | + EntryKind::Field | + EntryKind::Closure(_) => return None, + }) + } } -fn each_child_of_item_or_crate(cdata: Cmd, - item_doc: rbml::Doc, - mut get_crate_data: G, - mut callback: F) where - F: FnMut(DefLike, ast::Name, ty::Visibility), - G: FnMut(ast::CrateNum) -> Rc, -{ - // Iterate over all children. - for child_info_doc in reader::tagged_docs(item_doc, tag_mod_child) { - let child_def_id = translated_def_id(cdata, child_info_doc); - - // This item may be in yet another crate if it was the child of a - // reexport. - let crate_data = if child_def_id.krate == cdata.cnum { - None - } else { - Some(get_crate_data(child_def_id.krate)) - }; - let crate_data = match crate_data { - Some(ref cdata) => &**cdata, - None => cdata - }; +impl<'a, 'tcx> CrateMetadata { + fn maybe_entry(&self, item_id: DefIndex) -> Option>> { + self.root.index.lookup(self.blob.raw_bytes(), item_id) + } - // Get the item. - if let Some(child_item_doc) = crate_data.get_item(child_def_id.index) { - // Hand off the item to the callback. - let child_name = item_name(child_item_doc); - let def_like = item_to_def_like(crate_data, child_item_doc, child_def_id); - let visibility = item_visibility(child_item_doc); - callback(def_like, child_name, visibility); + fn entry(&self, item_id: DefIndex) -> Entry<'tcx> { + match self.maybe_entry(item_id) { + None => { + bug!("entry: id not found: {:?} in crate {:?} with number {}", + item_id, + self.name, + self.cnum) + } + Some(d) => d.decode(self), } } - for reexport_doc in reexports(item_doc) { - let def_id_doc = reader::get_doc(reexport_doc, - tag_items_data_item_reexport_def_id); - let child_def_id = translated_def_id(cdata, def_id_doc); - - let name_doc = reader::get_doc(reexport_doc, - tag_items_data_item_reexport_name); - let name = name_doc.as_str(); - - // This reexport may be in yet another crate. - let crate_data = if child_def_id.krate == cdata.cnum { - None - } else { - Some(get_crate_data(child_def_id.krate)) - }; - let crate_data = match crate_data { - Some(ref cdata) => &**cdata, - None => cdata - }; - - // Get the item. - if let Some(child_item_doc) = crate_data.get_item(child_def_id.index) { - // Hand off the item to the callback. - let def_like = item_to_def_like(crate_data, child_item_doc, child_def_id); - // These items have a public visibility because they're part of - // a public re-export. - callback(def_like, token::intern(name), ty::Visibility::Public); + fn local_def_id(&self, index: DefIndex) -> DefId { + DefId { + krate: self.cnum, + index: index, } } -} - -/// Iterates over each child of the given item. -pub fn each_child_of_item(cdata: Cmd, id: DefIndex, get_crate_data: G, callback: F) - where F: FnMut(DefLike, ast::Name, ty::Visibility), - G: FnMut(ast::CrateNum) -> Rc, -{ - // Find the item. - let item_doc = match cdata.get_item(id) { - None => return, - Some(item_doc) => item_doc, - }; - - each_child_of_item_or_crate(cdata, item_doc, get_crate_data, callback) -} - -/// Iterates over all the top-level crate items. -pub fn each_top_level_item_of_crate(cdata: Cmd, get_crate_data: G, callback: F) - where F: FnMut(DefLike, ast::Name, ty::Visibility), - G: FnMut(ast::CrateNum) -> Rc, -{ - each_child_of_item(cdata, CRATE_DEF_INDEX, get_crate_data, callback) -} -pub fn get_item_name(cdata: Cmd, id: DefIndex) -> ast::Name { - item_name(cdata.lookup_item(id)) -} - -pub fn maybe_get_item_name(cdata: Cmd, id: DefIndex) -> Option { - maybe_item_name(cdata.lookup_item(id)) -} - -pub enum FoundAst<'ast> { - Found(&'ast InlinedItem), - FoundParent(DefId, &'ast hir::Item), - NotFound, -} - -pub fn maybe_get_item_ast<'a, 'tcx>(cdata: Cmd, tcx: TyCtxt<'a, 'tcx, 'tcx>, id: DefIndex) - -> FoundAst<'tcx> { - debug!("Looking up item: {:?}", id); - let item_doc = cdata.lookup_item(id); - let item_did = item_def_id(item_doc, cdata); - let parent_def_id = DefId { - krate: cdata.cnum, - index: def_key(cdata, id).parent.unwrap() - }; - let mut parent_def_path = def_path(cdata, id).unwrap(); - parent_def_path.data.pop(); - if let Some(ast_doc) = reader::maybe_get_doc(item_doc, tag_ast as usize) { - let ii = decode_inlined_item(cdata, - tcx, - parent_def_path, - parent_def_id, - ast_doc, - item_did); - return FoundAst::Found(ii); - } else if let Some(parent_did) = item_parent_item(cdata, item_doc) { - // Remove the last element from the paths, since we are now - // trying to inline the parent. - let grandparent_def_id = DefId { - krate: cdata.cnum, - index: def_key(cdata, parent_def_id.index).parent.unwrap() - }; - let mut grandparent_def_path = parent_def_path; - grandparent_def_path.data.pop(); - let parent_doc = cdata.lookup_item(parent_did.index); - if let Some(ast_doc) = reader::maybe_get_doc(parent_doc, tag_ast as usize) { - let ii = decode_inlined_item(cdata, - tcx, - grandparent_def_path, - grandparent_def_id, - ast_doc, - parent_did); - if let &InlinedItem::Item(_, ref i) = ii { - return FoundAst::FoundParent(parent_did, i); - } - } + fn item_name(&self, item: &Entry<'tcx>) -> ast::Name { + item.def_key + .decode(self) + .disambiguated_data + .data + .get_opt_name() + .expect("no name in item_name") } - FoundAst::NotFound -} -pub fn is_item_mir_available<'tcx>(cdata: Cmd, id: DefIndex) -> bool { - if let Some(item_doc) = cdata.get_item(id) { - return reader::maybe_get_doc(item_doc, tag_mir as usize).is_some(); + pub fn get_def(&self, index: DefIndex) -> Option { + self.entry(index).kind.to_def(self.local_def_id(index)) } - false -} + pub fn get_trait_def(&self, + item_id: DefIndex, + tcx: TyCtxt<'a, 'tcx, 'tcx>) + -> ty::TraitDef<'tcx> { + let data = match self.entry(item_id).kind { + EntryKind::Trait(data) => data.decode(self), + _ => bug!(), + }; -pub fn maybe_get_item_mir<'a, 'tcx>(cdata: Cmd, - tcx: TyCtxt<'a, 'tcx, 'tcx>, - id: DefIndex) - -> Option> { - let item_doc = cdata.lookup_item(id); + ty::TraitDef::new(data.unsafety, + data.paren_sugar, + tcx.lookup_generics(self.local_def_id(item_id)), + data.trait_ref.decode((self, tcx)), + self.def_path(item_id).unwrap().deterministic_hash(tcx)) + } - return reader::maybe_get_doc(item_doc, tag_mir as usize).map(|mir_doc| { - let dcx = tls_context::DecodingContext { - crate_metadata: cdata, - tcx: tcx, + fn get_variant(&self, + item: &Entry<'tcx>, + index: DefIndex) + -> (ty::VariantDefData<'tcx, 'tcx>, Option) { + let data = match item.kind { + EntryKind::Variant(data) | + EntryKind::Struct(data) | + EntryKind::Union(data) => data.decode(self), + _ => bug!(), }; - let mut decoder = reader::Decoder::new(mir_doc); - let mut mir = decoder.read_opaque(|opaque_decoder, _| { - tls::enter_decoding_context(&dcx, opaque_decoder, |_, opaque_decoder| { - Decodable::decode(opaque_decoder) + let fields = item.children + .decode(self) + .map(|index| { + let f = self.entry(index); + ty::FieldDefData::new(self.local_def_id(index), self.item_name(&f), f.visibility) }) - }).unwrap(); - - assert!(decoder.position() == mir_doc.end); + .collect(); + + (ty::VariantDefData { + did: self.local_def_id(data.struct_ctor.unwrap_or(index)), + name: self.item_name(item), + fields: fields, + disr_val: ConstInt::Infer(data.disr), + ctor_kind: data.ctor_kind, + }, + data.struct_ctor) + } - let mut def_id_and_span_translator = MirDefIdAndSpanTranslator { - crate_metadata: cdata, - codemap: tcx.sess.codemap(), - last_filemap_index_hint: Cell::new(0), + pub fn get_adt_def(&self, + item_id: DefIndex, + tcx: TyCtxt<'a, 'tcx, 'tcx>) + -> ty::AdtDefMaster<'tcx> { + let item = self.entry(item_id); + let did = self.local_def_id(item_id); + let mut ctor_index = None; + let variants = if let EntryKind::Enum = item.kind { + item.children + .decode(self) + .map(|index| { + let (variant, struct_ctor) = self.get_variant(&self.entry(index), index); + assert_eq!(struct_ctor, None); + variant + }) + .collect() + } else { + let (variant, struct_ctor) = self.get_variant(&item, item_id); + ctor_index = struct_ctor; + vec![variant] + }; + let kind = match item.kind { + EntryKind::Enum => ty::AdtKind::Enum, + EntryKind::Struct(_) => ty::AdtKind::Struct, + EntryKind::Union(_) => ty::AdtKind::Union, + _ => bug!("get_adt_def called on a non-ADT {:?}", did), }; - def_id_and_span_translator.visit_mir(&mut mir); - for promoted in &mut mir.promoted { - def_id_and_span_translator.visit_mir(promoted); + let adt = tcx.intern_adt_def(did, kind, variants); + if let Some(ctor_index) = ctor_index { + // Make adt definition available through constructor id as well. + tcx.insert_adt_def(self.local_def_id(ctor_index), adt); } - mir - }); + // this needs to be done *after* the variant is interned, + // to support recursive structures + for variant in &adt.variants { + for field in &variant.fields { + debug!("evaluating the type of {:?}::{:?}", + variant.name, + field.name); + let ty = self.get_type(field.did.index, tcx); + field.fulfill_ty(ty); + debug!("evaluating the type of {:?}::{:?}: {:?}", + variant.name, + field.name, + ty); + } + } - struct MirDefIdAndSpanTranslator<'cdata, 'codemap> { - crate_metadata: Cmd<'cdata>, - codemap: &'codemap codemap::CodeMap, - last_filemap_index_hint: Cell + adt } - impl<'v, 'cdata, 'codemap> mir::visit::MutVisitor<'v> - for MirDefIdAndSpanTranslator<'cdata, 'codemap> - { - fn visit_def_id(&mut self, def_id: &mut DefId, _: Location) { - *def_id = translate_def_id(self.crate_metadata, *def_id); - } + pub fn get_predicates(&self, + item_id: DefIndex, + tcx: TyCtxt<'a, 'tcx, 'tcx>) + -> ty::GenericPredicates<'tcx> { + self.entry(item_id).predicates.unwrap().decode((self, tcx)) + } - fn visit_span(&mut self, span: &mut Span) { - *span = translate_span(self.crate_metadata, - self.codemap, - &self.last_filemap_index_hint, - *span); + pub fn get_super_predicates(&self, + item_id: DefIndex, + tcx: TyCtxt<'a, 'tcx, 'tcx>) + -> ty::GenericPredicates<'tcx> { + match self.entry(item_id).kind { + EntryKind::Trait(data) => data.decode(self).super_predicates.decode((self, tcx)), + _ => bug!(), } } -} -fn get_explicit_self<'a, 'tcx>(item: rbml::Doc, tcx: TyCtxt<'a, 'tcx, 'tcx>) - -> ty::ExplicitSelfCategory<'tcx> { - fn get_mutability(ch: u8) -> hir::Mutability { - match ch as char { - 'i' => hir::MutImmutable, - 'm' => hir::MutMutable, - _ => bug!("unknown mutability character: `{}`", ch as char), - } + pub fn get_generics(&self, + item_id: DefIndex, + tcx: TyCtxt<'a, 'tcx, 'tcx>) + -> ty::Generics<'tcx> { + self.entry(item_id).generics.unwrap().decode((self, tcx)) } - let explicit_self_doc = reader::get_doc(item, tag_item_trait_method_explicit_self); - let string = explicit_self_doc.as_str(); + pub fn get_type(&self, id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Ty<'tcx> { + self.entry(id).ty.unwrap().decode((self, tcx)) + } - let explicit_self_kind = string.as_bytes()[0]; - match explicit_self_kind as char { - 's' => ty::ExplicitSelfCategory::Static, - 'v' => ty::ExplicitSelfCategory::ByValue, - '~' => ty::ExplicitSelfCategory::ByBox, - // FIXME(#4846) expl. region - '&' => { - ty::ExplicitSelfCategory::ByReference( - tcx.mk_region(ty::ReEmpty), - get_mutability(string.as_bytes()[1])) - } - _ => bug!("unknown self type code: `{}`", explicit_self_kind as char) + pub fn get_stability(&self, id: DefIndex) -> Option { + self.entry(id).stability.map(|stab| stab.decode(self)) } -} -/// Returns the def IDs of all the items in the given implementation. -pub fn get_impl_items(cdata: Cmd, impl_id: DefIndex) - -> Vec { - reader::tagged_docs(cdata.lookup_item(impl_id), tag_item_impl_item).map(|doc| { - let def_id = item_def_id(doc, cdata); - match item_sort(doc) { - Some('C') | Some('c') => ty::ConstTraitItemId(def_id), - Some('r') | Some('p') => ty::MethodTraitItemId(def_id), - Some('t') => ty::TypeTraitItemId(def_id), - _ => bug!("unknown impl item sort"), - } - }).collect() -} + pub fn get_deprecation(&self, id: DefIndex) -> Option { + self.entry(id).deprecation.map(|depr| depr.decode(self)) + } -pub fn get_trait_name(cdata: Cmd, id: DefIndex) -> ast::Name { - let doc = cdata.lookup_item(id); - item_name(doc) -} + pub fn get_visibility(&self, id: DefIndex) -> ty::Visibility { + self.entry(id).visibility + } -pub fn get_impl_or_trait_item<'a, 'tcx>(cdata: Cmd, id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>) - -> Option> { - let item_doc = cdata.lookup_item(id); - - let def_id = item_def_id(item_doc, cdata); - - let container_id = if let Some(id) = item_parent_item(cdata, item_doc) { - id - } else { - return None; - }; - let container_doc = cdata.lookup_item(container_id.index); - let container = match item_family(container_doc) { - Trait => TraitContainer(container_id), - _ => ImplContainer(container_id), - }; - - let name = item_name(item_doc); - let vis = item_visibility(item_doc); - let defaultness = item_defaultness(item_doc); - - Some(match item_sort(item_doc) { - sort @ Some('C') | sort @ Some('c') => { - let ty = doc_type(item_doc, tcx, cdata); - ty::ConstTraitItem(Rc::new(ty::AssociatedConst { - name: name, - ty: ty, - vis: vis, - defaultness: defaultness, - def_id: def_id, - container: container, - has_value: sort == Some('C') - })) + fn get_impl_data(&self, id: DefIndex) -> ImplData<'tcx> { + match self.entry(id).kind { + EntryKind::Impl(data) => data.decode(self), + _ => bug!(), } - Some('r') | Some('p') => { - let generics = doc_generics(item_doc, tcx, cdata); - let predicates = doc_predicates(item_doc, tcx, cdata, tag_item_predicates); - let ity = tcx.lookup_item_type(def_id).ty; - let fty = match ity.sty { - ty::TyFnDef(.., fty) => fty, - _ => bug!( - "the type {:?} of the method {:?} is not a function?", - ity, name) - }; - let explicit_self = get_explicit_self(item_doc, tcx); - - ty::MethodTraitItem(Rc::new(ty::Method::new(name, - generics, - predicates, - fty, - explicit_self, - vis, - defaultness, - def_id, - container))) - } - Some('t') => { - let ty = maybe_doc_type(item_doc, tcx, cdata); - ty::TypeTraitItem(Rc::new(ty::AssociatedType { - name: name, - ty: ty, - vis: vis, - defaultness: defaultness, - def_id: def_id, - container: container, - })) - } - _ => return None - }) -} + } -pub fn get_trait_item_def_ids(cdata: Cmd, id: DefIndex) - -> Vec { - let item = cdata.lookup_item(id); - reader::tagged_docs(item, tag_item_trait_item).map(|mth| { - let def_id = item_def_id(mth, cdata); - match item_sort(mth) { - Some('C') | Some('c') => ty::ConstTraitItemId(def_id), - Some('r') | Some('p') => ty::MethodTraitItemId(def_id), - Some('t') => ty::TypeTraitItemId(def_id), - _ => bug!("unknown trait item sort"), - } - }).collect() -} + pub fn get_parent_impl(&self, id: DefIndex) -> Option { + self.get_impl_data(id).parent_impl + } -pub fn get_item_variances(cdata: Cmd, id: DefIndex) -> Vec { - let item_doc = cdata.lookup_item(id); - let variance_doc = reader::get_doc(item_doc, tag_item_variances); - let mut decoder = reader::Decoder::new(variance_doc); - Decodable::decode(&mut decoder).unwrap() -} + pub fn get_impl_polarity(&self, id: DefIndex) -> hir::ImplPolarity { + self.get_impl_data(id).polarity + } -pub fn get_provided_trait_methods<'a, 'tcx>(cdata: Cmd, - id: DefIndex, - tcx: TyCtxt<'a, 'tcx, 'tcx>) - -> Vec>> { - let item = cdata.lookup_item(id); + pub fn get_custom_coerce_unsized_kind(&self, + id: DefIndex) + -> Option { + self.get_impl_data(id).coerce_unsized_kind + } - reader::tagged_docs(item, tag_item_trait_item).filter_map(|mth_id| { - let did = item_def_id(mth_id, cdata); - let mth = cdata.lookup_item(did.index); + pub fn get_impl_trait(&self, + id: DefIndex, + tcx: TyCtxt<'a, 'tcx, 'tcx>) + -> Option> { + self.get_impl_data(id).trait_ref.map(|tr| tr.decode((self, tcx))) + } - if item_sort(mth) == Some('p') { - let trait_item = get_impl_or_trait_item(cdata, did.index, tcx); - if let Some(ty::MethodTraitItem(ref method)) = trait_item { - Some((*method).clone()) - } else { - None - } - } else { - None - } - }).collect() -} + /// Iterates over the language items in the given crate. + pub fn get_lang_items(&self) -> Vec<(DefIndex, usize)> { + self.root.lang_items.decode(self).collect() + } -pub fn get_associated_consts<'a, 'tcx>(cdata: Cmd, id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>) - -> Vec>> { - let item = cdata.lookup_item(id); + /// Iterates over each child of the given item. + pub fn each_child_of_item(&self, id: DefIndex, mut callback: F) + where F: FnMut(def::Export) + { + // Find the item. + let item = match self.maybe_entry(id) { + None => return, + Some(item) => item.decode(self), + }; - [tag_item_trait_item, tag_item_impl_item].iter().flat_map(|&tag| { - reader::tagged_docs(item, tag).filter_map(|ac_id| { - let did = item_def_id(ac_id, cdata); - let ac_doc = cdata.lookup_item(did.index); + // Iterate over all children. + for child_index in item.children.decode(self) { + // Get the item. + if let Some(child) = self.maybe_entry(child_index) { + let child = child.decode(self); + // Hand off the item to the callback. + match child.kind { + // FIXME(eddyb) Don't encode these in children. + EntryKind::ForeignMod => { + for child_index in child.children.decode(self) { + if let Some(def) = self.get_def(child_index) { + callback(def::Export { + def: def, + name: self.item_name(&self.entry(child_index)), + }); + } + } + continue; + } + EntryKind::Impl(_) | + EntryKind::DefaultImpl(_) => continue, - match item_sort(ac_doc) { - Some('C') | Some('c') => { - let trait_item = get_impl_or_trait_item(cdata, did.index, tcx); - if let Some(ty::ConstTraitItem(ref ac)) = trait_item { - Some((*ac).clone()) - } else { - None + _ => {} + } + + let def_key = child.def_key.decode(self); + if let (Some(def), Some(name)) = + (self.get_def(child_index), def_key.disambiguated_data.data.get_opt_name()) { + callback(def::Export { + def: def, + name: name, + }); + // For non-reexport structs and variants add their constructors to children. + // Reexport lists automatically contain constructors when necessary. + match def { + Def::Struct(..) => { + if let Some(ctor_def_id) = self.get_struct_ctor_def_id(child_index) { + let ctor_kind = self.get_ctor_kind(child_index); + let ctor_def = Def::StructCtor(ctor_def_id, ctor_kind); + callback(def::Export { + def: ctor_def, + name: name, + }); + } + } + Def::Variant(def_id) => { + // Braced variants, unlike structs, generate unusable names in + // value namespace, they are reserved for possible future use. + let ctor_kind = self.get_ctor_kind(child_index); + let ctor_def = Def::VariantCtor(def_id, ctor_kind); + callback(def::Export { + def: ctor_def, + name: name, + }); + } + _ => {} } } - _ => None } - }) - }).collect() -} + } -pub fn get_variant_kind(cdata: Cmd, node_id: DefIndex) -> Option -{ - let item = cdata.lookup_item(node_id); - family_to_variant_kind(item_family(item)) -} + if let EntryKind::Mod(data) = item.kind { + for exp in data.decode(self).reexports.decode(self) { + callback(exp); + } + } + } -pub fn get_struct_ctor_def_id(cdata: Cmd, node_id: DefIndex) -> Option -{ - let item = cdata.lookup_item(node_id); - reader::maybe_get_doc(item, tag_items_data_item_struct_ctor). - map(|ctor_doc| translated_def_id(cdata, ctor_doc)) -} + pub fn maybe_get_item_ast(&self, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + id: DefIndex) + -> Option<&'tcx InlinedItem> { + debug!("Looking up item: {:?}", id); + let item_doc = self.entry(id); + let item_did = self.local_def_id(id); + let parent_def_id = self.local_def_id(self.def_key(id).parent.unwrap()); + let mut parent_def_path = self.def_path(id).unwrap(); + parent_def_path.data.pop(); + item_doc.ast.map(|ast| { + let ast = ast.decode(self); + decode_inlined_item(self, tcx, parent_def_path, parent_def_id, ast, item_did) + }) + } -/// If node_id is the constructor of a tuple struct, retrieve the NodeId of -/// the actual type definition, otherwise, return None -pub fn get_tuple_struct_definition_if_ctor(cdata: Cmd, - node_id: DefIndex) - -> Option -{ - let item = cdata.lookup_item(node_id); - reader::tagged_docs(item, tag_items_data_item_is_tuple_struct_ctor).next().map(|_| { - item_require_parent_item(cdata, item) - }) -} + pub fn is_item_mir_available(&self, id: DefIndex) -> bool { + self.maybe_entry(id).and_then(|item| item.decode(self).mir).is_some() + } -pub fn get_item_attrs(cdata: Cmd, - orig_node_id: DefIndex) - -> Vec { - // The attributes for a tuple struct are attached to the definition, not the ctor; - // we assume that someone passing in a tuple struct ctor is actually wanting to - // look at the definition - let node_id = get_tuple_struct_definition_if_ctor(cdata, orig_node_id); - let node_id = node_id.map(|x| x.index).unwrap_or(orig_node_id); - let item = cdata.lookup_item(node_id); - get_attributes(item) -} + pub fn maybe_get_item_mir(&self, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + id: DefIndex) + -> Option> { + self.entry(id).mir.map(|mir| mir.decode((self, tcx))) + } -pub fn get_struct_field_attrs(cdata: Cmd) -> FnvHashMap> { - let data = rbml::Doc::new(cdata.data()); - let fields = reader::get_doc(data, tag_struct_fields); - reader::tagged_docs(fields, tag_struct_field).map(|field| { - let def_id = translated_def_id(cdata, reader::get_doc(field, tag_def_id)); - let attrs = get_attributes(field); - (def_id, attrs) - }).collect() -} + pub fn get_impl_or_trait_item(&self, + id: DefIndex, + tcx: TyCtxt<'a, 'tcx, 'tcx>) + -> Option> { + let item = self.entry(id); + let parent_and_name = || { + let def_key = item.def_key.decode(self); + (self.local_def_id(def_key.parent.unwrap()), + def_key.disambiguated_data.data.get_opt_name().unwrap()) + }; -fn struct_field_family_to_visibility(family: Family) -> ty::Visibility { - match family { - PublicField => ty::Visibility::Public, - InheritedField => ty::Visibility::PrivateExternal, - _ => bug!() + Some(match item.kind { + EntryKind::AssociatedConst(container) => { + let (parent, name) = parent_and_name(); + ty::ConstTraitItem(Rc::new(ty::AssociatedConst { + name: name, + ty: item.ty.unwrap().decode((self, tcx)), + vis: item.visibility, + defaultness: container.defaultness(), + def_id: self.local_def_id(id), + container: container.with_def_id(parent), + has_value: container.has_body(), + })) + } + EntryKind::Method(data) => { + let (parent, name) = parent_and_name(); + let ity = item.ty.unwrap().decode((self, tcx)); + let fty = match ity.sty { + ty::TyFnDef(.., fty) => fty, + _ => { + bug!("the type {:?} of the method {:?} is not a function?", + ity, + name) + } + }; + + let data = data.decode(self); + ty::MethodTraitItem(Rc::new(ty::Method { + name: name, + generics: tcx.lookup_generics(self.local_def_id(id)), + predicates: item.predicates.unwrap().decode((self, tcx)), + fty: fty, + explicit_self: data.explicit_self.decode((self, tcx)), + vis: item.visibility, + defaultness: data.container.defaultness(), + has_body: data.container.has_body(), + def_id: self.local_def_id(id), + container: data.container.with_def_id(parent), + })) + } + EntryKind::AssociatedType(container) => { + let (parent, name) = parent_and_name(); + ty::TypeTraitItem(Rc::new(ty::AssociatedType { + name: name, + ty: item.ty.map(|ty| ty.decode((self, tcx))), + vis: item.visibility, + defaultness: container.defaultness(), + def_id: self.local_def_id(id), + container: container.with_def_id(parent), + })) + } + _ => return None, + }) } -} -pub fn get_struct_field_names(cdata: Cmd, id: DefIndex) -> Vec { - let item = cdata.lookup_item(id); - let mut index = 0; - reader::tagged_docs(item, tag_item_field).map(|an_item| { - item_name(an_item) - }).chain(reader::tagged_docs(item, tag_item_unnamed_field).map(|_| { - let name = token::with_ident_interner(|interner| interner.intern(index.to_string())); - index += 1; - name - })).collect() -} - -fn get_attributes(md: rbml::Doc) -> Vec { - reader::maybe_get_doc(md, tag_attributes).map_or(vec![], |attrs_doc| { - let mut decoder = reader::Decoder::new(attrs_doc); - let mut attrs: Vec = decoder.read_opaque(|opaque_decoder, _| { - Decodable::decode(opaque_decoder) - }).unwrap(); + pub fn get_item_variances(&self, id: DefIndex) -> Vec { + self.entry(id).variances.decode(self).collect() + } - // Need new unique IDs: old thread-local IDs won't map to new threads. - for attr in attrs.iter_mut() { - attr.node.id = attr::mk_attr_id(); + pub fn get_ctor_kind(&self, node_id: DefIndex) -> CtorKind { + match self.entry(node_id).kind { + EntryKind::Struct(data) | + EntryKind::Union(data) | + EntryKind::Variant(data) => data.decode(self).ctor_kind, + _ => CtorKind::Fictive, } - - attrs - }) -} - -fn list_crate_attributes(md: rbml::Doc, hash: &Svh, - out: &mut io::Write) -> io::Result<()> { - write!(out, "=Crate Attributes ({})=\n", *hash)?; - - let r = get_attributes(md); - for attr in &r { - write!(out, "{}\n", pprust::attribute_to_string(attr))?; } - write!(out, "\n\n") -} - -pub fn get_crate_attributes(data: &[u8]) -> Vec { - get_attributes(rbml::Doc::new(data)) -} - -#[derive(Clone)] -pub struct CrateDep { - pub cnum: ast::CrateNum, - pub name: String, - pub hash: Svh, - pub explicitly_linked: bool, -} - -pub fn get_crate_deps(data: &[u8]) -> Vec { - let cratedoc = rbml::Doc::new(data); - let depsdoc = reader::get_doc(cratedoc, tag_crate_deps); - - fn docstr(doc: rbml::Doc, tag_: usize) -> String { - let d = reader::get_doc(doc, tag_); - d.as_str().to_string() - } - - reader::tagged_docs(depsdoc, tag_crate_dep).enumerate().map(|(crate_num, depdoc)| { - let name = docstr(depdoc, tag_crate_dep_crate_name); - let hash = Svh::new(reader::doc_as_u64(reader::get_doc(depdoc, tag_crate_dep_hash))); - let doc = reader::get_doc(depdoc, tag_crate_dep_explicitly_linked); - let explicitly_linked = reader::doc_as_u8(doc) != 0; - CrateDep { - cnum: crate_num as u32 + 1, - name: name, - hash: hash, - explicitly_linked: explicitly_linked, + pub fn get_struct_ctor_def_id(&self, node_id: DefIndex) -> Option { + match self.entry(node_id).kind { + EntryKind::Struct(data) => { + data.decode(self).struct_ctor.map(|index| self.local_def_id(index)) + } + _ => None, } - }).collect() -} - -fn list_crate_deps(data: &[u8], out: &mut io::Write) -> io::Result<()> { - write!(out, "=External Dependencies=\n")?; - for dep in &get_crate_deps(data) { - write!(out, "{} {}-{}\n", dep.cnum, dep.name, dep.hash)?; } - write!(out, "\n")?; - Ok(()) -} -pub fn maybe_get_crate_hash(data: &[u8]) -> Option { - let cratedoc = rbml::Doc::new(data); - reader::maybe_get_doc(cratedoc, tag_crate_hash).map(|doc| { - Svh::new(reader::doc_as_u64(doc)) - }) -} - -pub fn get_crate_hash(data: &[u8]) -> Svh { - let cratedoc = rbml::Doc::new(data); - let hashdoc = reader::get_doc(cratedoc, tag_crate_hash); - Svh::new(reader::doc_as_u64(hashdoc)) -} - -pub fn maybe_get_crate_name(data: &[u8]) -> Option<&str> { - let cratedoc = rbml::Doc::new(data); - reader::maybe_get_doc(cratedoc, tag_crate_crate_name).map(|doc| { - doc.as_str() - }) -} - -pub fn get_crate_disambiguator<'a>(data: &'a [u8]) -> &'a str { - let crate_doc = rbml::Doc::new(data); - let disambiguator_doc = reader::get_doc(crate_doc, tag_crate_disambiguator); - let slice: &'a str = disambiguator_doc.as_str(); - slice -} - -pub fn get_crate_triple(data: &[u8]) -> Option { - let cratedoc = rbml::Doc::new(data); - let triple_doc = reader::maybe_get_doc(cratedoc, tag_crate_triple); - triple_doc.map(|s| s.as_str().to_string()) -} - -pub fn get_crate_name(data: &[u8]) -> &str { - maybe_get_crate_name(data).expect("no crate name in crate") -} - -pub fn list_crate_metadata(bytes: &[u8], out: &mut io::Write) -> io::Result<()> { - let hash = get_crate_hash(bytes); - let md = rbml::Doc::new(bytes); - list_crate_attributes(md, &hash, out)?; - list_crate_deps(bytes, out) -} - -// Translates a def_id from an external crate to a def_id for the current -// compilation environment. We use this when trying to load types from -// external crates - if those types further refer to types in other crates -// then we must translate the crate number from that encoded in the external -// crate to the correct local crate number. -pub fn translate_def_id(cdata: Cmd, did: DefId) -> DefId { - if did.is_local() { - return DefId { krate: cdata.cnum, index: did.index }; + pub fn get_item_attrs(&self, node_id: DefIndex) -> Vec { + // The attributes for a tuple struct are attached to the definition, not the ctor; + // we assume that someone passing in a tuple struct ctor is actually wanting to + // look at the definition + let mut item = self.entry(node_id); + let def_key = item.def_key.decode(self); + if def_key.disambiguated_data.data == DefPathData::StructCtor { + item = self.entry(def_key.parent.unwrap()); + } + self.get_attributes(&item) } - DefId { - krate: cdata.cnum_map.borrow()[did.krate], - index: did.index + pub fn get_struct_field_names(&self, id: DefIndex) -> Vec { + self.entry(id) + .children + .decode(self) + .map(|index| self.item_name(&self.entry(index))) + .collect() } -} -// Translate a DefId from the current compilation environment to a DefId -// for an external crate. -fn reverse_translate_def_id(cdata: Cmd, did: DefId) -> Option { - for (local, &global) in cdata.cnum_map.borrow().iter_enumerated() { - if global == did.krate { - return Some(DefId { krate: local, index: did.index }); - } + fn get_attributes(&self, item: &Entry<'tcx>) -> Vec { + item.attributes + .decode(self) + .map(|mut attr| { + // Need new unique IDs: old thread-local IDs won't map to new threads. + attr.node.id = attr::mk_attr_id(); + attr + }) + .collect() } - None -} - -/// Translates a `Span` from an extern crate to the corresponding `Span` -/// within the local crate's codemap. -pub fn translate_span(cdata: Cmd, - codemap: &codemap::CodeMap, - last_filemap_index_hint: &Cell, - span: syntax_pos::Span) - -> syntax_pos::Span { - let span = if span.lo > span.hi { - // Currently macro expansion sometimes produces invalid Span values - // where lo > hi. In order not to crash the compiler when trying to - // translate these values, let's transform them into something we - // can handle (and which will produce useful debug locations at - // least some of the time). - // This workaround is only necessary as long as macro expansion is - // not fixed. FIXME(#23480) - syntax_pos::mk_sp(span.lo, span.lo) - } else { - span - }; - - let imported_filemaps = cdata.imported_filemaps(&codemap); - let filemap = { - // Optimize for the case that most spans within a translated item - // originate from the same filemap. - let last_filemap_index = last_filemap_index_hint.get(); - let last_filemap = &imported_filemaps[last_filemap_index]; - - if span.lo >= last_filemap.original_start_pos && - span.lo <= last_filemap.original_end_pos && - span.hi >= last_filemap.original_start_pos && - span.hi <= last_filemap.original_end_pos { - last_filemap - } else { - let mut a = 0; - let mut b = imported_filemaps.len(); - - while b - a > 1 { - let m = (a + b) / 2; - if imported_filemaps[m].original_start_pos > span.lo { - b = m; - } else { - a = m; - } + // Translate a DefId from the current compilation environment to a DefId + // for an external crate. + fn reverse_translate_def_id(&self, did: DefId) -> Option { + for (local, &global) in self.cnum_map.borrow().iter_enumerated() { + if global == did.krate { + return Some(DefId { + krate: local, + index: did.index, + }); } - - last_filemap_index_hint.set(a); - &imported_filemaps[a] } - }; - - let lo = (span.lo - filemap.original_start_pos) + - filemap.translated_filemap.start_pos; - let hi = (span.hi - filemap.original_start_pos) + - filemap.translated_filemap.start_pos; - syntax_pos::mk_sp(lo, hi) -} + None + } -pub fn each_inherent_implementation_for_type(cdata: Cmd, - id: DefIndex, - mut callback: F) - where F: FnMut(DefId), -{ - let item_doc = cdata.lookup_item(id); - for impl_doc in reader::tagged_docs(item_doc, tag_items_data_item_inherent_impl) { - if reader::maybe_get_doc(impl_doc, tag_item_trait_ref).is_none() { - callback(item_def_id(impl_doc, cdata)); - } + pub fn get_inherent_implementations_for_type(&self, id: DefIndex) -> Vec { + self.entry(id) + .inherent_impls + .decode(self) + .map(|index| self.local_def_id(index)) + .collect() } -} -pub fn each_implementation_for_trait(cdata: Cmd, - def_id: DefId, - mut callback: F) where - F: FnMut(DefId), -{ - // Do a reverse lookup beforehand to avoid touching the crate_num - // hash map in the loop below. - if let Some(crate_local_did) = reverse_translate_def_id(cdata, def_id) { - let def_id_u64 = def_to_u64(crate_local_did); - - let impls_doc = reader::get_doc(rbml::Doc::new(cdata.data()), tag_impls); - for trait_doc in reader::tagged_docs(impls_doc, tag_impls_trait) { - let trait_def_id = reader::get_doc(trait_doc, tag_def_id); - if reader::doc_as_u64(trait_def_id) != def_id_u64 { + pub fn get_implementations_for_trait(&self, filter: Option, result: &mut Vec) { + // Do a reverse lookup beforehand to avoid touching the crate_num + // hash map in the loop below. + let filter = match filter.map(|def_id| self.reverse_translate_def_id(def_id)) { + Some(Some(def_id)) => Some((def_id.krate.as_u32(), def_id.index)), + Some(None) => return, + None => None, + }; + + // FIXME(eddyb) Make this O(1) instead of O(n). + for trait_impls in self.root.impls.decode(self) { + if filter.is_some() && filter != Some(trait_impls.trait_id) { continue; } - for impl_doc in reader::tagged_docs(trait_doc, tag_impls_trait_impl) { - callback(translated_def_id(cdata, impl_doc)); + + result.extend(trait_impls.impls.decode(self).map(|index| self.local_def_id(index))); + + if filter.is_some() { + break; } } } -} -pub fn get_trait_of_item(cdata: Cmd, id: DefIndex) -> Option { - let item_doc = cdata.lookup_item(id); - let parent_item_id = match item_parent_item(cdata, item_doc) { - None => return None, - Some(item_id) => item_id, - }; - let parent_item_doc = cdata.lookup_item(parent_item_id.index); - match item_family(parent_item_doc) { - Trait => Some(item_def_id(parent_item_doc, cdata)), - _ => None + pub fn get_trait_of_item(&self, id: DefIndex) -> Option { + self.entry(id).def_key.decode(self).parent.and_then(|parent_index| { + match self.entry(parent_index).kind { + EntryKind::Trait(_) => Some(self.local_def_id(parent_index)), + _ => None, + } + }) } -} -pub fn get_native_libraries(cdata: Cmd) - -> Vec<(cstore::NativeLibraryKind, String)> { - let libraries = reader::get_doc(rbml::Doc::new(cdata.data()), - tag_native_libraries); - reader::tagged_docs(libraries, tag_native_libraries_lib).map(|lib_doc| { - let kind_doc = reader::get_doc(lib_doc, tag_native_libraries_kind); - let name_doc = reader::get_doc(lib_doc, tag_native_libraries_name); - let kind: cstore::NativeLibraryKind = - cstore::NativeLibraryKind::from_u32(reader::doc_as_u32(kind_doc)).unwrap(); - let name = name_doc.as_str().to_string(); - (kind, name) - }).collect() -} - -pub fn get_plugin_registrar_fn(data: &[u8]) -> Option { - reader::maybe_get_doc(rbml::Doc::new(data), tag_plugin_registrar_fn) - .map(|doc| DefIndex::from_u32(reader::doc_as_u32(doc))) -} - -pub fn each_exported_macro(data: &[u8], mut f: F) where - F: FnMut(ast::Name, Vec, Span, String) -> bool, -{ - let macros = reader::get_doc(rbml::Doc::new(data), tag_macro_defs); - for macro_doc in reader::tagged_docs(macros, tag_macro_def) { - let name = item_name(macro_doc); - let attrs = get_attributes(macro_doc); - let span = get_macro_span(macro_doc); - let body = reader::get_doc(macro_doc, tag_macro_def_body); - if !f(name, attrs, span, body.as_str().to_string()) { - break; - } + pub fn get_native_libraries(&self) -> Vec<(NativeLibraryKind, String)> { + self.root.native_libraries.decode(self).collect() } -} -pub fn get_derive_registrar_fn(data: &[u8]) -> Option { - reader::maybe_get_doc(rbml::Doc::new(data), tag_macro_derive_registrar) - .map(|doc| DefIndex::from_u32(reader::doc_as_u32(doc))) -} + pub fn get_dylib_dependency_formats(&self) -> Vec<(CrateNum, LinkagePreference)> { + self.root + .dylib_dependency_formats + .decode(self) + .enumerate() + .flat_map(|(i, link)| { + let cnum = CrateNum::new(i + 1); + link.map(|link| (self.cnum_map.borrow()[cnum], link)) + }) + .collect() + } -pub fn get_macro_span(doc: rbml::Doc) -> Span { - let lo_doc = reader::get_doc(doc, tag_macro_def_span_lo); - let lo = BytePos(reader::doc_as_u32(lo_doc)); - let hi_doc = reader::get_doc(doc, tag_macro_def_span_hi); - let hi = BytePos(reader::doc_as_u32(hi_doc)); - return Span { lo: lo, hi: hi, expn_id: NO_EXPANSION }; -} + pub fn get_missing_lang_items(&self) -> Vec { + self.root.lang_items_missing.decode(self).collect() + } -pub fn get_dylib_dependency_formats(cdata: Cmd) - -> Vec<(ast::CrateNum, LinkagePreference)> -{ - let formats = reader::get_doc(rbml::Doc::new(cdata.data()), - tag_dylib_dependency_formats); - let mut result = Vec::new(); - - debug!("found dylib deps: {}", formats.as_str()); - for spec in formats.as_str().split(',') { - if spec.is_empty() { continue } - let mut split = spec.split(':'); - let cnum = split.next().unwrap(); - let link = split.next().unwrap(); - let cnum: ast::CrateNum = cnum.parse().unwrap(); - let cnum = cdata.cnum_map.borrow()[cnum]; - result.push((cnum, if link == "d" { - LinkagePreference::RequireDynamic - } else { - LinkagePreference::RequireStatic - })); + pub fn get_fn_arg_names(&self, id: DefIndex) -> Vec { + let arg_names = match self.entry(id).kind { + EntryKind::Fn(data) | + EntryKind::ForeignFn(data) => data.decode(self).arg_names, + EntryKind::Method(data) => data.decode(self).fn_data.arg_names, + _ => LazySeq::empty(), + }; + arg_names.decode(self).collect() } - return result; -} -pub fn get_missing_lang_items(cdata: Cmd) - -> Vec -{ - let items = reader::get_doc(rbml::Doc::new(cdata.data()), tag_lang_items); - reader::tagged_docs(items, tag_lang_items_missing).map(|missing_docs| { - lang_items::LangItem::from_u32(reader::doc_as_u32(missing_docs)).unwrap() - }).collect() -} + pub fn get_reachable_ids(&self) -> Vec { + self.root.reachable_ids.decode(self).map(|index| self.local_def_id(index)).collect() + } -pub fn get_method_arg_names(cdata: Cmd, id: DefIndex) -> Vec { - let method_doc = cdata.lookup_item(id); - match reader::maybe_get_doc(method_doc, tag_method_argument_names) { - Some(args_doc) => { - reader::tagged_docs(args_doc, tag_method_argument_name).map(|name_doc| { - name_doc.as_str().to_string() - }).collect() - }, - None => vec![], + pub fn is_const_fn(&self, id: DefIndex) -> bool { + let constness = match self.entry(id).kind { + EntryKind::Method(data) => data.decode(self).fn_data.constness, + EntryKind::Fn(data) => data.decode(self).constness, + _ => hir::Constness::NotConst, + }; + constness == hir::Constness::Const } -} -pub fn get_reachable_ids(cdata: Cmd) -> Vec { - let items = reader::get_doc(rbml::Doc::new(cdata.data()), - tag_reachable_ids); - reader::tagged_docs(items, tag_reachable_id).map(|doc| { - DefId { - krate: cdata.cnum, - index: DefIndex::from_u32(reader::doc_as_u32(doc)), + pub fn is_foreign_item(&self, id: DefIndex) -> bool { + match self.entry(id).kind { + EntryKind::ForeignImmStatic | + EntryKind::ForeignMutStatic | + EntryKind::ForeignFn(_) => true, + _ => false, } - }).collect() -} + } -pub fn is_typedef(cdata: Cmd, id: DefIndex) -> bool { - let item_doc = cdata.lookup_item(id); - match item_family(item_doc) { - Type => true, - _ => false, + pub fn is_defaulted_trait(&self, trait_id: DefIndex) -> bool { + match self.entry(trait_id).kind { + EntryKind::Trait(data) => data.decode(self).has_default_impl, + _ => bug!(), + } } -} -pub fn is_const_fn(cdata: Cmd, id: DefIndex) -> bool { - let item_doc = cdata.lookup_item(id); - match fn_constness(item_doc) { - hir::Constness::Const => true, - hir::Constness::NotConst => false, + pub fn is_default_impl(&self, impl_id: DefIndex) -> bool { + match self.entry(impl_id).kind { + EntryKind::DefaultImpl(_) => true, + _ => false, + } } -} -pub fn is_extern_item<'a, 'tcx>(cdata: Cmd, - id: DefIndex, - tcx: TyCtxt<'a, 'tcx, 'tcx>) - -> bool { - let item_doc = match cdata.get_item(id) { - Some(doc) => doc, - None => return false, - }; - let applicable = match item_family(item_doc) { - ImmStatic | MutStatic => true, - Fn => get_generics(cdata, id, tcx).types.is_empty(), - _ => false, - }; - - if applicable { - attr::contains_extern_indicator(tcx.sess.diagnostic(), - &get_attributes(item_doc)) - } else { - false + pub fn closure_kind(&self, closure_id: DefIndex) -> ty::ClosureKind { + match self.entry(closure_id).kind { + EntryKind::Closure(data) => data.decode(self).kind, + _ => bug!(), + } } -} -pub fn is_foreign_item(cdata: Cmd, id: DefIndex) -> bool { - let item_doc = cdata.lookup_item(id); - let parent_item_id = match item_parent_item(cdata, item_doc) { - None => return false, - Some(item_id) => item_id, - }; - let parent_item_doc = cdata.lookup_item(parent_item_id.index); - item_family(parent_item_doc) == ForeignMod -} + pub fn closure_ty(&self, + closure_id: DefIndex, + tcx: TyCtxt<'a, 'tcx, 'tcx>) + -> ty::ClosureTy<'tcx> { + match self.entry(closure_id).kind { + EntryKind::Closure(data) => data.decode(self).ty.decode((self, tcx)), + _ => bug!(), + } + } -pub fn is_impl(cdata: Cmd, id: DefIndex) -> bool { - let item_doc = cdata.lookup_item(id); - match item_family(item_doc) { - Impl => true, - _ => false, + pub fn def_key(&self, id: DefIndex) -> hir_map::DefKey { + debug!("def_key: id={:?}", id); + self.entry(id).def_key.decode(self) } -} -fn doc_generics<'a, 'tcx>(base_doc: rbml::Doc, - tcx: TyCtxt<'a, 'tcx, 'tcx>, - cdata: Cmd) - -> &'tcx ty::Generics<'tcx> -{ - let doc = reader::get_doc(base_doc, tag_item_generics); - TyDecoder::with_doc(tcx, cdata.cnum, doc, - &mut |did| translate_def_id(cdata, did)) - .parse_generics() -} + // Returns the path leading to the thing with this `id`. Note that + // some def-ids don't wind up in the metadata, so `def_path` sometimes + // returns `None` + pub fn def_path(&self, id: DefIndex) -> Option { + debug!("def_path(id={:?})", id); + if self.maybe_entry(id).is_some() { + Some(hir_map::DefPath::make(self.cnum, id, |parent| self.def_key(parent))) + } else { + None + } + } -fn doc_predicate<'a, 'tcx>(cdata: Cmd, - doc: rbml::Doc, - tcx: TyCtxt<'a, 'tcx, 'tcx>) - -> ty::Predicate<'tcx> -{ - let predicate_pos = cdata.xref_index.lookup( - cdata.data(), reader::doc_as_u32(doc)).unwrap() as usize; - TyDecoder::new( - cdata.data(), cdata.cnum, predicate_pos, tcx, - &mut |did| translate_def_id(cdata, did) - ).parse_predicate() -} + /// Imports the codemap from an external crate into the codemap of the crate + /// currently being compiled (the "local crate"). + /// + /// The import algorithm works analogous to how AST items are inlined from an + /// external crate's metadata: + /// For every FileMap in the external codemap an 'inline' copy is created in the + /// local codemap. The correspondence relation between external and local + /// FileMaps is recorded in the `ImportedFileMap` objects returned from this + /// function. When an item from an external crate is later inlined into this + /// crate, this correspondence information is used to translate the span + /// information of the inlined item so that it refers the correct positions in + /// the local codemap (see `>`). + /// + /// The import algorithm in the function below will reuse FileMaps already + /// existing in the local codemap. For example, even if the FileMap of some + /// source file of libstd gets imported many times, there will only ever be + /// one FileMap object for the corresponding file in the local codemap. + /// + /// Note that imported FileMaps do not actually contain the source code of the + /// file they represent, just information about length, line breaks, and + /// multibyte characters. This information is enough to generate valid debuginfo + /// for items inlined from other crates. + pub fn imported_filemaps(&'a self, + local_codemap: &codemap::CodeMap) + -> Ref<'a, Vec> { + { + let filemaps = self.codemap_import_info.borrow(); + if !filemaps.is_empty() { + return filemaps; + } + } -fn doc_predicates<'a, 'tcx>(base_doc: rbml::Doc, - tcx: TyCtxt<'a, 'tcx, 'tcx>, - cdata: Cmd, - tag: usize) - -> ty::GenericPredicates<'tcx> -{ - let doc = reader::get_doc(base_doc, tag); + let external_codemap = self.root.codemap.decode(self); + + let imported_filemaps = external_codemap.map(|filemap_to_import| { + // Try to find an existing FileMap that can be reused for the filemap to + // be imported. A FileMap is reusable if it is exactly the same, just + // positioned at a different offset within the codemap. + let reusable_filemap = { + local_codemap.files + .borrow() + .iter() + .find(|fm| are_equal_modulo_startpos(&fm, &filemap_to_import)) + .map(|rc| rc.clone()) + }; + + match reusable_filemap { + Some(fm) => { + + debug!("CrateMetaData::imported_filemaps reuse \ + filemap {:?} original (start_pos {:?} end_pos {:?}) \ + translated (start_pos {:?} end_pos {:?})", + filemap_to_import.name, + filemap_to_import.start_pos, filemap_to_import.end_pos, + fm.start_pos, fm.end_pos); + + cstore::ImportedFileMap { + original_start_pos: filemap_to_import.start_pos, + original_end_pos: filemap_to_import.end_pos, + translated_filemap: fm, + } + } + None => { + // We can't reuse an existing FileMap, so allocate a new one + // containing the information we need. + let syntax_pos::FileMap { name, + abs_path, + start_pos, + end_pos, + lines, + multibyte_chars, + .. } = filemap_to_import; + + let source_length = (end_pos - start_pos).to_usize(); + + // Translate line-start positions and multibyte character + // position into frame of reference local to file. + // `CodeMap::new_imported_filemap()` will then translate those + // coordinates to their new global frame of reference when the + // offset of the FileMap is known. + let mut lines = lines.into_inner(); + for pos in &mut lines { + *pos = *pos - start_pos; + } + let mut multibyte_chars = multibyte_chars.into_inner(); + for mbc in &mut multibyte_chars { + mbc.pos = mbc.pos - start_pos; + } + + let local_version = local_codemap.new_imported_filemap(name, + abs_path, + source_length, + lines, + multibyte_chars); + debug!("CrateMetaData::imported_filemaps alloc \ + filemap {:?} original (start_pos {:?} end_pos {:?}) \ + translated (start_pos {:?} end_pos {:?})", + local_version.name, start_pos, end_pos, + local_version.start_pos, local_version.end_pos); + + cstore::ImportedFileMap { + original_start_pos: start_pos, + original_end_pos: end_pos, + translated_filemap: local_version, + } + } + } + }) + .collect(); - ty::GenericPredicates { - parent: item_parent_item(cdata, doc), - predicates: reader::tagged_docs(doc, tag_predicate).map(|predicate_doc| { - doc_predicate(cdata, predicate_doc, tcx) - }).collect() + // This shouldn't borrow twice, but there is no way to downgrade RefMut to Ref. + *self.codemap_import_info.borrow_mut() = imported_filemaps; + self.codemap_import_info.borrow() } } -pub fn is_defaulted_trait(cdata: Cmd, trait_id: DefIndex) -> bool { - let trait_doc = cdata.lookup_item(trait_id); - assert!(item_family(trait_doc) == Family::Trait); - let defaulted_doc = reader::get_doc(trait_doc, tag_defaulted_trait); - reader::doc_as_u8(defaulted_doc) != 0 -} - -pub fn is_default_impl(cdata: Cmd, impl_id: DefIndex) -> bool { - let impl_doc = cdata.lookup_item(impl_id); - item_family(impl_doc) == Family::DefaultImpl -} - -pub fn get_imported_filemaps(metadata: &[u8]) -> Vec { - let crate_doc = rbml::Doc::new(metadata); - let cm_doc = reader::get_doc(crate_doc, tag_codemap); +fn are_equal_modulo_startpos(fm1: &syntax_pos::FileMap, fm2: &syntax_pos::FileMap) -> bool { + if fm1.byte_length() != fm2.byte_length() { + return false; + } - reader::tagged_docs(cm_doc, tag_codemap_filemap).map(|filemap_doc| { - let mut decoder = reader::Decoder::new(filemap_doc); - decoder.read_opaque(|opaque_decoder, _| { - Decodable::decode(opaque_decoder) - }).unwrap() - }).collect() -} + if fm1.name != fm2.name { + return false; + } -pub fn closure_kind(cdata: Cmd, closure_id: DefIndex) -> ty::ClosureKind { - let closure_doc = cdata.lookup_item(closure_id); - let closure_kind_doc = reader::get_doc(closure_doc, tag_items_closure_kind); - let mut decoder = reader::Decoder::new(closure_kind_doc); - ty::ClosureKind::decode(&mut decoder).unwrap() -} + let lines1 = fm1.lines.borrow(); + let lines2 = fm2.lines.borrow(); -pub fn closure_ty<'a, 'tcx>(cdata: Cmd, closure_id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>) - -> ty::ClosureTy<'tcx> { - let closure_doc = cdata.lookup_item(closure_id); - let closure_ty_doc = reader::get_doc(closure_doc, tag_items_closure_ty); - TyDecoder::with_doc(tcx, cdata.cnum, closure_ty_doc, &mut |did| translate_def_id(cdata, did)) - .parse_closure_ty() -} - -pub fn def_key(cdata: Cmd, id: DefIndex) -> hir_map::DefKey { - debug!("def_key: id={:?}", id); - let item_doc = cdata.lookup_item(id); - item_def_key(item_doc) -} + if lines1.len() != lines2.len() { + return false; + } -fn item_def_key(item_doc: rbml::Doc) -> hir_map::DefKey { - match reader::maybe_get_doc(item_doc, tag_def_key) { - Some(def_key_doc) => { - let mut decoder = reader::Decoder::new(def_key_doc); - let simple_key = def_key::DefKey::decode(&mut decoder).unwrap(); - let name = reader::maybe_get_doc(item_doc, tag_paths_data_name).map(|name| { - token::intern(name.as_str()).as_str() - }); - def_key::recover_def_key(simple_key, name) - } - None => { - bug!("failed to find block with tag {:?} for item with family {:?}", - tag_def_key, - item_family(item_doc)) + for (&line1, &line2) in lines1.iter().zip(lines2.iter()) { + if (line1 - fm1.start_pos) != (line2 - fm2.start_pos) { + return false; } } -} -// Returns the path leading to the thing with this `id`. Note that -// some def-ids don't wind up in the metadata, so `def_path` sometimes -// returns `None` -pub fn def_path(cdata: Cmd, id: DefIndex) -> Option { - debug!("def_path(id={:?})", id); - if cdata.get_item(id).is_some() { - Some(hir_map::DefPath::make(cdata.cnum, id, |parent| def_key(cdata, parent))) - } else { - None + let multibytes1 = fm1.multibyte_chars.borrow(); + let multibytes2 = fm2.multibyte_chars.borrow(); + + if multibytes1.len() != multibytes2.len() { + return false; } -} -pub fn get_panic_strategy(data: &[u8]) -> PanicStrategy { - let crate_doc = rbml::Doc::new(data); - let strat_doc = reader::get_doc(crate_doc, tag_panic_strategy); - match reader::doc_as_u8(strat_doc) { - b'U' => PanicStrategy::Unwind, - b'A' => PanicStrategy::Abort, - b => panic!("unknown panic strategy in metadata: {}", b), + for (mb1, mb2) in multibytes1.iter().zip(multibytes2.iter()) { + if (mb1.bytes != mb2.bytes) || ((mb1.pos - fm1.start_pos) != (mb2.pos - fm2.start_pos)) { + return false; + } } + + true } diff --git a/src/librustc_metadata/def_key.rs b/src/librustc_metadata/def_key.rs deleted file mode 100644 index 285ca2e4d4d4d..0000000000000 --- a/src/librustc_metadata/def_key.rs +++ /dev/null @@ -1,110 +0,0 @@ -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use rustc::hir::def_id::DefIndex; -use rustc::hir::map as hir_map; -use syntax::parse::token::InternedString; - -#[derive(RustcEncodable, RustcDecodable)] -pub struct DefKey { - pub parent: Option, - pub disambiguated_data: DisambiguatedDefPathData, -} - -#[derive(RustcEncodable, RustcDecodable)] -pub struct DisambiguatedDefPathData { - pub data: DefPathData, - pub disambiguator: u32, -} - -#[derive(RustcEncodable, RustcDecodable)] -pub enum DefPathData { - CrateRoot, - Misc, - Impl, - TypeNs, - ValueNs, - Module, - MacroDef, - ClosureExpr, - TypeParam, - LifetimeDef, - EnumVariant, - Field, - StructCtor, - Initializer, - Binding, - ImplTrait, -} - -pub fn simplify_def_key(key: hir_map::DefKey) -> DefKey { - let data = DisambiguatedDefPathData { - data: simplify_def_path_data(key.disambiguated_data.data), - disambiguator: key.disambiguated_data.disambiguator, - }; - DefKey { - parent: key.parent, - disambiguated_data: data, - } -} - -fn simplify_def_path_data(data: hir_map::DefPathData) -> DefPathData { - match data { - hir_map::DefPathData::CrateRoot => DefPathData::CrateRoot, - hir_map::DefPathData::InlinedRoot(_) => bug!("unexpected DefPathData"), - hir_map::DefPathData::Misc => DefPathData::Misc, - hir_map::DefPathData::Impl => DefPathData::Impl, - hir_map::DefPathData::TypeNs(_) => DefPathData::TypeNs, - hir_map::DefPathData::ValueNs(_) => DefPathData::ValueNs, - hir_map::DefPathData::Module(_) => DefPathData::Module, - hir_map::DefPathData::MacroDef(_) => DefPathData::MacroDef, - hir_map::DefPathData::ClosureExpr => DefPathData::ClosureExpr, - hir_map::DefPathData::TypeParam(_) => DefPathData::TypeParam, - hir_map::DefPathData::LifetimeDef(_) => DefPathData::LifetimeDef, - hir_map::DefPathData::EnumVariant(_) => DefPathData::EnumVariant, - hir_map::DefPathData::Field(_) => DefPathData::Field, - hir_map::DefPathData::StructCtor => DefPathData::StructCtor, - hir_map::DefPathData::Initializer => DefPathData::Initializer, - hir_map::DefPathData::Binding(_) => DefPathData::Binding, - hir_map::DefPathData::ImplTrait => DefPathData::ImplTrait, - } -} - -pub fn recover_def_key(key: DefKey, name: Option) -> hir_map::DefKey { - let data = hir_map::DisambiguatedDefPathData { - data: recover_def_path_data(key.disambiguated_data.data, name), - disambiguator: key.disambiguated_data.disambiguator, - }; - hir_map::DefKey { - parent: key.parent, - disambiguated_data: data, - } -} - -fn recover_def_path_data(data: DefPathData, name: Option) -> hir_map::DefPathData { - match data { - DefPathData::CrateRoot => hir_map::DefPathData::CrateRoot, - DefPathData::Misc => hir_map::DefPathData::Misc, - DefPathData::Impl => hir_map::DefPathData::Impl, - DefPathData::TypeNs => hir_map::DefPathData::TypeNs(name.unwrap()), - DefPathData::ValueNs => hir_map::DefPathData::ValueNs(name.unwrap()), - DefPathData::Module => hir_map::DefPathData::Module(name.unwrap()), - DefPathData::MacroDef => hir_map::DefPathData::MacroDef(name.unwrap()), - DefPathData::ClosureExpr => hir_map::DefPathData::ClosureExpr, - DefPathData::TypeParam => hir_map::DefPathData::TypeParam(name.unwrap()), - DefPathData::LifetimeDef => hir_map::DefPathData::LifetimeDef(name.unwrap()), - DefPathData::EnumVariant => hir_map::DefPathData::EnumVariant(name.unwrap()), - DefPathData::Field => hir_map::DefPathData::Field(name.unwrap()), - DefPathData::StructCtor => hir_map::DefPathData::StructCtor, - DefPathData::Initializer => hir_map::DefPathData::Initializer, - DefPathData::Binding => hir_map::DefPathData::Binding(name.unwrap()), - DefPathData::ImplTrait => hir_map::DefPathData::ImplTrait, - } -} diff --git a/src/librustc_metadata/diagnostics.rs b/src/librustc_metadata/diagnostics.rs index 099ec62b38de7..b2f4760727ae1 100644 --- a/src/librustc_metadata/diagnostics.rs +++ b/src/librustc_metadata/diagnostics.rs @@ -21,7 +21,7 @@ A link name was given with an empty name. Erroneous code example: The rust compiler cannot link to an external library if you don't give it its name. Example: -``` +```ignore #[link(name = "some_lib")] extern {} // ok! ``` "##, @@ -32,8 +32,8 @@ as frameworks are specific to that operating system. Erroneous code example: -```compile_fail,E0455 -#[link(name = "FooCoreServices", kind = "framework")] extern {} +```ignore +#[link(name = "FooCoreServices", kind = "framework")] extern {} // OS used to compile is Linux for example ``` @@ -72,7 +72,7 @@ A link was used without a name parameter. Erroneous code example: Please add the name parameter to allow the rust compiler to find the library you want. Example: -``` +```ignore #[link(kind = "dylib", name = "some_lib")] extern {} // ok! ``` "##, @@ -102,11 +102,6 @@ register_diagnostics! { E0462, // found staticlib `..` instead of rlib or dylib E0464, // multiple matching crates for `..` E0465, // multiple .. candidates for `..` found - E0466, // bad macro import - E0467, // bad macro reexport - E0468, // an `extern crate` loading macros must be at the crate root - E0469, // imported macro not found - E0470, // reexported macro not found E0519, // local crate and dependency have same (crate-name, disambiguator) E0523, // two dependencies have same (crate-name, disambiguator) but different SVH } diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs index e228bf7430261..e8734e4275712 100644 --- a/src/librustc_metadata/encoder.rs +++ b/src/librustc_metadata/encoder.rs @@ -8,401 +8,368 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// Metadata encoding - -#![allow(unused_must_use)] // everything is just a MemWriter, can't fail -#![allow(non_camel_case_types)] - -use astencode::encode_inlined_item; -use common::*; use cstore; -use decoder; -use def_key; -use tyencode; -use index::{self, IndexData}; +use index::Index; +use schema::*; -use middle::cstore::{InlinedItemRef, LinkMeta, tls}; +use rustc::middle::cstore::{InlinedItemRef, LinkMeta}; +use rustc::middle::cstore::{LinkagePreference, NativeLibraryKind}; use rustc::hir::def; -use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId}; -use middle::dependency_format::Linkage; -use rustc::dep_graph::DepNode; +use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefIndex, DefId}; +use rustc::middle::dependency_format::Linkage; +use rustc::middle::lang_items; +use rustc::mir; use rustc::traits::specialization_graph; use rustc::ty::{self, Ty, TyCtxt}; -use rustc::hir::svh::Svh; -use rustc::mir::mir_map::MirMap; -use rustc::session::config::{self, PanicStrategy, CrateTypeRustcMacro}; +use rustc::session::config::{self, CrateTypeProcMacro}; use rustc::util::nodemap::{FnvHashMap, NodeSet}; -use rustc_serialize::Encodable; -use std::cell::RefCell; +use rustc_serialize::{Encodable, Encoder, SpecializedEncoder, opaque}; +use std::hash::Hash; +use std::intrinsics; use std::io::prelude::*; -use std::io::{Cursor, SeekFrom}; +use std::io::Cursor; use std::rc::Rc; use std::u32; -use syntax::ast::{self, NodeId, Name, CRATE_NODE_ID, CrateNum}; +use syntax::ast::{self, CRATE_NODE_ID}; use syntax::attr; -use errors::Handler; use syntax; -use syntax_pos::BytePos; -use rbml::writer::Encoder; +use syntax_pos; use rustc::hir::{self, PatKind}; use rustc::hir::intravisit::Visitor; use rustc::hir::intravisit; -use rustc::hir::map::DefKey; -use super::index_builder::{FromId, IndexBuilder, ItemContentBuilder, Untracked, XRef}; +use super::index_builder::{FromId, IndexBuilder, Untracked}; pub struct EncodeContext<'a, 'tcx: 'a> { - pub diag: &'a Handler, + opaque: opaque::Encoder<'a>, pub tcx: TyCtxt<'a, 'tcx, 'tcx>, - pub reexports: &'a def::ExportMap, - pub link_meta: &'a LinkMeta, - pub cstore: &'a cstore::CStore, - pub type_abbrevs: tyencode::abbrev_map<'tcx>, - pub reachable: &'a NodeSet, - pub mir_map: &'a MirMap<'tcx>, -} - -impl<'a, 'tcx> EncodeContext<'a,'tcx> { - fn local_id(&self, def_id: DefId) -> NodeId { - self.tcx.map.as_local_node_id(def_id).unwrap() - } -} - -fn encode_name(rbml_w: &mut Encoder, name: Name) { - rbml_w.wr_tagged_str(tag_paths_data_name, &name.as_str()); -} - -fn encode_def_id(rbml_w: &mut Encoder, id: DefId) { - rbml_w.wr_tagged_u64(tag_def_id, def_to_u64(id)); -} - -fn encode_def_key(rbml_w: &mut Encoder, key: DefKey) { - let simple_key = def_key::simplify_def_key(key); - rbml_w.start_tag(tag_def_key); - simple_key.encode(rbml_w); - rbml_w.end_tag(); -} + reexports: &'a def::ExportMap, + link_meta: &'a LinkMeta, + cstore: &'a cstore::CStore, + reachable: &'a NodeSet, -/// For every DefId that we create a metadata item for, we include a -/// serialized copy of its DefKey, which allows us to recreate a path. -fn encode_def_id_and_key(ecx: &EncodeContext, - rbml_w: &mut Encoder, - def_id: DefId) -{ - encode_def_id(rbml_w, def_id); - let def_key = ecx.tcx.map.def_key(def_id); - encode_def_key(rbml_w, def_key); + lazy_state: LazyState, + type_shorthands: FnvHashMap, usize>, + predicate_shorthands: FnvHashMap, usize>, } -fn encode_trait_ref<'a, 'tcx>(rbml_w: &mut Encoder, - ecx: &EncodeContext<'a, 'tcx>, - trait_ref: ty::TraitRef<'tcx>, - tag: usize) { - rbml_w.start_tag(tag); - tyencode::enc_trait_ref(rbml_w.writer, &ecx.ty_str_ctxt(), trait_ref); - rbml_w.mark_stable_position(); - rbml_w.end_tag(); +macro_rules! encoder_methods { + ($($name:ident($ty:ty);)*) => { + $(fn $name(&mut self, value: $ty) -> Result<(), Self::Error> { + self.opaque.$name(value) + })* + } } -// Item info table encoding -fn encode_family(rbml_w: &mut Encoder, c: char) { - rbml_w.wr_tagged_u8(tag_items_data_item_family, c as u8); -} +impl<'a, 'tcx> Encoder for EncodeContext<'a, 'tcx> { + type Error = as Encoder>::Error; -pub fn def_to_u64(did: DefId) -> u64 { - assert!(did.index.as_u32() < u32::MAX); - (did.krate as u64) << 32 | (did.index.as_usize() as u64) -} + fn emit_nil(&mut self) -> Result<(), Self::Error> { + Ok(()) + } -pub fn def_to_string(_tcx: TyCtxt, did: DefId) -> String { - format!("{}:{}", did.krate, did.index.as_usize()) -} + encoder_methods! { + emit_usize(usize); + emit_u64(u64); + emit_u32(u32); + emit_u16(u16); + emit_u8(u8); -fn encode_item_variances(rbml_w: &mut Encoder, - ecx: &EncodeContext, - id: NodeId) { - let v = ecx.tcx.item_variances(ecx.tcx.map.local_def_id(id)); - rbml_w.start_tag(tag_item_variances); - v.encode(rbml_w); - rbml_w.end_tag(); -} + emit_isize(isize); + emit_i64(i64); + emit_i32(i32); + emit_i16(i16); + emit_i8(i8); -impl<'a, 'tcx, 'encoder> ItemContentBuilder<'a, 'tcx, 'encoder> { - fn encode_bounds_and_type_for_item(&mut self, - id: NodeId) { - let ecx = self.ecx(); - self.encode_bounds_and_type(&ecx.tcx.lookup_item_type(ecx.tcx.map.local_def_id(id)), - &ecx.tcx.lookup_predicates(ecx.tcx.map.local_def_id(id))); + emit_bool(bool); + emit_f64(f64); + emit_f32(f32); + emit_char(char); + emit_str(&str); } +} - fn encode_bounds_and_type(&mut self, - scheme: &ty::TypeScheme<'tcx>, - predicates: &ty::GenericPredicates<'tcx>) { - self.encode_generics(&scheme.generics, &predicates); - self.encode_type(scheme.ty); +impl<'a, 'tcx, T> SpecializedEncoder> for EncodeContext<'a, 'tcx> { + fn specialized_encode(&mut self, lazy: &Lazy) -> Result<(), Self::Error> { + self.emit_lazy_distance(lazy.position, Lazy::::min_size()) } } -fn encode_variant_id(rbml_w: &mut Encoder, vid: DefId) { - let id = def_to_u64(vid); - rbml_w.wr_tagged_u64(tag_items_data_item_variant, id); - rbml_w.wr_tagged_u64(tag_mod_child, id); +impl<'a, 'tcx, T> SpecializedEncoder> for EncodeContext<'a, 'tcx> { + fn specialized_encode(&mut self, seq: &LazySeq) -> Result<(), Self::Error> { + self.emit_usize(seq.len)?; + if seq.len == 0 { + return Ok(()); + } + self.emit_lazy_distance(seq.position, LazySeq::::min_size(seq.len)) + } } -fn write_closure_type<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, - rbml_w: &mut Encoder, - closure_type: &ty::ClosureTy<'tcx>) { - tyencode::enc_closure_ty(rbml_w.writer, &ecx.ty_str_ctxt(), closure_type); - rbml_w.mark_stable_position(); +impl<'a, 'tcx> SpecializedEncoder> for EncodeContext<'a, 'tcx> { + fn specialized_encode(&mut self, ty: &Ty<'tcx>) -> Result<(), Self::Error> { + self.encode_with_shorthand(ty, &ty.sty, |ecx| &mut ecx.type_shorthands) + } } -impl<'a, 'tcx, 'encoder> ItemContentBuilder<'a, 'tcx, 'encoder> { - fn encode_type(&mut self, - typ: Ty<'tcx>) { - let ecx = self.ecx; - self.rbml_w.start_tag(tag_items_data_item_type); - tyencode::enc_ty(self.rbml_w.writer, &ecx.ty_str_ctxt(), typ); - self.rbml_w.mark_stable_position(); - self.rbml_w.end_tag(); +impl<'a, 'tcx> SpecializedEncoder> for EncodeContext<'a, 'tcx> { + fn specialized_encode(&mut self, + predicates: &ty::GenericPredicates<'tcx>) + -> Result<(), Self::Error> { + predicates.parent.encode(self)?; + predicates.predicates.len().encode(self)?; + for predicate in &predicates.predicates { + self.encode_with_shorthand(predicate, predicate, |ecx| &mut ecx.predicate_shorthands)? + } + Ok(()) } +} - fn encode_disr_val(&mut self, - disr_val: ty::Disr) { - // convert to u64 so just the number is printed, without any type info - self.rbml_w.wr_tagged_str(tag_disr_val, &disr_val.to_u64_unchecked().to_string()); +impl<'a, 'tcx> EncodeContext<'a, 'tcx> { + pub fn position(&self) -> usize { + self.opaque.position() } - fn encode_parent_item(&mut self, id: DefId) { - self.rbml_w.wr_tagged_u64(tag_items_data_parent_item, def_to_u64(id)); + fn emit_node R, R>(&mut self, f: F) -> R { + assert_eq!(self.lazy_state, LazyState::NoNode); + let pos = self.position(); + self.lazy_state = LazyState::NodeStart(pos); + let r = f(self, pos); + self.lazy_state = LazyState::NoNode; + r } - fn encode_struct_fields(&mut self, - variant: ty::VariantDef) { - for f in &variant.fields { - if variant.kind == ty::VariantKind::Tuple { - self.rbml_w.start_tag(tag_item_unnamed_field); - } else { - self.rbml_w.start_tag(tag_item_field); - encode_name(self.rbml_w, f.name); + fn emit_lazy_distance(&mut self, + position: usize, + min_size: usize) + -> Result<(), ::Error> { + let min_end = position + min_size; + let distance = match self.lazy_state { + LazyState::NoNode => bug!("emit_lazy_distance: outside of a metadata node"), + LazyState::NodeStart(start) => { + assert!(min_end <= start); + start - min_end } - self.encode_struct_field_family(f.vis); - encode_def_id(self.rbml_w, f.did); - self.rbml_w.end_tag(); - } - } -} - -impl<'a, 'tcx, 'encoder> IndexBuilder<'a, 'tcx, 'encoder> { - fn encode_enum_variant_infos(&mut self, - enum_did: DefId) { - debug!("encode_enum_variant_info(enum_did={:?})", enum_did); - let ecx = self.ecx(); - let def = ecx.tcx.lookup_adt_def(enum_did); - self.encode_fields(enum_did); - for (i, variant) in def.variants.iter().enumerate() { - self.record(variant.did, - ItemContentBuilder::encode_enum_variant_info, - (enum_did, Untracked(i))); - } + LazyState::Previous(last_min_end) => { + assert!(last_min_end <= position); + position - last_min_end + } + }; + self.lazy_state = LazyState::Previous(min_end); + self.emit_usize(distance) } -} - -impl<'a, 'tcx, 'encoder> ItemContentBuilder<'a, 'tcx, 'encoder> { - /// Encode data for the given variant of the given ADT. The - /// index of the variant is untracked: this is ok because we - /// will have to lookup the adt-def by its id, and that gives us - /// the right to access any information in the adt-def (including, - /// e.g., the length of the various vectors). - fn encode_enum_variant_info(&mut self, - (enum_did, Untracked(index)): - (DefId, Untracked)) { - let ecx = self.ecx; - let def = ecx.tcx.lookup_adt_def(enum_did); - let variant = &def.variants[index]; - let vid = variant.did; - let variant_node_id = ecx.local_id(vid); - encode_def_id_and_key(ecx, self.rbml_w, vid); - encode_family(self.rbml_w, match variant.kind { - ty::VariantKind::Struct => 'V', - ty::VariantKind::Tuple => 'v', - ty::VariantKind::Unit => 'w', - }); - encode_name(self.rbml_w, variant.name); - self.encode_parent_item(enum_did); - - let enum_id = ecx.tcx.map.as_local_node_id(enum_did).unwrap(); - let enum_vis = &ecx.tcx.map.expect_item(enum_id).vis; - self.encode_visibility(enum_vis); - let attrs = ecx.tcx.get_attrs(vid); - encode_attributes(self.rbml_w, &attrs); - self.encode_repr_attrs(&attrs); + pub fn lazy(&mut self, value: &T) -> Lazy { + self.emit_node(|ecx, pos| { + value.encode(ecx).unwrap(); - let stab = ecx.tcx.lookup_stability(vid); - let depr = ecx.tcx.lookup_deprecation(vid); - encode_stability(self.rbml_w, stab); - encode_deprecation(self.rbml_w, depr); - - self.encode_struct_fields(variant); - self.encode_disr_val(variant.disr_val); - self.encode_bounds_and_type_for_item(variant_node_id); + assert!(pos + Lazy::::min_size() <= ecx.position()); + Lazy::with_position(pos) + }) } -} -fn encode_reexports(ecx: &EncodeContext, - rbml_w: &mut Encoder, - id: NodeId) { - debug!("(encoding info for module) encoding reexports for {}", id); - match ecx.reexports.get(&id) { - Some(exports) => { - debug!("(encoding info for module) found reexports for {}", id); - for exp in exports { - debug!("(encoding info for module) reexport '{}' ({:?}) for \ - {}", - exp.name, - exp.def_id, - id); - rbml_w.start_tag(tag_items_data_item_reexport); - rbml_w.wr_tagged_u64(tag_items_data_item_reexport_def_id, - def_to_u64(exp.def_id)); - rbml_w.wr_tagged_str(tag_items_data_item_reexport_name, - &exp.name.as_str()); - rbml_w.end_tag(); - } - }, - None => debug!("(encoding info for module) found no reexports for {}", id), + fn lazy_seq(&mut self, iter: I) -> LazySeq + where I: IntoIterator, + T: Encodable + { + self.emit_node(|ecx, pos| { + let len = iter.into_iter().map(|value| value.encode(ecx).unwrap()).count(); + + assert!(pos + LazySeq::::min_size(len) <= ecx.position()); + LazySeq::with_position_and_length(pos, len) + }) } -} -impl<'a, 'tcx, 'encoder> ItemContentBuilder<'a, 'tcx, 'encoder> { - fn encode_info_for_mod(&mut self, - FromId(id, (md, attrs, name, vis)): - FromId<(&hir::Mod, &[ast::Attribute], Name, &hir::Visibility)>) { - let ecx = self.ecx(); - - encode_def_id_and_key(ecx, self.rbml_w, ecx.tcx.map.local_def_id(id)); - encode_family(self.rbml_w, 'm'); - encode_name(self.rbml_w, name); - debug!("(encoding info for module) encoding info for module ID {}", id); - - // Encode info about all the module children. - for item_id in &md.item_ids { - self.rbml_w.wr_tagged_u64(tag_mod_child, - def_to_u64(ecx.tcx.map.local_def_id(item_id.id))); + fn lazy_seq_ref<'b, I, T>(&mut self, iter: I) -> LazySeq + where I: IntoIterator, + T: 'b + Encodable + { + self.emit_node(|ecx, pos| { + let len = iter.into_iter().map(|value| value.encode(ecx).unwrap()).count(); + + assert!(pos + LazySeq::::min_size(len) <= ecx.position()); + LazySeq::with_position_and_length(pos, len) + }) + } + + /// Encode the given value or a previously cached shorthand. + fn encode_with_shorthand(&mut self, + value: &T, + variant: &U, + map: M) + -> Result<(), ::Error> + where M: for<'b> Fn(&'b mut Self) -> &'b mut FnvHashMap, + T: Clone + Eq + Hash, + U: Encodable + { + let existing_shorthand = map(self).get(value).cloned(); + if let Some(shorthand) = existing_shorthand { + return self.emit_usize(shorthand); } - self.encode_visibility(vis); + let start = self.position(); + variant.encode(self)?; + let len = self.position() - start; + + // The shorthand encoding uses the same usize as the + // discriminant, with an offset so they can't conflict. + let discriminant = unsafe { intrinsics::discriminant_value(variant) }; + assert!(discriminant < SHORTHAND_OFFSET as u64); + let shorthand = start + SHORTHAND_OFFSET; - let stab = ecx.tcx.lookup_stability(ecx.tcx.map.local_def_id(id)); - let depr = ecx.tcx.lookup_deprecation(ecx.tcx.map.local_def_id(id)); - encode_stability(self.rbml_w, stab); - encode_deprecation(self.rbml_w, depr); + // Get the number of bits that leb128 could fit + // in the same space as the fully encoded type. + let leb128_bits = len * 7; - // Encode the reexports of this module, if this module is public. - if *vis == hir::Public { - debug!("(encoding info for module) encoding reexports for {}", id); - encode_reexports(ecx, self.rbml_w, id); + // Check that the shorthand is a not longer than the + // full encoding itself, i.e. it's an obvious win. + if leb128_bits >= 64 || (shorthand as u64) < (1 << leb128_bits) { + map(self).insert(value.clone(), shorthand); } - encode_attributes(self.rbml_w, attrs); - } - fn encode_struct_field_family(&mut self, - visibility: ty::Visibility) { - encode_family(self.rbml_w, if visibility.is_public() { 'g' } else { 'N' }); + Ok(()) } - fn encode_visibility(&mut self, visibility: T) { - let ch = if visibility.is_public() { 'y' } else { 'i' }; - self.rbml_w.wr_tagged_u8(tag_items_data_item_visibility, ch as u8); + /// For every DefId that we create a metadata item for, we include a + /// serialized copy of its DefKey, which allows us to recreate a path. + fn encode_def_key(&mut self, def_id: DefId) -> Lazy { + let tcx = self.tcx; + self.lazy(&tcx.map.def_key(def_id)) } -} -trait HasVisibility: Sized { - fn is_public(self) -> bool; -} + fn encode_item_variances(&mut self, def_id: DefId) -> LazySeq { + let tcx = self.tcx; + self.lazy_seq(tcx.item_variances(def_id).iter().cloned()) + } -impl<'a> HasVisibility for &'a hir::Visibility { - fn is_public(self) -> bool { - *self == hir::Public + fn encode_item_type(&mut self, def_id: DefId) -> Lazy> { + let tcx = self.tcx; + self.lazy(&tcx.lookup_item_type(def_id).ty) } -} -impl HasVisibility for ty::Visibility { - fn is_public(self) -> bool { - self == ty::Visibility::Public + /// Encode data for the given variant of the given ADT. The + /// index of the variant is untracked: this is ok because we + /// will have to lookup the adt-def by its id, and that gives us + /// the right to access any information in the adt-def (including, + /// e.g., the length of the various vectors). + fn encode_enum_variant_info(&mut self, + (enum_did, Untracked(index)): (DefId, Untracked)) + -> Entry<'tcx> { + let tcx = self.tcx; + let def = tcx.lookup_adt_def(enum_did); + let variant = &def.variants[index]; + let def_id = variant.did; + + let data = VariantData { + ctor_kind: variant.ctor_kind, + disr: variant.disr_val.to_u64_unchecked(), + struct_ctor: None, + }; + + let enum_id = tcx.map.as_local_node_id(enum_did).unwrap(); + let enum_vis = &tcx.map.expect_item(enum_id).vis; + + Entry { + kind: EntryKind::Variant(self.lazy(&data)), + visibility: enum_vis.simplify(), + def_key: self.encode_def_key(def_id), + attributes: self.encode_attributes(&tcx.get_attrs(def_id)), + children: self.lazy_seq(variant.fields.iter().map(|f| { + assert!(f.did.is_local()); + f.did.index + })), + stability: self.encode_stability(def_id), + deprecation: self.encode_deprecation(def_id), + + ty: Some(self.encode_item_type(def_id)), + inherent_impls: LazySeq::empty(), + variances: LazySeq::empty(), + generics: Some(self.encode_generics(def_id)), + predicates: Some(self.encode_predicates(def_id)), + + ast: None, + mir: None, + } } -} -fn encode_constness(rbml_w: &mut Encoder, constness: hir::Constness) { - rbml_w.start_tag(tag_items_data_item_constness); - let ch = match constness { - hir::Constness::Const => 'c', - hir::Constness::NotConst => 'n', - }; - rbml_w.wr_str(&ch.to_string()); - rbml_w.end_tag(); + fn encode_info_for_mod(&mut self, + FromId(id, (md, attrs, vis)): FromId<(&hir::Mod, + &[ast::Attribute], + &hir::Visibility)>) + -> Entry<'tcx> { + let tcx = self.tcx; + let def_id = tcx.map.local_def_id(id); + + let data = ModData { + reexports: match self.reexports.get(&id) { + Some(exports) if *vis == hir::Public => self.lazy_seq_ref(exports), + _ => LazySeq::empty(), + }, + }; + + Entry { + kind: EntryKind::Mod(self.lazy(&data)), + visibility: vis.simplify(), + def_key: self.encode_def_key(def_id), + attributes: self.encode_attributes(attrs), + children: self.lazy_seq(md.item_ids.iter().map(|item_id| { + tcx.map.local_def_id(item_id.id).index + })), + stability: self.encode_stability(def_id), + deprecation: self.encode_deprecation(def_id), + + ty: None, + inherent_impls: LazySeq::empty(), + variances: LazySeq::empty(), + generics: None, + predicates: None, + + ast: None, + mir: None + } + } } -fn encode_defaultness(rbml_w: &mut Encoder, defaultness: hir::Defaultness) { - let ch = match defaultness { - hir::Defaultness::Default => 'd', - hir::Defaultness::Final => 'f', - }; - rbml_w.wr_tagged_u8(tag_items_data_item_defaultness, ch as u8); +trait Visibility { + fn simplify(&self) -> ty::Visibility; } -fn encode_explicit_self(rbml_w: &mut Encoder, - explicit_self: &ty::ExplicitSelfCategory) { - let tag = tag_item_trait_method_explicit_self; - - // Encode the base self type. - match *explicit_self { - ty::ExplicitSelfCategory::Static => { - rbml_w.wr_tagged_bytes(tag, &['s' as u8]); - } - ty::ExplicitSelfCategory::ByValue => { - rbml_w.wr_tagged_bytes(tag, &['v' as u8]); - } - ty::ExplicitSelfCategory::ByBox => { - rbml_w.wr_tagged_bytes(tag, &['~' as u8]); - } - ty::ExplicitSelfCategory::ByReference(_, m) => { - // FIXME(#4846) encode custom lifetime - let ch = encode_mutability(m); - rbml_w.wr_tagged_bytes(tag, &['&' as u8, ch]); +impl Visibility for hir::Visibility { + fn simplify(&self) -> ty::Visibility { + if *self == hir::Public { + ty::Visibility::Public + } else { + ty::Visibility::PrivateExternal } } +} - fn encode_mutability(m: hir::Mutability) -> u8 { - match m { - hir::MutImmutable => 'i' as u8, - hir::MutMutable => 'm' as u8, +impl Visibility for ty::Visibility { + fn simplify(&self) -> ty::Visibility { + if *self == ty::Visibility::Public { + ty::Visibility::Public + } else { + ty::Visibility::PrivateExternal } } } -fn encode_item_sort(rbml_w: &mut Encoder, sort: char) { - rbml_w.wr_tagged_u8(tag_item_trait_item_sort, sort as u8); -} - -impl<'a, 'tcx, 'encoder> IndexBuilder<'a, 'tcx, 'encoder> { - fn encode_fields(&mut self, - adt_def_id: DefId) { - let def = self.ecx().tcx.lookup_adt_def(adt_def_id); +impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> { + fn encode_fields(&mut self, adt_def_id: DefId) { + let def = self.tcx.lookup_adt_def(adt_def_id); for (variant_index, variant) in def.variants.iter().enumerate() { for (field_index, field) in variant.fields.iter().enumerate() { self.record(field.did, - ItemContentBuilder::encode_field, + EncodeContext::encode_field, (adt_def_id, Untracked((variant_index, field_index)))); } } } } -impl<'a, 'tcx, 'encoder> ItemContentBuilder<'a, 'tcx, 'encoder> { +impl<'a, 'tcx> EncodeContext<'a, 'tcx> { /// Encode data for the given field of the given variant of the /// given ADT. The indices of the variant/field are untracked: /// this is ok because we will have to lookup the adt-def by its @@ -410,756 +377,472 @@ impl<'a, 'tcx, 'encoder> ItemContentBuilder<'a, 'tcx, 'encoder> { /// the adt-def (including, e.g., the length of the various /// vectors). fn encode_field(&mut self, - (adt_def_id, Untracked((variant_index, field_index))): - (DefId, Untracked<(usize, usize)>)) { - let ecx = self.ecx(); - let def = ecx.tcx.lookup_adt_def(adt_def_id); - let variant = &def.variants[variant_index]; + (adt_def_id, Untracked((variant_index, field_index))): (DefId, + Untracked<(usize, + usize)>)) + -> Entry<'tcx> { + let tcx = self.tcx; + let variant = &tcx.lookup_adt_def(adt_def_id).variants[variant_index]; let field = &variant.fields[field_index]; - let nm = field.name; - let id = ecx.local_id(field.did); - debug!("encode_field: encoding {} {}", nm, id); - - self.encode_struct_field_family(field.vis); - encode_name(self.rbml_w, nm); - self.encode_bounds_and_type_for_item(id); - encode_def_id_and_key(ecx, self.rbml_w, field.did); - - let stab = ecx.tcx.lookup_stability(field.did); - let depr = ecx.tcx.lookup_deprecation(field.did); - encode_stability(self.rbml_w, stab); - encode_deprecation(self.rbml_w, depr); - } -} - -impl<'a, 'tcx, 'encoder> ItemContentBuilder<'a, 'tcx, 'encoder> { - fn encode_struct_ctor(&mut self, - (struct_def_id, struct_node_id, ctor_node_id): - (DefId, ast::NodeId, ast::NodeId)) { - let ecx = self.ecx(); - let def = ecx.tcx.lookup_adt_def(struct_def_id); - let variant = def.struct_variant(); - let item = ecx.tcx.map.expect_item(struct_node_id); - let ctor_def_id = ecx.tcx.map.local_def_id(ctor_node_id); - encode_def_id_and_key(ecx, self.rbml_w, ctor_def_id); - encode_family(self.rbml_w, match variant.kind { - ty::VariantKind::Struct => 'S', - ty::VariantKind::Tuple => 's', - ty::VariantKind::Unit => 'u', - }); - self.encode_bounds_and_type_for_item(ctor_node_id); - encode_name(self.rbml_w, item.name); - self.encode_parent_item(struct_def_id); - - let stab = ecx.tcx.lookup_stability(ctor_def_id); - let depr = ecx.tcx.lookup_deprecation(ctor_def_id); - encode_stability(self.rbml_w, stab); - encode_deprecation(self.rbml_w, depr); - - // indicate that this is a tuple struct ctor, because - // downstream users will normally want the tuple struct - // definition, but without this there is no way for them - // to tell that they actually have a ctor rather than a - // normal function - self.rbml_w.wr_tagged_bytes(tag_items_data_item_is_tuple_struct_ctor, &[]); - } -} - -impl<'a, 'tcx, 'encoder> ItemContentBuilder<'a, 'tcx, 'encoder> { - fn encode_generics(&mut self, - generics: &ty::Generics<'tcx>, - predicates: &ty::GenericPredicates<'tcx>) - { - let ecx = self.ecx(); - self.rbml_w.start_tag(tag_item_generics); - tyencode::enc_generics(self.rbml_w.writer, &ecx.ty_str_ctxt(), generics); - self.rbml_w.mark_stable_position(); - self.rbml_w.end_tag(); - self.encode_predicates(predicates, tag_item_predicates); - } - - fn encode_predicates(&mut self, - predicates: &ty::GenericPredicates<'tcx>, - tag: usize) { - self.rbml_w.start_tag(tag); - if let Some(def_id) = predicates.parent { - self.rbml_w.wr_tagged_u64(tag_items_data_parent_item, def_to_u64(def_id)); + let def_id = field.did; + let variant_id = tcx.map.as_local_node_id(variant.did).unwrap(); + let variant_data = tcx.map.expect_variant_data(variant_id); + + Entry { + kind: EntryKind::Field, + visibility: field.vis.simplify(), + def_key: self.encode_def_key(def_id), + attributes: self.encode_attributes(&variant_data.fields()[field_index].attrs), + children: LazySeq::empty(), + stability: self.encode_stability(def_id), + deprecation: self.encode_deprecation(def_id), + + ty: Some(self.encode_item_type(def_id)), + inherent_impls: LazySeq::empty(), + variances: LazySeq::empty(), + generics: Some(self.encode_generics(def_id)), + predicates: Some(self.encode_predicates(def_id)), + + ast: None, + mir: None, } - for predicate in &predicates.predicates { - let xref = self.add_xref(XRef::Predicate(predicate.clone())); - self.rbml_w.wr_tagged_u32(tag_predicate, xref); - } - self.rbml_w.end_tag(); } - fn encode_method_ty_fields(&mut self, - method_ty: &ty::Method<'tcx>) { - let ecx = self.ecx(); - encode_def_id_and_key(ecx, self.rbml_w, method_ty.def_id); - encode_name(self.rbml_w, method_ty.name); - self.encode_generics(&method_ty.generics, &method_ty.predicates); - self.encode_visibility(method_ty.vis); - encode_explicit_self(self.rbml_w, &method_ty.explicit_self); - match method_ty.explicit_self { - ty::ExplicitSelfCategory::Static => { - encode_family(self.rbml_w, STATIC_METHOD_FAMILY); - } - _ => encode_family(self.rbml_w, METHOD_FAMILY) + fn encode_struct_ctor(&mut self, (adt_def_id, def_id): (DefId, DefId)) -> Entry<'tcx> { + let tcx = self.tcx; + let variant = tcx.lookup_adt_def(adt_def_id).struct_variant(); + + let data = VariantData { + ctor_kind: variant.ctor_kind, + disr: variant.disr_val.to_u64_unchecked(), + struct_ctor: Some(def_id.index), + }; + + let struct_id = tcx.map.as_local_node_id(adt_def_id).unwrap(); + let struct_vis = &tcx.map.expect_item(struct_id).vis; + + Entry { + kind: EntryKind::Struct(self.lazy(&data)), + visibility: struct_vis.simplify(), + def_key: self.encode_def_key(def_id), + attributes: LazySeq::empty(), + children: LazySeq::empty(), + stability: self.encode_stability(def_id), + deprecation: self.encode_deprecation(def_id), + + ty: Some(self.encode_item_type(def_id)), + inherent_impls: LazySeq::empty(), + variances: LazySeq::empty(), + generics: Some(self.encode_generics(def_id)), + predicates: Some(self.encode_predicates(def_id)), + + ast: None, + mir: None, } } -} - -impl<'a, 'tcx, 'encoder> ItemContentBuilder<'a, 'tcx, 'encoder> { - fn encode_info_for_trait_item(&mut self, - (trait_def_id, item_def_id, trait_item): - (DefId, DefId, &hir::TraitItem)) { - let ecx = self.ecx; - let tcx = ecx.tcx; - self.encode_parent_item(trait_def_id); + fn encode_generics(&mut self, def_id: DefId) -> Lazy> { + let tcx = self.tcx; + self.lazy(tcx.lookup_generics(def_id)) + } - let stab = tcx.lookup_stability(item_def_id); - let depr = tcx.lookup_deprecation(item_def_id); - encode_stability(self.rbml_w, stab); - encode_deprecation(self.rbml_w, depr); + fn encode_predicates(&mut self, def_id: DefId) -> Lazy> { + let tcx = self.tcx; + self.lazy(&tcx.lookup_predicates(def_id)) + } - let trait_item_type = - tcx.impl_or_trait_item(item_def_id); - let is_nonstatic_method; - match trait_item_type { - ty::ConstTraitItem(associated_const) => { - encode_name(self.rbml_w, associated_const.name); - encode_def_id_and_key(ecx, self.rbml_w, associated_const.def_id); - self.encode_visibility(associated_const.vis); + fn encode_info_for_trait_item(&mut self, def_id: DefId) -> Entry<'tcx> { + let tcx = self.tcx; - encode_family(self.rbml_w, 'C'); + let node_id = tcx.map.as_local_node_id(def_id).unwrap(); + let ast_item = tcx.map.expect_trait_item(node_id); + let trait_item = tcx.impl_or_trait_item(def_id); - self.encode_bounds_and_type_for_item( - ecx.local_id(associated_const.def_id)); + let container = |has_body| if has_body { + AssociatedContainer::TraitWithDefault + } else { + AssociatedContainer::TraitRequired + }; - is_nonstatic_method = false; + let kind = match trait_item { + ty::ConstTraitItem(ref associated_const) => { + EntryKind::AssociatedConst(container(associated_const.has_value)) } - ty::MethodTraitItem(method_ty) => { - let method_def_id = item_def_id; - - self.encode_method_ty_fields(&method_ty); - - match method_ty.explicit_self { - ty::ExplicitSelfCategory::Static => { - encode_family(self.rbml_w, - STATIC_METHOD_FAMILY); - } - _ => { - encode_family(self.rbml_w, - METHOD_FAMILY); + ty::MethodTraitItem(ref method_ty) => { + let fn_data = if let hir::MethodTraitItem(ref sig, _) = ast_item.node { + FnData { + constness: hir::Constness::NotConst, + arg_names: self.encode_fn_arg_names(&sig.decl), } - } - self.encode_bounds_and_type_for_item(ecx.local_id(method_def_id)); - - is_nonstatic_method = method_ty.explicit_self != - ty::ExplicitSelfCategory::Static; - } - ty::TypeTraitItem(associated_type) => { - encode_name(self.rbml_w, associated_type.name); - encode_def_id_and_key(ecx, self.rbml_w, associated_type.def_id); - encode_item_sort(self.rbml_w, 't'); - encode_family(self.rbml_w, 'y'); - - if let Some(ty) = associated_type.ty { - self.encode_type(ty); - } - - is_nonstatic_method = false; - } - } - - encode_attributes(self.rbml_w, &trait_item.attrs); - match trait_item.node { - hir::ConstTraitItem(_, ref default) => { - if default.is_some() { - encode_item_sort(self.rbml_w, 'C'); } else { - encode_item_sort(self.rbml_w, 'c'); - } - - encode_inlined_item(ecx, self.rbml_w, - InlinedItemRef::TraitItem(trait_def_id, trait_item)); - self.encode_mir(trait_item.id); + bug!() + }; + let data = MethodData { + fn_data: fn_data, + container: container(method_ty.has_body), + explicit_self: self.lazy(&method_ty.explicit_self), + }; + EntryKind::Method(self.lazy(&data)) } - hir::MethodTraitItem(ref sig, ref body) => { - // If this is a static method, we've already - // encoded self. - if is_nonstatic_method { - self.encode_bounds_and_type_for_item( - ecx.local_id(item_def_id)); - } - - if body.is_some() { - encode_item_sort(self.rbml_w, 'p'); - self.encode_mir(trait_item.id); - } else { - encode_item_sort(self.rbml_w, 'r'); + ty::TypeTraitItem(_) => EntryKind::AssociatedType(container(false)), + }; + + Entry { + kind: kind, + visibility: trait_item.vis().simplify(), + def_key: self.encode_def_key(def_id), + attributes: self.encode_attributes(&ast_item.attrs), + children: LazySeq::empty(), + stability: self.encode_stability(def_id), + deprecation: self.encode_deprecation(def_id), + + ty: match trait_item { + ty::ConstTraitItem(_) | + ty::MethodTraitItem(_) => Some(self.encode_item_type(def_id)), + ty::TypeTraitItem(ref associated_type) => { + associated_type.ty.map(|ty| self.lazy(&ty)) } - self.encode_method_argument_names(&sig.decl); - } - - hir::TypeTraitItem(..) => {} + }, + inherent_impls: LazySeq::empty(), + variances: LazySeq::empty(), + generics: Some(self.encode_generics(def_id)), + predicates: Some(self.encode_predicates(def_id)), + + ast: if let ty::ConstTraitItem(_) = trait_item { + let trait_def_id = trait_item.container().id(); + Some(self.encode_inlined_item(InlinedItemRef::TraitItem(trait_def_id, ast_item))) + } else { + None + }, + mir: self.encode_mir(def_id), } } - fn encode_info_for_impl_item(&mut self, - (impl_id, impl_item_def_id, ast_item): - (NodeId, DefId, Option<&hir::ImplItem>)) { - match self.ecx.tcx.impl_or_trait_item(impl_item_def_id) { - ty::ConstTraitItem(ref associated_const) => { - self.encode_info_for_associated_const(&associated_const, - impl_id, - ast_item) - } - ty::MethodTraitItem(ref method_type) => { - self.encode_info_for_method(&method_type, - false, - impl_id, - ast_item) - } - ty::TypeTraitItem(ref associated_type) => { - self.encode_info_for_associated_type(&associated_type, - impl_id, - ast_item) + fn encode_info_for_impl_item(&mut self, def_id: DefId) -> Entry<'tcx> { + let node_id = self.tcx.map.as_local_node_id(def_id).unwrap(); + let ast_item = self.tcx.map.expect_impl_item(node_id); + let impl_item = self.tcx.impl_or_trait_item(def_id); + let impl_def_id = impl_item.container().id(); + + let container = match ast_item.defaultness { + hir::Defaultness::Default => AssociatedContainer::ImplDefault, + hir::Defaultness::Final => AssociatedContainer::ImplFinal, + }; + + let kind = match impl_item { + ty::ConstTraitItem(_) => EntryKind::AssociatedConst(container), + ty::MethodTraitItem(ref method_ty) => { + let fn_data = if let hir::ImplItemKind::Method(ref sig, _) = ast_item.node { + FnData { + constness: sig.constness, + arg_names: self.encode_fn_arg_names(&sig.decl), + } + } else { + bug!() + }; + let data = MethodData { + fn_data: fn_data, + container: container, + explicit_self: self.lazy(&method_ty.explicit_self), + }; + EntryKind::Method(self.lazy(&data)) } - } - } - - fn encode_info_for_associated_const(&mut self, - associated_const: &ty::AssociatedConst, - parent_id: NodeId, - impl_item_opt: Option<&hir::ImplItem>) { - let ecx = self.ecx(); - debug!("encode_info_for_associated_const({:?},{:?})", - associated_const.def_id, - associated_const.name); - - encode_def_id_and_key(ecx, self.rbml_w, associated_const.def_id); - encode_name(self.rbml_w, associated_const.name); - self.encode_visibility(associated_const.vis); - encode_family(self.rbml_w, 'C'); - - self.encode_parent_item(ecx.tcx.map.local_def_id(parent_id)); - encode_item_sort(self.rbml_w, 'C'); - - self.encode_bounds_and_type_for_item(ecx.local_id(associated_const.def_id)); - - let stab = ecx.tcx.lookup_stability(associated_const.def_id); - let depr = ecx.tcx.lookup_deprecation(associated_const.def_id); - encode_stability(self.rbml_w, stab); - encode_deprecation(self.rbml_w, depr); - - if let Some(ii) = impl_item_opt { - encode_attributes(self.rbml_w, &ii.attrs); - encode_defaultness(self.rbml_w, ii.defaultness); - encode_inlined_item(ecx, - self.rbml_w, - InlinedItemRef::ImplItem(ecx.tcx.map.local_def_id(parent_id), - ii)); - self.encode_mir(ii.id); - } - } - - fn encode_info_for_method(&mut self, - m: &ty::Method<'tcx>, - is_default_impl: bool, - parent_id: NodeId, - impl_item_opt: Option<&hir::ImplItem>) { - let ecx = self.ecx(); - - debug!("encode_info_for_method: {:?} {:?}", m.def_id, - m.name); - self.encode_method_ty_fields(m); - self.encode_parent_item(ecx.tcx.map.local_def_id(parent_id)); - encode_item_sort(self.rbml_w, 'r'); - - let stab = ecx.tcx.lookup_stability(m.def_id); - let depr = ecx.tcx.lookup_deprecation(m.def_id); - encode_stability(self.rbml_w, stab); - encode_deprecation(self.rbml_w, depr); - - let m_node_id = ecx.local_id(m.def_id); - self.encode_bounds_and_type_for_item(m_node_id); - - if let Some(impl_item) = impl_item_opt { - if let hir::ImplItemKind::Method(ref sig, _) = impl_item.node { - encode_attributes(self.rbml_w, &impl_item.attrs); - let generics = ecx.tcx.lookup_generics(m.def_id); - let types = generics.parent_types as usize + generics.types.len(); - let needs_inline = types > 0 || is_default_impl || - attr::requests_inline(&impl_item.attrs); - if sig.constness == hir::Constness::Const { - encode_inlined_item( - ecx, - self.rbml_w, - InlinedItemRef::ImplItem(ecx.tcx.map.local_def_id(parent_id), - impl_item)); - } - if needs_inline || sig.constness == hir::Constness::Const { - self.encode_mir(impl_item.id); + ty::TypeTraitItem(_) => EntryKind::AssociatedType(container), + }; + + let (ast, mir) = if let ty::ConstTraitItem(_) = impl_item { + (true, true) + } else if let hir::ImplItemKind::Method(ref sig, _) = ast_item.node { + let generics = self.tcx.lookup_generics(def_id); + let types = generics.parent_types as usize + generics.types.len(); + let needs_inline = types > 0 || attr::requests_inline(&ast_item.attrs); + let is_const_fn = sig.constness == hir::Constness::Const; + (is_const_fn, needs_inline || is_const_fn) + } else { + (false, false) + }; + + Entry { + kind: kind, + visibility: impl_item.vis().simplify(), + def_key: self.encode_def_key(def_id), + attributes: self.encode_attributes(&ast_item.attrs), + children: LazySeq::empty(), + stability: self.encode_stability(def_id), + deprecation: self.encode_deprecation(def_id), + + ty: match impl_item { + ty::ConstTraitItem(_) | + ty::MethodTraitItem(_) => Some(self.encode_item_type(def_id)), + ty::TypeTraitItem(ref associated_type) => { + associated_type.ty.map(|ty| self.lazy(&ty)) } - encode_constness(self.rbml_w, sig.constness); - encode_defaultness(self.rbml_w, impl_item.defaultness); - self.encode_method_argument_names(&sig.decl); - } - } - } - - fn encode_info_for_associated_type(&mut self, - associated_type: &ty::AssociatedType<'tcx>, - parent_id: NodeId, - impl_item_opt: Option<&hir::ImplItem>) { - let ecx = self.ecx(); - debug!("encode_info_for_associated_type({:?},{:?})", - associated_type.def_id, - associated_type.name); - - encode_def_id_and_key(ecx, self.rbml_w, associated_type.def_id); - encode_name(self.rbml_w, associated_type.name); - self.encode_visibility(associated_type.vis); - encode_family(self.rbml_w, 'y'); - self.encode_parent_item(ecx.tcx.map.local_def_id(parent_id)); - encode_item_sort(self.rbml_w, 't'); - - let stab = ecx.tcx.lookup_stability(associated_type.def_id); - let depr = ecx.tcx.lookup_deprecation(associated_type.def_id); - encode_stability(self.rbml_w, stab); - encode_deprecation(self.rbml_w, depr); - - if let Some(ii) = impl_item_opt { - encode_attributes(self.rbml_w, &ii.attrs); - encode_defaultness(self.rbml_w, ii.defaultness); - } - - if let Some(ty) = associated_type.ty { - self.encode_type(ty); + }, + inherent_impls: LazySeq::empty(), + variances: LazySeq::empty(), + generics: Some(self.encode_generics(def_id)), + predicates: Some(self.encode_predicates(def_id)), + + ast: if ast { + Some(self.encode_inlined_item(InlinedItemRef::ImplItem(impl_def_id, ast_item))) + } else { + None + }, + mir: if mir { self.encode_mir(def_id) } else { None }, } } -} -impl<'a, 'tcx, 'encoder> ItemContentBuilder<'a, 'tcx, 'encoder> { - fn encode_method_argument_names(&mut self, - decl: &hir::FnDecl) { - self.rbml_w.start_tag(tag_method_argument_names); - for arg in &decl.inputs { - let tag = tag_method_argument_name; + fn encode_fn_arg_names(&mut self, decl: &hir::FnDecl) -> LazySeq { + self.lazy_seq(decl.inputs.iter().map(|arg| { if let PatKind::Binding(_, ref path1, _) = arg.pat.node { - let name = path1.node.as_str(); - self.rbml_w.wr_tagged_bytes(tag, name.as_bytes()); + path1.node } else { - self.rbml_w.wr_tagged_bytes(tag, &[]); + syntax::parse::token::intern("") } - } - self.rbml_w.end_tag(); - } - - fn encode_repr_attrs(&mut self, - attrs: &[ast::Attribute]) { - let ecx = self.ecx(); - let mut repr_attrs = Vec::new(); - for attr in attrs { - repr_attrs.extend(attr::find_repr_attrs(ecx.tcx.sess.diagnostic(), - attr)); - } - self.rbml_w.start_tag(tag_items_data_item_repr); - repr_attrs.encode(self.rbml_w); - self.rbml_w.end_tag(); + })) } - fn encode_mir(&mut self, node_id: NodeId) { - let ecx = self.ecx(); - let def_id = ecx.tcx.map.local_def_id(node_id); - if let Some(mir) = ecx.mir_map.map.get(&def_id) { - self.rbml_w.start_tag(tag_mir as usize); - self.rbml_w.emit_opaque(|opaque_encoder| { - tls::enter_encoding_context(ecx, opaque_encoder, |_, opaque_encoder| { - Encodable::encode(mir, opaque_encoder) - }) - }).unwrap(); - self.rbml_w.end_tag(); - } + fn encode_mir(&mut self, def_id: DefId) -> Option>> { + self.tcx.mir_map.borrow().get(&def_id).map(|mir| self.lazy(&*mir.borrow())) } -} -const FN_FAMILY: char = 'f'; -const STATIC_METHOD_FAMILY: char = 'F'; -const METHOD_FAMILY: char = 'h'; - -// Encodes the inherent implementations of a structure, enumeration, or trait. -fn encode_inherent_implementations(ecx: &EncodeContext, - rbml_w: &mut Encoder, - def_id: DefId) { - match ecx.tcx.inherent_impls.borrow().get(&def_id) { - None => {} - Some(implementations) => { - for &impl_def_id in implementations.iter() { - rbml_w.start_tag(tag_items_data_item_inherent_impl); - encode_def_id(rbml_w, impl_def_id); - rbml_w.end_tag(); + // Encodes the inherent implementations of a structure, enumeration, or trait. + fn encode_inherent_implementations(&mut self, def_id: DefId) -> LazySeq { + match self.tcx.inherent_impls.borrow().get(&def_id) { + None => LazySeq::empty(), + Some(implementations) => { + self.lazy_seq(implementations.iter().map(|&def_id| { + assert!(def_id.is_local()); + def_id.index + })) } } } -} - -fn encode_stability(rbml_w: &mut Encoder, stab_opt: Option<&attr::Stability>) { - stab_opt.map(|stab| { - rbml_w.start_tag(tag_items_data_item_stability); - stab.encode(rbml_w).unwrap(); - rbml_w.end_tag(); - }); -} - -fn encode_deprecation(rbml_w: &mut Encoder, depr_opt: Option) { - depr_opt.map(|depr| { - rbml_w.start_tag(tag_items_data_item_deprecation); - depr.encode(rbml_w).unwrap(); - rbml_w.end_tag(); - }); -} - -fn encode_parent_impl(rbml_w: &mut Encoder, parent_opt: Option) { - parent_opt.map(|parent| { - rbml_w.wr_tagged_u64(tag_items_data_parent_impl, def_to_u64(parent)); - }); -} -fn encode_xrefs<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, - rbml_w: &mut Encoder, - xrefs: FnvHashMap, u32>) -{ - let mut xref_positions = vec![0; xrefs.len()]; - - // Encode XRefs sorted by their ID - let mut sorted_xrefs: Vec<_> = xrefs.into_iter().collect(); - sorted_xrefs.sort_by_key(|&(_, id)| id); - - rbml_w.start_tag(tag_xref_data); - for (xref, id) in sorted_xrefs.into_iter() { - xref_positions[id as usize] = rbml_w.mark_stable_position() as u32; - match xref { - XRef::Predicate(p) => { - tyencode::enc_predicate(rbml_w.writer, &ecx.ty_str_ctxt(), &p) - } - } + fn encode_stability(&mut self, def_id: DefId) -> Option> { + self.tcx.lookup_stability(def_id).map(|stab| self.lazy(stab)) } - rbml_w.mark_stable_position(); - rbml_w.end_tag(); - rbml_w.start_tag(tag_xref_index); - index::write_dense_index(xref_positions, rbml_w.writer); - rbml_w.end_tag(); -} + fn encode_deprecation(&mut self, def_id: DefId) -> Option> { + self.tcx.lookup_deprecation(def_id).map(|depr| self.lazy(&depr)) + } -impl<'a, 'tcx, 'encoder> ItemContentBuilder<'a, 'tcx, 'encoder> { - fn encode_info_for_item(&mut self, - (def_id, item): (DefId, &hir::Item)) { - let ecx = self.ecx(); - let tcx = ecx.tcx; + fn encode_info_for_item(&mut self, (def_id, item): (DefId, &hir::Item)) -> Entry<'tcx> { + let tcx = self.tcx; debug!("encoding info for item at {}", tcx.sess.codemap().span_to_string(item.span)); - let vis = &item.vis; - - let (stab, depr) = tcx.dep_graph.with_task(DepNode::MetaData(def_id), || { - (tcx.lookup_stability(ecx.tcx.map.local_def_id(item.id)), - tcx.lookup_deprecation(ecx.tcx.map.local_def_id(item.id))) - }); + let kind = match item.node { + hir::ItemStatic(_, hir::MutMutable, _) => EntryKind::MutStatic, + hir::ItemStatic(_, hir::MutImmutable, _) => EntryKind::ImmStatic, + hir::ItemConst(..) => EntryKind::Const, + hir::ItemFn(ref decl, _, constness, ..) => { + let data = FnData { + constness: constness, + arg_names: self.encode_fn_arg_names(&decl), + }; - match item.node { - hir::ItemStatic(_, m, _) => { - encode_def_id_and_key(ecx, self.rbml_w, def_id); - if m == hir::MutMutable { - encode_family(self.rbml_w, 'b'); - } else { - encode_family(self.rbml_w, 'c'); - } - self.encode_bounds_and_type_for_item(item.id); - encode_name(self.rbml_w, item.name); - self.encode_visibility(vis); - encode_stability(self.rbml_w, stab); - encode_deprecation(self.rbml_w, depr); - encode_attributes(self.rbml_w, &item.attrs); - } - hir::ItemConst(..) => { - encode_def_id_and_key(ecx, self.rbml_w, def_id); - encode_family(self.rbml_w, 'C'); - self.encode_bounds_and_type_for_item(item.id); - encode_name(self.rbml_w, item.name); - encode_attributes(self.rbml_w, &item.attrs); - encode_inlined_item(ecx, self.rbml_w, InlinedItemRef::Item(def_id, item)); - self.encode_mir(item.id); - self.encode_visibility(vis); - encode_stability(self.rbml_w, stab); - encode_deprecation(self.rbml_w, depr); - } - hir::ItemFn(ref decl, _, constness, _, ref generics, _) => { - encode_def_id_and_key(ecx, self.rbml_w, def_id); - encode_family(self.rbml_w, FN_FAMILY); - let tps_len = generics.ty_params.len(); - self.encode_bounds_and_type_for_item(item.id); - encode_name(self.rbml_w, item.name); - encode_attributes(self.rbml_w, &item.attrs); - let needs_inline = tps_len > 0 || attr::requests_inline(&item.attrs); - if constness == hir::Constness::Const { - encode_inlined_item(ecx, self.rbml_w, InlinedItemRef::Item(def_id, item)); - } - if needs_inline || constness == hir::Constness::Const { - self.encode_mir(item.id); - } - encode_constness(self.rbml_w, constness); - self.encode_visibility(vis); - encode_stability(self.rbml_w, stab); - encode_deprecation(self.rbml_w, depr); - self.encode_method_argument_names(&decl); + EntryKind::Fn(self.lazy(&data)) } hir::ItemMod(ref m) => { - self.encode_info_for_mod(FromId(item.id, (m, &item.attrs, item.name, &item.vis))); - } - hir::ItemForeignMod(ref fm) => { - encode_def_id_and_key(ecx, self.rbml_w, def_id); - encode_family(self.rbml_w, 'n'); - encode_name(self.rbml_w, item.name); - - // Encode all the items in self module. - for foreign_item in &fm.items { - self.rbml_w.wr_tagged_u64( - tag_mod_child, - def_to_u64(ecx.tcx.map.local_def_id(foreign_item.id))); - } - self.encode_visibility(vis); - encode_stability(self.rbml_w, stab); - encode_deprecation(self.rbml_w, depr); - } - hir::ItemTy(..) => { - encode_def_id_and_key(ecx, self.rbml_w, def_id); - encode_family(self.rbml_w, 'y'); - self.encode_bounds_and_type_for_item(item.id); - encode_name(self.rbml_w, item.name); - self.encode_visibility(vis); - encode_stability(self.rbml_w, stab); - encode_deprecation(self.rbml_w, depr); - } - hir::ItemEnum(ref enum_definition, _) => { - encode_def_id_and_key(ecx, self.rbml_w, def_id); - encode_family(self.rbml_w, 't'); - encode_item_variances(self.rbml_w, ecx, item.id); - self.encode_bounds_and_type_for_item(item.id); - encode_name(self.rbml_w, item.name); - encode_attributes(self.rbml_w, &item.attrs); - self.encode_repr_attrs(&item.attrs); - for v in &enum_definition.variants { - encode_variant_id(self.rbml_w, ecx.tcx.map.local_def_id(v.node.data.id())); - } - - // Encode inherent implementations for self enumeration. - encode_inherent_implementations(ecx, self.rbml_w, def_id); - - self.encode_visibility(vis); - encode_stability(self.rbml_w, stab); - encode_deprecation(self.rbml_w, depr); + return self.encode_info_for_mod(FromId(item.id, (m, &item.attrs, &item.vis))); } + hir::ItemForeignMod(_) => EntryKind::ForeignMod, + hir::ItemTy(..) => EntryKind::Type, + hir::ItemEnum(..) => EntryKind::Enum, hir::ItemStruct(ref struct_def, _) => { - /* Index the class*/ - let def = ecx.tcx.lookup_adt_def(def_id); - let variant = def.struct_variant(); - - /* Now, make an item for the class itself */ - encode_def_id_and_key(ecx, self.rbml_w, def_id); - encode_family(self.rbml_w, match *struct_def { - hir::VariantData::Struct(..) => 'S', - hir::VariantData::Tuple(..) => 's', - hir::VariantData::Unit(..) => 'u', - }); - self.encode_bounds_and_type_for_item(item.id); - - encode_item_variances(self.rbml_w, ecx, item.id); - encode_name(self.rbml_w, item.name); - encode_attributes(self.rbml_w, &item.attrs); - encode_stability(self.rbml_w, stab); - encode_deprecation(self.rbml_w, depr); - self.encode_visibility(vis); - self.encode_repr_attrs(&item.attrs); - - /* Encode def_ids for each field and method - for methods, write all the stuff get_trait_method - needs to know*/ - self.encode_struct_fields(variant); - - // Encode inherent implementations for self structure. - encode_inherent_implementations(ecx, self.rbml_w, def_id); + let variant = tcx.lookup_adt_def(def_id).struct_variant(); - if !struct_def.is_struct() { - let ctor_did = ecx.tcx.map.local_def_id(struct_def.id()); - self.rbml_w.wr_tagged_u64(tag_items_data_item_struct_ctor, - def_to_u64(ctor_did)); - } + // Encode def_ids for each field and method + // for methods, write all the stuff get_trait_method + // needs to know + let struct_ctor = if !struct_def.is_struct() { + Some(tcx.map.local_def_id(struct_def.id()).index) + } else { + None + }; + EntryKind::Struct(self.lazy(&VariantData { + ctor_kind: variant.ctor_kind, + disr: variant.disr_val.to_u64_unchecked(), + struct_ctor: struct_ctor, + })) } hir::ItemUnion(..) => { - let def = ecx.tcx.lookup_adt_def(def_id); - let variant = def.struct_variant(); - - encode_def_id_and_key(ecx, self.rbml_w, def_id); - encode_family(self.rbml_w, 'U'); - self.encode_bounds_and_type_for_item(item.id); - - encode_item_variances(self.rbml_w, ecx, item.id); - encode_name(self.rbml_w, item.name); - encode_attributes(self.rbml_w, &item.attrs); - encode_stability(self.rbml_w, stab); - encode_deprecation(self.rbml_w, depr); - self.encode_visibility(vis); - self.encode_repr_attrs(&item.attrs); - - /* Encode def_ids for each field and method - for methods, write all the stuff get_trait_method - needs to know*/ - self.encode_struct_fields(variant); - - encode_inlined_item(ecx, self.rbml_w, InlinedItemRef::Item(def_id, item)); - self.encode_mir(item.id); - - // Encode inherent implementations for self union. - encode_inherent_implementations(ecx, self.rbml_w, def_id); - } - hir::ItemDefaultImpl(unsafety, _) => { - encode_def_id_and_key(ecx, self.rbml_w, def_id); - encode_family(self.rbml_w, 'd'); - encode_name(self.rbml_w, item.name); - encode_unsafety(self.rbml_w, unsafety); - - let trait_ref = tcx.impl_trait_ref(ecx.tcx.map.local_def_id(item.id)).unwrap(); - encode_trait_ref(self.rbml_w, ecx, trait_ref, tag_item_trait_ref); + let variant = tcx.lookup_adt_def(def_id).struct_variant(); + + EntryKind::Union(self.lazy(&VariantData { + ctor_kind: variant.ctor_kind, + disr: variant.disr_val.to_u64_unchecked(), + struct_ctor: None, + })) } - hir::ItemImpl(unsafety, polarity, ..) => { - // We need to encode information about the default methods we - // have inherited, so we drive self based on the impl structure. - let impl_items = tcx.impl_items.borrow(); - let items = &impl_items[&def_id]; - - encode_def_id_and_key(ecx, self.rbml_w, def_id); - encode_family(self.rbml_w, 'i'); - self.encode_bounds_and_type_for_item(item.id); - encode_name(self.rbml_w, item.name); - encode_attributes(self.rbml_w, &item.attrs); - encode_unsafety(self.rbml_w, unsafety); - encode_polarity(self.rbml_w, polarity); - - match - tcx.custom_coerce_unsized_kinds - .borrow() - .get(&ecx.tcx.map.local_def_id(item.id)) - { - Some(&kind) => { - self.rbml_w.start_tag(tag_impl_coerce_unsized_kind); - kind.encode(self.rbml_w); - self.rbml_w.end_tag(); - } - None => {} - } + hir::ItemDefaultImpl(..) => { + let data = ImplData { + polarity: hir::ImplPolarity::Positive, + parent_impl: None, + coerce_unsized_kind: None, + trait_ref: tcx.impl_trait_ref(def_id).map(|trait_ref| self.lazy(&trait_ref)), + }; - for &item_def_id in items { - self.rbml_w.start_tag(tag_item_impl_item); - match item_def_id { - ty::ConstTraitItemId(item_def_id) => { - encode_def_id(self.rbml_w, item_def_id); - encode_item_sort(self.rbml_w, 'C'); - } - ty::MethodTraitItemId(item_def_id) => { - encode_def_id(self.rbml_w, item_def_id); - encode_item_sort(self.rbml_w, 'r'); - } - ty::TypeTraitItemId(item_def_id) => { - encode_def_id(self.rbml_w, item_def_id); - encode_item_sort(self.rbml_w, 't'); + EntryKind::DefaultImpl(self.lazy(&data)) + } + hir::ItemImpl(_, polarity, ..) => { + let trait_ref = tcx.impl_trait_ref(def_id); + let parent = if let Some(trait_ref) = trait_ref { + let trait_def = tcx.lookup_trait_def(trait_ref.def_id); + trait_def.ancestors(def_id).skip(1).next().and_then(|node| { + match node { + specialization_graph::Node::Impl(parent) => Some(parent), + _ => None, } - } - self.rbml_w.end_tag(); - } + }) + } else { + None + }; - let did = ecx.tcx.map.local_def_id(item.id); - if let Some(trait_ref) = tcx.impl_trait_ref(did) { - encode_trait_ref(self.rbml_w, ecx, trait_ref, tag_item_trait_ref); + let data = ImplData { + polarity: polarity, + parent_impl: parent, + coerce_unsized_kind: tcx.custom_coerce_unsized_kinds + .borrow() + .get(&def_id) + .cloned(), + trait_ref: trait_ref.map(|trait_ref| self.lazy(&trait_ref)), + }; - let trait_def = tcx.lookup_trait_def(trait_ref.def_id); - let parent = trait_def.ancestors(did) - .skip(1) - .next() - .and_then(|node| match node { - specialization_graph::Node::Impl(parent) => - Some(parent), - _ => None, - }); - encode_parent_impl(self.rbml_w, parent); - } - encode_stability(self.rbml_w, stab); - encode_deprecation(self.rbml_w, depr); + EntryKind::Impl(self.lazy(&data)) } hir::ItemTrait(..) => { - encode_def_id_and_key(ecx, self.rbml_w, def_id); - encode_family(self.rbml_w, 'I'); - encode_item_variances(self.rbml_w, ecx, item.id); let trait_def = tcx.lookup_trait_def(def_id); - let trait_predicates = tcx.lookup_predicates(def_id); - encode_unsafety(self.rbml_w, trait_def.unsafety); - encode_paren_sugar(self.rbml_w, trait_def.paren_sugar); - encode_defaulted(self.rbml_w, tcx.trait_has_default_impl(def_id)); - encode_associated_type_names(self.rbml_w, &trait_def.associated_type_names); - self.encode_generics(&trait_def.generics, &trait_predicates); - self.encode_predicates(&tcx.lookup_super_predicates(def_id), - tag_item_super_predicates); - encode_trait_ref(self.rbml_w, ecx, trait_def.trait_ref, tag_item_trait_ref); - encode_name(self.rbml_w, item.name); - encode_attributes(self.rbml_w, &item.attrs); - self.encode_visibility(vis); - encode_stability(self.rbml_w, stab); - encode_deprecation(self.rbml_w, depr); - for &method_def_id in tcx.trait_item_def_ids(def_id).iter() { - self.rbml_w.start_tag(tag_item_trait_item); - match method_def_id { - ty::ConstTraitItemId(const_def_id) => { - encode_def_id(self.rbml_w, const_def_id); - encode_item_sort(self.rbml_w, 'C'); - } - ty::MethodTraitItemId(method_def_id) => { - encode_def_id(self.rbml_w, method_def_id); - encode_item_sort(self.rbml_w, 'r'); - } - ty::TypeTraitItemId(type_def_id) => { - encode_def_id(self.rbml_w, type_def_id); - encode_item_sort(self.rbml_w, 't'); - } - } - self.rbml_w.end_tag(); - - self.rbml_w.wr_tagged_u64(tag_mod_child, - def_to_u64(method_def_id.def_id())); - } + let data = TraitData { + unsafety: trait_def.unsafety, + paren_sugar: trait_def.paren_sugar, + has_default_impl: tcx.trait_has_default_impl(def_id), + trait_ref: self.lazy(&trait_def.trait_ref), + super_predicates: self.lazy(&tcx.lookup_super_predicates(def_id)), + }; - // Encode inherent implementations for self trait. - encode_inherent_implementations(ecx, self.rbml_w, def_id); - } - hir::ItemExternCrate(_) | hir::ItemUse(_) => { - bug!("cannot encode info for item {:?}", item) + EntryKind::Trait(self.lazy(&data)) } + hir::ItemExternCrate(_) | + hir::ItemUse(_) => bug!("cannot encode info for item {:?}", item), + }; + + Entry { + kind: kind, + visibility: item.vis.simplify(), + def_key: self.encode_def_key(def_id), + attributes: self.encode_attributes(&item.attrs), + children: match item.node { + hir::ItemForeignMod(ref fm) => { + self.lazy_seq(fm.items + .iter() + .map(|foreign_item| tcx.map.local_def_id(foreign_item.id).index)) + } + hir::ItemEnum(..) => { + let def = self.tcx.lookup_adt_def(def_id); + self.lazy_seq(def.variants.iter().map(|v| { + assert!(v.did.is_local()); + v.did.index + })) + } + hir::ItemStruct(..) | + hir::ItemUnion(..) => { + let def = self.tcx.lookup_adt_def(def_id); + self.lazy_seq(def.struct_variant().fields.iter().map(|f| { + assert!(f.did.is_local()); + f.did.index + })) + } + hir::ItemImpl(..) | + hir::ItemTrait(..) => { + self.lazy_seq(tcx.impl_or_trait_items(def_id).iter().map(|&def_id| { + assert!(def_id.is_local()); + def_id.index + })) + } + _ => LazySeq::empty(), + }, + stability: self.encode_stability(def_id), + deprecation: self.encode_deprecation(def_id), + + ty: match item.node { + hir::ItemStatic(..) | + hir::ItemConst(..) | + hir::ItemFn(..) | + hir::ItemTy(..) | + hir::ItemEnum(..) | + hir::ItemStruct(..) | + hir::ItemUnion(..) | + hir::ItemImpl(..) => Some(self.encode_item_type(def_id)), + _ => None, + }, + inherent_impls: self.encode_inherent_implementations(def_id), + variances: match item.node { + hir::ItemEnum(..) | + hir::ItemStruct(..) | + hir::ItemUnion(..) | + hir::ItemTrait(..) => self.encode_item_variances(def_id), + _ => LazySeq::empty(), + }, + generics: match item.node { + hir::ItemStatic(..) | + hir::ItemConst(..) | + hir::ItemFn(..) | + hir::ItemTy(..) | + hir::ItemEnum(..) | + hir::ItemStruct(..) | + hir::ItemUnion(..) | + hir::ItemImpl(..) | + hir::ItemTrait(..) => Some(self.encode_generics(def_id)), + _ => None, + }, + predicates: match item.node { + hir::ItemStatic(..) | + hir::ItemConst(..) | + hir::ItemFn(..) | + hir::ItemTy(..) | + hir::ItemEnum(..) | + hir::ItemStruct(..) | + hir::ItemUnion(..) | + hir::ItemImpl(..) | + hir::ItemTrait(..) => Some(self.encode_predicates(def_id)), + _ => None, + }, + + ast: match item.node { + hir::ItemConst(..) | + hir::ItemFn(_, _, hir::Constness::Const, ..) => { + Some(self.encode_inlined_item(InlinedItemRef::Item(def_id, item))) + } + _ => None, + }, + mir: match item.node { + hir::ItemConst(..) => self.encode_mir(def_id), + hir::ItemFn(_, _, constness, _, ref generics, _) => { + let tps_len = generics.ty_params.len(); + let needs_inline = tps_len > 0 || attr::requests_inline(&item.attrs); + if needs_inline || constness == hir::Constness::Const { + self.encode_mir(def_id) + } else { + None + } + } + _ => None, + }, } } } -impl<'a, 'tcx, 'encoder> IndexBuilder<'a, 'tcx, 'encoder> { +impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> { /// In some cases, along with the item itself, we also /// encode some sub-items. Usually we want some info from the item /// so it's easier to do that here then to wait until we would encounter /// normally in the visitor walk. - fn encode_addl_info_for_item(&mut self, - item: &hir::Item) { - let def_id = self.ecx().tcx.map.local_def_id(item.id); + fn encode_addl_info_for_item(&mut self, item: &hir::Item) { + let def_id = self.tcx.map.local_def_id(item.id); match item.node { hir::ItemStatic(..) | hir::ItemConst(..) | @@ -1173,162 +856,112 @@ impl<'a, 'tcx, 'encoder> IndexBuilder<'a, 'tcx, 'encoder> { // no sub-item recording needed in these cases } hir::ItemEnum(..) => { - self.encode_enum_variant_infos(def_id); + self.encode_fields(def_id); + + let def = self.tcx.lookup_adt_def(def_id); + for (i, variant) in def.variants.iter().enumerate() { + self.record(variant.did, + EncodeContext::encode_enum_variant_info, + (def_id, Untracked(i))); + } } hir::ItemStruct(ref struct_def, _) => { - self.encode_addl_struct_info(def_id, struct_def.id(), item); + self.encode_fields(def_id); + + // If the struct has a constructor, encode it. + if !struct_def.is_struct() { + let ctor_def_id = self.tcx.map.local_def_id(struct_def.id()); + self.record(ctor_def_id, + EncodeContext::encode_struct_ctor, + (def_id, ctor_def_id)); + } } hir::ItemUnion(..) => { - self.encode_addl_union_info(def_id); - } - hir::ItemImpl(.., ref ast_items) => { - self.encode_addl_impl_info(def_id, item.id, ast_items); - } - hir::ItemTrait(.., ref trait_items) => { - self.encode_addl_trait_info(def_id, trait_items); + self.encode_fields(def_id); } - } - } - - fn encode_addl_struct_info(&mut self, - def_id: DefId, - struct_node_id: ast::NodeId, - item: &hir::Item) { - let ecx = self.ecx(); - let def = ecx.tcx.lookup_adt_def(def_id); - let variant = def.struct_variant(); - - self.encode_fields(def_id); - - // If this is a tuple-like struct, encode the type of the constructor. - match variant.kind { - ty::VariantKind::Struct => { - // no value for structs like struct Foo { ... } + hir::ItemImpl(..) => { + for &trait_item_def_id in &self.tcx.impl_or_trait_items(def_id)[..] { + self.record(trait_item_def_id, + EncodeContext::encode_info_for_impl_item, + trait_item_def_id); + } } - ty::VariantKind::Tuple | ty::VariantKind::Unit => { - // there is a value for structs like `struct - // Foo()` and `struct Foo` - let ctor_def_id = ecx.tcx.map.local_def_id(struct_node_id); - self.record(ctor_def_id, - ItemContentBuilder::encode_struct_ctor, - (def_id, item.id, struct_node_id)); + hir::ItemTrait(..) => { + for &item_def_id in &self.tcx.impl_or_trait_items(def_id)[..] { + self.record(item_def_id, + EncodeContext::encode_info_for_trait_item, + item_def_id); + } } } } - - fn encode_addl_union_info(&mut self, def_id: DefId) { - self.encode_fields(def_id); - } - - fn encode_addl_impl_info(&mut self, - def_id: DefId, - impl_id: ast::NodeId, - ast_items: &[hir::ImplItem]) { - let ecx = self.ecx(); - let impl_items = ecx.tcx.impl_items.borrow(); - let items = &impl_items[&def_id]; - - // Iterate down the trait items, emitting them. We rely on the - // assumption that all of the actually implemented trait items - // appear first in the impl structure, in the same order they do - // in the ast. This is a little sketchy. - let num_implemented_methods = ast_items.len(); - for (i, &trait_item_def_id) in items.iter().enumerate() { - let ast_item = if i < num_implemented_methods { - Some(&ast_items[i]) - } else { - None - }; - - let trait_item_def_id = trait_item_def_id.def_id(); - self.record(trait_item_def_id, - ItemContentBuilder::encode_info_for_impl_item, - (impl_id, trait_item_def_id, ast_item)); - } - } - - fn encode_addl_trait_info(&mut self, - def_id: DefId, - trait_items: &[hir::TraitItem]) { - // Now output the trait item info for each trait item. - let tcx = self.ecx().tcx; - let r = tcx.trait_item_def_ids(def_id); - for (item_def_id, trait_item) in r.iter().zip(trait_items) { - let item_def_id = item_def_id.def_id(); - assert!(item_def_id.is_local()); - self.record(item_def_id, - ItemContentBuilder::encode_info_for_trait_item, - (def_id, item_def_id, trait_item)); - } - } } -impl<'a, 'tcx, 'encoder> ItemContentBuilder<'a, 'tcx, 'encoder> { +impl<'a, 'tcx> EncodeContext<'a, 'tcx> { fn encode_info_for_foreign_item(&mut self, - (def_id, nitem): (DefId, &hir::ForeignItem)) { - let ecx = self.ecx(); + (def_id, nitem): (DefId, &hir::ForeignItem)) + -> Entry<'tcx> { + let tcx = self.tcx; - debug!("writing foreign item {}", ecx.tcx.node_path_str(nitem.id)); + debug!("writing foreign item {}", tcx.node_path_str(nitem.id)); - encode_def_id_and_key(ecx, self.rbml_w, def_id); - let parent_id = ecx.tcx.map.get_parent(nitem.id); - self.encode_parent_item(ecx.tcx.map.local_def_id(parent_id)); - self.encode_visibility(&nitem.vis); - match nitem.node { + let kind = match nitem.node { hir::ForeignItemFn(ref fndecl, _) => { - encode_family(self.rbml_w, FN_FAMILY); - self.encode_bounds_and_type_for_item(nitem.id); - encode_name(self.rbml_w, nitem.name); - encode_attributes(self.rbml_w, &nitem.attrs); - let stab = ecx.tcx.lookup_stability(ecx.tcx.map.local_def_id(nitem.id)); - let depr = ecx.tcx.lookup_deprecation(ecx.tcx.map.local_def_id(nitem.id)); - encode_stability(self.rbml_w, stab); - encode_deprecation(self.rbml_w, depr); - self.encode_method_argument_names(&fndecl); - } - hir::ForeignItemStatic(_, mutbl) => { - if mutbl { - encode_family(self.rbml_w, 'b'); - } else { - encode_family(self.rbml_w, 'c'); - } - self.encode_bounds_and_type_for_item(nitem.id); - encode_attributes(self.rbml_w, &nitem.attrs); - let stab = ecx.tcx.lookup_stability(ecx.tcx.map.local_def_id(nitem.id)); - let depr = ecx.tcx.lookup_deprecation(ecx.tcx.map.local_def_id(nitem.id)); - encode_stability(self.rbml_w, stab); - encode_deprecation(self.rbml_w, depr); - encode_name(self.rbml_w, nitem.name); + let data = FnData { + constness: hir::Constness::NotConst, + arg_names: self.encode_fn_arg_names(&fndecl), + }; + EntryKind::ForeignFn(self.lazy(&data)) } + hir::ForeignItemStatic(_, true) => EntryKind::ForeignMutStatic, + hir::ForeignItemStatic(_, false) => EntryKind::ForeignImmStatic, + }; + + Entry { + kind: kind, + visibility: nitem.vis.simplify(), + def_key: self.encode_def_key(def_id), + attributes: self.encode_attributes(&nitem.attrs), + children: LazySeq::empty(), + stability: self.encode_stability(def_id), + deprecation: self.encode_deprecation(def_id), + + ty: Some(self.encode_item_type(def_id)), + inherent_impls: LazySeq::empty(), + variances: LazySeq::empty(), + generics: Some(self.encode_generics(def_id)), + predicates: Some(self.encode_predicates(def_id)), + + ast: None, + mir: None, } } } -struct EncodeVisitor<'a, 'ecx: 'a, 'tcx: 'ecx, 'encoder: 'ecx> { - index: &'a mut IndexBuilder<'ecx, 'tcx, 'encoder>, +struct EncodeVisitor<'a, 'b: 'a, 'tcx: 'b> { + index: IndexBuilder<'a, 'b, 'tcx>, } -impl<'a, 'ecx, 'tcx, 'encoder> Visitor<'tcx> for EncodeVisitor<'a, 'ecx, 'tcx, 'encoder> { +impl<'a, 'b, 'tcx> Visitor<'tcx> for EncodeVisitor<'a, 'b, 'tcx> { fn visit_expr(&mut self, ex: &'tcx hir::Expr) { intravisit::walk_expr(self, ex); self.index.encode_info_for_expr(ex); } fn visit_item(&mut self, item: &'tcx hir::Item) { intravisit::walk_item(self, item); - let def_id = self.index.ecx().tcx.map.local_def_id(item.id); + let def_id = self.index.tcx.map.local_def_id(item.id); match item.node { - hir::ItemExternCrate(_) | hir::ItemUse(_) => (), // ignore these - _ => self.index.record(def_id, - ItemContentBuilder::encode_info_for_item, - (def_id, item)), + hir::ItemExternCrate(_) | + hir::ItemUse(_) => (), // ignore these + _ => self.index.record(def_id, EncodeContext::encode_info_for_item, (def_id, item)), } self.index.encode_addl_info_for_item(item); } fn visit_foreign_item(&mut self, ni: &'tcx hir::ForeignItem) { intravisit::walk_foreign_item(self, ni); - let def_id = self.index.ecx().tcx.map.local_def_id(ni.id); + let def_id = self.index.tcx.map.local_def_id(ni.id); self.index.record(def_id, - ItemContentBuilder::encode_info_for_foreign_item, + EncodeContext::encode_info_for_foreign_item, (def_id, ni)); } fn visit_ty(&mut self, ty: &'tcx hir::Ty) { @@ -1337,297 +970,180 @@ impl<'a, 'ecx, 'tcx, 'encoder> Visitor<'tcx> for EncodeVisitor<'a, 'ecx, 'tcx, ' } } -impl<'a, 'tcx, 'encoder> IndexBuilder<'a, 'tcx, 'encoder> { +impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> { fn encode_info_for_ty(&mut self, ty: &hir::Ty) { - let ecx = self.ecx(); if let hir::TyImplTrait(_) = ty.node { - let def_id = ecx.tcx.map.local_def_id(ty.id); - self.record(def_id, - ItemContentBuilder::encode_info_for_anon_ty, - (def_id, ty.id)); + let def_id = self.tcx.map.local_def_id(ty.id); + self.record(def_id, EncodeContext::encode_info_for_anon_ty, def_id); } } fn encode_info_for_expr(&mut self, expr: &hir::Expr) { - let ecx = self.ecx(); - match expr.node { hir::ExprClosure(..) => { - let def_id = ecx.tcx.map.local_def_id(expr.id); - self.record(def_id, - ItemContentBuilder::encode_info_for_closure, - (def_id, expr.id)); + let def_id = self.tcx.map.local_def_id(expr.id); + self.record(def_id, EncodeContext::encode_info_for_closure, def_id); } - _ => { } + _ => {} } } } -impl<'a, 'tcx, 'encoder> ItemContentBuilder<'a, 'tcx, 'encoder> { - fn encode_info_for_anon_ty(&mut self, (def_id, ty_id): (DefId, NodeId)) { - let ecx = self.ecx; - encode_def_id_and_key(ecx, self.rbml_w, def_id); - encode_family(self.rbml_w, 'y'); - self.encode_bounds_and_type_for_item(ty_id); - } - - fn encode_info_for_closure(&mut self, (def_id, expr_id): (DefId, NodeId)) { - let ecx = self.ecx; - encode_def_id_and_key(ecx, self.rbml_w, def_id); - encode_name(self.rbml_w, syntax::parse::token::intern("")); - - self.rbml_w.start_tag(tag_items_closure_ty); - write_closure_type(ecx, - self.rbml_w, - &ecx.tcx.tables.borrow().closure_tys[&def_id]); - self.rbml_w.end_tag(); +impl<'a, 'tcx> EncodeContext<'a, 'tcx> { + fn encode_info_for_anon_ty(&mut self, def_id: DefId) -> Entry<'tcx> { + Entry { + kind: EntryKind::Type, + visibility: ty::Visibility::Public, + def_key: self.encode_def_key(def_id), + attributes: LazySeq::empty(), + children: LazySeq::empty(), + stability: None, + deprecation: None, - self.rbml_w.start_tag(tag_items_closure_kind); - ecx.tcx.closure_kind(def_id).encode(self.rbml_w).unwrap(); - self.rbml_w.end_tag(); + ty: Some(self.encode_item_type(def_id)), + inherent_impls: LazySeq::empty(), + variances: LazySeq::empty(), + generics: Some(self.encode_generics(def_id)), + predicates: Some(self.encode_predicates(def_id)), - assert!(ecx.mir_map.map.contains_key(&def_id)); - self.encode_mir(expr_id); + ast: None, + mir: None, + } } -} - -fn encode_info_for_items<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, - rbml_w: &mut Encoder) - -> (IndexData, FnvHashMap, u32>) { - let krate = ecx.tcx.map.krate(); - - rbml_w.start_tag(tag_items_data); - - let fields = { - let mut index = IndexBuilder::new(ecx, rbml_w); - index.record(DefId::local(CRATE_DEF_INDEX), - ItemContentBuilder::encode_info_for_mod, - FromId(CRATE_NODE_ID, (&krate.module, - &[], - syntax::parse::token::intern(&ecx.link_meta.crate_name), - &hir::Public))); - krate.visit_all_items(&mut EncodeVisitor { - index: &mut index, - }); - index.into_fields() - }; - - rbml_w.end_tag(); - - fields -} - -fn encode_item_index(rbml_w: &mut Encoder, index: IndexData) { - rbml_w.start_tag(tag_index); - index.write_index(rbml_w.writer); - rbml_w.end_tag(); -} - -fn encode_attributes(rbml_w: &mut Encoder, attrs: &[ast::Attribute]) { - rbml_w.start_tag(tag_attributes); - rbml_w.emit_opaque(|opaque_encoder| { - attrs.encode(opaque_encoder) - }).unwrap(); - rbml_w.end_tag(); -} - -fn encode_unsafety(rbml_w: &mut Encoder, unsafety: hir::Unsafety) { - let byte: u8 = match unsafety { - hir::Unsafety::Normal => 0, - hir::Unsafety::Unsafe => 1, - }; - rbml_w.wr_tagged_u8(tag_unsafety, byte); -} - -fn encode_paren_sugar(rbml_w: &mut Encoder, paren_sugar: bool) { - let byte: u8 = if paren_sugar {1} else {0}; - rbml_w.wr_tagged_u8(tag_paren_sugar, byte); -} - -fn encode_defaulted(rbml_w: &mut Encoder, is_defaulted: bool) { - let byte: u8 = if is_defaulted {1} else {0}; - rbml_w.wr_tagged_u8(tag_defaulted_trait, byte); -} -fn encode_associated_type_names(rbml_w: &mut Encoder, names: &[Name]) { - rbml_w.start_tag(tag_associated_type_names); - for &name in names { - rbml_w.wr_tagged_str(tag_associated_type_name, &name.as_str()); - } - rbml_w.end_tag(); -} + fn encode_info_for_closure(&mut self, def_id: DefId) -> Entry<'tcx> { + let tcx = self.tcx; -fn encode_polarity(rbml_w: &mut Encoder, polarity: hir::ImplPolarity) { - let byte: u8 = match polarity { - hir::ImplPolarity::Positive => 0, - hir::ImplPolarity::Negative => 1, - }; - rbml_w.wr_tagged_u8(tag_polarity, byte); -} + let data = ClosureData { + kind: tcx.closure_kind(def_id), + ty: self.lazy(&tcx.tables.borrow().closure_tys[&def_id]), + }; -fn encode_crate_deps(rbml_w: &mut Encoder, cstore: &cstore::CStore) { - fn get_ordered_deps(cstore: &cstore::CStore) - -> Vec<(CrateNum, Rc)> { - // Pull the cnums and name,vers,hash out of cstore - let mut deps = Vec::new(); - cstore.iter_crate_data(|cnum, val| { - deps.push((cnum, val.clone())); - }); + Entry { + kind: EntryKind::Closure(self.lazy(&data)), + visibility: ty::Visibility::Public, + def_key: self.encode_def_key(def_id), + attributes: self.encode_attributes(&tcx.get_attrs(def_id)), + children: LazySeq::empty(), + stability: None, + deprecation: None, - // Sort by cnum - deps.sort_by(|kv1, kv2| kv1.0.cmp(&kv2.0)); + ty: None, + inherent_impls: LazySeq::empty(), + variances: LazySeq::empty(), + generics: None, + predicates: None, - // Sanity-check the crate numbers - let mut expected_cnum = 1; - for &(n, _) in &deps { - assert_eq!(n, expected_cnum); - expected_cnum += 1; + ast: None, + mir: self.encode_mir(def_id), } - - deps } - // We're just going to write a list of crate 'name-hash-version's, with - // the assumption that they are numbered 1 to n. - // FIXME (#2166): This is not nearly enough to support correct versioning - // but is enough to get transitive crate dependencies working. - rbml_w.start_tag(tag_crate_deps); - for (_cnum, dep) in get_ordered_deps(cstore) { - encode_crate_dep(rbml_w, &dep); + fn encode_info_for_items(&mut self) -> Index { + let krate = self.tcx.map.krate(); + let mut index = IndexBuilder::new(self); + index.record(DefId::local(CRATE_DEF_INDEX), + EncodeContext::encode_info_for_mod, + FromId(CRATE_NODE_ID, (&krate.module, &krate.attrs, &hir::Public))); + let mut visitor = EncodeVisitor { index: index }; + krate.visit_all_items(&mut visitor); + visitor.index.into_items() } - rbml_w.end_tag(); -} - -fn encode_lang_items(ecx: &EncodeContext, rbml_w: &mut Encoder) { - rbml_w.start_tag(tag_lang_items); - for (i, &opt_def_id) in ecx.tcx.lang_items.items().iter().enumerate() { - if let Some(def_id) = opt_def_id { - if def_id.is_local() { - rbml_w.start_tag(tag_lang_items_item); - rbml_w.wr_tagged_u32(tag_lang_items_item_id, i as u32); - rbml_w.wr_tagged_u32(tag_lang_items_item_index, def_id.index.as_u32()); - rbml_w.end_tag(); - } - } + fn encode_attributes(&mut self, attrs: &[ast::Attribute]) -> LazySeq { + self.lazy_seq_ref(attrs) } - for i in &ecx.tcx.lang_items.missing { - rbml_w.wr_tagged_u32(tag_lang_items_missing, *i as u32); - } + fn encode_crate_deps(&mut self) -> LazySeq { + fn get_ordered_deps(cstore: &cstore::CStore) -> Vec<(CrateNum, Rc)> { + // Pull the cnums and name,vers,hash out of cstore + let mut deps = Vec::new(); + cstore.iter_crate_data(|cnum, val| { + deps.push((cnum, val.clone())); + }); - rbml_w.end_tag(); // tag_lang_items -} + // Sort by cnum + deps.sort_by(|kv1, kv2| kv1.0.cmp(&kv2.0)); -fn encode_native_libraries(ecx: &EncodeContext, rbml_w: &mut Encoder) { - rbml_w.start_tag(tag_native_libraries); - - for &(ref lib, kind) in ecx.tcx.sess.cstore.used_libraries().iter() { - match kind { - cstore::NativeStatic => {} // these libraries are not propagated - cstore::NativeFramework | cstore::NativeUnknown => { - rbml_w.start_tag(tag_native_libraries_lib); - rbml_w.wr_tagged_u32(tag_native_libraries_kind, kind as u32); - rbml_w.wr_tagged_str(tag_native_libraries_name, lib); - rbml_w.end_tag(); + // Sanity-check the crate numbers + let mut expected_cnum = 1; + for &(n, _) in &deps { + assert_eq!(n, CrateNum::new(expected_cnum)); + expected_cnum += 1; } - } - } - - rbml_w.end_tag(); -} - -fn encode_plugin_registrar_fn(ecx: &EncodeContext, rbml_w: &mut Encoder) { - match ecx.tcx.sess.plugin_registrar_fn.get() { - Some(id) => { - let def_id = ecx.tcx.map.local_def_id(id); - rbml_w.wr_tagged_u32(tag_plugin_registrar_fn, def_id.index.as_u32()); - } - None => {} - } -} - -fn encode_codemap(ecx: &EncodeContext, rbml_w: &mut Encoder) { - rbml_w.start_tag(tag_codemap); - let codemap = ecx.tcx.sess.codemap(); - for filemap in &codemap.files.borrow()[..] { - - if filemap.lines.borrow().is_empty() || filemap.is_imported() { - // No need to export empty filemaps, as they can't contain spans - // that need translation. - // Also no need to re-export imported filemaps, as any downstream - // crate will import them from their original source. - continue; + deps } - rbml_w.start_tag(tag_codemap_filemap); - rbml_w.emit_opaque(|opaque_encoder| { - filemap.encode(opaque_encoder) - }).unwrap(); - rbml_w.end_tag(); - } - - rbml_w.end_tag(); -} - -/// Serialize the text of the exported macros -fn encode_macro_defs(rbml_w: &mut Encoder, - krate: &hir::Crate, - tcx: TyCtxt) { - rbml_w.start_tag(tag_macro_defs); - for def in &krate.exported_macros { - rbml_w.start_tag(tag_macro_def); - - encode_name(rbml_w, def.name); - encode_attributes(rbml_w, &def.attrs); - let &BytePos(lo) = &def.span.lo; - let &BytePos(hi) = &def.span.hi; - rbml_w.wr_tagged_u32(tag_macro_def_span_lo, lo); - rbml_w.wr_tagged_u32(tag_macro_def_span_hi, hi); - - rbml_w.wr_tagged_str(tag_macro_def_body, - &::syntax::print::pprust::tts_to_string(&def.body)); - - rbml_w.end_tag(); - } - rbml_w.end_tag(); - - if tcx.sess.crate_types.borrow().contains(&CrateTypeRustcMacro) { - let id = tcx.sess.derive_registrar_fn.get().unwrap(); - let did = tcx.map.local_def_id(id); - rbml_w.wr_tagged_u32(tag_macro_derive_registrar, did.index.as_u32()); + // We're just going to write a list of crate 'name-hash-version's, with + // the assumption that they are numbered 1 to n. + // FIXME (#2166): This is not nearly enough to support correct versioning + // but is enough to get transitive crate dependencies working. + let deps = get_ordered_deps(self.cstore); + self.lazy_seq(deps.iter().map(|&(_, ref dep)| { + CrateDep { + name: syntax::parse::token::intern(dep.name()), + hash: dep.hash(), + explicitly_linked: dep.explicitly_linked.get(), + } + })) } -} -fn encode_struct_field_attrs(ecx: &EncodeContext, - rbml_w: &mut Encoder, - krate: &hir::Crate) { - struct StructFieldVisitor<'a, 'b:'a, 'c:'a, 'tcx:'b> { - ecx: &'a EncodeContext<'b, 'tcx>, - rbml_w: &'a mut Encoder<'c>, + fn encode_lang_items(&mut self) -> (LazySeq<(DefIndex, usize)>, LazySeq) { + let tcx = self.tcx; + let lang_items = tcx.lang_items.items().iter(); + (self.lazy_seq(lang_items.enumerate().filter_map(|(i, &opt_def_id)| { + if let Some(def_id) = opt_def_id { + if def_id.is_local() { + return Some((def_id.index, i)); + } + } + None + })), + self.lazy_seq_ref(&tcx.lang_items.missing)) } - impl<'a, 'b, 'c, 'tcx, 'v> Visitor<'v> for StructFieldVisitor<'a, 'b, 'c, 'tcx> { - fn visit_struct_field(&mut self, field: &hir::StructField) { - self.rbml_w.start_tag(tag_struct_field); - let def_id = self.ecx.tcx.map.local_def_id(field.id); - encode_def_id(self.rbml_w, def_id); - encode_attributes(self.rbml_w, &field.attrs); - self.rbml_w.end_tag(); - } + fn encode_native_libraries(&mut self) -> LazySeq<(NativeLibraryKind, String)> { + let used_libraries = self.tcx.sess.cstore.used_libraries(); + self.lazy_seq(used_libraries.into_iter().filter_map(|(lib, kind)| { + match kind { + cstore::NativeStatic => None, // these libraries are not propagated + cstore::NativeFramework | cstore::NativeUnknown => Some((kind, lib)), + } + })) + } + + fn encode_codemap(&mut self) -> LazySeq { + let codemap = self.tcx.sess.codemap(); + let all_filemaps = codemap.files.borrow(); + self.lazy_seq_ref(all_filemaps.iter() + .filter(|filemap| { + // No need to export empty filemaps, as they can't contain spans + // that need translation. + // Also no need to re-export imported filemaps, as any downstream + // crate will import them from their original source. + !filemap.lines.borrow().is_empty() && !filemap.is_imported() + }) + .map(|filemap| &**filemap)) + } + + /// Serialize the text of the exported macros + fn encode_macro_defs(&mut self) -> LazySeq { + let tcx = self.tcx; + self.lazy_seq(tcx.map.krate().exported_macros.iter().map(|def| { + MacroDef { + name: def.name, + attrs: def.attrs.to_vec(), + span: def.span, + body: ::syntax::print::pprust::tts_to_string(&def.body), + } + })) } - - rbml_w.start_tag(tag_struct_fields); - krate.visit_all_items(&mut StructFieldVisitor { ecx: ecx, rbml_w: rbml_w }); - rbml_w.end_tag(); } - - -struct ImplVisitor<'a, 'tcx:'a> { +struct ImplVisitor<'a, 'tcx: 'a> { tcx: TyCtxt<'a, 'tcx, 'tcx>, - impls: FnvHashMap> + impls: FnvHashMap>, } impl<'a, 'tcx, 'v> Visitor<'v> for ImplVisitor<'a, 'tcx> { @@ -1635,310 +1151,228 @@ impl<'a, 'tcx, 'v> Visitor<'v> for ImplVisitor<'a, 'tcx> { if let hir::ItemImpl(..) = item.node { let impl_id = self.tcx.map.local_def_id(item.id); if let Some(trait_ref) = self.tcx.impl_trait_ref(impl_id) { - self.impls.entry(trait_ref.def_id) + self.impls + .entry(trait_ref.def_id) .or_insert(vec![]) - .push(impl_id); + .push(impl_id.index); } } } } -/// Encodes an index, mapping each trait to its (local) implementations. -fn encode_impls<'a>(ecx: &'a EncodeContext, - krate: &hir::Crate, - rbml_w: &'a mut Encoder) { - let mut visitor = ImplVisitor { - tcx: ecx.tcx, - impls: FnvHashMap() - }; - krate.visit_all_items(&mut visitor); - - rbml_w.start_tag(tag_impls); - for (trait_, trait_impls) in visitor.impls { - rbml_w.start_tag(tag_impls_trait); - encode_def_id(rbml_w, trait_); - for impl_ in trait_impls { - rbml_w.wr_tagged_u64(tag_impls_trait_impl, def_to_u64(impl_)); - } - rbml_w.end_tag(); - } - rbml_w.end_tag(); -} - -// Encodes all reachable symbols in this crate into the metadata. -// -// This pass is seeded off the reachability list calculated in the -// middle::reachable module but filters out items that either don't have a -// symbol associated with them (they weren't translated) or if they're an FFI -// definition (as that's not defined in this crate). -fn encode_reachable(ecx: &EncodeContext, rbml_w: &mut Encoder) { - rbml_w.start_tag(tag_reachable_ids); - for &id in ecx.reachable { - let def_id = ecx.tcx.map.local_def_id(id); - rbml_w.wr_tagged_u32(tag_reachable_id, def_id.index.as_u32()); - } - rbml_w.end_tag(); -} - -fn encode_crate_dep(rbml_w: &mut Encoder, - dep: &cstore::CrateMetadata) { - rbml_w.start_tag(tag_crate_dep); - rbml_w.wr_tagged_str(tag_crate_dep_crate_name, &dep.name()); - let hash = decoder::get_crate_hash(dep.data()); - rbml_w.wr_tagged_u64(tag_crate_dep_hash, hash.as_u64()); - rbml_w.wr_tagged_u8(tag_crate_dep_explicitly_linked, - dep.explicitly_linked.get() as u8); - rbml_w.end_tag(); -} - -fn encode_hash(rbml_w: &mut Encoder, hash: &Svh) { - rbml_w.wr_tagged_u64(tag_crate_hash, hash.as_u64()); -} - -fn encode_rustc_version(rbml_w: &mut Encoder) { - rbml_w.wr_tagged_str(tag_rustc_version, &rustc_version()); -} - -fn encode_crate_name(rbml_w: &mut Encoder, crate_name: &str) { - rbml_w.wr_tagged_str(tag_crate_crate_name, crate_name); -} - -fn encode_crate_disambiguator(rbml_w: &mut Encoder, crate_disambiguator: &str) { - rbml_w.wr_tagged_str(tag_crate_disambiguator, crate_disambiguator); -} +impl<'a, 'tcx> EncodeContext<'a, 'tcx> { + /// Encodes an index, mapping each trait to its (local) implementations. + fn encode_impls(&mut self) -> LazySeq { + let mut visitor = ImplVisitor { + tcx: self.tcx, + impls: FnvHashMap(), + }; + self.tcx.map.krate().visit_all_items(&mut visitor); -fn encode_crate_triple(rbml_w: &mut Encoder, triple: &str) { - rbml_w.wr_tagged_str(tag_crate_triple, triple); -} + let all_impls: Vec<_> = visitor.impls + .into_iter() + .map(|(trait_def_id, impls)| { + TraitImpls { + trait_id: (trait_def_id.krate.as_u32(), trait_def_id.index), + impls: self.lazy_seq(impls), + } + }) + .collect(); -fn encode_dylib_dependency_formats(rbml_w: &mut Encoder, ecx: &EncodeContext) { - let tag = tag_dylib_dependency_formats; - match ecx.tcx.sess.dependency_formats.borrow().get(&config::CrateTypeDylib) { - Some(arr) => { - let s = arr.iter().enumerate().filter_map(|(i, slot)| { - let kind = match *slot { - Linkage::NotLinked | - Linkage::IncludedFromDylib => return None, - Linkage::Dynamic => "d", - Linkage::Static => "s", - }; - Some(format!("{}:{}", i + 1, kind)) - }).collect::>(); - rbml_w.wr_tagged_str(tag, &s.join(",")); - } - None => { - rbml_w.wr_tagged_str(tag, ""); - } + self.lazy_seq(all_impls) } -} -fn encode_panic_strategy(rbml_w: &mut Encoder, ecx: &EncodeContext) { - match ecx.tcx.sess.opts.cg.panic { - PanicStrategy::Unwind => { - rbml_w.wr_tagged_u8(tag_panic_strategy, b'U'); - } - PanicStrategy::Abort => { - rbml_w.wr_tagged_u8(tag_panic_strategy, b'A'); + // Encodes all reachable symbols in this crate into the metadata. + // + // This pass is seeded off the reachability list calculated in the + // middle::reachable module but filters out items that either don't have a + // symbol associated with them (they weren't translated) or if they're an FFI + // definition (as that's not defined in this crate). + fn encode_reachable(&mut self) -> LazySeq { + let reachable = self.reachable; + let tcx = self.tcx; + self.lazy_seq(reachable.iter().map(|&id| tcx.map.local_def_id(id).index)) + } + + fn encode_dylib_dependency_formats(&mut self) -> LazySeq> { + match self.tcx.sess.dependency_formats.borrow().get(&config::CrateTypeDylib) { + Some(arr) => { + self.lazy_seq(arr.iter().map(|slot| { + match *slot { + Linkage::NotLinked | + Linkage::IncludedFromDylib => None, + + Linkage::Dynamic => Some(LinkagePreference::RequireDynamic), + Linkage::Static => Some(LinkagePreference::RequireStatic), + } + })) + } + None => LazySeq::empty(), } } -} - -pub fn encode_metadata(ecx: EncodeContext, krate: &hir::Crate) -> Vec { - let mut wr = Cursor::new(Vec::new()); - { - let mut rbml_w = Encoder::new(&mut wr); - encode_metadata_inner(&mut rbml_w, &ecx, krate) - } + fn encode_crate_root(&mut self) -> Lazy { + let mut i = self.position(); + let crate_deps = self.encode_crate_deps(); + let dylib_dependency_formats = self.encode_dylib_dependency_formats(); + let dep_bytes = self.position() - i; + + // Encode the language items. + i = self.position(); + let (lang_items, lang_items_missing) = self.encode_lang_items(); + let lang_item_bytes = self.position() - i; + + // Encode the native libraries used + i = self.position(); + let native_libraries = self.encode_native_libraries(); + let native_lib_bytes = self.position() - i; + + // Encode codemap + i = self.position(); + let codemap = self.encode_codemap(); + let codemap_bytes = self.position() - i; + + // Encode macro definitions + i = self.position(); + let macro_defs = self.encode_macro_defs(); + let macro_defs_bytes = self.position() - i; + + // Encode the def IDs of impls, for coherence checking. + i = self.position(); + let impls = self.encode_impls(); + let impl_bytes = self.position() - i; + + // Encode reachability info. + i = self.position(); + let reachable_ids = self.encode_reachable(); + let reachable_bytes = self.position() - i; + + // Encode and index the items. + i = self.position(); + let items = self.encode_info_for_items(); + let item_bytes = self.position() - i; + + i = self.position(); + let index = items.write_index(&mut self.opaque.cursor); + let index_bytes = self.position() - i; + + let tcx = self.tcx; + let link_meta = self.link_meta; + let is_proc_macro = tcx.sess.crate_types.borrow().contains(&CrateTypeProcMacro); + let root = self.lazy(&CrateRoot { + rustc_version: rustc_version(), + name: link_meta.crate_name.clone(), + triple: tcx.sess.opts.target_triple.clone(), + hash: link_meta.crate_hash, + disambiguator: tcx.sess.local_crate_disambiguator().to_string(), + panic_strategy: tcx.sess.panic_strategy(), + plugin_registrar_fn: tcx.sess + .plugin_registrar_fn + .get() + .map(|id| tcx.map.local_def_id(id).index), + macro_derive_registrar: if is_proc_macro { + let id = tcx.sess.derive_registrar_fn.get().unwrap(); + Some(tcx.map.local_def_id(id).index) + } else { + None + }, + + crate_deps: crate_deps, + dylib_dependency_formats: dylib_dependency_formats, + lang_items: lang_items, + lang_items_missing: lang_items_missing, + native_libraries: native_libraries, + codemap: codemap, + macro_defs: macro_defs, + impls: impls, + reachable_ids: reachable_ids, + index: index, + }); - // RBML compacts the encoded bytes whenever appropriate, - // so there are some garbages left after the end of the data. - let metalen = wr.seek(SeekFrom::Current(0)).unwrap() as usize; - let mut v = wr.into_inner(); - v.truncate(metalen); - assert_eq!(v.len(), metalen); - - // And here we run into yet another obscure archive bug: in which metadata - // loaded from archives may have trailing garbage bytes. Awhile back one of - // our tests was failing sporadically on the OSX 64-bit builders (both nopt - // and opt) by having rbml generate an out-of-bounds panic when looking at - // metadata. - // - // Upon investigation it turned out that the metadata file inside of an rlib - // (and ar archive) was being corrupted. Some compilations would generate a - // metadata file which would end in a few extra bytes, while other - // compilations would not have these extra bytes appended to the end. These - // extra bytes were interpreted by rbml as an extra tag, so they ended up - // being interpreted causing the out-of-bounds. - // - // The root cause of why these extra bytes were appearing was never - // discovered, and in the meantime the solution we're employing is to insert - // the length of the metadata to the start of the metadata. Later on this - // will allow us to slice the metadata to the precise length that we just - // generated regardless of trailing bytes that end up in it. - // - // We also need to store the metadata encoding version here, because - // rlibs don't have it. To get older versions of rustc to ignore - // this metadata, there are 4 zero bytes at the start, which are - // treated as a length of 0 by old compilers. - - let len = v.len(); - let mut result = vec![]; - result.push(0); - result.push(0); - result.push(0); - result.push(0); - result.extend(metadata_encoding_version.iter().cloned()); - result.push((len >> 24) as u8); - result.push((len >> 16) as u8); - result.push((len >> 8) as u8); - result.push((len >> 0) as u8); - result.extend(v); - result -} + let total_bytes = self.position(); -fn encode_metadata_inner(rbml_w: &mut Encoder, - ecx: &EncodeContext, - krate: &hir::Crate) { - struct Stats { - attr_bytes: u64, - dep_bytes: u64, - lang_item_bytes: u64, - native_lib_bytes: u64, - plugin_registrar_fn_bytes: u64, - codemap_bytes: u64, - macro_defs_bytes: u64, - impl_bytes: u64, - reachable_bytes: u64, - item_bytes: u64, - index_bytes: u64, - xref_bytes: u64, - zero_bytes: u64, - total_bytes: u64, - } - let mut stats = Stats { - attr_bytes: 0, - dep_bytes: 0, - lang_item_bytes: 0, - native_lib_bytes: 0, - plugin_registrar_fn_bytes: 0, - codemap_bytes: 0, - macro_defs_bytes: 0, - impl_bytes: 0, - reachable_bytes: 0, - item_bytes: 0, - index_bytes: 0, - xref_bytes: 0, - zero_bytes: 0, - total_bytes: 0, - }; - - encode_rustc_version(rbml_w); - encode_crate_name(rbml_w, &ecx.link_meta.crate_name); - encode_crate_triple(rbml_w, &ecx.tcx.sess.opts.target_triple); - encode_hash(rbml_w, &ecx.link_meta.crate_hash); - encode_crate_disambiguator(rbml_w, &ecx.tcx.sess.local_crate_disambiguator()); - encode_dylib_dependency_formats(rbml_w, &ecx); - encode_panic_strategy(rbml_w, &ecx); - - let mut i = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap(); - encode_attributes(rbml_w, &krate.attrs); - stats.attr_bytes = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap() - i; - - i = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap(); - encode_crate_deps(rbml_w, ecx.cstore); - stats.dep_bytes = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap() - i; - - // Encode the language items. - i = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap(); - encode_lang_items(&ecx, rbml_w); - stats.lang_item_bytes = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap() - i; - - // Encode the native libraries used - i = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap(); - encode_native_libraries(&ecx, rbml_w); - stats.native_lib_bytes = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap() - i; - - // Encode the plugin registrar function - i = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap(); - encode_plugin_registrar_fn(&ecx, rbml_w); - stats.plugin_registrar_fn_bytes = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap() - i; - - // Encode codemap - i = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap(); - encode_codemap(&ecx, rbml_w); - stats.codemap_bytes = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap() - i; - - // Encode macro definitions - i = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap(); - encode_macro_defs(rbml_w, krate, ecx.tcx); - stats.macro_defs_bytes = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap() - i; - - // Encode the def IDs of impls, for coherence checking. - i = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap(); - encode_impls(&ecx, krate, rbml_w); - stats.impl_bytes = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap() - i; - - // Encode reachability info. - i = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap(); - encode_reachable(&ecx, rbml_w); - stats.reachable_bytes = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap() - i; - - // Encode and index the items. - rbml_w.start_tag(tag_items); - i = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap(); - let (items, xrefs) = encode_info_for_items(&ecx, rbml_w); - stats.item_bytes = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap() - i; - rbml_w.end_tag(); - - i = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap(); - encode_item_index(rbml_w, items); - stats.index_bytes = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap() - i; - - i = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap(); - encode_xrefs(&ecx, rbml_w, xrefs); - stats.xref_bytes = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap() - i; - - encode_struct_field_attrs(&ecx, rbml_w, krate); - - stats.total_bytes = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap(); - - if ecx.tcx.sess.meta_stats() { - for e in rbml_w.writer.get_ref() { - if *e == 0 { - stats.zero_bytes += 1; + if self.tcx.sess.meta_stats() { + let mut zero_bytes = 0; + for e in self.opaque.cursor.get_ref() { + if *e == 0 { + zero_bytes += 1; + } } + + println!("metadata stats:"); + println!(" dep bytes: {}", dep_bytes); + println!(" lang item bytes: {}", lang_item_bytes); + println!(" native bytes: {}", native_lib_bytes); + println!(" codemap bytes: {}", codemap_bytes); + println!(" macro def bytes: {}", macro_defs_bytes); + println!(" impl bytes: {}", impl_bytes); + println!(" reachable bytes: {}", reachable_bytes); + println!(" item bytes: {}", item_bytes); + println!(" index bytes: {}", index_bytes); + println!(" zero bytes: {}", zero_bytes); + println!(" total bytes: {}", total_bytes); } - println!("metadata stats:"); - println!(" attribute bytes: {}", stats.attr_bytes); - println!(" dep bytes: {}", stats.dep_bytes); - println!(" lang item bytes: {}", stats.lang_item_bytes); - println!(" native bytes: {}", stats.native_lib_bytes); - println!("plugin registrar bytes: {}", stats.plugin_registrar_fn_bytes); - println!(" codemap bytes: {}", stats.codemap_bytes); - println!(" macro def bytes: {}", stats.macro_defs_bytes); - println!(" impl bytes: {}", stats.impl_bytes); - println!(" reachable bytes: {}", stats.reachable_bytes); - println!(" item bytes: {}", stats.item_bytes); - println!(" index bytes: {}", stats.index_bytes); - println!(" xref bytes: {}", stats.xref_bytes); - println!(" zero bytes: {}", stats.zero_bytes); - println!(" total bytes: {}", stats.total_bytes); + root } } -// Get the encoded string for a type -pub fn encoded_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - t: Ty<'tcx>, - def_id_to_string: for<'b> fn(TyCtxt<'b, 'tcx, 'tcx>, DefId) -> String) - -> Vec { - let mut wr = Cursor::new(Vec::new()); - tyencode::enc_ty(&mut wr, &tyencode::ctxt { - diag: tcx.sess.diagnostic(), - ds: def_id_to_string, - tcx: tcx, - abbrevs: &RefCell::new(FnvHashMap()) - }, t); - wr.into_inner() +// NOTE(eddyb) The following comment was preserved for posterity, even +// though it's no longer relevant as EBML (which uses nested & tagged +// "documents") was replaced with a scheme that can't go out of bounds. +// +// And here we run into yet another obscure archive bug: in which metadata +// loaded from archives may have trailing garbage bytes. Awhile back one of +// our tests was failing sporadically on the OSX 64-bit builders (both nopt +// and opt) by having ebml generate an out-of-bounds panic when looking at +// metadata. +// +// Upon investigation it turned out that the metadata file inside of an rlib +// (and ar archive) was being corrupted. Some compilations would generate a +// metadata file which would end in a few extra bytes, while other +// compilations would not have these extra bytes appended to the end. These +// extra bytes were interpreted by ebml as an extra tag, so they ended up +// being interpreted causing the out-of-bounds. +// +// The root cause of why these extra bytes were appearing was never +// discovered, and in the meantime the solution we're employing is to insert +// the length of the metadata to the start of the metadata. Later on this +// will allow us to slice the metadata to the precise length that we just +// generated regardless of trailing bytes that end up in it. + +pub fn encode_metadata<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + cstore: &cstore::CStore, + reexports: &def::ExportMap, + link_meta: &LinkMeta, + reachable: &NodeSet) + -> Vec { + let mut cursor = Cursor::new(vec![]); + cursor.write_all(METADATA_HEADER).unwrap(); + + // Will be filed with the root position after encoding everything. + cursor.write_all(&[0, 0, 0, 0]).unwrap(); + + let root = EncodeContext { + opaque: opaque::Encoder::new(&mut cursor), + tcx: tcx, + reexports: reexports, + link_meta: link_meta, + cstore: cstore, + reachable: reachable, + lazy_state: LazyState::NoNode, + type_shorthands: Default::default(), + predicate_shorthands: Default::default(), + } + .encode_crate_root(); + let mut result = cursor.into_inner(); + + // Encode the root position. + let header = METADATA_HEADER.len(); + let pos = root.position; + result[header + 0] = (pos >> 24) as u8; + result[header + 1] = (pos >> 16) as u8; + result[header + 2] = (pos >> 8) as u8; + result[header + 3] = (pos >> 0) as u8; + + result } diff --git a/src/librustc_metadata/index.rs b/src/librustc_metadata/index.rs index b850073462f5a..53e6988c756c9 100644 --- a/src/librustc_metadata/index.rs +++ b/src/librustc_metadata/index.rs @@ -8,53 +8,13 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +use schema::*; + use rustc::hir::def_id::{DefId, DefIndex}; -use rbml; use std::io::{Cursor, Write}; use std::slice; use std::u32; -/// As part of the metadata, we generate an index that stores, for -/// each DefIndex, the position of the corresponding RBML document (if -/// any). This is just a big `[u32]` slice, where an entry of -/// `u32::MAX` indicates that there is no RBML document. This little -/// struct just stores the offsets within the metadata of the start -/// and end of this slice. These are actually part of an RBML -/// document, but for looking things up in the metadata, we just -/// discard the RBML positioning and jump directly to the data. -pub struct Index { - data_start: usize, - data_end: usize, -} - -impl Index { - /// Given the RBML doc representing the index, save the offests - /// for later. - pub fn from_rbml(index: rbml::Doc) -> Index { - Index { data_start: index.start, data_end: index.end } - } - - /// Given the metadata, extract out the offset of a particular - /// DefIndex (if any). - #[inline(never)] - pub fn lookup_item(&self, bytes: &[u8], def_index: DefIndex) -> Option { - let words = bytes_to_words(&bytes[self.data_start..self.data_end]); - let index = def_index.as_usize(); - - debug!("lookup_item: index={:?} words.len={:?}", - index, words.len()); - - let position = u32::from_be(words[index]); - if position == u32::MAX { - debug!("lookup_item: position=u32::MAX"); - None - } else { - debug!("lookup_item: position={:?}", position); - Some(position) - } - } -} - /// While we are generating the metadata, we also track the position /// of each DefIndex. It is not required that all definitions appear /// in the metadata, nor that they are serialized in order, and @@ -62,84 +22,83 @@ impl Index { /// `u32::MAX`. Whenever an index is visited, we fill in the /// appropriate spot by calling `record_position`. We should never /// visit the same index twice. -pub struct IndexData { +pub struct Index { positions: Vec, } -impl IndexData { - pub fn new(max_index: usize) -> IndexData { - IndexData { - positions: vec![u32::MAX; max_index] - } +impl Index { + pub fn new(max_index: usize) -> Index { + Index { positions: vec![u32::MAX; max_index] } } - pub fn record(&mut self, def_id: DefId, position: u64) { + pub fn record(&mut self, def_id: DefId, entry: Lazy) { assert!(def_id.is_local()); - self.record_index(def_id.index, position); + self.record_index(def_id.index, entry); } - pub fn record_index(&mut self, item: DefIndex, position: u64) { + pub fn record_index(&mut self, item: DefIndex, entry: Lazy) { let item = item.as_usize(); - assert!(position < (u32::MAX as u64)); - let position = position as u32; + assert!(entry.position < (u32::MAX as usize)); + let position = entry.position as u32; assert!(self.positions[item] == u32::MAX, "recorded position for item {:?} twice, first at {:?} and now at {:?}", - item, self.positions[item], position); + item, + self.positions[item], + position); - self.positions[item] = position; + self.positions[item] = position.to_le(); } - pub fn write_index(&self, buf: &mut Cursor>) { - for &position in &self.positions { - write_be_u32(buf, position); - } + pub fn write_index(&self, buf: &mut Cursor>) -> LazySeq { + let pos = buf.position(); + buf.write_all(words_to_bytes(&self.positions)).unwrap(); + LazySeq::with_position_and_length(pos as usize, self.positions.len()) } } -/// A dense index with integer keys. Different API from IndexData (should -/// these be merged?) -pub struct DenseIndex { - start: usize, - end: usize -} +impl<'tcx> LazySeq { + /// Given the metadata, extract out the offset of a particular + /// DefIndex (if any). + #[inline(never)] + pub fn lookup(&self, bytes: &[u8], def_index: DefIndex) -> Option>> { + let words = &bytes_to_words(&bytes[self.position..])[..self.len]; + let index = def_index.as_usize(); -impl DenseIndex { - pub fn lookup(&self, buf: &[u8], ix: u32) -> Option { - let data = bytes_to_words(&buf[self.start..self.end]); - data.get(ix as usize).map(|d| u32::from_be(*d)) - } - pub fn from_buf(buf: &[u8], start: usize, end: usize) -> Self { - assert!((end-start)%4 == 0 && start <= end && end <= buf.len()); - DenseIndex { - start: start, - end: end + debug!("Index::lookup: index={:?} words.len={:?}", + index, + words.len()); + + let position = u32::from_le(words[index]); + if position == u32::MAX { + debug!("Index::lookup: position=u32::MAX"); + None + } else { + debug!("Index::lookup: position={:?}", position); + Some(Lazy::with_position(position as usize)) } } -} -pub fn write_dense_index(entries: Vec, buf: &mut Cursor>) { - let elen = entries.len(); - assert!(elen < u32::MAX as usize); - - for entry in entries { - write_be_u32(buf, entry); + pub fn iter_enumerated<'a>(&self, + bytes: &'a [u8]) + -> impl Iterator>)> + 'a { + let words = &bytes_to_words(&bytes[self.position..])[..self.len]; + words.iter().enumerate().filter_map(|(index, &position)| { + if position == u32::MAX { + None + } else { + let position = u32::from_le(position) as usize; + Some((DefIndex::new(index), Lazy::with_position(position))) + } + }) } - - info!("write_dense_index: {} entries", elen); } -fn write_be_u32(w: &mut W, u: u32) { - let _ = w.write_all(&[ - (u >> 24) as u8, - (u >> 16) as u8, - (u >> 8) as u8, - (u >> 0) as u8, - ]); +fn bytes_to_words(b: &[u8]) -> &[u32] { + unsafe { slice::from_raw_parts(b.as_ptr() as *const u32, b.len() / 4) } } -fn bytes_to_words(b: &[u8]) -> &[u32] { - assert!(b.len() % 4 == 0); - unsafe { slice::from_raw_parts(b.as_ptr() as *const u32, b.len()/4) } +fn words_to_bytes(w: &[u32]) -> &[u8] { + unsafe { slice::from_raw_parts(w.as_ptr() as *const u8, w.len() * 4) } } diff --git a/src/librustc_metadata/index_builder.rs b/src/librustc_metadata/index_builder.rs index 1d3d09d6bc2d7..9938e20d1861d 100644 --- a/src/librustc_metadata/index_builder.rs +++ b/src/librustc_metadata/index_builder.rs @@ -28,10 +28,9 @@ //! incremental compilation purposes. //! //! The `IndexBuilder` facilitates both of these. It is created -//! with an RBML encoder isntance (`rbml_w`) along with an -//! `EncodingContext` (`ecx`), which it encapsulates. It has one main -//! method, `record()`. You invoke `record` like so to create a new -//! `data_item` element in the list: +//! with an `EncodingContext` (`ecx`), which it encapsulates. +//! It has one main method, `record()`. You invoke `record` +//! like so to create a new `data_item` element in the list: //! //! ``` //! index.record(some_def_id, callback_fn, data) @@ -39,17 +38,11 @@ //! //! What record will do is to (a) record the current offset, (b) emit //! the `common::data_item` tag, and then call `callback_fn` with the -//! given data as well as an `ItemContentBuilder`. Once `callback_fn` +//! given data as well as the `EncodingContext`. Once `callback_fn` //! returns, the `common::data_item` tag will be closed. //! -//! The `ItemContentBuilder` is another type that just offers access -//! to the `ecx` and `rbml_w` that were given in, as well as -//! maintaining a list of `xref` instances, which are used to extract -//! common data so it is not re-serialized. -//! -//! `ItemContentBuilder` is a distinct type which does not offer the -//! `record` method, so that we can ensure that `common::data_item` elements -//! are never nested. +//! `EncodingContext` does not offer the `record` method, so that we +//! can ensure that `common::data_item` elements are never nested. //! //! In addition, while the `callback_fn` is executing, we will push a //! task `MetaData(some_def_id)`, which can then observe the @@ -62,59 +55,51 @@ //! give a callback fn, rather than taking a closure: it allows us to //! easily control precisely what data is given to that fn. -use common::tag_items_data_item; use encoder::EncodeContext; -use index::IndexData; -use rbml::writer::Encoder; +use index::Index; +use schema::*; + use rustc::dep_graph::DepNode; use rustc::hir; use rustc::hir::def_id::DefId; -use rustc::ty::{self, TyCtxt}; -use rustc_data_structures::fnv::FnvHashMap; +use rustc::ty::TyCtxt; use syntax::ast; +use std::ops::{Deref, DerefMut}; + /// Builder that can encode new items, adding them into the index. /// Item encoding cannot be nested. -pub struct IndexBuilder<'a, 'tcx: 'a, 'encoder: 'a> { - items: IndexData, - builder: ItemContentBuilder<'a, 'tcx, 'encoder>, +pub struct IndexBuilder<'a, 'b: 'a, 'tcx: 'b> { + items: Index, + pub ecx: &'a mut EncodeContext<'b, 'tcx>, } -/// Builder that can encode the content of items, but can't start a -/// new item itself. Most code is attached to here. -pub struct ItemContentBuilder<'a, 'tcx: 'a, 'encoder: 'a> { - xrefs: FnvHashMap, u32>, // sequentially-assigned - pub ecx: &'a EncodeContext<'a, 'tcx>, - pub rbml_w: &'a mut Encoder<'encoder>, +impl<'a, 'b, 'tcx> Deref for IndexBuilder<'a, 'b, 'tcx> { + type Target = EncodeContext<'b, 'tcx>; + fn deref(&self) -> &Self::Target { + self.ecx + } } -/// "interned" entries referenced by id -#[derive(PartialEq, Eq, Hash)] -pub enum XRef<'tcx> { Predicate(ty::Predicate<'tcx>) } +impl<'a, 'b, 'tcx> DerefMut for IndexBuilder<'a, 'b, 'tcx> { + fn deref_mut(&mut self) -> &mut Self::Target { + self.ecx + } +} -impl<'a, 'tcx, 'encoder> IndexBuilder<'a, 'tcx, 'encoder> { - pub fn new(ecx: &'a EncodeContext<'a, 'tcx>, - rbml_w: &'a mut Encoder<'encoder>) - -> Self { +impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> { + pub fn new(ecx: &'a mut EncodeContext<'b, 'tcx>) -> Self { IndexBuilder { - items: IndexData::new(ecx.tcx.map.num_local_def_ids()), - builder: ItemContentBuilder { - ecx: ecx, - xrefs: FnvHashMap(), - rbml_w: rbml_w, - }, + items: Index::new(ecx.tcx.map.num_local_def_ids()), + ecx: ecx, } } - pub fn ecx(&self) -> &'a EncodeContext<'a, 'tcx> { - self.builder.ecx() - } - /// Emit the data for a def-id to the metadata. The function to /// emit the data is `op`, and it will be given `data` as - /// arguments. This `record` function will start/end an RBML tag - /// and record the current offset for use in the index, calling - /// `op` to generate the data in the RBML tag. + /// arguments. This `record` function will call `op` to generate + /// the `Entry` (which may point to other encoded information) + /// and will then record the `Lazy` for use in the index. /// /// In addition, it will setup a dep-graph task to track what data /// `op` accesses to generate the metadata, which is later used by @@ -129,32 +114,18 @@ impl<'a, 'tcx, 'encoder> IndexBuilder<'a, 'tcx, 'encoder> { /// content system. pub fn record(&mut self, id: DefId, - op: fn(&mut ItemContentBuilder<'a, 'tcx, 'encoder>, DATA), + op: fn(&mut EncodeContext<'b, 'tcx>, DATA) -> Entry<'tcx>, data: DATA) where DATA: DepGraphRead { - let position = self.builder.rbml_w.mark_stable_position(); - self.items.record(id, position); - let _task = self.ecx().tcx.dep_graph.in_task(DepNode::MetaData(id)); - self.builder.rbml_w.start_tag(tag_items_data_item).unwrap(); - data.read(self.ecx().tcx); - op(&mut self.builder, data); - self.builder.rbml_w.end_tag().unwrap(); - } - - pub fn into_fields(self) -> (IndexData, FnvHashMap, u32>) { - (self.items, self.builder.xrefs) - } -} - -impl<'a, 'tcx, 'encoder> ItemContentBuilder<'a, 'tcx, 'encoder> { - pub fn ecx(&self) -> &'a EncodeContext<'a, 'tcx> { - self.ecx + let _task = self.tcx.dep_graph.in_task(DepNode::MetaData(id)); + data.read(self.tcx); + let entry = op(&mut self.ecx, data); + self.items.record(id, self.ecx.lazy(&entry)); } - pub fn add_xref(&mut self, xref: XRef<'tcx>) -> u32 { - let old_len = self.xrefs.len() as u32; - *self.xrefs.entry(xref).or_insert(old_len) + pub fn into_items(self) -> Index { + self.items } } @@ -167,11 +138,11 @@ pub trait DepGraphRead { } impl DepGraphRead for DefId { - fn read(&self, _tcx: TyCtxt) { } + fn read(&self, _tcx: TyCtxt) {} } impl DepGraphRead for ast::NodeId { - fn read(&self, _tcx: TyCtxt) { } + fn read(&self, _tcx: TyCtxt) {} } impl DepGraphRead for Option @@ -208,8 +179,8 @@ macro_rules! read_tuple { } } } -read_tuple!(A,B); -read_tuple!(A,B,C); +read_tuple!(A, B); +read_tuple!(A, B, C); macro_rules! read_hir { ($t:ty) => { @@ -237,7 +208,7 @@ read_hir!(hir::ForeignItem); pub struct Untracked(pub T); impl DepGraphRead for Untracked { - fn read(&self, _tcx: TyCtxt) { } + fn read(&self, _tcx: TyCtxt) {} } /// Newtype that can be used to package up misc data extracted from a diff --git a/src/librustc_metadata/lib.rs b/src/librustc_metadata/lib.rs index 84323d4646607..ef81dbd7f29ec 100644 --- a/src/librustc_metadata/lib.rs +++ b/src/librustc_metadata/lib.rs @@ -18,53 +18,48 @@ #![cfg_attr(not(stage0), deny(warnings))] #![feature(box_patterns)] +#![feature(conservative_impl_trait)] +#![feature(core_intrinsics)] #![feature(dotdot_in_tuple_patterns)] -#![feature(enumset)] -#![feature(question_mark)] +#![feature(proc_macro_internals)] +#![feature(proc_macro_lib)] +#![cfg_attr(stage0, feature(question_mark))] #![feature(quote)] #![feature(rustc_diagnostic_macros)] -#![feature(rustc_macro_lib)] -#![feature(rustc_macro_internals)] #![feature(rustc_private)] +#![feature(specialization)] #![feature(staged_api)] -#[macro_use] extern crate log; -#[macro_use] extern crate syntax; -#[macro_use] #[no_link] extern crate rustc_bitflags; +#[macro_use] +extern crate log; +#[macro_use] +extern crate syntax; extern crate syntax_pos; extern crate flate; -extern crate rbml; extern crate serialize as rustc_serialize; // used by deriving extern crate rustc_errors as errors; extern crate syntax_ext; +extern crate proc_macro; #[macro_use] extern crate rustc; -extern crate rustc_data_structures; extern crate rustc_back; -extern crate rustc_llvm; -extern crate rustc_macro; extern crate rustc_const_math; +extern crate rustc_data_structures; +extern crate rustc_llvm; -pub use rustc::middle; - -#[macro_use] -mod macros; - -pub mod diagnostics; +mod diagnostics; -pub mod astencode; -pub mod common; -pub mod def_key; -pub mod tyencode; -pub mod tydecode; -pub mod encoder; +mod astencode; mod index_builder; -pub mod decoder; +mod index; +mod encoder; +mod decoder; +mod cstore_impl; +mod schema; + pub mod creader; -pub mod csearch; pub mod cstore; -pub mod index; -pub mod loader; -pub mod macro_import; -pub mod tls_context; +pub mod locator; + +__build_diagnostic_array! { librustc_metadata, DIAGNOSTICS } diff --git a/src/librustc_metadata/loader.rs b/src/librustc_metadata/locator.rs similarity index 78% rename from src/librustc_metadata/loader.rs rename to src/librustc_metadata/locator.rs index a4f8ee4779905..0461d7ec061d4 100644 --- a/src/librustc_metadata/loader.rs +++ b/src/librustc_metadata/locator.rs @@ -210,11 +210,11 @@ //! //! That's the general overview of loading crates in the compiler, but it's by //! no means all of the necessary details. Take a look at the rest of -//! metadata::loader or metadata::creader for all the juicy details! +//! metadata::locator or metadata::creader for all the juicy details! -use cstore::{MetadataBlob, MetadataVec, MetadataArchive}; -use common::{metadata_encoding_version, rustc_version}; -use decoder; +use cstore::MetadataBlob; +use creader::Library; +use schema::{METADATA_HEADER, rustc_version}; use rustc::hir::svh::Svh; use rustc::session::Session; @@ -264,12 +264,6 @@ pub struct Context<'a> { pub should_match_name: bool, } -pub struct Library { - pub dylib: Option<(PathBuf, PathKind)>, - pub rlib: Option<(PathBuf, PathKind)>, - pub metadata: MetadataBlob, -} - pub struct ArchiveMetadata { _archive: ArchiveRO, // points into self._archive @@ -279,7 +273,7 @@ pub struct ArchiveMetadata { pub struct CratePaths { pub ident: String, pub dylib: Option, - pub rlib: Option + pub rlib: Option, } pub const METADATA_FILENAME: &'static str = "rust.metadata.bin"; @@ -287,14 +281,14 @@ pub const METADATA_FILENAME: &'static str = "rust.metadata.bin"; #[derive(Copy, Clone, PartialEq)] enum CrateFlavor { Rlib, - Dylib + Dylib, } impl fmt::Display for CrateFlavor { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(match *self { CrateFlavor::Rlib => "rlib", - CrateFlavor::Dylib => "dylib" + CrateFlavor::Dylib => "dylib", }) } } @@ -302,10 +296,10 @@ impl fmt::Display for CrateFlavor { impl CratePaths { fn paths(&self) -> Vec { match (&self.dylib, &self.rlib) { - (&None, &None) => vec!(), + (&None, &None) => vec![], (&Some(ref p), &None) | - (&None, &Some(ref p)) => vec!(p.clone()), - (&Some(ref p1), &Some(ref p2)) => vec!(p1.clone(), p2.clone()), + (&None, &Some(ref p)) => vec![p.clone()], + (&Some(ref p1), &Some(ref p2)) => vec![p1.clone(), p2.clone()], } } } @@ -316,59 +310,78 @@ impl<'a> Context<'a> { } pub fn load_library_crate(&mut self) -> Library { - self.find_library_crate().unwrap_or_else(|| self.report_load_errs()) + self.find_library_crate().unwrap_or_else(|| self.report_errs()) } - pub fn report_load_errs(&mut self) -> ! { + pub fn report_errs(&mut self) -> ! { let add = match self.root { &None => String::new(), - &Some(ref r) => format!(" which `{}` depends on", - r.ident) + &Some(ref r) => format!(" which `{}` depends on", r.ident), }; let mut err = if !self.rejected_via_hash.is_empty() { - struct_span_err!(self.sess, self.span, E0460, + struct_span_err!(self.sess, + self.span, + E0460, "found possibly newer version of crate `{}`{}", - self.ident, add) + self.ident, + add) } else if !self.rejected_via_triple.is_empty() { - struct_span_err!(self.sess, self.span, E0461, + struct_span_err!(self.sess, + self.span, + E0461, "couldn't find crate `{}` with expected target triple {}{}", - self.ident, self.triple, add) + self.ident, + self.triple, + add) } else if !self.rejected_via_kind.is_empty() { - struct_span_err!(self.sess, self.span, E0462, + struct_span_err!(self.sess, + self.span, + E0462, "found staticlib `{}` instead of rlib or dylib{}", - self.ident, add) + self.ident, + add) } else if !self.rejected_via_version.is_empty() { - struct_span_err!(self.sess, self.span, E0514, + struct_span_err!(self.sess, + self.span, + E0514, "found crate `{}` compiled by an incompatible version of rustc{}", - self.ident, add) + self.ident, + add) } else { - let mut err = struct_span_err!(self.sess, self.span, E0463, + let mut err = struct_span_err!(self.sess, + self.span, + E0463, "can't find crate for `{}`{}", - self.ident, add); + self.ident, + add); err.span_label(self.span, &format!("can't find crate")); err }; if !self.rejected_via_triple.is_empty() { let mismatches = self.rejected_via_triple.iter(); - for (i, &CrateMismatch{ ref path, ref got }) in mismatches.enumerate() { + for (i, &CrateMismatch { ref path, ref got }) in mismatches.enumerate() { err.note(&format!("crate `{}`, path #{}, triple {}: {}", - self.ident, i+1, got, path.display())); + self.ident, + i + 1, + got, + path.display())); } } if !self.rejected_via_hash.is_empty() { err.note("perhaps that crate needs to be recompiled?"); let mismatches = self.rejected_via_hash.iter(); - for (i, &CrateMismatch{ ref path, .. }) in mismatches.enumerate() { - err.note(&format!("crate `{}` path #{}: {}", - self.ident, i+1, path.display())); + for (i, &CrateMismatch { ref path, .. }) in mismatches.enumerate() { + err.note(&format!("crate `{}` path #{}: {}", self.ident, i + 1, path.display())); } match self.root { &None => {} &Some(ref r) => { for (i, path) in r.paths().iter().enumerate() { err.note(&format!("crate `{}` path #{}: {}", - r.ident, i+1, path.display())); + r.ident, + i + 1, + path.display())); } } } @@ -377,8 +390,7 @@ impl<'a> Context<'a> { err.help("please recompile that crate using --crate-type lib"); let mismatches = self.rejected_via_kind.iter(); for (i, &CrateMismatch { ref path, .. }) in mismatches.enumerate() { - err.note(&format!("crate `{}` path #{}: {}", - self.ident, i+1, path.display())); + err.note(&format!("crate `{}` path #{}: {}", self.ident, i + 1, path.display())); } } if !self.rejected_via_version.is_empty() { @@ -387,7 +399,10 @@ impl<'a> Context<'a> { let mismatches = self.rejected_via_version.iter(); for (i, &CrateMismatch { ref path, ref got }) in mismatches.enumerate() { err.note(&format!("crate `{}` path #{}: {} compiled by {:?}", - self.ident, i+1, path.display(), got)); + self.ident, + i + 1, + path.display(), + got)); } } @@ -416,7 +431,7 @@ impl<'a> Context<'a> { let staticlib_prefix = format!("{}{}", staticpair.0, self.crate_name); let mut candidates = FnvHashMap(); - let mut staticlibs = vec!(); + let mut staticlibs = vec![]; // First, find all possible candidate rlibs and dylibs purely based on // the name of the files themselves. We're trying to match against an @@ -436,38 +451,36 @@ impl<'a> Context<'a> { None => return FileDoesntMatch, Some(file) => file, }; - let (hash, rlib) = if file.starts_with(&rlib_prefix[..]) && - file.ends_with(".rlib") { - (&file[(rlib_prefix.len()) .. (file.len() - ".rlib".len())], - true) + let (hash, rlib) = if file.starts_with(&rlib_prefix[..]) && file.ends_with(".rlib") { + (&file[(rlib_prefix.len())..(file.len() - ".rlib".len())], true) } else if file.starts_with(&dylib_prefix) && - file.ends_with(&dypair.1) { - (&file[(dylib_prefix.len()) .. (file.len() - dypair.1.len())], - false) + file.ends_with(&dypair.1) { + (&file[(dylib_prefix.len())..(file.len() - dypair.1.len())], false) } else { - if file.starts_with(&staticlib_prefix[..]) && - file.ends_with(&staticpair.1) { + if file.starts_with(&staticlib_prefix[..]) && file.ends_with(&staticpair.1) { staticlibs.push(CrateMismatch { path: path.to_path_buf(), - got: "static".to_string() + got: "static".to_string(), }); } - return FileDoesntMatch + return FileDoesntMatch; }; info!("lib candidate: {}", path.display()); let hash_str = hash.to_string(); let slot = candidates.entry(hash_str) - .or_insert_with(|| (FnvHashMap(), FnvHashMap())); + .or_insert_with(|| (FnvHashMap(), FnvHashMap())); let (ref mut rlibs, ref mut dylibs) = *slot; - fs::canonicalize(path).map(|p| { - if rlib { - rlibs.insert(p, kind); - } else { - dylibs.insert(p, kind); - } - FileMatches - }).unwrap_or(FileDoesntMatch) + fs::canonicalize(path) + .map(|p| { + if rlib { + rlibs.insert(p, kind); + } else { + dylibs.insert(p, kind); + } + FileMatches + }) + .unwrap_or(FileDoesntMatch) }); self.rejected_via_kind.extend(staticlibs); @@ -485,11 +498,12 @@ impl<'a> Context<'a> { let rlib = self.extract_one(rlibs, CrateFlavor::Rlib, &mut slot); let dylib = self.extract_one(dylibs, CrateFlavor::Dylib, &mut slot); if let Some((h, m)) = slot { - libraries.insert(h, Library { - dylib: dylib, - rlib: rlib, - metadata: m, - }); + libraries.insert(h, + Library { + dylib: dylib, + rlib: rlib, + metadata: m, + }); } } @@ -500,7 +514,9 @@ impl<'a> Context<'a> { 0 => None, 1 => Some(libraries.into_iter().next().unwrap().1), _ => { - let mut err = struct_span_err!(self.sess, self.span, E0464, + let mut err = struct_span_err!(self.sess, + self.span, + E0464, "multiple matching crates for `{}`", self.crate_name); err.note("candidates:"); @@ -511,9 +527,7 @@ impl<'a> Context<'a> { if let Some((ref p, _)) = lib.rlib { err.note(&format!("path: {}", p.display())); } - let data = lib.metadata.as_slice(); - let name = decoder::get_crate_name(data); - note_crate_name(&mut err, &name); + note_crate_name(&mut err, &lib.metadata.get_root().name); } err.emit(); None @@ -529,8 +543,11 @@ impl<'a> Context<'a> { // read the metadata from it if `*slot` is `None`. If the metadata couldn't // be read, it is assumed that the file isn't a valid rust library (no // errors are emitted). - fn extract_one(&mut self, m: FnvHashMap, flavor: CrateFlavor, - slot: &mut Option<(Svh, MetadataBlob)>) -> Option<(PathBuf, PathKind)> { + fn extract_one(&mut self, + m: FnvHashMap, + flavor: CrateFlavor, + slot: &mut Option<(Svh, MetadataBlob)>) + -> Option<(PathBuf, PathKind)> { let mut ret: Option<(PathBuf, PathKind)> = None; let mut error = 0; @@ -540,9 +557,9 @@ impl<'a> Context<'a> { // read both, but reading dylib metadata is quite // slow. if m.is_empty() { - return None + return None; } else if m.len() == 1 { - return Some(m.into_iter().next().unwrap()) + return Some(m.into_iter().next().unwrap()); } } @@ -551,27 +568,32 @@ impl<'a> Context<'a> { info!("{} reading metadata from: {}", flavor, lib.display()); let (hash, metadata) = match get_metadata_section(self.target, flavor, &lib) { Ok(blob) => { - if let Some(h) = self.crate_matches(blob.as_slice(), &lib) { + if let Some(h) = self.crate_matches(&blob, &lib) { (h, blob) } else { info!("metadata mismatch"); - continue + continue; } } Err(err) => { info!("no metadata found: {}", err); - continue + continue; } }; // If we see multiple hashes, emit an error about duplicate candidates. if slot.as_ref().map_or(false, |s| s.0 != hash) { - let mut e = struct_span_err!(self.sess, self.span, E0465, + let mut e = struct_span_err!(self.sess, + self.span, + E0465, "multiple {} candidates for `{}` found", - flavor, self.crate_name); + flavor, + self.crate_name); e.span_note(self.span, &format!(r"candidate #1: {}", - ret.as_ref().unwrap().0 - .display())); + ret.as_ref() + .unwrap() + .0 + .display())); if let Some(ref mut e) = err { e.emit(); } @@ -582,9 +604,10 @@ impl<'a> Context<'a> { if error > 0 { error += 1; err.as_mut().unwrap().span_note(self.span, - &format!(r"candidate #{}: {}", error, + &format!(r"candidate #{}: {}", + error, lib.display())); - continue + continue; } *slot = Some((hash, metadata)); ret = Some((lib, kind)); @@ -598,54 +621,50 @@ impl<'a> Context<'a> { } } - fn crate_matches(&mut self, crate_data: &[u8], libpath: &Path) -> Option { - let crate_rustc_version = decoder::crate_rustc_version(crate_data); - if crate_rustc_version != Some(rustc_version()) { - let message = crate_rustc_version.unwrap_or(format!("an unknown compiler")); - info!("Rejecting via version: expected {} got {}", rustc_version(), message); + fn crate_matches(&mut self, metadata: &MetadataBlob, libpath: &Path) -> Option { + let root = metadata.get_root(); + let rustc_version = rustc_version(); + if root.rustc_version != rustc_version { + info!("Rejecting via version: expected {} got {}", + rustc_version, + root.rustc_version); self.rejected_via_version.push(CrateMismatch { path: libpath.to_path_buf(), - got: message + got: root.rustc_version, }); return None; } if self.should_match_name { - match decoder::maybe_get_crate_name(crate_data) { - Some(ref name) if self.crate_name == *name => {} - _ => { info!("Rejecting via crate name"); return None } + if self.crate_name != root.name { + info!("Rejecting via crate name"); + return None; } } - let hash = match decoder::maybe_get_crate_hash(crate_data) { - None => { info!("Rejecting via lack of crate hash"); return None; } - Some(h) => h, - }; - let triple = match decoder::get_crate_triple(crate_data) { - None => { debug!("triple not present"); return None } - Some(t) => t, - }; - if triple != self.triple { - info!("Rejecting via crate triple: expected {} got {}", self.triple, triple); + if root.triple != self.triple { + info!("Rejecting via crate triple: expected {} got {}", + self.triple, + root.triple); self.rejected_via_triple.push(CrateMismatch { path: libpath.to_path_buf(), - got: triple.to_string() + got: root.triple, }); return None; } if let Some(myhash) = self.hash { - if *myhash != hash { - info!("Rejecting via hash: expected {} got {}", *myhash, hash); + if *myhash != root.hash { + info!("Rejecting via hash: expected {} got {}", *myhash, root.hash); self.rejected_via_hash.push(CrateMismatch { path: libpath.to_path_buf(), - got: myhash.to_string() + got: myhash.to_string(), }); return None; } } - Some(hash) + Some(root.hash) } @@ -663,8 +682,8 @@ impl<'a> Context<'a> { (t.options.staticlib_prefix.clone(), t.options.staticlib_suffix.clone()) } - fn find_commandline_library<'b, LOCS> (&mut self, locs: LOCS) -> Option - where LOCS: Iterator + fn find_commandline_library<'b, LOCS>(&mut self, locs: LOCS) -> Option + where LOCS: Iterator { // First, filter out all libraries that look suspicious. We only accept // files which actually exist that have the correct naming scheme for @@ -677,30 +696,33 @@ impl<'a> Context<'a> { let locs = locs.map(|l| PathBuf::from(l)).filter(|loc| { if !loc.exists() { sess.err(&format!("extern location for {} does not exist: {}", - self.crate_name, loc.display())); + self.crate_name, + loc.display())); return false; } let file = match loc.file_name().and_then(|s| s.to_str()) { Some(file) => file, None => { sess.err(&format!("extern location for {} is not a file: {}", - self.crate_name, loc.display())); + self.crate_name, + loc.display())); return false; } }; if file.starts_with("lib") && file.ends_with(".rlib") { - return true + return true; } else { let (ref prefix, ref suffix) = dylibname; - if file.starts_with(&prefix[..]) && - file.ends_with(&suffix[..]) { - return true + if file.starts_with(&prefix[..]) && file.ends_with(&suffix[..]) { + return true; } } sess.struct_err(&format!("extern location for {} is of an unknown type: {}", - self.crate_name, loc.display())) + self.crate_name, + loc.display())) .help(&format!("file name should be lib*.rlib or {}*.{}", - dylibname.0, dylibname.1)) + dylibname.0, + dylibname.1)) .emit(); false }); @@ -709,11 +731,9 @@ impl<'a> Context<'a> { // there's at most one rlib and at most one dylib. for loc in locs { if loc.file_name().unwrap().to_str().unwrap().ends_with(".rlib") { - rlibs.insert(fs::canonicalize(&loc).unwrap(), - PathKind::ExternFlag); + rlibs.insert(fs::canonicalize(&loc).unwrap(), PathKind::ExternFlag); } else { - dylibs.insert(fs::canonicalize(&loc).unwrap(), - PathKind::ExternFlag); + dylibs.insert(fs::canonicalize(&loc).unwrap(), PathKind::ExternFlag); } } }; @@ -723,13 +743,17 @@ impl<'a> Context<'a> { let rlib = self.extract_one(rlibs, CrateFlavor::Rlib, &mut slot); let dylib = self.extract_one(dylibs, CrateFlavor::Dylib, &mut slot); - if rlib.is_none() && dylib.is_none() { return None } + if rlib.is_none() && dylib.is_none() { + return None; + } match slot { - Some((_, metadata)) => Some(Library { - dylib: dylib, - rlib: rlib, - metadata: metadata, - }), + Some((_, metadata)) => { + Some(Library { + dylib: dylib, + rlib: rlib, + metadata: metadata, + }) + } None => None, } } @@ -742,9 +766,9 @@ pub fn note_crate_name(err: &mut DiagnosticBuilder, name: &str) { impl ArchiveMetadata { fn new(ar: ArchiveRO) -> Option { let data = { - let section = ar.iter().filter_map(|s| s.ok()).find(|sect| { - sect.name() == Some(METADATA_FILENAME) - }); + let section = ar.iter() + .filter_map(|s| s.ok()) + .find(|sect| sect.name() == Some(METADATA_FILENAME)); match section { Some(s) => s.data() as *const [u8], None => { @@ -760,17 +784,15 @@ impl ArchiveMetadata { }) } - pub fn as_slice<'a>(&'a self) -> &'a [u8] { unsafe { &*self.data } } + pub fn as_slice<'a>(&'a self) -> &'a [u8] { + unsafe { &*self.data } + } } -fn verify_decompressed_encoding_version(blob: &MetadataBlob, filename: &Path) - -> Result<(), String> -{ - let data = blob.as_slice_raw(); - if data.len() < 4+metadata_encoding_version.len() || - !<[u8]>::eq(&data[..4], &[0, 0, 0, 0]) || - &data[4..4+metadata_encoding_version.len()] != metadata_encoding_version - { +fn verify_decompressed_encoding_version(blob: &MetadataBlob, + filename: &Path) + -> Result<(), String> { + if !blob.is_compatible() { Err((format!("incompatible metadata version found: '{}'", filename.display()))) } else { @@ -779,16 +801,21 @@ fn verify_decompressed_encoding_version(blob: &MetadataBlob, filename: &Path) } // Just a small wrapper to time how long reading metadata takes. -fn get_metadata_section(target: &Target, flavor: CrateFlavor, filename: &Path) +fn get_metadata_section(target: &Target, + flavor: CrateFlavor, + filename: &Path) -> Result { let start = Instant::now(); let ret = get_metadata_section_imp(target, flavor, filename); - info!("reading {:?} => {:?}", filename.file_name().unwrap(), + info!("reading {:?} => {:?}", + filename.file_name().unwrap(), start.elapsed()); - return ret + return ret; } -fn get_metadata_section_imp(target: &Target, flavor: CrateFlavor, filename: &Path) +fn get_metadata_section_imp(target: &Target, + flavor: CrateFlavor, + filename: &Path) -> Result { if !filename.exists() { return Err(format!("no such file: '{}'", filename.display())); @@ -801,13 +828,11 @@ fn get_metadata_section_imp(target: &Target, flavor: CrateFlavor, filename: &Pat Some(ar) => ar, None => { debug!("llvm didn't like `{}`", filename.display()); - return Err(format!("failed to read rlib metadata: '{}'", - filename.display())); + return Err(format!("failed to read rlib metadata: '{}'", filename.display())); } }; - return match ArchiveMetadata::new(archive).map(|ar| MetadataArchive(ar)) { - None => Err(format!("failed to read rlib metadata: '{}'", - filename.display())), + return match ArchiveMetadata::new(archive).map(|ar| MetadataBlob::Archive(ar)) { + None => Err(format!("failed to read rlib metadata: '{}'", filename.display())), Some(blob) => { verify_decompressed_encoding_version(&blob, filename)?; Ok(blob) @@ -818,46 +843,41 @@ fn get_metadata_section_imp(target: &Target, flavor: CrateFlavor, filename: &Pat let buf = common::path2cstr(filename); let mb = llvm::LLVMRustCreateMemoryBufferWithContentsOfFile(buf.as_ptr()); if mb as isize == 0 { - return Err(format!("error reading library: '{}'", - filename.display())) + return Err(format!("error reading library: '{}'", filename.display())); } let of = match ObjectFile::new(mb) { Some(of) => of, _ => { - return Err((format!("provided path not an object file: '{}'", - filename.display()))) + return Err((format!("provided path not an object file: '{}'", filename.display()))) } }; let si = mk_section_iter(of.llof); while llvm::LLVMIsSectionIteratorAtEnd(of.llof, si.llsi) == False { let mut name_buf = ptr::null(); let name_len = llvm::LLVMRustGetSectionName(si.llsi, &mut name_buf); - let name = slice::from_raw_parts(name_buf as *const u8, - name_len as usize).to_vec(); + let name = slice::from_raw_parts(name_buf as *const u8, name_len as usize).to_vec(); let name = String::from_utf8(name).unwrap(); debug!("get_metadata_section: name {}", name); if read_meta_section_name(target) == name { let cbuf = llvm::LLVMGetSectionContents(si.llsi); let csz = llvm::LLVMGetSectionSize(si.llsi) as usize; let cvbuf: *const u8 = cbuf as *const u8; - let vlen = metadata_encoding_version.len(); - debug!("checking {} bytes of metadata-version stamp", - vlen); + let vlen = METADATA_HEADER.len(); + debug!("checking {} bytes of metadata-version stamp", vlen); let minsz = cmp::min(vlen, csz); let buf0 = slice::from_raw_parts(cvbuf, minsz); - let version_ok = buf0 == metadata_encoding_version; + let version_ok = buf0 == METADATA_HEADER; if !version_ok { return Err((format!("incompatible metadata version found: '{}'", filename.display()))); } let cvbuf1 = cvbuf.offset(vlen as isize); - debug!("inflating {} bytes of compressed metadata", - csz - vlen); + debug!("inflating {} bytes of compressed metadata", csz - vlen); let bytes = slice::from_raw_parts(cvbuf1, csz - vlen); match flate::inflate_bytes(bytes) { Ok(inflated) => { - let blob = MetadataVec(inflated); + let blob = MetadataBlob::Inflated(inflated); verify_decompressed_encoding_version(&blob, filename)?; return Ok(blob); } @@ -897,14 +917,15 @@ pub fn read_meta_section_name(_target: &Target) -> &'static str { } // A diagnostic function for dumping crate metadata to an output stream -pub fn list_file_metadata(target: &Target, path: &Path, - out: &mut io::Write) -> io::Result<()> { +pub fn list_file_metadata(target: &Target, path: &Path, out: &mut io::Write) -> io::Result<()> { let filename = path.file_name().unwrap().to_str().unwrap(); - let flavor = if filename.ends_with(".rlib") { CrateFlavor::Rlib } else { CrateFlavor::Dylib }; + let flavor = if filename.ends_with(".rlib") { + CrateFlavor::Rlib + } else { + CrateFlavor::Dylib + }; match get_metadata_section(target, flavor, path) { - Ok(bytes) => decoder::list_crate_metadata(bytes.as_slice(), out), - Err(msg) => { - write!(out, "{}\n", msg) - } + Ok(metadata) => metadata.list_crate_metadata(out), + Err(msg) => write!(out, "{}\n", msg), } } diff --git a/src/librustc_metadata/macro_import.rs b/src/librustc_metadata/macro_import.rs deleted file mode 100644 index e41f076d64a80..0000000000000 --- a/src/librustc_metadata/macro_import.rs +++ /dev/null @@ -1,236 +0,0 @@ -// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Used by `rustc` when loading a crate with exported macros. - -use std::collections::HashSet; -use std::env; -use std::mem; - -use creader::{CrateReader, Macros}; -use cstore::CStore; - -use rustc::hir::def_id::DefIndex; -use rustc::middle; -use rustc::session::Session; -use rustc::util::nodemap::FnvHashMap; -use rustc_back::dynamic_lib::DynamicLibrary; -use rustc_macro::TokenStream; -use rustc_macro::__internal::Registry; -use syntax::ast; -use syntax::attr; -use syntax::ext::base::LoadedMacro; -use syntax::parse::token; -use syntax_ext::deriving::custom::CustomDerive; -use syntax_pos::Span; - -pub struct MacroLoader<'a> { - sess: &'a Session, - reader: CrateReader<'a>, -} - -impl<'a> MacroLoader<'a> { - pub fn new(sess: &'a Session, - cstore: &'a CStore, - crate_name: &str, - crate_config: ast::CrateConfig) - -> MacroLoader<'a> { - MacroLoader { - sess: sess, - reader: CrateReader::new(sess, cstore, crate_name, crate_config), - } - } -} - -pub fn call_bad_macro_reexport(a: &Session, b: Span) { - span_err!(a, b, E0467, "bad macro reexport"); -} - -pub type MacroSelection = FnvHashMap; - -impl<'a> middle::cstore::MacroLoader for MacroLoader<'a> { - fn load_crate(&mut self, - extern_crate: &ast::Item, - allows_macros: bool) -> Vec { - // Parse the attributes relating to macros. - let mut import = Some(FnvHashMap()); // None => load all - let mut reexport = FnvHashMap(); - - for attr in &extern_crate.attrs { - let mut used = true; - match &attr.name()[..] { - "macro_use" => { - let names = attr.meta_item_list(); - if names.is_none() { - // no names => load all - import = None; - } - if let (Some(sel), Some(names)) = (import.as_mut(), names) { - for attr in names { - if let Some(word) = attr.word() { - sel.insert(word.name().clone(), attr.span()); - } else { - span_err!(self.sess, attr.span(), E0466, "bad macro import"); - } - } - } - } - "macro_reexport" => { - let names = match attr.meta_item_list() { - Some(names) => names, - None => { - call_bad_macro_reexport(self.sess, attr.span); - continue; - } - }; - - for attr in names { - if let Some(word) = attr.word() { - reexport.insert(word.name().clone(), attr.span()); - } else { - call_bad_macro_reexport(self.sess, attr.span()); - } - } - } - _ => used = false, - } - if used { - attr::mark_used(attr); - } - } - - self.load_macros(extern_crate, allows_macros, import, reexport) - } -} - -impl<'a> MacroLoader<'a> { - fn load_macros<'b>(&mut self, - vi: &ast::Item, - allows_macros: bool, - import: Option, - reexport: MacroSelection) - -> Vec { - if let Some(sel) = import.as_ref() { - if sel.is_empty() && reexport.is_empty() { - return Vec::new(); - } - } - - if !allows_macros { - span_err!(self.sess, vi.span, E0468, - "an `extern crate` loading macros must be at the crate root"); - return Vec::new(); - } - - let mut macros = self.reader.read_macros(vi); - let mut ret = Vec::new(); - let mut seen = HashSet::new(); - - for mut def in macros.macro_rules.drain(..) { - let name = def.ident.name.as_str(); - - def.use_locally = match import.as_ref() { - None => true, - Some(sel) => sel.contains_key(&name), - }; - def.export = reexport.contains_key(&name); - def.allow_internal_unstable = attr::contains_name(&def.attrs, - "allow_internal_unstable"); - debug!("load_macros: loaded: {:?}", def); - ret.push(LoadedMacro::Def(def)); - seen.insert(name); - } - - if let Some(index) = macros.custom_derive_registrar { - // custom derive crates currently should not have any macro_rules! - // exported macros, enforced elsewhere - assert_eq!(ret.len(), 0); - - if import.is_some() { - self.sess.span_err(vi.span, "`rustc-macro` crates cannot be \ - selectively imported from, must \ - use `#[macro_use]`"); - } - - if reexport.len() > 0 { - self.sess.span_err(vi.span, "`rustc-macro` crates cannot be \ - reexported from"); - } - - self.load_derive_macros(vi.span, ¯os, index, &mut ret); - } - - if let Some(sel) = import.as_ref() { - for (name, span) in sel { - if !seen.contains(&name) { - span_err!(self.sess, *span, E0469, - "imported macro not found"); - } - } - } - - for (name, span) in &reexport { - if !seen.contains(&name) { - span_err!(self.sess, *span, E0470, - "reexported macro not found"); - } - } - - return ret - } - - /// Load the custom derive macros into the list of macros we're loading. - /// - /// Note that this is intentionally similar to how we load plugins today, - /// but also intentionally separate. Plugins are likely always going to be - /// implemented as dynamic libraries, but we have a possible future where - /// custom derive (and other macro-1.1 style features) are implemented via - /// executables and custom IPC. - fn load_derive_macros(&mut self, - span: Span, - macros: &Macros, - index: DefIndex, - ret: &mut Vec) { - // Make sure the path contains a / or the linker will search for it. - let path = macros.dylib.as_ref().unwrap(); - let path = env::current_dir().unwrap().join(path); - let lib = match DynamicLibrary::open(Some(&path)) { - Ok(lib) => lib, - Err(err) => self.sess.span_fatal(span, &err), - }; - - let sym = self.sess.generate_derive_registrar_symbol(¯os.svh, index); - let registrar = unsafe { - let sym = match lib.symbol(&sym) { - Ok(f) => f, - Err(err) => self.sess.span_fatal(span, &err), - }; - mem::transmute::<*mut u8, fn(&mut Registry)>(sym) - }; - - struct MyRegistrar<'a>(&'a mut Vec); - - impl<'a> Registry for MyRegistrar<'a> { - fn register_custom_derive(&mut self, - trait_name: &str, - expand: fn(TokenStream) -> TokenStream) { - let derive = Box::new(CustomDerive::new(expand)); - self.0.push(LoadedMacro::CustomDerive(trait_name.to_string(), - derive)); - } - } - - registrar(&mut MyRegistrar(ret)); - - // Intentionally leak the dynamic library. We can't ever unload it - // since the library can make things that will live arbitrarily long. - mem::forget(lib); - } -} diff --git a/src/librustc_metadata/macros.rs b/src/librustc_metadata/macros.rs deleted file mode 100644 index ed764ebd9f95d..0000000000000 --- a/src/librustc_metadata/macros.rs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright 2015 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -macro_rules! enum_from_u32 { - ($(#[$attr:meta])* pub enum $name:ident { - $($variant:ident = $e:expr,)* - }) => { - $(#[$attr])* - pub enum $name { - $($variant = $e),* - } - - impl $name { - pub fn from_u32(u: u32) -> Option<$name> { - $(if u == $name::$variant as u32 { - return Some($name::$variant) - })* - None - } - } - }; - ($(#[$attr:meta])* pub enum $name:ident { - $($variant:ident,)* - }) => { - $(#[$attr])* - pub enum $name { - $($variant,)* - } - - impl $name { - pub fn from_u32(u: u32) -> Option<$name> { - $(if u == $name::$variant as u32 { - return Some($name::$variant) - })* - None - } - } - } -} diff --git a/src/librustc_metadata/schema.rs b/src/librustc_metadata/schema.rs new file mode 100644 index 0000000000000..3d1bd77d8bc28 --- /dev/null +++ b/src/librustc_metadata/schema.rs @@ -0,0 +1,345 @@ +// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use astencode; +use index; + +use rustc::hir; +use rustc::hir::def::{self, CtorKind}; +use rustc::hir::def_id::{DefIndex, DefId}; +use rustc::middle::cstore::{LinkagePreference, NativeLibraryKind}; +use rustc::middle::lang_items; +use rustc::mir; +use rustc::ty::{self, Ty}; +use rustc_back::PanicStrategy; + +use rustc_serialize as serialize; +use syntax::{ast, attr}; +use syntax_pos::{self, Span}; + +use std::marker::PhantomData; + +pub fn rustc_version() -> String { + format!("rustc {}", + option_env!("CFG_VERSION").unwrap_or("unknown version")) +} + +/// Metadata encoding version. +/// NB: increment this if you change the format of metadata such that +/// the rustc version can't be found to compare with `RUSTC_VERSION`. +pub const METADATA_VERSION: u8 = 3; + +/// Metadata header which includes `METADATA_VERSION`. +/// To get older versions of rustc to ignore this metadata, +/// there are 4 zero bytes at the start, which are treated +/// as a length of 0 by old compilers. +/// +/// This header is followed by the position of the `CrateRoot`. +pub const METADATA_HEADER: &'static [u8; 12] = + &[0, 0, 0, 0, b'r', b'u', b's', b't', 0, 0, 0, METADATA_VERSION]; + +/// The shorthand encoding uses an enum's variant index `usize` +/// and is offset by this value so it never matches a real variant. +/// This offset is also chosen so that the first byte is never < 0x80. +pub const SHORTHAND_OFFSET: usize = 0x80; + +/// A value of type T referred to by its absolute position +/// in the metadata, and which can be decoded lazily. +/// +/// Metadata is effective a tree, encoded in post-order, +/// and with the root's position written next to the header. +/// That means every single `Lazy` points to some previous +/// location in the metadata and is part of a larger node. +/// +/// The first `Lazy` in a node is encoded as the backwards +/// distance from the position where the containing node +/// starts and where the `Lazy` points to, while the rest +/// use the forward distance from the previous `Lazy`. +/// Distances start at 1, as 0-byte nodes are invalid. +/// Also invalid are nodes being referred in a different +/// order than they were encoded in. +#[must_use] +pub struct Lazy { + pub position: usize, + _marker: PhantomData, +} + +impl Lazy { + pub fn with_position(position: usize) -> Lazy { + Lazy { + position: position, + _marker: PhantomData, + } + } + + /// Returns the minimum encoded size of a value of type `T`. + // FIXME(eddyb) Give better estimates for certain types. + pub fn min_size() -> usize { + 1 + } +} + +impl Copy for Lazy {} +impl Clone for Lazy { + fn clone(&self) -> Self { + *self + } +} + +impl serialize::UseSpecializedEncodable for Lazy {} +impl serialize::UseSpecializedDecodable for Lazy {} + +/// A sequence of type T referred to by its absolute position +/// in the metadata and length, and which can be decoded lazily. +/// The sequence is a single node for the purposes of `Lazy`. +/// +/// Unlike `Lazy>`, the length is encoded next to the +/// position, not at the position, which means that the length +/// doesn't need to be known before encoding all the elements. +/// +/// If the length is 0, no position is encoded, but otherwise, +/// the encoding is that of `Lazy`, with the distinction that +/// the minimal distance the length of the sequence, i.e. +/// it's assumed there's no 0-byte element in the sequence. +#[must_use] +pub struct LazySeq { + pub len: usize, + pub position: usize, + _marker: PhantomData, +} + +impl LazySeq { + pub fn empty() -> LazySeq { + LazySeq::with_position_and_length(0, 0) + } + + pub fn with_position_and_length(position: usize, len: usize) -> LazySeq { + LazySeq { + len: len, + position: position, + _marker: PhantomData, + } + } + + /// Returns the minimum encoded size of `length` values of type `T`. + pub fn min_size(length: usize) -> usize { + length + } +} + +impl Copy for LazySeq {} +impl Clone for LazySeq { + fn clone(&self) -> Self { + *self + } +} + +impl serialize::UseSpecializedEncodable for LazySeq {} +impl serialize::UseSpecializedDecodable for LazySeq {} + +/// Encoding / decoding state for `Lazy` and `LazySeq`. +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +pub enum LazyState { + /// Outside of a metadata node. + NoNode, + + /// Inside a metadata node, and before any `Lazy` or `LazySeq`. + /// The position is that of the node itself. + NodeStart(usize), + + /// Inside a metadata node, with a previous `Lazy` or `LazySeq`. + /// The position is a conservative estimate of where that + /// previous `Lazy` / `LazySeq` would end (see their comments). + Previous(usize), +} + +#[derive(RustcEncodable, RustcDecodable)] +pub struct CrateRoot { + pub rustc_version: String, + pub name: String, + pub triple: String, + pub hash: hir::svh::Svh, + pub disambiguator: String, + pub panic_strategy: PanicStrategy, + pub plugin_registrar_fn: Option, + pub macro_derive_registrar: Option, + + pub crate_deps: LazySeq, + pub dylib_dependency_formats: LazySeq>, + pub lang_items: LazySeq<(DefIndex, usize)>, + pub lang_items_missing: LazySeq, + pub native_libraries: LazySeq<(NativeLibraryKind, String)>, + pub codemap: LazySeq, + pub macro_defs: LazySeq, + pub impls: LazySeq, + pub reachable_ids: LazySeq, + pub index: LazySeq, +} + +#[derive(RustcEncodable, RustcDecodable)] +pub struct CrateDep { + pub name: ast::Name, + pub hash: hir::svh::Svh, + pub explicitly_linked: bool, +} + +#[derive(RustcEncodable, RustcDecodable)] +pub struct TraitImpls { + pub trait_id: (u32, DefIndex), + pub impls: LazySeq, +} + +#[derive(RustcEncodable, RustcDecodable)] +pub struct MacroDef { + pub name: ast::Name, + pub attrs: Vec, + pub span: Span, + pub body: String, +} + +#[derive(RustcEncodable, RustcDecodable)] +pub struct Entry<'tcx> { + pub kind: EntryKind<'tcx>, + pub visibility: ty::Visibility, + pub def_key: Lazy, + pub attributes: LazySeq, + pub children: LazySeq, + pub stability: Option>, + pub deprecation: Option>, + + pub ty: Option>>, + pub inherent_impls: LazySeq, + pub variances: LazySeq, + pub generics: Option>>, + pub predicates: Option>>, + + pub ast: Option>>, + pub mir: Option>>, +} + +#[derive(Copy, Clone, RustcEncodable, RustcDecodable)] +pub enum EntryKind<'tcx> { + Const, + ImmStatic, + MutStatic, + ForeignImmStatic, + ForeignMutStatic, + ForeignMod, + Type, + Enum, + Field, + Variant(Lazy), + Struct(Lazy), + Union(Lazy), + Fn(Lazy), + ForeignFn(Lazy), + Mod(Lazy), + Closure(Lazy>), + Trait(Lazy>), + Impl(Lazy>), + DefaultImpl(Lazy>), + Method(Lazy>), + AssociatedType(AssociatedContainer), + AssociatedConst(AssociatedContainer), +} + +#[derive(RustcEncodable, RustcDecodable)] +pub struct ModData { + pub reexports: LazySeq, +} + +#[derive(RustcEncodable, RustcDecodable)] +pub struct FnData { + pub constness: hir::Constness, + pub arg_names: LazySeq, +} + +#[derive(RustcEncodable, RustcDecodable)] +pub struct VariantData { + pub ctor_kind: CtorKind, + pub disr: u64, + + /// If this is a struct's only variant, this + /// is the index of the "struct ctor" item. + pub struct_ctor: Option, +} + +#[derive(RustcEncodable, RustcDecodable)] +pub struct TraitData<'tcx> { + pub unsafety: hir::Unsafety, + pub paren_sugar: bool, + pub has_default_impl: bool, + pub trait_ref: Lazy>, + pub super_predicates: Lazy>, +} + +#[derive(RustcEncodable, RustcDecodable)] +pub struct ImplData<'tcx> { + pub polarity: hir::ImplPolarity, + pub parent_impl: Option, + pub coerce_unsized_kind: Option, + pub trait_ref: Option>>, +} + +/// Describes whether the container of an associated item +/// is a trait or an impl and whether, in a trait, it has +/// a default, or an in impl, whether it's marked "default". +#[derive(Copy, Clone, RustcEncodable, RustcDecodable)] +pub enum AssociatedContainer { + TraitRequired, + TraitWithDefault, + ImplDefault, + ImplFinal, +} + +impl AssociatedContainer { + pub fn with_def_id(&self, def_id: DefId) -> ty::ImplOrTraitItemContainer { + match *self { + AssociatedContainer::TraitRequired | + AssociatedContainer::TraitWithDefault => ty::TraitContainer(def_id), + + AssociatedContainer::ImplDefault | + AssociatedContainer::ImplFinal => ty::ImplContainer(def_id), + } + } + + pub fn has_body(&self) -> bool { + match *self { + AssociatedContainer::TraitRequired => false, + + AssociatedContainer::TraitWithDefault | + AssociatedContainer::ImplDefault | + AssociatedContainer::ImplFinal => true, + } + } + + pub fn defaultness(&self) -> hir::Defaultness { + match *self { + AssociatedContainer::TraitRequired | + AssociatedContainer::TraitWithDefault | + AssociatedContainer::ImplDefault => hir::Defaultness::Default, + + AssociatedContainer::ImplFinal => hir::Defaultness::Final, + } + } +} + +#[derive(RustcEncodable, RustcDecodable)] +pub struct MethodData<'tcx> { + pub fn_data: FnData, + pub container: AssociatedContainer, + pub explicit_self: Lazy>, +} + +#[derive(RustcEncodable, RustcDecodable)] +pub struct ClosureData<'tcx> { + pub kind: ty::ClosureKind, + pub ty: Lazy>, +} diff --git a/src/librustc_metadata/tls_context.rs b/src/librustc_metadata/tls_context.rs deleted file mode 100644 index 6e78cbcd28e73..0000000000000 --- a/src/librustc_metadata/tls_context.rs +++ /dev/null @@ -1,102 +0,0 @@ -// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// This module provides implementations for the thread-local encoding and -// decoding context traits in rustc::middle::cstore::tls. - -use rbml::opaque::Encoder as OpaqueEncoder; -use rbml::opaque::Decoder as OpaqueDecoder; -use rustc::middle::cstore::tls; -use rustc::hir::def_id::DefId; -use rustc::ty::subst::Substs; -use rustc::ty::{self, TyCtxt}; - -use decoder::{self, Cmd}; -use encoder; -use tydecode::TyDecoder; -use tyencode; - -impl<'a, 'tcx: 'a> tls::EncodingContext<'tcx> for encoder::EncodeContext<'a, 'tcx> { - - fn tcx<'s>(&'s self) -> TyCtxt<'s, 'tcx, 'tcx> { - self.tcx - } - - fn encode_ty(&self, encoder: &mut OpaqueEncoder, t: ty::Ty<'tcx>) { - tyencode::enc_ty(encoder.cursor, &self.ty_str_ctxt(), t); - } - - fn encode_substs(&self, encoder: &mut OpaqueEncoder, substs: &Substs<'tcx>) { - tyencode::enc_substs(encoder.cursor, &self.ty_str_ctxt(), substs); - } -} - -pub struct DecodingContext<'a, 'tcx: 'a> { - pub crate_metadata: Cmd<'a>, - pub tcx: TyCtxt<'a, 'tcx, 'tcx>, -} - -impl<'a, 'tcx: 'a> tls::DecodingContext<'tcx> for DecodingContext<'a, 'tcx> { - - fn tcx<'s>(&'s self) -> TyCtxt<'s, 'tcx, 'tcx> { - self.tcx - } - - fn decode_ty(&self, decoder: &mut OpaqueDecoder) -> ty::Ty<'tcx> { - let def_id_convert = &mut |did| { - decoder::translate_def_id(self.crate_metadata, did) - }; - - let starting_position = decoder.position(); - - let mut ty_decoder = TyDecoder::new( - self.crate_metadata.data.as_slice(), - self.crate_metadata.cnum, - starting_position, - self.tcx, - def_id_convert); - - let ty = ty_decoder.parse_ty(); - - let end_position = ty_decoder.position(); - - // We can just reuse the tydecode implementation for parsing types, but - // we have to make sure to leave the rbml reader at the position just - // after the type. - decoder.advance(end_position - starting_position); - ty - } - - fn decode_substs(&self, decoder: &mut OpaqueDecoder) -> &'tcx Substs<'tcx> { - let def_id_convert = &mut |did| { - decoder::translate_def_id(self.crate_metadata, did) - }; - - let starting_position = decoder.position(); - - let mut ty_decoder = TyDecoder::new( - self.crate_metadata.data.as_slice(), - self.crate_metadata.cnum, - starting_position, - self.tcx, - def_id_convert); - - let substs = ty_decoder.parse_substs(); - - let end_position = ty_decoder.position(); - - decoder.advance(end_position - starting_position); - substs - } - - fn translate_def_id(&self, def_id: DefId) -> DefId { - decoder::translate_def_id(self.crate_metadata, def_id) - } -} diff --git a/src/librustc_metadata/tydecode.rs b/src/librustc_metadata/tydecode.rs deleted file mode 100644 index bcaf1640bc41b..0000000000000 --- a/src/librustc_metadata/tydecode.rs +++ /dev/null @@ -1,750 +0,0 @@ -// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - - -// Type decoding - -// tjc note: Would be great to have a `match check` macro equivalent -// for some of these - -#![allow(non_camel_case_types)] - -use rustc::hir; - -use rustc::hir::def_id::{DefId, DefIndex}; -use middle::region; -use rustc::ty::subst::{Kind, Substs}; -use rustc::ty::{self, ToPredicate, Ty, TyCtxt, TypeFoldable}; - -use rbml; -use rbml::leb128; -use std::str; -use syntax::abi; -use syntax::ast; -use syntax::parse::token; - -// Compact string representation for Ty values. API TyStr & -// parse_from_str. Extra parameters are for converting to/from def_ids in the -// data buffer. Whatever format you choose should not contain pipe characters. - -pub type DefIdConvert<'a> = &'a mut FnMut(DefId) -> DefId; - -pub struct TyDecoder<'a, 'tcx: 'a> { - data: &'a [u8], - krate: ast::CrateNum, - pos: usize, - tcx: TyCtxt<'a, 'tcx, 'tcx>, - conv_def_id: DefIdConvert<'a>, -} - -impl<'a,'tcx> TyDecoder<'a,'tcx> { - pub fn with_doc(tcx: TyCtxt<'a, 'tcx, 'tcx>, - crate_num: ast::CrateNum, - doc: rbml::Doc<'a>, - conv: DefIdConvert<'a>) - -> TyDecoder<'a,'tcx> { - TyDecoder::new(doc.data, crate_num, doc.start, tcx, conv) - } - - pub fn new(data: &'a [u8], - crate_num: ast::CrateNum, - pos: usize, - tcx: TyCtxt<'a, 'tcx, 'tcx>, - conv: DefIdConvert<'a>) - -> TyDecoder<'a, 'tcx> { - TyDecoder { - data: data, - krate: crate_num, - pos: pos, - tcx: tcx, - conv_def_id: conv, - } - } - - pub fn position(&self) -> usize { - self.pos - } - - fn peek(&self) -> char { - self.data[self.pos] as char - } - - fn next(&mut self) -> char { - let ch = self.data[self.pos] as char; - self.pos = self.pos + 1; - return ch; - } - - fn next_byte(&mut self) -> u8 { - let b = self.data[self.pos]; - self.pos = self.pos + 1; - return b; - } - - fn scan(&mut self, mut is_last: F) -> &'a [u8] - where F: FnMut(char) -> bool, - { - let start_pos = self.pos; - debug!("scan: '{}' (start)", self.data[self.pos] as char); - while !is_last(self.data[self.pos] as char) { - self.pos += 1; - debug!("scan: '{}'", self.data[self.pos] as char); - } - let end_pos = self.pos; - self.pos += 1; - return &self.data[start_pos..end_pos]; - } - - fn parse_vuint(&mut self) -> usize { - let (value, bytes_read) = leb128::read_unsigned_leb128(self.data, - self.pos); - self.pos += bytes_read; - value as usize - } - - fn parse_name(&mut self, last: char) -> ast::Name { - fn is_last(b: char, c: char) -> bool { return c == b; } - let bytes = self.scan(|a| is_last(last, a)); - token::intern(str::from_utf8(bytes).unwrap()) - } - - fn parse_size(&mut self) -> Option { - assert_eq!(self.next(), '/'); - - if self.peek() == '|' { - assert_eq!(self.next(), '|'); - None - } else { - let n = self.parse_uint(); - assert_eq!(self.next(), '|'); - Some(n) - } - } - - pub fn parse_substs(&mut self) -> &'tcx Substs<'tcx> { - let mut params = vec![]; - assert_eq!(self.next(), '['); - while self.peek() != ']' { - let k = match self.next() { - 'r' => Kind::from(self.parse_region()), - 't' => Kind::from(self.parse_ty()), - _ => bug!() - }; - params.push(k); - } - assert_eq!(self.next(), ']'); - - Substs::new(self.tcx, params) - } - - pub fn parse_generics(&mut self) -> &'tcx ty::Generics<'tcx> { - let parent = self.parse_opt(|this| this.parse_def()); - let parent_regions = self.parse_u32(); - assert_eq!(self.next(), '|'); - let parent_types = self.parse_u32(); - - let mut regions = vec![]; - let mut types = vec![]; - assert_eq!(self.next(), '['); - while self.peek() != '|' { - regions.push(self.parse_region_param_def()); - } - assert_eq!(self.next(), '|'); - while self.peek() != ']' { - types.push(self.parse_type_param_def()); - } - assert_eq!(self.next(), ']'); - - self.tcx.alloc_generics(ty::Generics { - parent: parent, - parent_regions: parent_regions, - parent_types: parent_types, - regions: regions, - types: types, - has_self: self.next() == 'S' - }) - } - - fn parse_bound_region(&mut self) -> ty::BoundRegion { - match self.next() { - 'a' => { - let id = self.parse_u32(); - assert_eq!(self.next(), '|'); - ty::BrAnon(id) - } - '[' => { - let def = self.parse_def(); - let name = token::intern(&self.parse_str('|')); - let issue32330 = match self.next() { - 'n' => { - assert_eq!(self.next(), ']'); - ty::Issue32330::WontChange - } - 'y' => { - ty::Issue32330::WillChange { - fn_def_id: self.parse_def(), - region_name: token::intern(&self.parse_str(']')), - } - } - c => panic!("expected n or y not {}", c) - }; - ty::BrNamed(def, name, issue32330) - } - 'f' => { - let id = self.parse_u32(); - assert_eq!(self.next(), '|'); - ty::BrFresh(id) - } - 'e' => ty::BrEnv, - _ => bug!("parse_bound_region: bad input") - } - } - - pub fn parse_region(&mut self) -> &'tcx ty::Region { - self.tcx.mk_region(match self.next() { - 'b' => { - assert_eq!(self.next(), '['); - let id = ty::DebruijnIndex::new(self.parse_u32()); - assert_eq!(self.next(), '|'); - let br = self.parse_bound_region(); - assert_eq!(self.next(), ']'); - ty::ReLateBound(id, br) - } - 'B' => { - assert_eq!(self.next(), '['); - let index = self.parse_u32(); - assert_eq!(self.next(), '|'); - let name = token::intern(&self.parse_str(']')); - ty::ReEarlyBound(ty::EarlyBoundRegion { - index: index, - name: name - }) - } - 'f' => { - assert_eq!(self.next(), '['); - let scope = self.parse_scope(); - assert_eq!(self.next(), '|'); - let br = self.parse_bound_region(); - assert_eq!(self.next(), ']'); - ty::ReFree(ty::FreeRegion { scope: scope, - bound_region: br}) - } - 's' => { - let scope = self.parse_scope(); - assert_eq!(self.next(), '|'); - ty::ReScope(scope) - } - 't' => ty::ReStatic, - 'e' => ty::ReEmpty, - 'E' => ty::ReErased, - _ => bug!("parse_region: bad input") - }) - } - - fn parse_scope(&mut self) -> region::CodeExtent { - self.tcx.region_maps.bogus_code_extent(match self.next() { - // This creates scopes with the wrong NodeId. This isn't - // actually a problem because scopes only exist *within* - // functions, and functions aren't loaded until trans which - // doesn't care about regions. - // - // May still be worth fixing though. - 'C' => { - assert_eq!(self.next(), '['); - let fn_id = self.parse_uint() as ast::NodeId; - assert_eq!(self.next(), '|'); - let body_id = self.parse_uint() as ast::NodeId; - assert_eq!(self.next(), ']'); - region::CodeExtentData::CallSiteScope { - fn_id: fn_id, body_id: body_id - } - } - // This creates scopes with the wrong NodeId. (See note above.) - 'P' => { - assert_eq!(self.next(), '['); - let fn_id = self.parse_uint() as ast::NodeId; - assert_eq!(self.next(), '|'); - let body_id = self.parse_uint() as ast::NodeId; - assert_eq!(self.next(), ']'); - region::CodeExtentData::ParameterScope { - fn_id: fn_id, body_id: body_id - } - } - 'M' => { - let node_id = self.parse_uint() as ast::NodeId; - region::CodeExtentData::Misc(node_id) - } - 'D' => { - let node_id = self.parse_uint() as ast::NodeId; - region::CodeExtentData::DestructionScope(node_id) - } - 'B' => { - assert_eq!(self.next(), '['); - let node_id = self.parse_uint() as ast::NodeId; - assert_eq!(self.next(), '|'); - let first_stmt_index = self.parse_u32(); - assert_eq!(self.next(), ']'); - let block_remainder = region::BlockRemainder { - block: node_id, first_statement_index: first_stmt_index, - }; - region::CodeExtentData::Remainder(block_remainder) - } - _ => bug!("parse_scope: bad input") - }) - } - - fn parse_opt(&mut self, f: F) -> Option - where F: FnOnce(&mut TyDecoder<'a, 'tcx>) -> T, - { - match self.next() { - 'n' => None, - 's' => Some(f(self)), - _ => bug!("parse_opt: bad input") - } - } - - fn parse_str(&mut self, term: char) -> String { - let mut result = String::new(); - while self.peek() != term { - unsafe { - result.as_mut_vec().extend_from_slice(&[self.next_byte()]) - } - } - self.next(); - result - } - - pub fn parse_trait_ref(&mut self) -> ty::TraitRef<'tcx> { - ty::TraitRef { - def_id: self.parse_def(), - substs: self.parse_substs() - } - } - - pub fn parse_existential_trait_ref(&mut self) -> ty::ExistentialTraitRef<'tcx> { - ty::ExistentialTraitRef { - def_id: self.parse_def(), - substs: self.parse_substs() - } - } - - pub fn parse_ty(&mut self) -> Ty<'tcx> { - let tcx = self.tcx; - match self.next() { - 'b' => return tcx.types.bool, - '!' => return tcx.types.never, - 'i' => { /* eat the s of is */ self.next(); return tcx.types.isize }, - 'u' => { /* eat the s of us */ self.next(); return tcx.types.usize }, - 'M' => { - match self.next() { - 'b' => return tcx.types.u8, - 'w' => return tcx.types.u16, - 'l' => return tcx.types.u32, - 'd' => return tcx.types.u64, - 'B' => return tcx.types.i8, - 'W' => return tcx.types.i16, - 'L' => return tcx.types.i32, - 'D' => return tcx.types.i64, - 'f' => return tcx.types.f32, - 'F' => return tcx.types.f64, - _ => bug!("parse_ty: bad numeric type") - } - } - 'c' => return tcx.types.char, - 'x' => { - assert_eq!(self.next(), '['); - let trait_ref = ty::Binder(self.parse_existential_trait_ref()); - let builtin_bounds = self.parse_builtin_bounds(); - let region_bound = self.parse_region(); - let mut projection_bounds = Vec::new(); - - loop { - match self.next() { - 'P' => { - let bound = self.parse_existential_projection(); - projection_bounds.push(ty::Binder(bound)); - } - '.' => { break; } - c => { - bug!("parse_bounds: bad bounds ('{}')", c) - } - } - } - assert_eq!(self.next(), ']'); - return tcx.mk_trait(ty::TraitObject { - principal: trait_ref, - region_bound: region_bound, - builtin_bounds: builtin_bounds, - projection_bounds: projection_bounds - }); - } - 'p' => { - assert_eq!(self.next(), '['); - let index = self.parse_u32(); - assert_eq!(self.next(), '|'); - let name = token::intern(&self.parse_str(']')); - return tcx.mk_param(index, name); - } - '~' => return tcx.mk_box(self.parse_ty()), - '*' => return tcx.mk_ptr(self.parse_mt()), - '&' => { - return tcx.mk_ref(self.parse_region(), self.parse_mt()); - } - 'V' => { - let t = self.parse_ty(); - return match self.parse_size() { - Some(n) => tcx.mk_array(t, n), - None => tcx.mk_slice(t) - }; - } - 'v' => { - return tcx.mk_str(); - } - 'T' => { - assert_eq!(self.next(), '['); - let mut params = Vec::new(); - while self.peek() != ']' { params.push(self.parse_ty()); } - self.pos = self.pos + 1; - return tcx.mk_tup(params); - } - 'F' => { - let def_id = self.parse_def(); - let substs = self.parse_substs(); - return tcx.mk_fn_def(def_id, substs, self.parse_bare_fn_ty()); - } - 'G' => { - return tcx.mk_fn_ptr(self.parse_bare_fn_ty()); - } - '#' => { - // This is a hacky little caching scheme. The idea is that if we encode - // the same type twice, the second (and third, and fourth...) time we will - // just write `#123`, where `123` is the offset in the metadata of the - // first appearance. Now when we are *decoding*, if we see a `#123`, we - // can first check a cache (`tcx.rcache`) for that offset. If we find something, - // we return it (modulo closure types, see below). But if not, then we - // jump to offset 123 and read the type from there. - - let pos = self.parse_vuint(); - let key = ty::CReaderCacheKey { cnum: self.krate, pos: pos }; - if let Some(tt) = tcx.rcache.borrow().get(&key).cloned() { - // If there is a closure buried in the type some where, then we - // need to re-convert any def ids (see case 'k', below). That means - // we can't reuse the cached version. - if !tt.has_closure_types() { - return tt; - } - } - - let mut substate = TyDecoder::new(self.data, - self.krate, - pos, - self.tcx, - self.conv_def_id); - let tt = substate.parse_ty(); - tcx.rcache.borrow_mut().insert(key, tt); - return tt; - } - '\"' => { - let _ = self.parse_def(); - let inner = self.parse_ty(); - inner - } - 'a' => { - assert_eq!(self.next(), '['); - let did = self.parse_def(); - let substs = self.parse_substs(); - assert_eq!(self.next(), ']'); - let def = self.tcx.lookup_adt_def(did); - return self.tcx.mk_adt(def, substs); - } - 'k' => { - assert_eq!(self.next(), '['); - let did = self.parse_def(); - let substs = self.parse_substs(); - let mut tys = vec![]; - while self.peek() != '.' { - tys.push(self.parse_ty()); - } - assert_eq!(self.next(), '.'); - assert_eq!(self.next(), ']'); - return self.tcx.mk_closure(did, substs, tys); - } - 'P' => { - assert_eq!(self.next(), '['); - let trait_ref = self.parse_trait_ref(); - let name = token::intern(&self.parse_str(']')); - return tcx.mk_projection(trait_ref, name); - } - 'A' => { - assert_eq!(self.next(), '['); - let def_id = self.parse_def(); - let substs = self.parse_substs(); - assert_eq!(self.next(), ']'); - return self.tcx.mk_anon(def_id, substs); - } - 'e' => { - return tcx.types.err; - } - c => { bug!("unexpected char in type string: {}", c);} - } - } - - fn parse_mutability(&mut self) -> hir::Mutability { - match self.peek() { - 'm' => { self.next(); hir::MutMutable } - _ => { hir::MutImmutable } - } - } - - fn parse_mt(&mut self) -> ty::TypeAndMut<'tcx> { - let m = self.parse_mutability(); - ty::TypeAndMut { ty: self.parse_ty(), mutbl: m } - } - - fn parse_def(&mut self) -> DefId { - let def_id = parse_defid(self.scan(|c| c == '|')); - return (self.conv_def_id)(def_id); - } - - fn parse_uint(&mut self) -> usize { - let mut n = 0; - loop { - let cur = self.peek(); - if cur < '0' || cur > '9' { return n; } - self.pos = self.pos + 1; - n *= 10; - n += (cur as usize) - ('0' as usize); - }; - } - - fn parse_u32(&mut self) -> u32 { - let n = self.parse_uint(); - let m = n as u32; - assert_eq!(m as usize, n); - m - } - - fn parse_abi_set(&mut self) -> abi::Abi { - assert_eq!(self.next(), '['); - let bytes = self.scan(|c| c == ']'); - let abi_str = str::from_utf8(bytes).unwrap(); - abi::lookup(&abi_str[..]).expect(abi_str) - } - - pub fn parse_closure_ty(&mut self) -> ty::ClosureTy<'tcx> { - let unsafety = parse_unsafety(self.next()); - let sig = self.parse_sig(); - let abi = self.parse_abi_set(); - ty::ClosureTy { - unsafety: unsafety, - sig: sig, - abi: abi, - } - } - - pub fn parse_bare_fn_ty(&mut self) -> &'tcx ty::BareFnTy<'tcx> { - let unsafety = parse_unsafety(self.next()); - let abi = self.parse_abi_set(); - let sig = self.parse_sig(); - self.tcx.mk_bare_fn(ty::BareFnTy { - unsafety: unsafety, - abi: abi, - sig: sig - }) - } - - fn parse_sig(&mut self) -> ty::PolyFnSig<'tcx> { - assert_eq!(self.next(), '['); - let mut inputs = Vec::new(); - while self.peek() != ']' { - inputs.push(self.parse_ty()); - } - self.pos += 1; // eat the ']' - let variadic = match self.next() { - 'V' => true, - 'N' => false, - r => bug!("bad variadic: {}", r), - }; - let output = self.parse_ty(); - ty::Binder(ty::FnSig {inputs: inputs, - output: output, - variadic: variadic}) - } - - pub fn parse_predicate(&mut self) -> ty::Predicate<'tcx> { - match self.next() { - 't' => ty::Binder(self.parse_trait_ref()).to_predicate(), - 'e' => ty::Binder(ty::EquatePredicate(self.parse_ty(), - self.parse_ty())).to_predicate(), - 'r' => ty::Binder(ty::OutlivesPredicate(self.parse_region(), - self.parse_region())).to_predicate(), - 'o' => ty::Binder(ty::OutlivesPredicate(self.parse_ty(), - self.parse_region())).to_predicate(), - 'p' => ty::Binder(self.parse_projection_predicate()).to_predicate(), - 'w' => ty::Predicate::WellFormed(self.parse_ty()), - 'O' => { - let def_id = self.parse_def(); - assert_eq!(self.next(), '|'); - ty::Predicate::ObjectSafe(def_id) - } - 'c' => { - let def_id = self.parse_def(); - assert_eq!(self.next(), '|'); - let kind = match self.next() { - 'f' => ty::ClosureKind::Fn, - 'm' => ty::ClosureKind::FnMut, - 'o' => ty::ClosureKind::FnOnce, - c => bug!("Encountered invalid character in metadata: {}", c) - }; - assert_eq!(self.next(), '|'); - ty::Predicate::ClosureKind(def_id, kind) - } - c => bug!("Encountered invalid character in metadata: {}", c) - } - } - - fn parse_projection_predicate(&mut self) -> ty::ProjectionPredicate<'tcx> { - ty::ProjectionPredicate { - projection_ty: ty::ProjectionTy { - trait_ref: self.parse_trait_ref(), - item_name: token::intern(&self.parse_str('|')), - }, - ty: self.parse_ty(), - } - } - - fn parse_existential_projection(&mut self) -> ty::ExistentialProjection<'tcx> { - ty::ExistentialProjection { - trait_ref: self.parse_existential_trait_ref(), - item_name: token::intern(&self.parse_str('|')), - ty: self.parse_ty(), - } - } - - fn parse_type_param_def(&mut self) -> ty::TypeParameterDef<'tcx> { - let name = self.parse_name(':'); - let def_id = self.parse_def(); - let index = self.parse_u32(); - assert_eq!(self.next(), '|'); - let default_def_id = self.parse_def(); - let default = self.parse_opt(|this| this.parse_ty()); - let object_lifetime_default = self.parse_object_lifetime_default(); - - ty::TypeParameterDef { - name: name, - def_id: def_id, - index: index, - default_def_id: default_def_id, - default: default, - object_lifetime_default: object_lifetime_default, - } - } - - fn parse_region_param_def(&mut self) -> ty::RegionParameterDef<'tcx> { - let name = self.parse_name(':'); - let def_id = self.parse_def(); - let index = self.parse_u32(); - assert_eq!(self.next(), '|'); - let mut bounds = vec![]; - loop { - match self.next() { - 'R' => bounds.push(self.parse_region()), - '.' => { break; } - c => { - bug!("parse_region_param_def: bad bounds ('{}')", c) - } - } - } - ty::RegionParameterDef { - name: name, - def_id: def_id, - index: index, - bounds: bounds, - } - } - - - fn parse_object_lifetime_default(&mut self) -> ty::ObjectLifetimeDefault<'tcx> { - match self.next() { - 'a' => ty::ObjectLifetimeDefault::Ambiguous, - 'b' => ty::ObjectLifetimeDefault::BaseDefault, - 's' => { - let region = self.parse_region(); - ty::ObjectLifetimeDefault::Specific(region) - } - _ => bug!("parse_object_lifetime_default: bad input") - } - } - - fn parse_builtin_bounds(&mut self) -> ty::BuiltinBounds { - let mut builtin_bounds = ty::BuiltinBounds::empty(); - loop { - match self.next() { - 'S' => { - builtin_bounds.insert(ty::BoundSend); - } - 'Z' => { - builtin_bounds.insert(ty::BoundSized); - } - 'P' => { - builtin_bounds.insert(ty::BoundCopy); - } - 'T' => { - builtin_bounds.insert(ty::BoundSync); - } - '.' => { - return builtin_bounds; - } - c => { - bug!("parse_bounds: bad builtin bounds ('{}')", c) - } - } - } - } -} - -// Rust metadata parsing -fn parse_defid(buf: &[u8]) -> DefId { - let mut colon_idx = 0; - let len = buf.len(); - while colon_idx < len && buf[colon_idx] != ':' as u8 { colon_idx += 1; } - if colon_idx == len { - error!("didn't find ':' when parsing def id"); - bug!(); - } - - let crate_part = &buf[0..colon_idx]; - let def_part = &buf[colon_idx + 1..len]; - - let crate_num = match str::from_utf8(crate_part).ok().and_then(|s| { - s.parse::().ok() - }) { - Some(cn) => cn as ast::CrateNum, - None => bug!("internal error: parse_defid: crate number expected, found {:?}", - crate_part) - }; - let def_num = match str::from_utf8(def_part).ok().and_then(|s| { - s.parse::().ok() - }) { - Some(dn) => dn, - None => bug!("internal error: parse_defid: id expected, found {:?}", - def_part) - }; - let index = DefIndex::new(def_num); - DefId { krate: crate_num, index: index } -} - -fn parse_unsafety(c: char) -> hir::Unsafety { - match c { - 'u' => hir::Unsafety::Unsafe, - 'n' => hir::Unsafety::Normal, - _ => bug!("parse_unsafety: bad unsafety {}", c) - } -} diff --git a/src/librustc_metadata/tyencode.rs b/src/librustc_metadata/tyencode.rs deleted file mode 100644 index dbefd3eacc24a..0000000000000 --- a/src/librustc_metadata/tyencode.rs +++ /dev/null @@ -1,519 +0,0 @@ -// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// Type encoding - -#![allow(unused_must_use)] // as with encoding, everything is a no-fail MemWriter -#![allow(non_camel_case_types)] - -use std::cell::RefCell; -use std::io::Cursor; -use std::io::prelude::*; - -use rustc::hir::def_id::DefId; -use middle::region; -use rustc::ty::subst::Substs; -use rustc::ty::{self, Ty, TyCtxt}; -use rustc::util::nodemap::FnvHashMap; - -use rustc::hir; - -use syntax::abi::Abi; -use syntax::ast; -use errors::Handler; - -use rbml::leb128; -use encoder; - -pub struct ctxt<'a, 'tcx: 'a> { - pub diag: &'a Handler, - // Def -> str Callback: - pub ds: for<'b> fn(TyCtxt<'b, 'tcx, 'tcx>, DefId) -> String, - // The type context. - pub tcx: TyCtxt<'a, 'tcx, 'tcx>, - pub abbrevs: &'a abbrev_map<'tcx> -} - -impl<'a, 'tcx> encoder::EncodeContext<'a, 'tcx> { - pub fn ty_str_ctxt<'b>(&'b self) -> ctxt<'b, 'tcx> { - ctxt { - diag: self.tcx.sess.diagnostic(), - ds: encoder::def_to_string, - tcx: self.tcx, - abbrevs: &self.type_abbrevs - } - } -} - -// Compact string representation for Ty values. API TyStr & parse_from_str. -// Extra parameters are for converting to/from def_ids in the string rep. -// Whatever format you choose should not contain pipe characters. -pub struct ty_abbrev { - s: Vec -} - -pub type abbrev_map<'tcx> = RefCell, ty_abbrev>>; - -pub fn enc_ty<'a, 'tcx>(w: &mut Cursor>, cx: &ctxt<'a, 'tcx>, t: Ty<'tcx>) { - if let Some(a) = cx.abbrevs.borrow_mut().get(&t) { - w.write_all(&a.s); - return; - } - - let pos = w.position(); - - match t.sty { - ty::TyBool => { write!(w, "b"); } - ty::TyChar => { write!(w, "c"); } - ty::TyNever => { write!(w, "!"); } - ty::TyInt(t) => { - match t { - ast::IntTy::Is => write!(w, "is"), - ast::IntTy::I8 => write!(w, "MB"), - ast::IntTy::I16 => write!(w, "MW"), - ast::IntTy::I32 => write!(w, "ML"), - ast::IntTy::I64 => write!(w, "MD") - }; - } - ty::TyUint(t) => { - match t { - ast::UintTy::Us => write!(w, "us"), - ast::UintTy::U8 => write!(w, "Mb"), - ast::UintTy::U16 => write!(w, "Mw"), - ast::UintTy::U32 => write!(w, "Ml"), - ast::UintTy::U64 => write!(w, "Md") - }; - } - ty::TyFloat(t) => { - match t { - ast::FloatTy::F32 => write!(w, "Mf"), - ast::FloatTy::F64 => write!(w, "MF"), - }; - } - ty::TyTrait(ref obj) => { - write!(w, "x["); - enc_existential_trait_ref(w, cx, obj.principal.0); - enc_builtin_bounds(w, cx, &obj.builtin_bounds); - - enc_region(w, cx, obj.region_bound); - - for tp in &obj.projection_bounds { - write!(w, "P"); - enc_existential_projection(w, cx, &tp.0); - } - - write!(w, "."); - write!(w, "]"); - } - ty::TyTuple(ts) => { - write!(w, "T["); - for t in ts { enc_ty(w, cx, *t); } - write!(w, "]"); - } - ty::TyBox(typ) => { write!(w, "~"); enc_ty(w, cx, typ); } - ty::TyRawPtr(mt) => { write!(w, "*"); enc_mt(w, cx, mt); } - ty::TyRef(r, mt) => { - write!(w, "&"); - enc_region(w, cx, r); - enc_mt(w, cx, mt); - } - ty::TyArray(t, sz) => { - write!(w, "V"); - enc_ty(w, cx, t); - write!(w, "/{}|", sz); - } - ty::TySlice(t) => { - write!(w, "V"); - enc_ty(w, cx, t); - write!(w, "/|"); - } - ty::TyStr => { - write!(w, "v"); - } - ty::TyFnDef(def_id, substs, f) => { - write!(w, "F"); - write!(w, "{}|", (cx.ds)(cx.tcx, def_id)); - enc_substs(w, cx, substs); - enc_bare_fn_ty(w, cx, f); - } - ty::TyFnPtr(f) => { - write!(w, "G"); - enc_bare_fn_ty(w, cx, f); - } - ty::TyInfer(_) => { - bug!("cannot encode inference variable types"); - } - ty::TyParam(p) => { - write!(w, "p[{}|{}]", p.idx, p.name); - } - ty::TyAdt(def, substs) => { - write!(w, "a[{}|", (cx.ds)(cx.tcx, def.did)); - enc_substs(w, cx, substs); - write!(w, "]"); - } - ty::TyClosure(def, substs) => { - write!(w, "k[{}|", (cx.ds)(cx.tcx, def)); - enc_substs(w, cx, substs.func_substs); - for ty in substs.upvar_tys { - enc_ty(w, cx, ty); - } - write!(w, "."); - write!(w, "]"); - } - ty::TyProjection(ref data) => { - write!(w, "P["); - enc_trait_ref(w, cx, data.trait_ref); - write!(w, "{}]", data.item_name); - } - ty::TyAnon(def_id, substs) => { - write!(w, "A[{}|", (cx.ds)(cx.tcx, def_id)); - enc_substs(w, cx, substs); - write!(w, "]"); - } - ty::TyError => { - write!(w, "e"); - } - } - - let end = w.position(); - let len = end - pos; - - let mut abbrev = Cursor::new(Vec::with_capacity(16)); - abbrev.write_all(b"#"); - { - let start_position = abbrev.position() as usize; - let bytes_written = leb128::write_unsigned_leb128(abbrev.get_mut(), - start_position, - pos); - abbrev.set_position((start_position + bytes_written) as u64); - } - - cx.abbrevs.borrow_mut().insert(t, ty_abbrev { - s: if abbrev.position() < len { - abbrev.get_ref()[..abbrev.position() as usize].to_owned() - } else { - // if the abbreviation is longer than the real type, - // don't use #-notation. However, insert it here so - // other won't have to `mark_stable_position` - w.get_ref()[pos as usize .. end as usize].to_owned() - } - }); -} - -fn enc_mutability(w: &mut Cursor>, mt: hir::Mutability) { - match mt { - hir::MutImmutable => (), - hir::MutMutable => { - write!(w, "m"); - } - }; -} - -fn enc_mt<'a, 'tcx>(w: &mut Cursor>, cx: &ctxt<'a, 'tcx>, - mt: ty::TypeAndMut<'tcx>) { - enc_mutability(w, mt.mutbl); - enc_ty(w, cx, mt.ty); -} - -fn enc_opt(w: &mut Cursor>, t: Option, enc_f: F) where - F: FnOnce(&mut Cursor>, T), -{ - match t { - None => { - write!(w, "n"); - } - Some(v) => { - write!(w, "s"); - enc_f(w, v); - } - } -} - -pub fn enc_substs<'a, 'tcx>(w: &mut Cursor>, cx: &ctxt<'a, 'tcx>, - substs: &Substs<'tcx>) { - write!(w, "["); - for &k in substs.params() { - if let Some(ty) = k.as_type() { - write!(w, "t"); - enc_ty(w, cx, ty); - } else if let Some(r) = k.as_region() { - write!(w, "r"); - enc_region(w, cx, r); - } else { - bug!() - } - } - write!(w, "]"); -} - -pub fn enc_generics<'a, 'tcx>(w: &mut Cursor>, cx: &ctxt<'a, 'tcx>, - generics: &ty::Generics<'tcx>) { - enc_opt(w, generics.parent, |w, def_id| { - write!(w, "{}|", (cx.ds)(cx.tcx, def_id)); - }); - write!(w, "{}|{}[", - generics.parent_regions, - generics.parent_types); - - for r in &generics.regions { - enc_region_param_def(w, cx, r) - } - write!(w, "|"); - for t in &generics.types { - enc_type_param_def(w, cx, t); - } - write!(w, "]"); - - if generics.has_self { - write!(w, "S"); - } else { - write!(w, "N"); - } -} - -pub fn enc_region(w: &mut Cursor>, cx: &ctxt, r: &ty::Region) { - match *r { - ty::ReLateBound(id, br) => { - write!(w, "b[{}|", id.depth); - enc_bound_region(w, cx, br); - write!(w, "]"); - } - ty::ReEarlyBound(ref data) => { - write!(w, "B[{}|{}]", - data.index, - data.name); - } - ty::ReFree(ref fr) => { - write!(w, "f["); - enc_scope(w, cx, fr.scope); - write!(w, "|"); - enc_bound_region(w, cx, fr.bound_region); - write!(w, "]"); - } - ty::ReScope(scope) => { - write!(w, "s"); - enc_scope(w, cx, scope); - write!(w, "|"); - } - ty::ReStatic => { - write!(w, "t"); - } - ty::ReEmpty => { - write!(w, "e"); - } - ty::ReErased => { - write!(w, "E"); - } - ty::ReVar(_) | ty::ReSkolemized(..) => { - // these should not crop up after typeck - bug!("cannot encode region variables"); - } - } -} - -fn enc_scope(w: &mut Cursor>, cx: &ctxt, scope: region::CodeExtent) { - match cx.tcx.region_maps.code_extent_data(scope) { - region::CodeExtentData::CallSiteScope { - fn_id, body_id } => write!(w, "C[{}|{}]", fn_id, body_id), - region::CodeExtentData::ParameterScope { - fn_id, body_id } => write!(w, "P[{}|{}]", fn_id, body_id), - region::CodeExtentData::Misc(node_id) => write!(w, "M{}", node_id), - region::CodeExtentData::Remainder(region::BlockRemainder { - block: b, first_statement_index: i }) => write!(w, "B[{}|{}]", b, i), - region::CodeExtentData::DestructionScope(node_id) => write!(w, "D{}", node_id), - }; -} - -fn enc_bound_region(w: &mut Cursor>, cx: &ctxt, br: ty::BoundRegion) { - match br { - ty::BrAnon(idx) => { - write!(w, "a{}|", idx); - } - ty::BrNamed(d, name, issue32330) => { - write!(w, "[{}|{}|", - (cx.ds)(cx.tcx, d), - name); - - match issue32330 { - ty::Issue32330::WontChange => - write!(w, "n]"), - ty::Issue32330::WillChange { fn_def_id, region_name } => - write!(w, "y{}|{}]", (cx.ds)(cx.tcx, fn_def_id), region_name), - }; - } - ty::BrFresh(id) => { - write!(w, "f{}|", id); - } - ty::BrEnv => { - write!(w, "e|"); - } - } -} - -pub fn enc_trait_ref<'a, 'tcx>(w: &mut Cursor>, cx: &ctxt<'a, 'tcx>, - s: ty::TraitRef<'tcx>) { - write!(w, "{}|", (cx.ds)(cx.tcx, s.def_id)); - enc_substs(w, cx, s.substs); -} - -fn enc_existential_trait_ref<'a, 'tcx>(w: &mut Cursor>, cx: &ctxt<'a, 'tcx>, - s: ty::ExistentialTraitRef<'tcx>) { - write!(w, "{}|", (cx.ds)(cx.tcx, s.def_id)); - enc_substs(w, cx, s.substs); -} - -fn enc_unsafety(w: &mut Cursor>, p: hir::Unsafety) { - match p { - hir::Unsafety::Normal => write!(w, "n"), - hir::Unsafety::Unsafe => write!(w, "u"), - }; -} - -fn enc_abi(w: &mut Cursor>, abi: Abi) { - write!(w, "["); - write!(w, "{}", abi.name()); - write!(w, "]"); -} - -pub fn enc_bare_fn_ty<'a, 'tcx>(w: &mut Cursor>, cx: &ctxt<'a, 'tcx>, - ft: &ty::BareFnTy<'tcx>) { - enc_unsafety(w, ft.unsafety); - enc_abi(w, ft.abi); - enc_fn_sig(w, cx, &ft.sig); -} - -pub fn enc_closure_ty<'a, 'tcx>(w: &mut Cursor>, cx: &ctxt<'a, 'tcx>, - ft: &ty::ClosureTy<'tcx>) { - enc_unsafety(w, ft.unsafety); - enc_fn_sig(w, cx, &ft.sig); - enc_abi(w, ft.abi); -} - -fn enc_fn_sig<'a, 'tcx>(w: &mut Cursor>, cx: &ctxt<'a, 'tcx>, - fsig: &ty::PolyFnSig<'tcx>) { - write!(w, "["); - for ty in &fsig.0.inputs { - enc_ty(w, cx, *ty); - } - write!(w, "]"); - if fsig.0.variadic { - write!(w, "V"); - } else { - write!(w, "N"); - } - enc_ty(w, cx, fsig.0.output); -} - -fn enc_builtin_bounds(w: &mut Cursor>, _cx: &ctxt, bs: &ty::BuiltinBounds) { - for bound in bs { - match bound { - ty::BoundSend => write!(w, "S"), - ty::BoundSized => write!(w, "Z"), - ty::BoundCopy => write!(w, "P"), - ty::BoundSync => write!(w, "T"), - }; - } - - write!(w, "."); -} - -fn enc_type_param_def<'a, 'tcx>(w: &mut Cursor>, cx: &ctxt<'a, 'tcx>, - v: &ty::TypeParameterDef<'tcx>) { - write!(w, "{}:{}|{}|{}|", - v.name, (cx.ds)(cx.tcx, v.def_id), - v.index, (cx.ds)(cx.tcx, v.default_def_id)); - enc_opt(w, v.default, |w, t| enc_ty(w, cx, t)); - enc_object_lifetime_default(w, cx, v.object_lifetime_default); -} - -fn enc_region_param_def(w: &mut Cursor>, cx: &ctxt, - v: &ty::RegionParameterDef) { - write!(w, "{}:{}|{}|", - v.name, (cx.ds)(cx.tcx, v.def_id), v.index); - for &r in &v.bounds { - write!(w, "R"); - enc_region(w, cx, r); - } - write!(w, "."); -} - -fn enc_object_lifetime_default<'a, 'tcx>(w: &mut Cursor>, - cx: &ctxt<'a, 'tcx>, - default: ty::ObjectLifetimeDefault) -{ - match default { - ty::ObjectLifetimeDefault::Ambiguous => { - write!(w, "a"); - } - ty::ObjectLifetimeDefault::BaseDefault => { - write!(w, "b"); - } - ty::ObjectLifetimeDefault::Specific(r) => { - write!(w, "s"); - enc_region(w, cx, r); - } - } -} - -pub fn enc_predicate<'a, 'tcx>(w: &mut Cursor>, - cx: &ctxt<'a, 'tcx>, - p: &ty::Predicate<'tcx>) -{ - match *p { - ty::Predicate::Trait(ref trait_ref) => { - write!(w, "t"); - enc_trait_ref(w, cx, trait_ref.0.trait_ref); - } - ty::Predicate::Equate(ty::Binder(ty::EquatePredicate(a, b))) => { - write!(w, "e"); - enc_ty(w, cx, a); - enc_ty(w, cx, b); - } - ty::Predicate::RegionOutlives(ty::Binder(ty::OutlivesPredicate(a, b))) => { - write!(w, "r"); - enc_region(w, cx, a); - enc_region(w, cx, b); - } - ty::Predicate::TypeOutlives(ty::Binder(ty::OutlivesPredicate(a, b))) => { - write!(w, "o"); - enc_ty(w, cx, a); - enc_region(w, cx, b); - } - ty::Predicate::Projection(ty::Binder(ref data)) => { - write!(w, "p"); - enc_trait_ref(w, cx, data.projection_ty.trait_ref); - write!(w, "{}|", data.projection_ty.item_name); - enc_ty(w, cx, data.ty); - } - ty::Predicate::WellFormed(data) => { - write!(w, "w"); - enc_ty(w, cx, data); - } - ty::Predicate::ObjectSafe(trait_def_id) => { - write!(w, "O{}|", (cx.ds)(cx.tcx, trait_def_id)); - } - ty::Predicate::ClosureKind(closure_def_id, kind) => { - let kind_char = match kind { - ty::ClosureKind::Fn => 'f', - ty::ClosureKind::FnMut => 'm', - ty::ClosureKind::FnOnce => 'o', - }; - write!(w, "c{}|{}|", (cx.ds)(cx.tcx, closure_def_id), kind_char); - } - } -} - -fn enc_existential_projection<'a, 'tcx>(w: &mut Cursor>, - cx: &ctxt<'a, 'tcx>, - data: &ty::ExistentialProjection<'tcx>) { - enc_existential_trait_ref(w, cx, data.trait_ref); - write!(w, "{}|", data.item_name); - enc_ty(w, cx, data.ty); -} diff --git a/src/librustc_mir/build/block.rs b/src/librustc_mir/build/block.rs index 34d7973264631..b53f8c4da86f2 100644 --- a/src/librustc_mir/build/block.rs +++ b/src/librustc_mir/build/block.rs @@ -10,7 +10,7 @@ use build::{BlockAnd, BlockAndExtension, Builder}; use hair::*; -use rustc::mir::repr::*; +use rustc::mir::*; use rustc::hir; impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { diff --git a/src/librustc_mir/build/cfg.rs b/src/librustc_mir/build/cfg.rs index 026a79b32b8f7..9f612175e5da7 100644 --- a/src/librustc_mir/build/cfg.rs +++ b/src/librustc_mir/build/cfg.rs @@ -14,7 +14,7 @@ //! Routines for manipulating the control-flow graph. use build::CFG; -use rustc::mir::repr::*; +use rustc::mir::*; impl<'tcx> CFG<'tcx> { pub fn block_data(&self, blk: BasicBlock) -> &BasicBlockData<'tcx> { diff --git a/src/librustc_mir/build/expr/as_constant.rs b/src/librustc_mir/build/expr/as_constant.rs index a08d14d9e2056..6230123a9ca17 100644 --- a/src/librustc_mir/build/expr/as_constant.rs +++ b/src/librustc_mir/build/expr/as_constant.rs @@ -12,7 +12,7 @@ use build::Builder; use hair::*; -use rustc::mir::repr::*; +use rustc::mir::*; impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// Compile `expr`, yielding a compile-time constant. Assumes that diff --git a/src/librustc_mir/build/expr/as_lvalue.rs b/src/librustc_mir/build/expr/as_lvalue.rs index ae5ccbfd82099..58abaa0c484f2 100644 --- a/src/librustc_mir/build/expr/as_lvalue.rs +++ b/src/librustc_mir/build/expr/as_lvalue.rs @@ -13,7 +13,7 @@ use build::{BlockAnd, BlockAndExtension, Builder}; use build::expr::category::Category; use hair::*; -use rustc::mir::repr::*; +use rustc::mir::*; use rustc_data_structures::indexed_vec::Idx; @@ -77,11 +77,11 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { success.and(slice.index(idx)) } ExprKind::SelfRef => { - block.and(Lvalue::Arg(Arg::new(0))) + block.and(Lvalue::Local(Local::new(1))) } ExprKind::VarRef { id } => { let index = this.var_indices[&id]; - block.and(Lvalue::Var(index)) + block.and(Lvalue::Local(index)) } ExprKind::StaticRef { id } => { block.and(Lvalue::Static(id)) @@ -96,6 +96,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { ExprKind::LogicalOp { .. } | ExprKind::Box { .. } | ExprKind::Cast { .. } | + ExprKind::Use { .. } | ExprKind::NeverToAny { .. } | ExprKind::ReifyFnPointer { .. } | ExprKind::UnsafeFnPointer { .. } | diff --git a/src/librustc_mir/build/expr/as_operand.rs b/src/librustc_mir/build/expr/as_operand.rs index beb9ca256abfd..09cdcc74ef63e 100644 --- a/src/librustc_mir/build/expr/as_operand.rs +++ b/src/librustc_mir/build/expr/as_operand.rs @@ -13,7 +13,7 @@ use build::{BlockAnd, BlockAndExtension, Builder}; use build::expr::category::Category; use hair::*; -use rustc::mir::repr::*; +use rustc::mir::*; impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// Compile `expr` into a value that can be used as an operand. diff --git a/src/librustc_mir/build/expr/as_rvalue.rs b/src/librustc_mir/build/expr/as_rvalue.rs index a40571c5d8597..490f675c3d5e3 100644 --- a/src/librustc_mir/build/expr/as_rvalue.rs +++ b/src/librustc_mir/build/expr/as_rvalue.rs @@ -22,7 +22,7 @@ use hair::*; use rustc_const_math::{ConstInt, ConstIsize}; use rustc::middle::const_val::ConstVal; use rustc::ty; -use rustc::mir::repr::*; +use rustc::mir::*; use syntax::ast; use syntax_pos::Span; @@ -115,6 +115,10 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { let source = unpack!(block = this.as_operand(block, source)); block.and(Rvalue::Cast(CastKind::Misc, source, expr.ty)) } + ExprKind::Use { source } => { + let source = unpack!(block = this.as_operand(block, source)); + block.and(Rvalue::Use(source)) + } ExprKind::ReifyFnPointer { source } => { let source = unpack!(block = this.as_operand(block, source)); block.and(Rvalue::Cast(CastKind::ReifyFnPointer, source, expr.ty)) @@ -160,7 +164,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { .map(|f| unpack!(block = this.as_operand(block, f))) .collect(); - block.and(Rvalue::Aggregate(AggregateKind::Vec, fields)) + block.and(Rvalue::Aggregate(AggregateKind::Array, fields)) } ExprKind::Tuple { fields } => { // see (*) above // first process the set of fields @@ -253,7 +257,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { let source_info = self.source_info(span); let bool_ty = self.hir.bool_ty(); if self.hir.check_overflow() && op.is_checkable() && ty.is_integral() { - let result_tup = self.hir.tcx().mk_tup(vec![ty, bool_ty]); + let result_tup = self.hir.tcx().intern_tup(&[ty, bool_ty]); let result_value = self.temp(result_tup); self.cfg.push_assign(block, source_info, diff --git a/src/librustc_mir/build/expr/as_temp.rs b/src/librustc_mir/build/expr/as_temp.rs index 85128cbbbafef..fb12e08affd2d 100644 --- a/src/librustc_mir/build/expr/as_temp.rs +++ b/src/librustc_mir/build/expr/as_temp.rs @@ -13,7 +13,7 @@ use build::{BlockAnd, BlockAndExtension, Builder}; use build::expr::category::Category; use hair::*; -use rustc::mir::repr::*; +use rustc::mir::*; impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// Compile `expr` into a fresh temporary. This is used when building diff --git a/src/librustc_mir/build/expr/category.rs b/src/librustc_mir/build/expr/category.rs index c19ea0f445ac0..9671f80f48ba7 100644 --- a/src/librustc_mir/build/expr/category.rs +++ b/src/librustc_mir/build/expr/category.rs @@ -68,6 +68,7 @@ impl Category { ExprKind::Binary { .. } | ExprKind::Box { .. } | ExprKind::Cast { .. } | + ExprKind::Use { .. } | ExprKind::ReifyFnPointer { .. } | ExprKind::UnsafeFnPointer { .. } | ExprKind::Unsize { .. } | diff --git a/src/librustc_mir/build/expr/into.rs b/src/librustc_mir/build/expr/into.rs index e5930f5a62df6..5fa0844222100 100644 --- a/src/librustc_mir/build/expr/into.rs +++ b/src/librustc_mir/build/expr/into.rs @@ -14,7 +14,7 @@ use build::{BlockAnd, BlockAndExtension, Builder}; use build::expr::category::{Category, RvalueFunc}; use hair::*; use rustc::ty; -use rustc::mir::repr::*; +use rustc::mir::*; impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// Compile `expr`, storing the result into `destination`, which @@ -249,6 +249,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { ExprKind::Binary { .. } | ExprKind::Box { .. } | ExprKind::Cast { .. } | + ExprKind::Use { .. } | ExprKind::ReifyFnPointer { .. } | ExprKind::UnsafeFnPointer { .. } | ExprKind::Unsize { .. } | diff --git a/src/librustc_mir/build/expr/stmt.rs b/src/librustc_mir/build/expr/stmt.rs index 8ae23c9103b02..4a1926e7c57d4 100644 --- a/src/librustc_mir/build/expr/stmt.rs +++ b/src/librustc_mir/build/expr/stmt.rs @@ -12,7 +12,7 @@ use build::{BlockAnd, BlockAndExtension, Builder}; use build::scope::LoopScope; use hair::*; use rustc::middle::region::CodeExtent; -use rustc::mir::repr::*; +use rustc::mir::*; use syntax_pos::Span; impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { @@ -90,9 +90,13 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { } ExprKind::Return { value } => { block = match value { - Some(value) => unpack!(this.into(&Lvalue::ReturnPointer, block, value)), + Some(value) => { + unpack!(this.into(&Lvalue::Local(RETURN_POINTER), block, value)) + } None => { - this.cfg.push_assign_unit(block, source_info, &Lvalue::ReturnPointer); + this.cfg.push_assign_unit(block, + source_info, + &Lvalue::Local(RETURN_POINTER)); block } }; diff --git a/src/librustc_mir/build/into.rs b/src/librustc_mir/build/into.rs index 17ccb701c2b7a..5c133780e433b 100644 --- a/src/librustc_mir/build/into.rs +++ b/src/librustc_mir/build/into.rs @@ -16,7 +16,7 @@ use build::{BlockAnd, Builder}; use hair::*; -use rustc::mir::repr::*; +use rustc::mir::*; pub trait EvalInto<'tcx> { fn eval_into<'a, 'gcx>(self, diff --git a/src/librustc_mir/build/matches/mod.rs b/src/librustc_mir/build/matches/mod.rs index 1b64b4d0b5317..727e634ef92db 100644 --- a/src/librustc_mir/build/matches/mod.rs +++ b/src/librustc_mir/build/matches/mod.rs @@ -18,7 +18,7 @@ use rustc_data_structures::fnv::FnvHashMap; use rustc_data_structures::bitvec::BitVector; use rustc::middle::const_val::ConstVal; use rustc::ty::{AdtDef, Ty}; -use rustc::mir::repr::*; +use rustc::mir::*; use hair::*; use syntax::ast::{Name, NodeId}; use syntax_pos::Span; @@ -123,7 +123,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { var, subpattern: None, .. } => { self.storage_live_for_bindings(block, &irrefutable_pat); - let lvalue = Lvalue::Var(self.var_indices[&var]); + let lvalue = Lvalue::Local(self.var_indices[&var]); return self.into(&lvalue, block, initializer); } _ => {} @@ -214,7 +214,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { pattern: &Pattern<'tcx>) { match *pattern.kind { PatternKind::Binding { var, ref subpattern, .. } => { - let lvalue = Lvalue::Var(self.var_indices[&var]); + let lvalue = Lvalue::Local(self.var_indices[&var]); let source_info = self.source_info(pattern.span); self.cfg.push(block, Statement { source_info: source_info, @@ -705,10 +705,10 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { let source_info = self.source_info(binding.span); self.cfg.push(block, Statement { source_info: source_info, - kind: StatementKind::StorageLive(Lvalue::Var(var_index)) + kind: StatementKind::StorageLive(Lvalue::Local(var_index)) }); self.cfg.push_assign(block, source_info, - &Lvalue::Var(var_index), rvalue); + &Lvalue::Local(var_index), rvalue); } } @@ -718,19 +718,19 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { name: Name, var_id: NodeId, var_ty: Ty<'tcx>) - -> Var + -> Local { debug!("declare_binding(var_id={:?}, name={:?}, var_ty={:?}, source_info={:?})", var_id, name, var_ty, source_info); - let var = self.var_decls.push(VarDecl::<'tcx> { - source_info: source_info, + let var = self.local_decls.push(LocalDecl::<'tcx> { mutability: mutability, - name: name, ty: var_ty.clone(), + name: Some(name), + source_info: Some(source_info), }); let extent = self.extent_of_innermost_scope(); - self.schedule_drop(source_info.span, extent, &Lvalue::Var(var), var_ty); + self.schedule_drop(source_info.span, extent, &Lvalue::Local(var), var_ty); self.var_indices.insert(var_id, var); debug!("declare_binding: var={:?}", var); diff --git a/src/librustc_mir/build/matches/simplify.rs b/src/librustc_mir/build/matches/simplify.rs index 8392248e3f22e..71282dcf0ba07 100644 --- a/src/librustc_mir/build/matches/simplify.rs +++ b/src/librustc_mir/build/matches/simplify.rs @@ -25,7 +25,7 @@ use build::{BlockAnd, BlockAndExtension, Builder}; use build::matches::{Binding, MatchPair, Candidate}; use hair::*; -use rustc::mir::repr::*; +use rustc::mir::*; use std::mem; diff --git a/src/librustc_mir/build/matches/test.rs b/src/librustc_mir/build/matches/test.rs index bf43bfb326a58..5984b0f7893cd 100644 --- a/src/librustc_mir/build/matches/test.rs +++ b/src/librustc_mir/build/matches/test.rs @@ -22,7 +22,7 @@ use rustc_data_structures::fnv::FnvHashMap; use rustc_data_structures::bitvec::BitVector; use rustc::middle::const_val::ConstVal; use rustc::ty::{self, Ty}; -use rustc::mir::repr::*; +use rustc::mir::*; use syntax_pos::Span; use std::cmp::Ordering; @@ -73,8 +73,8 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { Test { span: match_pair.pattern.span, kind: TestKind::Range { - lo: lo.clone(), - hi: hi.clone(), + lo: Literal::Value { value: lo.clone() }, + hi: Literal::Value { value: hi.clone() }, ty: match_pair.pattern.ty.clone(), }, } diff --git a/src/librustc_mir/build/matches/util.rs b/src/librustc_mir/build/matches/util.rs index 53ebf6fceb5c8..a013875b3110b 100644 --- a/src/librustc_mir/build/matches/util.rs +++ b/src/librustc_mir/build/matches/util.rs @@ -11,7 +11,7 @@ use build::Builder; use build::matches::MatchPair; use hair::*; -use rustc::mir::repr::*; +use rustc::mir::*; use std::u32; impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { diff --git a/src/librustc_mir/build/misc.rs b/src/librustc_mir/build/misc.rs index 79a4cf73041d7..a5f51ef35b741 100644 --- a/src/librustc_mir/build/misc.rs +++ b/src/librustc_mir/build/misc.rs @@ -17,7 +17,7 @@ use rustc_const_math::{ConstInt, ConstUsize, ConstIsize}; use rustc::middle::const_val::ConstVal; use rustc::ty::{self, Ty}; -use rustc::mir::repr::*; +use rustc::mir::*; use syntax::ast; use syntax_pos::Span; @@ -28,10 +28,10 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// NB: **No cleanup is scheduled for this temporary.** You should /// call `schedule_drop` once the temporary is initialized. pub fn temp(&mut self, ty: Ty<'tcx>) -> Lvalue<'tcx> { - let temp = self.temp_decls.push(TempDecl { ty: ty }); - let lvalue = Lvalue::Temp(temp); + let temp = self.local_decls.push(LocalDecl::new_temp(ty)); + let lvalue = Lvalue::Local(temp); debug!("temp: created temp {:?} with type {:?}", - lvalue, self.temp_decls[temp].ty); + lvalue, self.local_decls[temp].ty); lvalue } diff --git a/src/librustc_mir/build/mod.rs b/src/librustc_mir/build/mod.rs index 59d6cf1185969..d6fcc79a9a213 100644 --- a/src/librustc_mir/build/mod.rs +++ b/src/librustc_mir/build/mod.rs @@ -9,9 +9,11 @@ // except according to those terms. use hair::cx::Cx; +use hair::Pattern; + use rustc::middle::region::{CodeExtent, CodeExtentData, ROOT_CODE_EXTENT}; use rustc::ty::{self, Ty}; -use rustc::mir::repr::*; +use rustc::mir::*; use rustc::util::nodemap::NodeMap; use rustc::hir; use syntax::abi::Abi; @@ -28,6 +30,7 @@ pub struct Builder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { cfg: CFG<'tcx>, fn_span: Span, + arg_count: usize, /// the current set of scopes, updated as we traverse; /// see the `scope` module for more details @@ -49,9 +52,9 @@ pub struct Builder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { visibility_scopes: IndexVec, visibility_scope: VisibilityScope, - var_decls: IndexVec>, - var_indices: NodeMap, - temp_decls: IndexVec>, + /// Maps node ids of variable bindings to the `Local`s created for them. + var_indices: NodeMap, + local_decls: IndexVec>, unit_temp: Option>, /// cached block with the RESUME terminator; this is created @@ -157,9 +160,11 @@ pub fn construct_fn<'a, 'gcx, 'tcx, A>(hir: Cx<'a, 'gcx, 'tcx>, -> (Mir<'tcx>, ScopeAuxiliaryVec) where A: Iterator, Option<&'gcx hir::Pat>)> { + let arguments: Vec<_> = arguments.collect(); + let tcx = hir.tcx(); let span = tcx.map.span(fn_id); - let mut builder = Builder::new(hir, span); + let mut builder = Builder::new(hir, span, arguments.len(), return_ty); let body_id = ast_block.id; let call_site_extent = @@ -169,27 +174,27 @@ pub fn construct_fn<'a, 'gcx, 'tcx, A>(hir: Cx<'a, 'gcx, 'tcx>, tcx.region_maps.lookup_code_extent( CodeExtentData::ParameterScope { fn_id: fn_id, body_id: body_id }); let mut block = START_BLOCK; - let mut arg_decls = unpack!(block = builder.in_scope(call_site_extent, block, |builder| { - let arg_decls = unpack!(block = builder.in_scope(arg_extent, block, |builder| { - builder.args_and_body(block, return_ty, arguments, arg_extent, ast_block) + unpack!(block = builder.in_scope(call_site_extent, block, |builder| { + unpack!(block = builder.in_scope(arg_extent, block, |builder| { + builder.args_and_body(block, return_ty, &arguments, arg_extent, ast_block) })); - - let source_info = builder.source_info(span); + // Attribute epilogue to function's closing brace + let fn_end = Span { lo: span.hi, ..span }; + let source_info = builder.source_info(fn_end); let return_block = builder.return_block(); builder.cfg.terminate(block, source_info, TerminatorKind::Goto { target: return_block }); builder.cfg.terminate(return_block, source_info, TerminatorKind::Return); - return_block.and(arg_decls) + return_block.unit() })); assert_eq!(block, builder.return_block()); + let mut spread_arg = None; match tcx.node_id_to_type(fn_id).sty { ty::TyFnDef(_, _, f) if f.abi == Abi::RustCall => { // RustCall pseudo-ABI untuples the last argument. - if let Some(last_arg) = arg_decls.last() { - arg_decls[last_arg].spread = true; - } + spread_arg = Some(Local::new(arguments.len())); } _ => {} } @@ -197,8 +202,9 @@ pub fn construct_fn<'a, 'gcx, 'tcx, A>(hir: Cx<'a, 'gcx, 'tcx>, // Gather the upvars of a closure, if any. let upvar_decls: Vec<_> = tcx.with_freevars(fn_id, |freevars| { freevars.iter().map(|fv| { + let var_id = tcx.map.as_local_node_id(fv.def.def_id()).unwrap(); let by_ref = tcx.upvar_capture(ty::UpvarId { - var_id: fv.def.var_id(), + var_id: var_id, closure_expr_id: fn_id }).map_or(false, |capture| match capture { ty::UpvarCapture::ByValue => false, @@ -208,7 +214,7 @@ pub fn construct_fn<'a, 'gcx, 'tcx, A>(hir: Cx<'a, 'gcx, 'tcx>, debug_name: keywords::Invalid.name(), by_ref: by_ref }; - if let Some(hir::map::NodeLocal(pat)) = tcx.map.find(fv.def.var_id()) { + if let Some(hir::map::NodeLocal(pat)) = tcx.map.find(var_id) { if let hir::PatKind::Binding(_, ref ident, _) = pat.node { decl.debug_name = ident.node; } @@ -217,7 +223,9 @@ pub fn construct_fn<'a, 'gcx, 'tcx, A>(hir: Cx<'a, 'gcx, 'tcx>, }).collect() }); - builder.finish(upvar_decls, arg_decls, return_ty) + let (mut mir, aux) = builder.finish(upvar_decls, return_ty); + mir.spread_arg = spread_arg; + (mir, aux) } pub fn construct_const<'a, 'gcx, 'tcx>(hir: Cx<'a, 'gcx, 'tcx>, @@ -225,15 +233,16 @@ pub fn construct_const<'a, 'gcx, 'tcx>(hir: Cx<'a, 'gcx, 'tcx>, ast_expr: &'tcx hir::Expr) -> (Mir<'tcx>, ScopeAuxiliaryVec) { let tcx = hir.tcx(); + let ty = tcx.expr_ty_adjusted(ast_expr); let span = tcx.map.span(item_id); - let mut builder = Builder::new(hir, span); + let mut builder = Builder::new(hir, span, 0, ty); let extent = tcx.region_maps.temporary_scope(ast_expr.id) .unwrap_or(ROOT_CODE_EXTENT); let mut block = START_BLOCK; let _ = builder.in_scope(extent, block, |builder| { let expr = builder.hir.mirror(ast_expr); - unpack!(block = builder.into(&Lvalue::ReturnPointer, block, expr)); + unpack!(block = builder.into(&Lvalue::Local(RETURN_POINTER), block, expr)); let source_info = builder.source_info(span); let return_block = builder.return_block(); @@ -245,23 +254,26 @@ pub fn construct_const<'a, 'gcx, 'tcx>(hir: Cx<'a, 'gcx, 'tcx>, return_block.unit() }); - let ty = tcx.expr_ty_adjusted(ast_expr); - builder.finish(vec![], IndexVec::new(), ty) + builder.finish(vec![], ty) } impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { - fn new(hir: Cx<'a, 'gcx, 'tcx>, span: Span) -> Builder<'a, 'gcx, 'tcx> { + fn new(hir: Cx<'a, 'gcx, 'tcx>, + span: Span, + arg_count: usize, + return_ty: Ty<'tcx>) + -> Builder<'a, 'gcx, 'tcx> { let mut builder = Builder { hir: hir, cfg: CFG { basic_blocks: IndexVec::new() }, fn_span: span, + arg_count: arg_count, scopes: vec![], visibility_scopes: IndexVec::new(), visibility_scope: ARGUMENT_VISIBILITY_SCOPE, scope_auxiliary: IndexVec::new(), loop_scopes: vec![], - temp_decls: IndexVec::new(), - var_decls: IndexVec::new(), + local_decls: IndexVec::from_elem_n(LocalDecl::new_return_pointer(return_ty), 1), var_indices: NodeMap(), unit_temp: None, cached_resume_block: None, @@ -277,7 +289,6 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { fn finish(self, upvar_decls: Vec, - arg_decls: IndexVec>, return_ty: Ty<'tcx>) -> (Mir<'tcx>, ScopeAuxiliaryVec) { for (index, block) in self.cfg.basic_blocks.iter().enumerate() { @@ -290,29 +301,47 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { self.visibility_scopes, IndexVec::new(), return_ty, - self.var_decls, - arg_decls, - self.temp_decls, + self.local_decls, + self.arg_count, upvar_decls, self.fn_span ), self.scope_auxiliary) } - fn args_and_body(&mut self, - mut block: BasicBlock, - return_ty: Ty<'tcx>, - arguments: A, - argument_extent: CodeExtent, - ast_block: &'gcx hir::Block) - -> BlockAnd>> - where A: Iterator, Option<&'gcx hir::Pat>)> + fn args_and_body(&mut self, + mut block: BasicBlock, + return_ty: Ty<'tcx>, + arguments: &[(Ty<'gcx>, Option<&'gcx hir::Pat>)], + argument_extent: CodeExtent, + ast_block: &'gcx hir::Block) + -> BlockAnd<()> { - // to start, translate the argument patterns and collect the argument types. + // Allocate locals for the function arguments + for &(ty, pattern) in arguments.iter() { + // If this is a simple binding pattern, give the local a nice name for debuginfo. + let mut name = None; + if let Some(pat) = pattern { + if let hir::PatKind::Binding(_, ref ident, _) = pat.node { + name = Some(ident.node); + } + } + + self.local_decls.push(LocalDecl { + mutability: Mutability::Not, + ty: ty, + source_info: None, + name: name, + }); + } + let mut scope = None; - let arg_decls = arguments.enumerate().map(|(index, (ty, pattern))| { - let lvalue = Lvalue::Arg(Arg::new(index)); + // Bind the argument patterns + for (index, &(ty, pattern)) in arguments.iter().enumerate() { + // Function arguments always get the first Local indices after the return pointer + let lvalue = Lvalue::Local(Local::new(index + 1)); + if let Some(pattern) = pattern { - let pattern = self.hir.irrefutable_pat(pattern); + let pattern = Pattern::from_hir(self.hir.tcx(), pattern); scope = self.declare_bindings(scope, ast_block.span, &pattern); unpack!(block = self.lvalue_into_pattern(block, pattern, &lvalue)); } @@ -321,19 +350,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { self.schedule_drop(pattern.as_ref().map_or(ast_block.span, |pat| pat.span), argument_extent, &lvalue, ty); - let mut name = keywords::Invalid.name(); - if let Some(pat) = pattern { - if let hir::PatKind::Binding(_, ref ident, _) = pat.node { - name = ident.node; - } - } - - ArgDecl { - ty: ty, - spread: false, - debug_name: name - } - }).collect(); + } // Enter the argument pattern bindings visibility scope, if it exists. if let Some(visibility_scope) = scope { @@ -343,9 +360,10 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { // FIXME(#32959): temporary hack for the issue at hand let return_is_unit = return_ty.is_nil(); // start the first basic block and translate the body - unpack!(block = self.ast_block(&Lvalue::ReturnPointer, return_is_unit, block, ast_block)); + unpack!(block = self.ast_block(&Lvalue::Local(RETURN_POINTER), + return_is_unit, block, ast_block)); - block.and(arg_decls) + block.unit() } fn get_unit_temp(&mut self) -> Lvalue<'tcx> { diff --git a/src/librustc_mir/build/scope.rs b/src/librustc_mir/build/scope.rs index 0b33e5a145083..af8170a1b8f55 100644 --- a/src/librustc_mir/build/scope.rs +++ b/src/librustc_mir/build/scope.rs @@ -26,7 +26,7 @@ multiple-exit (SEME) region in the control-flow graph. For now, we keep a mapping from each `CodeExtent` to its corresponding SEME region for later reference (see caveat in next paragraph). This is because region scopes are tied to -them. Eventually, when we shift to non-lexical lifetimes, three should +them. Eventually, when we shift to non-lexical lifetimes, there should be no need to remember this mapping. There is one additional wrinkle, actually, that I wanted to hide from @@ -67,7 +67,7 @@ There are numerous "normal" ways to early exit a scope: `break`, early exit occurs, the method `exit_scope` is called. It is given the current point in execution where the early exit occurs, as well as the scope you want to branch to (note that all early exits from to some -other enclosing scope). `exit_scope` will record thid exit point and +other enclosing scope). `exit_scope` will record this exit point and also add all drops. Panics are handled in a similar fashion, except that a panic always @@ -89,15 +89,13 @@ should go to. use build::{BlockAnd, BlockAndExtension, Builder, CFG, ScopeAuxiliary, ScopeId}; use rustc::middle::region::{CodeExtent, CodeExtentData}; use rustc::middle::lang_items; -use rustc::ty::subst::{Kind, Substs, Subst}; +use rustc::ty::subst::{Kind, Subst}; use rustc::ty::{Ty, TyCtxt}; -use rustc::mir::repr::*; +use rustc::mir::*; use syntax_pos::Span; use rustc_data_structures::indexed_vec::Idx; use rustc_data_structures::fnv::FnvHashMap; -use std::iter; - pub struct Scope<'tcx> { /// the scope-id within the scope_auxiliary id: ScopeId, @@ -322,7 +320,11 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { self.diverge_cleanup(); let scope = self.scopes.pop().unwrap(); assert_eq!(scope.extent, extent); - unpack!(block = build_scope_drops(&mut self.cfg, &scope, &self.scopes, block)); + unpack!(block = build_scope_drops(&mut self.cfg, + &scope, + &self.scopes, + block, + self.arg_count)); self.scope_auxiliary[scope.id] .postdoms .push(self.cfg.current_location(block)); @@ -362,7 +364,11 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { scope.cached_exits.insert((target, extent), b); b }; - unpack!(block = build_scope_drops(&mut self.cfg, scope, rest, block)); + unpack!(block = build_scope_drops(&mut self.cfg, + scope, + rest, + block, + self.arg_count)); if let Some(ref free_data) = scope.free { let next = self.cfg.start_new_block(); let free = build_free(self.hir.tcx(), &tmp, free_data, next); @@ -454,7 +460,7 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { } else { // Only temps and vars need their storage dead. match *lvalue { - Lvalue::Temp(_) | Lvalue::Var(_) => DropKind::Storage, + Lvalue::Local(index) if index.index() > self.arg_count => DropKind::Storage, _ => return } }; @@ -513,8 +519,12 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { if let DropKind::Value { .. } = drop_kind { scope.needs_cleanup = true; } + let tcx = self.hir.tcx(); + let extent_span = extent.span(&tcx.region_maps, &tcx.map).unwrap(); + // Attribute scope exit drops to scope's closing brace + let scope_end = Span { lo: extent_span.hi, .. extent_span}; scope.drops.push(DropData { - span: span, + span: scope_end, location: lvalue.clone(), kind: drop_kind }); @@ -671,7 +681,8 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { fn build_scope_drops<'tcx>(cfg: &mut CFG<'tcx>, scope: &Scope<'tcx>, earlier_scopes: &[Scope<'tcx>], - mut block: BasicBlock) + mut block: BasicBlock, + arg_count: usize) -> BlockAnd<()> { let mut iter = scope.drops.iter().rev().peekable(); while let Some(drop_data) = iter.next() { @@ -703,7 +714,7 @@ fn build_scope_drops<'tcx>(cfg: &mut CFG<'tcx>, DropKind::Storage => { // Only temps and vars need their storage dead. match drop_data.location { - Lvalue::Temp(_) | Lvalue::Var(_) => {} + Lvalue::Local(index) if index.index() > arg_count => {} _ => continue } @@ -791,7 +802,7 @@ fn build_free<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, -> TerminatorKind<'tcx> { let free_func = tcx.lang_items.require(lang_items::BoxFreeFnLangItem) .unwrap_or_else(|e| tcx.sess.fatal(&e)); - let substs = Substs::new(tcx, iter::once(Kind::from(data.item_ty))); + let substs = tcx.intern_substs(&[Kind::from(data.item_ty)]); TerminatorKind::Call { func: Operand::Constant(Constant { span: data.span, diff --git a/src/librustc_mir/def_use.rs b/src/librustc_mir/def_use.rs new file mode 100644 index 0000000000000..d20d50c561140 --- /dev/null +++ b/src/librustc_mir/def_use.rs @@ -0,0 +1,163 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Def-use analysis. + +use rustc::mir::{Local, Location, Lvalue, Mir}; +use rustc::mir::visit::{LvalueContext, MutVisitor, Visitor}; +use rustc_data_structures::indexed_vec::IndexVec; +use std::marker::PhantomData; +use std::mem; + +pub struct DefUseAnalysis<'tcx> { + info: IndexVec>, +} + +#[derive(Clone)] +pub struct Info<'tcx> { + pub defs_and_uses: Vec>, +} + +#[derive(Clone)] +pub struct Use<'tcx> { + pub context: LvalueContext<'tcx>, + pub location: Location, +} + +impl<'tcx> DefUseAnalysis<'tcx> { + pub fn new(mir: &Mir<'tcx>) -> DefUseAnalysis<'tcx> { + DefUseAnalysis { + info: IndexVec::from_elem_n(Info::new(), mir.local_decls.len()), + } + } + + pub fn analyze(&mut self, mir: &Mir<'tcx>) { + let mut finder = DefUseFinder { + info: mem::replace(&mut self.info, IndexVec::new()), + }; + finder.visit_mir(mir); + self.info = finder.info + } + + pub fn local_info(&self, local: Local) -> &Info<'tcx> { + &self.info[local] + } + + pub fn local_info_mut(&mut self, local: Local) -> &mut Info<'tcx> { + &mut self.info[local] + } + + fn mutate_defs_and_uses(&self, local: Local, mir: &mut Mir<'tcx>, mut callback: F) + where F: for<'a> FnMut(&'a mut Lvalue<'tcx>, + LvalueContext<'tcx>, + Location) { + for lvalue_use in &self.info[local].defs_and_uses { + MutateUseVisitor::new(local, + &mut callback, + mir).visit_location(mir, lvalue_use.location) + } + } + + /// FIXME(pcwalton): This should update the def-use chains. + pub fn replace_all_defs_and_uses_with(&self, + local: Local, + mir: &mut Mir<'tcx>, + new_lvalue: Lvalue<'tcx>) { + self.mutate_defs_and_uses(local, mir, |lvalue, _, _| *lvalue = new_lvalue.clone()) + } +} + +struct DefUseFinder<'tcx> { + info: IndexVec>, +} + +impl<'tcx> DefUseFinder<'tcx> { + fn lvalue_mut_info(&mut self, lvalue: &Lvalue<'tcx>) -> Option<&mut Info<'tcx>> { + let info = &mut self.info; + + if let Lvalue::Local(local) = *lvalue { + Some(&mut info[local]) + } else { + None + } + } +} + +impl<'tcx> Visitor<'tcx> for DefUseFinder<'tcx> { + fn visit_lvalue(&mut self, + lvalue: &Lvalue<'tcx>, + context: LvalueContext<'tcx>, + location: Location) { + if let Some(ref mut info) = self.lvalue_mut_info(lvalue) { + info.defs_and_uses.push(Use { + context: context, + location: location, + }) + } + self.super_lvalue(lvalue, context, location) + } +} + +impl<'tcx> Info<'tcx> { + fn new() -> Info<'tcx> { + Info { + defs_and_uses: vec![], + } + } + + pub fn def_count(&self) -> usize { + self.defs_and_uses.iter().filter(|lvalue_use| lvalue_use.context.is_mutating_use()).count() + } + + pub fn def_count_not_including_drop(&self) -> usize { + self.defs_and_uses.iter().filter(|lvalue_use| { + lvalue_use.context.is_mutating_use() && !lvalue_use.context.is_drop() + }).count() + } + + pub fn use_count(&self) -> usize { + self.defs_and_uses.iter().filter(|lvalue_use| { + lvalue_use.context.is_nonmutating_use() + }).count() + } +} + +struct MutateUseVisitor<'tcx, F> { + query: Local, + callback: F, + phantom: PhantomData<&'tcx ()>, +} + +impl<'tcx, F> MutateUseVisitor<'tcx, F> { + fn new(query: Local, callback: F, _: &Mir<'tcx>) + -> MutateUseVisitor<'tcx, F> + where F: for<'a> FnMut(&'a mut Lvalue<'tcx>, LvalueContext<'tcx>, Location) { + MutateUseVisitor { + query: query, + callback: callback, + phantom: PhantomData, + } + } +} + +impl<'tcx, F> MutVisitor<'tcx> for MutateUseVisitor<'tcx, F> + where F: for<'a> FnMut(&'a mut Lvalue<'tcx>, LvalueContext<'tcx>, Location) { + fn visit_lvalue(&mut self, + lvalue: &mut Lvalue<'tcx>, + context: LvalueContext<'tcx>, + location: Location) { + if let Lvalue::Local(local) = *lvalue { + if local == self.query { + (self.callback)(lvalue, context, location) + } + } + self.super_lvalue(lvalue, context, location) + } +} diff --git a/src/librustc_mir/graphviz.rs b/src/librustc_mir/graphviz.rs index 72b6d7f0e5aa5..dd4dd4699d858 100644 --- a/src/librustc_mir/graphviz.rs +++ b/src/librustc_mir/graphviz.rs @@ -10,8 +10,7 @@ use dot; use rustc::hir::def_id::DefId; -use rustc::mir::repr::*; -use rustc::mir::mir_map::MirMap; +use rustc::mir::*; use rustc::ty::TyCtxt; use std::fmt::Debug; use std::io::{self, Write}; @@ -22,14 +21,13 @@ use rustc_data_structures::indexed_vec::Idx; /// Write a graphviz DOT graph of a list of MIRs. pub fn write_mir_graphviz<'a, 'b, 'tcx, W, I>(tcx: TyCtxt<'b, 'tcx, 'tcx>, iter: I, - mir_map: &MirMap<'tcx>, w: &mut W) -> io::Result<()> where W: Write, I: Iterator { for def_id in iter { let nodeid = tcx.map.as_local_node_id(def_id).unwrap(); - let mir = &mir_map.map[&def_id]; + let mir = &tcx.item_mir(def_id); writeln!(w, "digraph Mir_{} {{", nodeid)?; @@ -136,30 +134,31 @@ fn write_graph_label<'a, 'tcx, W: Write>(tcx: TyCtxt<'a, 'tcx, 'tcx>, write!(w, " label= 0 { write!(w, ", ")?; } - write!(w, "{:?}: {}", Lvalue::Arg(Arg::new(i)), escape(&arg.ty))?; + write!(w, "{:?}: {}", Lvalue::Local(arg), escape(&mir.local_decls[arg].ty))?; } write!(w, ") -> {}", escape(mir.return_ty))?; write!(w, r#"
"#)?; - // User variable types (including the user's name in a comment). - for (i, var) in mir.var_decls.iter().enumerate() { + for local in mir.vars_and_temps_iter() { + let decl = &mir.local_decls[local]; + write!(w, "let ")?; - if var.mutability == Mutability::Mut { + if decl.mutability == Mutability::Mut { write!(w, "mut ")?; } - write!(w, r#"{:?}: {}; // {}
"#, - Lvalue::Var(Var::new(i)), escape(&var.ty), var.name)?; - } - // Compiler-introduced temporary types. - for (i, temp) in mir.temp_decls.iter().enumerate() { - write!(w, r#"let mut {:?}: {};
"#, - Lvalue::Temp(Temp::new(i)), escape(&temp.ty))?; + if let Some(name) = decl.name { + write!(w, r#"{:?}: {}; // {}
"#, + Lvalue::Local(local), escape(&decl.ty), name)?; + } else { + write!(w, r#"let mut {:?}: {};
"#, + Lvalue::Local(local), escape(&decl.ty))?; + } } writeln!(w, ">;") diff --git a/src/librustc_mir/hair/cx/block.rs b/src/librustc_mir/hair/cx/block.rs index 52d54f2cc8572..ec1136368c135 100644 --- a/src/librustc_mir/hair/cx/block.rs +++ b/src/librustc_mir/hair/cx/block.rs @@ -57,7 +57,7 @@ fn mirror_stmts<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, let remainder_extent = cx.tcx.region_maps.lookup_code_extent(remainder_extent); - let pattern = cx.irrefutable_pat(&local.pat); + let pattern = Pattern::from_hir(cx.tcx, &local.pat); result.push(StmtRef::Mirror(Box::new(Stmt { span: stmt.span, kind: StmtKind::Let { diff --git a/src/librustc_mir/hair/cx/expr.rs b/src/librustc_mir/hair/cx/expr.rs index 4518f8cb373fa..1b324ac3132fc 100644 --- a/src/librustc_mir/hair/cx/expr.rs +++ b/src/librustc_mir/hair/cx/expr.rs @@ -15,13 +15,13 @@ use hair::cx::Cx; use hair::cx::block; use hair::cx::to_ref::ToRef; use rustc::hir::map; -use rustc::hir::def::Def; +use rustc::hir::def::{Def, CtorKind}; use rustc::middle::const_val::ConstVal; use rustc_const_eval as const_eval; use rustc::middle::region::CodeExtent; use rustc::ty::{self, AdtKind, VariantDef, Ty}; use rustc::ty::cast::CastKind as TyCastKind; -use rustc::mir::repr::*; +use rustc::mir::*; use rustc::hir; use syntax::ptr::P; @@ -271,10 +271,10 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, // Tuple-like ADTs are represented as ExprCall. We convert them here. expr_ty.ty_adt_def().and_then(|adt_def|{ match cx.tcx.expect_def(fun.id) { - Def::Variant(_, variant_id) => { + Def::VariantCtor(variant_id, CtorKind::Fn) => { Some((adt_def, adt_def.variant_index_with_id(variant_id))) }, - Def::Struct(..) => { + Def::StructCtor(_, CtorKind::Fn) => { Some((adt_def, 0)) }, _ => None @@ -480,8 +480,7 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, } AdtKind::Enum => { match cx.tcx.expect_def(expr.id) { - Def::Variant(enum_id, variant_id) => { - debug_assert!(adt.did == enum_id); + Def::Variant(variant_id) => { assert!(base.is_none()); let index = adt.variant_index_with_id(variant_id); @@ -601,8 +600,8 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, // Check to see if this cast is a "coercion cast", where the cast is actually done // using a coercion (or is a no-op). if let Some(&TyCastKind::CoercionCast) = cx.tcx.cast_kinds.borrow().get(&source.id) { - // Skip the actual cast itexpr, as it's now a no-op. - return source.make_mirror(cx); + // Convert the lexpr to a vexpr. + ExprKind::Use { source: source.to_ref() } } else { ExprKind::Cast { source: source.to_ref() } } @@ -614,7 +613,7 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, value: value.to_ref(), value_extents: cx.tcx.region_maps.node_extent(value.id) }, - hir::ExprVec(ref fields) => + hir::ExprArray(ref fields) => ExprKind::Vec { fields: fields.to_ref() }, hir::ExprTup(ref fields) => ExprKind::Tuple { fields: fields.to_ref() }, @@ -658,7 +657,7 @@ fn to_borrow_kind(m: hir::Mutability) -> BorrowKind { fn convert_arm<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, arm: &'tcx hir::Arm) -> Arm<'tcx> { Arm { - patterns: arm.pats.iter().map(|p| cx.refutable_pat(p)).collect(), + patterns: arm.pats.iter().map(|p| Pattern::from_hir(cx.tcx, p)).collect(), guard: arm.guard.to_ref(), body: arm.body.to_ref(), } @@ -671,43 +670,25 @@ fn convert_path_expr<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, // Otherwise there may be def_map borrow conflicts let def = cx.tcx.expect_def(expr.id); let def_id = match def { - // A regular function. - Def::Fn(def_id) | Def::Method(def_id) => def_id, - Def::Struct(def_id) => match cx.tcx.node_id_to_type(expr.id).sty { - // A tuple-struct constructor. Should only be reached if not called in the same - // expression. - ty::TyFnDef(..) => def_id, - // A unit struct which is used as a value. We return a completely different ExprKind - // here to account for this special case. + // A regular function, constructor function or a constant. + Def::Fn(def_id) | Def::Method(def_id) | + Def::StructCtor(def_id, CtorKind::Fn) | + Def::VariantCtor(def_id, CtorKind::Fn) | + Def::Const(def_id) | Def::AssociatedConst(def_id) => def_id, + + Def::StructCtor(def_id, CtorKind::Const) | + Def::VariantCtor(def_id, CtorKind::Const) => match cx.tcx.node_id_to_type(expr.id).sty { + // A unit struct/variant which is used as a value. + // We return a completely different ExprKind here to account for this special case. ty::TyAdt(adt_def, substs) => return ExprKind::Adt { adt_def: adt_def, - variant_index: 0, + variant_index: adt_def.variant_index_with_id(def_id), substs: substs, fields: vec![], - base: None + base: None, }, ref sty => bug!("unexpected sty: {:?}", sty) }, - Def::Variant(enum_id, variant_id) => match cx.tcx.node_id_to_type(expr.id).sty { - // A variant constructor. Should only be reached if not called in the same - // expression. - ty::TyFnDef(..) => variant_id, - // A unit variant, similar special case to the struct case above. - ty::TyAdt(adt_def, substs) => { - debug_assert!(adt_def.did == enum_id); - let index = adt_def.variant_index_with_id(variant_id); - return ExprKind::Adt { - adt_def: adt_def, - substs: substs, - variant_index: index, - fields: vec![], - base: None - }; - }, - ref sty => bug!("unexpected sty: {:?}", sty) - }, - Def::Const(def_id) | - Def::AssociatedConst(def_id) => def_id, Def::Static(node_id, _) => return ExprKind::StaticRef { id: node_id, @@ -729,13 +710,15 @@ fn convert_var<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, let temp_lifetime = cx.tcx.region_maps.temporary_scope(expr.id); match def { - Def::Local(_, node_id) => { + Def::Local(def_id) => { + let node_id = cx.tcx.map.as_local_node_id(def_id).unwrap(); ExprKind::VarRef { id: node_id, } } - Def::Upvar(_, id_var, index, closure_expr_id) => { + Def::Upvar(def_id, index, closure_expr_id) => { + let id_var = cx.tcx.map.as_local_node_id(def_id).unwrap(); debug!("convert_var(upvar({:?}, {:?}, {:?}))", id_var, index, closure_expr_id); let var_ty = cx.tcx.node_id_to_type(id_var); @@ -974,7 +957,7 @@ fn capture_freevar<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, freevar: &hir::Freevar, freevar_ty: Ty<'tcx>) -> ExprRef<'tcx> { - let id_var = freevar.def.var_id(); + let id_var = cx.tcx.map.as_local_node_id(freevar.def.def_id()).unwrap(); let upvar_id = ty::UpvarId { var_id: id_var, closure_expr_id: closure_expr.id, diff --git a/src/librustc_mir/hair/cx/mod.rs b/src/librustc_mir/hair/cx/mod.rs index 8dd33ad2f9c7a..678db1e544cb0 100644 --- a/src/librustc_mir/hair/cx/mod.rs +++ b/src/librustc_mir/hair/cx/mod.rs @@ -16,7 +16,7 @@ */ use hair::*; -use rustc::mir::repr::*; +use rustc::mir::*; use rustc::mir::transform::MirSource; use rustc::middle::const_val::ConstVal; @@ -27,7 +27,7 @@ use rustc::hir::def_id::DefId; use rustc::hir::intravisit::FnKind; use rustc::hir::map::blocks::FnLikeNode; use rustc::infer::InferCtxt; -use rustc::ty::subst::{Subst, Substs}; +use rustc::ty::subst::Subst; use rustc::ty::{self, Ty, TyCtxt}; use syntax::parse::token; use rustc::hir; @@ -146,7 +146,7 @@ impl<'a, 'gcx, 'tcx> Cx<'a, 'gcx, 'tcx> { params: &[Ty<'tcx>]) -> (Ty<'tcx>, Literal<'tcx>) { let method_name = token::intern(method_name); - let substs = Substs::new_trait(self.tcx, self_ty, params); + let substs = self.tcx.mk_substs_trait(self_ty, params); for trait_item in self.tcx.trait_items(trait_def_id).iter() { match *trait_item { ty::ImplOrTraitItem::MethodTraitItem(ref method) => { @@ -196,5 +196,4 @@ impl<'a, 'gcx, 'tcx> Cx<'a, 'gcx, 'tcx> { mod block; mod expr; -mod pattern; mod to_ref; diff --git a/src/librustc_mir/hair/cx/pattern.rs b/src/librustc_mir/hair/cx/pattern.rs deleted file mode 100644 index 3639b165eb5ab..0000000000000 --- a/src/librustc_mir/hair/cx/pattern.rs +++ /dev/null @@ -1,326 +0,0 @@ -// Copyright 2015 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use hair::*; -use hair::cx::Cx; -use rustc_data_structures::indexed_vec::Idx; -use rustc_const_eval as const_eval; -use rustc::hir::def::Def; -use rustc::hir::pat_util::EnumerateAndAdjustIterator; -use rustc::ty::{self, Ty}; -use rustc::mir::repr::*; -use rustc::hir::{self, PatKind}; -use syntax::ptr::P; -use syntax_pos::Span; - -/// When there are multiple patterns in a single arm, each one has its -/// own node-ids for the bindings. References to the variables always -/// use the node-ids from the first pattern in the arm, so we just -/// remap the ids for all subsequent bindings to the first one. -/// -/// Example: -/// ``` -/// match foo { -/// Test1(flavor /* def 1 */) | -/// Test2(flavor /* def 2 */) if flavor /* ref 1 */.is_tasty() => { ... } -/// _ => { ... } -/// } -/// ``` -struct PatCx<'patcx, 'cx: 'patcx, 'gcx: 'cx+'tcx, 'tcx: 'cx> { - cx: &'patcx mut Cx<'cx, 'gcx, 'tcx>, -} - -impl<'cx, 'gcx, 'tcx> Cx<'cx, 'gcx, 'tcx> { - pub fn irrefutable_pat(&mut self, pat: &hir::Pat) -> Pattern<'tcx> { - PatCx::new(self).to_pattern(pat) - } - - pub fn refutable_pat(&mut self, - pat: &hir::Pat) - -> Pattern<'tcx> { - PatCx::new(self).to_pattern(pat) - } -} - -impl<'patcx, 'cx, 'gcx, 'tcx> PatCx<'patcx, 'cx, 'gcx, 'tcx> { - fn new(cx: &'patcx mut Cx<'cx, 'gcx, 'tcx>) - -> PatCx<'patcx, 'cx, 'gcx, 'tcx> { - PatCx { - cx: cx, - } - } - - fn to_pattern(&mut self, pat: &hir::Pat) -> Pattern<'tcx> { - let mut ty = self.cx.tcx.node_id_to_type(pat.id); - - let kind = match pat.node { - PatKind::Wild => PatternKind::Wild, - - PatKind::Lit(ref value) => { - let value = const_eval::eval_const_expr(self.cx.tcx.global_tcx(), value); - PatternKind::Constant { value: value } - } - - PatKind::Range(ref lo, ref hi) => { - let lo = const_eval::eval_const_expr(self.cx.tcx.global_tcx(), lo); - let lo = Literal::Value { value: lo }; - let hi = const_eval::eval_const_expr(self.cx.tcx.global_tcx(), hi); - let hi = Literal::Value { value: hi }; - PatternKind::Range { lo: lo, hi: hi } - }, - - PatKind::Path(..) => { - match self.cx.tcx.expect_def(pat.id) { - Def::Const(def_id) | Def::AssociatedConst(def_id) => { - let tcx = self.cx.tcx.global_tcx(); - let substs = Some(self.cx.tcx.node_id_item_substs(pat.id).substs); - match const_eval::lookup_const_by_id(tcx, def_id, substs) { - Some((const_expr, _const_ty)) => { - match const_eval::const_expr_to_pat(tcx, - const_expr, - pat.id, - pat.span) { - Ok(pat) => - return self.to_pattern(&pat), - Err(_) => - span_bug!( - pat.span, "illegal constant"), - } - } - None => { - span_bug!( - pat.span, - "cannot eval constant: {:?}", - def_id) - } - } - } - _ => { - self.variant_or_leaf(pat, vec![]) - } - } - } - - PatKind::Ref(ref subpattern, _) | - PatKind::Box(ref subpattern) => { - PatternKind::Deref { subpattern: self.to_pattern(subpattern) } - } - - PatKind::Vec(ref prefix, ref slice, ref suffix) => { - let ty = self.cx.tcx.node_id_to_type(pat.id); - match ty.sty { - ty::TyRef(_, mt) => - PatternKind::Deref { - subpattern: Pattern { - ty: mt.ty, - span: pat.span, - kind: Box::new(self.slice_or_array_pattern(pat.span, mt.ty, prefix, - slice, suffix)), - }, - }, - - ty::TySlice(..) | - ty::TyArray(..) => - self.slice_or_array_pattern(pat.span, ty, prefix, slice, suffix), - - ref sty => - span_bug!( - pat.span, - "unexpanded type for vector pattern: {:?}", - sty), - } - } - - PatKind::Tuple(ref subpatterns, ddpos) => { - match self.cx.tcx.node_id_to_type(pat.id).sty { - ty::TyTuple(ref tys) => { - let subpatterns = - subpatterns.iter() - .enumerate_and_adjust(tys.len(), ddpos) - .map(|(i, subpattern)| FieldPattern { - field: Field::new(i), - pattern: self.to_pattern(subpattern), - }) - .collect(); - - PatternKind::Leaf { subpatterns: subpatterns } - } - - ref sty => span_bug!(pat.span, "unexpected type for tuple pattern: {:?}", sty), - } - } - - PatKind::Binding(bm, ref ident, ref sub) => { - let id = self.cx.tcx.expect_def(pat.id).var_id(); - let var_ty = self.cx.tcx.node_id_to_type(pat.id); - let region = match var_ty.sty { - ty::TyRef(r, _) => Some(r), - _ => None, - }; - let (mutability, mode) = match bm { - hir::BindByValue(hir::MutMutable) => - (Mutability::Mut, BindingMode::ByValue), - hir::BindByValue(hir::MutImmutable) => - (Mutability::Not, BindingMode::ByValue), - hir::BindByRef(hir::MutMutable) => - (Mutability::Not, BindingMode::ByRef(region.unwrap(), BorrowKind::Mut)), - hir::BindByRef(hir::MutImmutable) => - (Mutability::Not, BindingMode::ByRef(region.unwrap(), BorrowKind::Shared)), - }; - - // A ref x pattern is the same node used for x, and as such it has - // x's type, which is &T, where we want T (the type being matched). - if let hir::BindByRef(_) = bm { - if let ty::TyRef(_, mt) = ty.sty { - ty = mt.ty; - } else { - bug!("`ref {}` has wrong type {}", ident.node, ty); - } - } - - PatternKind::Binding { - mutability: mutability, - mode: mode, - name: ident.node, - var: id, - ty: var_ty, - subpattern: self.to_opt_pattern(sub), - } - } - - PatKind::TupleStruct(_, ref subpatterns, ddpos) => { - let pat_ty = self.cx.tcx.node_id_to_type(pat.id); - let adt_def = match pat_ty.sty { - ty::TyAdt(adt_def, _) => adt_def, - _ => span_bug!(pat.span, "tuple struct pattern not applied to an ADT"), - }; - let variant_def = adt_def.variant_of_def(self.cx.tcx.expect_def(pat.id)); - - let subpatterns = - subpatterns.iter() - .enumerate_and_adjust(variant_def.fields.len(), ddpos) - .map(|(i, field)| FieldPattern { - field: Field::new(i), - pattern: self.to_pattern(field), - }) - .collect(); - self.variant_or_leaf(pat, subpatterns) - } - - PatKind::Struct(_, ref fields, _) => { - let pat_ty = self.cx.tcx.node_id_to_type(pat.id); - let adt_def = match pat_ty.sty { - ty::TyAdt(adt_def, _) => adt_def, - _ => { - span_bug!( - pat.span, - "struct pattern not applied to an ADT"); - } - }; - let variant_def = adt_def.variant_of_def(self.cx.tcx.expect_def(pat.id)); - - let subpatterns = - fields.iter() - .map(|field| { - let index = variant_def.index_of_field_named(field.node.name); - let index = index.unwrap_or_else(|| { - span_bug!( - pat.span, - "no field with name {:?}", - field.node.name); - }); - FieldPattern { - field: Field::new(index), - pattern: self.to_pattern(&field.node.pat), - } - }) - .collect(); - - self.variant_or_leaf(pat, subpatterns) - } - }; - - Pattern { - span: pat.span, - ty: ty, - kind: Box::new(kind), - } - } - - fn to_patterns(&mut self, pats: &[P]) -> Vec> { - pats.iter().map(|p| self.to_pattern(p)).collect() - } - - fn to_opt_pattern(&mut self, pat: &Option>) -> Option> { - pat.as_ref().map(|p| self.to_pattern(p)) - } - - fn slice_or_array_pattern(&mut self, - span: Span, - ty: Ty<'tcx>, - prefix: &[P], - slice: &Option>, - suffix: &[P]) - -> PatternKind<'tcx> { - match ty.sty { - ty::TySlice(..) => { - // matching a slice or fixed-length array - PatternKind::Slice { - prefix: self.to_patterns(prefix), - slice: self.to_opt_pattern(slice), - suffix: self.to_patterns(suffix), - } - } - - ty::TyArray(_, len) => { - // fixed-length array - assert!(len >= prefix.len() + suffix.len()); - PatternKind::Array { - prefix: self.to_patterns(prefix), - slice: self.to_opt_pattern(slice), - suffix: self.to_patterns(suffix), - } - } - - _ => { - span_bug!(span, "unexpanded macro or bad constant etc"); - } - } - } - - fn variant_or_leaf(&mut self, - pat: &hir::Pat, - subpatterns: Vec>) - -> PatternKind<'tcx> { - match self.cx.tcx.expect_def(pat.id) { - Def::Variant(enum_id, variant_id) => { - let adt_def = self.cx.tcx.lookup_adt_def(enum_id); - if adt_def.variants.len() > 1 { - PatternKind::Variant { - adt_def: adt_def, - variant_index: adt_def.variant_index_with_id(variant_id), - subpatterns: subpatterns, - } - } else { - PatternKind::Leaf { subpatterns: subpatterns } - } - } - - Def::Struct(..) | Def::Union(..) | - Def::TyAlias(..) | Def::AssociatedTy(..) => { - PatternKind::Leaf { subpatterns: subpatterns } - } - - def => { - span_bug!(pat.span, "inappropriate def for pattern: {:?}", def); - } - } - } -} diff --git a/src/librustc_mir/hair/mod.rs b/src/librustc_mir/hair/mod.rs index 353f243335302..e211334e5473a 100644 --- a/src/librustc_mir/hair/mod.rs +++ b/src/librustc_mir/hair/mod.rs @@ -14,9 +14,7 @@ //! unit-tested and separated from the Rust source and compiler data //! structures. -use rustc::mir::repr::{BinOp, BorrowKind, Field, Literal, Mutability, UnOp, - TypedConstVal}; -use rustc::middle::const_val::ConstVal; +use rustc::mir::{BinOp, BorrowKind, Field, Literal, UnOp, TypedConstVal}; use rustc::hir::def_id::DefId; use rustc::middle::region::CodeExtent; use rustc::ty::subst::Substs; @@ -28,6 +26,8 @@ use self::cx::Cx; pub mod cx; +pub use rustc_const_eval::pattern::{BindingMode, Pattern, PatternKind, FieldPattern}; + #[derive(Clone, Debug)] pub struct Block<'tcx> { pub extent: CodeExtent, @@ -139,6 +139,9 @@ pub enum ExprKind<'tcx> { Cast { source: ExprRef<'tcx>, }, + Use { + source: ExprRef<'tcx>, + }, // Use a lexpr to get a vexpr. NeverToAny { source: ExprRef<'tcx>, }, @@ -263,86 +266,12 @@ pub struct Arm<'tcx> { pub body: ExprRef<'tcx>, } -#[derive(Clone, Debug)] -pub struct Pattern<'tcx> { - pub ty: Ty<'tcx>, - pub span: Span, - pub kind: Box>, -} - #[derive(Copy, Clone, Debug)] pub enum LogicalOp { And, Or, } -#[derive(Clone, Debug)] -pub enum PatternKind<'tcx> { - Wild, - - /// x, ref x, x @ P, etc - Binding { - mutability: Mutability, - name: ast::Name, - mode: BindingMode<'tcx>, - var: ast::NodeId, - ty: Ty<'tcx>, - subpattern: Option>, - }, - - /// Foo(...) or Foo{...} or Foo, where `Foo` is a variant name from an adt with >1 variants - Variant { - adt_def: AdtDef<'tcx>, - variant_index: usize, - subpatterns: Vec>, - }, - - /// (...), Foo(...), Foo{...}, or Foo, where `Foo` is a variant name from an adt with 1 variant - Leaf { - subpatterns: Vec>, - }, - - /// box P, &P, &mut P, etc - Deref { - subpattern: Pattern<'tcx>, - }, - - Constant { - value: ConstVal, - }, - - Range { - lo: Literal<'tcx>, - hi: Literal<'tcx>, - }, - - /// matches against a slice, checking the length and extracting elements - Slice { - prefix: Vec>, - slice: Option>, - suffix: Vec>, - }, - - /// fixed match against an array, irrefutable - Array { - prefix: Vec>, - slice: Option>, - suffix: Vec>, - }, -} - -#[derive(Copy, Clone, Debug)] -pub enum BindingMode<'tcx> { - ByValue, - ByRef(&'tcx Region, BorrowKind), -} - -#[derive(Clone, Debug)] -pub struct FieldPattern<'tcx> { - pub field: Field, - pub pattern: Pattern<'tcx>, -} - /////////////////////////////////////////////////////////////////////////// // The Mirror trait diff --git a/src/librustc_mir/lib.rs b/src/librustc_mir/lib.rs index f580ceeee5d7f..02f15602d708b 100644 --- a/src/librustc_mir/lib.rs +++ b/src/librustc_mir/lib.rs @@ -26,7 +26,7 @@ Rust MIR: a lowered representation of Rust. Also: an experiment! #![feature(rustc_diagnostic_macros)] #![feature(rustc_private)] #![feature(staged_api)] -#![feature(question_mark)] +#![cfg_attr(stage0, feature(question_mark))] #[macro_use] extern crate log; extern crate graphviz as dot; @@ -46,8 +46,10 @@ extern crate rustc_const_eval; pub mod diagnostics; pub mod build; +pub mod def_use; pub mod graphviz; mod hair; pub mod mir_map; pub mod pretty; pub mod transform; + diff --git a/src/librustc_mir/mir_map.rs b/src/librustc_mir/mir_map.rs index 5e92a057da382..b0e2d6e73d37a 100644 --- a/src/librustc_mir/mir_map.rs +++ b/src/librustc_mir/mir_map.rs @@ -19,13 +19,12 @@ use build; use rustc::dep_graph::DepNode; use rustc::hir::def_id::DefId; -use rustc::mir::repr::Mir; +use rustc::mir::Mir; use rustc::mir::transform::MirSource; use rustc::mir::visit::MutVisitor; use pretty; use hair::cx::Cx; -use rustc::mir::mir_map::MirMap; use rustc::infer::InferCtxtBuilder; use rustc::traits::Reveal; use rustc::ty::{self, Ty, TyCtxt}; @@ -37,16 +36,10 @@ use syntax_pos::Span; use std::mem; -pub fn build_mir_for_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> MirMap<'tcx> { - let mut map = MirMap::new(tcx.dep_graph.clone()); - { - let mut dump = BuildMir { - tcx: tcx, - map: &mut map, - }; - tcx.visit_all_items_in_krate(DepNode::Mir, &mut dump); - } - map +pub fn build_mir_for_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { + tcx.visit_all_items_in_krate(DepNode::Mir, &mut BuildMir { + tcx: tcx + }); } /// A pass to lift all the types and substitutions in a Mir @@ -83,8 +76,7 @@ impl<'a, 'gcx: 'tcx, 'tcx> MutVisitor<'tcx> for GlobalizeMir<'a, 'gcx> { // BuildMir -- walks a crate, looking for fn items and methods to build MIR from struct BuildMir<'a, 'tcx: 'a> { - tcx: TyCtxt<'a, 'tcx, 'tcx>, - map: &'a mut MirMap<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx> } /// Helper type of a temporary returned by BuildMir::cx(...). @@ -93,8 +85,7 @@ struct BuildMir<'a, 'tcx: 'a> { struct CxBuilder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { src: MirSource, def_id: DefId, - infcx: InferCtxtBuilder<'a, 'gcx, 'tcx>, - map: &'a mut MirMap<'gcx>, + infcx: InferCtxtBuilder<'a, 'gcx, 'tcx> } impl<'a, 'gcx, 'tcx> BuildMir<'a, 'gcx> { @@ -104,8 +95,7 @@ impl<'a, 'gcx, 'tcx> BuildMir<'a, 'gcx> { CxBuilder { src: src, infcx: self.tcx.infer_ctxt(None, Some(param_env), Reveal::NotSpecializable), - def_id: def_id, - map: self.map + def_id: def_id } } } @@ -114,13 +104,14 @@ impl<'a, 'gcx, 'tcx> CxBuilder<'a, 'gcx, 'tcx> { fn build(&'tcx mut self, f: F) where F: for<'b> FnOnce(Cx<'b, 'gcx, 'tcx>) -> (Mir<'tcx>, build::ScopeAuxiliaryVec) { - let src = self.src; - let mir = self.infcx.enter(|infcx| { + let (src, def_id) = (self.src, self.def_id); + self.infcx.enter(|infcx| { let (mut mir, scope_auxiliary) = f(Cx::new(&infcx, src)); // Convert the Mir to global types. + let tcx = infcx.tcx.global_tcx(); let mut globalizer = GlobalizeMir { - tcx: infcx.tcx.global_tcx(), + tcx: tcx, span: mir.span }; globalizer.visit_mir(&mut mir); @@ -128,13 +119,11 @@ impl<'a, 'gcx, 'tcx> CxBuilder<'a, 'gcx, 'tcx> { mem::transmute::>(mir) }; - pretty::dump_mir(infcx.tcx.global_tcx(), "mir_map", &0, - src, &mir, Some(&scope_auxiliary)); + pretty::dump_mir(tcx, "mir_map", &0, src, &mir, Some(&scope_auxiliary)); - mir + let mir = tcx.alloc_mir(mir); + assert!(tcx.mir_map.borrow_mut().insert(def_id, mir).is_none()); }); - - assert!(self.map.map.insert(self.def_id, mir).is_none()) } } @@ -202,7 +191,7 @@ impl<'a, 'tcx> Visitor<'tcx> for BuildMir<'a, 'tcx> { // Array lengths, i.e. [T; constant]. fn visit_ty(&mut self, ty: &'tcx hir::Ty) { - if let hir::TyFixedLengthVec(_, ref length) = ty.node { + if let hir::TyArray(_, ref length) = ty.node { self.build_const_integer(length); } intravisit::walk_ty(self, ty); diff --git a/src/librustc_mir/pretty.rs b/src/librustc_mir/pretty.rs index 01e2c6308ba01..d2fc8aeaa2eea 100644 --- a/src/librustc_mir/pretty.rs +++ b/src/librustc_mir/pretty.rs @@ -11,8 +11,7 @@ use build::{ScopeAuxiliaryVec, ScopeId}; use rustc::hir; use rustc::hir::def_id::DefId; -use rustc::mir::repr::*; -use rustc::mir::mir_map::MirMap; +use rustc::mir::*; use rustc::mir::transform::MirSource; use rustc::ty::TyCtxt; use rustc_data_structures::fnv::FnvHashMap; @@ -90,14 +89,13 @@ pub fn dump_mir<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, /// Write out a human-readable textual representation for the given MIR. pub fn write_mir_pretty<'a, 'b, 'tcx, I>(tcx: TyCtxt<'b, 'tcx, 'tcx>, iter: I, - mir_map: &MirMap<'tcx>, w: &mut Write) -> io::Result<()> where I: Iterator, 'tcx: 'a { let mut first = true; for def_id in iter { - let mir = &mir_map.map[&def_id]; + let mir = &tcx.item_mir(def_id); if first { first = false; @@ -214,6 +212,9 @@ fn comment(tcx: TyCtxt, SourceInfo { span, scope }: SourceInfo) -> String { format!("scope {} at {}", scope.index(), tcx.sess.codemap().span_to_string(span)) } +/// Prints user-defined variables in a scope tree. +/// +/// Returns the total number of variables printed. fn write_scope_tree(tcx: TyCtxt, mir: &Mir, scope_tree: &FnvHashMap>, @@ -234,11 +235,14 @@ fn write_scope_tree(tcx: TyCtxt, writeln!(w, "{0:1$}scope {2} {{", "", indent, child.index())?; // User variable types (including the user's name in a comment). - for (id, var) in mir.var_decls.iter_enumerated() { - // Skip if not declared in this scope. - if var.source_info.scope != child { + for local in mir.vars_iter() { + let var = &mir.local_decls[local]; + let (name, source_info) = if var.source_info.unwrap().scope == child { + (var.name.unwrap(), var.source_info.unwrap()) + } else { + // Not a variable or not declared in this scope. continue; - } + }; let mut_str = if var.mutability == Mutability::Mut { "mut " @@ -251,13 +255,13 @@ fn write_scope_tree(tcx: TyCtxt, INDENT, indent, mut_str, - id, + local, var.ty); writeln!(w, "{0:1$} // \"{2}\" in {3}", indented_var, ALIGN, - var.name, - comment(tcx, var.source_info))?; + name, + comment(tcx, source_info))?; } write_scope_tree(tcx, mir, scope_tree, w, child, depth + 1)?; @@ -291,9 +295,23 @@ fn write_mir_intro<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, } } + // Print return pointer + let indented_retptr = format!("{}let mut {:?}: {};", + INDENT, + RETURN_POINTER, + mir.return_ty); + writeln!(w, "{0:1$} // return pointer", + indented_retptr, + ALIGN)?; + write_scope_tree(tcx, mir, &scope_tree, w, ARGUMENT_VISIBILITY_SCOPE, 1)?; - write_mir_decls(mir, w) + write_temp_decls(mir, w)?; + + // Add an empty line before the first block is printed. + writeln!(w, "")?; + + Ok(()) } fn write_mir_sig(tcx: TyCtxt, src: MirSource, mir: &Mir, w: &mut Write) @@ -313,29 +331,24 @@ fn write_mir_sig(tcx: TyCtxt, src: MirSource, mir: &Mir, w: &mut Write) write!(w, "(")?; // fn argument types. - for (i, arg) in mir.arg_decls.iter_enumerated() { - if i.index() != 0 { + for (i, arg) in mir.args_iter().enumerate() { + if i != 0 { write!(w, ", ")?; } - write!(w, "{:?}: {}", Lvalue::Arg(i), arg.ty)?; + write!(w, "{:?}: {}", Lvalue::Local(arg), mir.local_decls[arg].ty)?; } write!(w, ") -> {}", mir.return_ty) } else { - assert!(mir.arg_decls.is_empty()); + assert_eq!(mir.arg_count, 0); write!(w, ": {} =", mir.return_ty) } } -fn write_mir_decls(mir: &Mir, w: &mut Write) -> io::Result<()> { +fn write_temp_decls(mir: &Mir, w: &mut Write) -> io::Result<()> { // Compiler-introduced temporary types. - for (id, temp) in mir.temp_decls.iter_enumerated() { - writeln!(w, "{}let mut {:?}: {};", INDENT, id, temp.ty)?; - } - - // Wrote any declaration? Add an empty line before the first block is printed. - if !mir.var_decls.is_empty() || !mir.temp_decls.is_empty() { - writeln!(w, "")?; + for temp in mir.temps_iter() { + writeln!(w, "{}let mut {:?}: {};", INDENT, temp, mir.local_decls[temp].ty)?; } Ok(()) diff --git a/src/librustc_mir/transform/add_call_guards.rs b/src/librustc_mir/transform/add_call_guards.rs index c028504d6f96d..89e644e4fb077 100644 --- a/src/librustc_mir/transform/add_call_guards.rs +++ b/src/librustc_mir/transform/add_call_guards.rs @@ -9,7 +9,7 @@ // except according to those terms. use rustc::ty::TyCtxt; -use rustc::mir::repr::*; +use rustc::mir::*; use rustc::mir::transform::{MirPass, MirSource, Pass}; use rustc_data_structures::indexed_vec::{Idx, IndexVec}; diff --git a/src/librustc_mir/transform/copy_prop.rs b/src/librustc_mir/transform/copy_prop.rs new file mode 100644 index 0000000000000..8c8c42a1c7687 --- /dev/null +++ b/src/librustc_mir/transform/copy_prop.rs @@ -0,0 +1,323 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Trivial copy propagation pass. +//! +//! This uses def-use analysis to remove values that have exactly one def and one use, which must +//! be an assignment. +//! +//! To give an example, we look for patterns that look like: +//! +//! DEST = SRC +//! ... +//! USE(DEST) +//! +//! where `DEST` and `SRC` are both locals of some form. We replace that with: +//! +//! NOP +//! ... +//! USE(SRC) +//! +//! The assignment `DEST = SRC` must be (a) the only mutation of `DEST` and (b) the only +//! (non-mutating) use of `SRC`. These restrictions are conservative and may be relaxed in the +//! future. + +use def_use::DefUseAnalysis; +use rustc::mir::{Constant, Local, Location, Lvalue, Mir, Operand, Rvalue, StatementKind}; +use rustc::mir::transform::{MirPass, MirSource, Pass}; +use rustc::mir::visit::MutVisitor; +use rustc::ty::TyCtxt; +use transform::qualify_consts; + +pub struct CopyPropagation; + +impl Pass for CopyPropagation {} + +impl<'tcx> MirPass<'tcx> for CopyPropagation { + fn run_pass<'a>(&mut self, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + source: MirSource, + mir: &mut Mir<'tcx>) { + match source { + MirSource::Const(_) => { + // Don't run on constants, because constant qualification might reject the + // optimized IR. + return + } + MirSource::Static(..) | MirSource::Promoted(..) => { + // Don't run on statics and promoted statics, because trans might not be able to + // evaluate the optimized IR. + return + } + MirSource::Fn(function_node_id) => { + if qualify_consts::is_const_fn(tcx, tcx.map.local_def_id(function_node_id)) { + // Don't run on const functions, as, again, trans might not be able to evaluate + // the optimized IR. + return + } + } + } + + // We only run when the MIR optimization level is at least 1. This avoids messing up debug + // info. + match tcx.sess.opts.debugging_opts.mir_opt_level { + Some(0) | None => return, + _ => {} + } + + loop { + let mut def_use_analysis = DefUseAnalysis::new(mir); + def_use_analysis.analyze(mir); + + let mut changed = false; + for dest_local in mir.local_decls.indices() { + debug!("Considering destination local: {:?}", dest_local); + + let action; + let location; + { + // The destination must have exactly one def. + let dest_use_info = def_use_analysis.local_info(dest_local); + let dest_def_count = dest_use_info.def_count_not_including_drop(); + if dest_def_count == 0 { + debug!(" Can't copy-propagate local: dest {:?} undefined", + dest_local); + continue + } + if dest_def_count > 1 { + debug!(" Can't copy-propagate local: dest {:?} defined {} times", + dest_local, + dest_use_info.def_count()); + continue + } + if dest_use_info.use_count() == 0 { + debug!(" Can't copy-propagate local: dest {:?} unused", + dest_local); + continue + } + let dest_lvalue_def = dest_use_info.defs_and_uses.iter().filter(|lvalue_def| { + lvalue_def.context.is_mutating_use() && !lvalue_def.context.is_drop() + }).next().unwrap(); + location = dest_lvalue_def.location; + + let basic_block = &mir[location.block]; + let statement_index = location.statement_index; + let statement = match basic_block.statements.get(statement_index) { + Some(statement) => statement, + None => { + debug!(" Can't copy-propagate local: used in terminator"); + continue + } + }; + + // That use of the source must be an assignment. + match statement.kind { + StatementKind::Assign(Lvalue::Local(local), Rvalue::Use(ref operand)) if + local == dest_local => { + let maybe_action = match *operand { + Operand::Consume(ref src_lvalue) => { + Action::local_copy(&def_use_analysis, src_lvalue) + } + Operand::Constant(ref src_constant) => { + Action::constant(src_constant) + } + }; + match maybe_action { + Some(this_action) => action = this_action, + None => continue, + } + } + _ => { + debug!(" Can't copy-propagate local: source use is not an \ + assignment"); + continue + } + } + } + + changed = action.perform(mir, &def_use_analysis, dest_local, location) || changed; + // FIXME(pcwalton): Update the use-def chains to delete the instructions instead of + // regenerating the chains. + break + } + if !changed { + break + } + } + } +} + +enum Action<'tcx> { + PropagateLocalCopy(Local), + PropagateConstant(Constant<'tcx>), +} + +impl<'tcx> Action<'tcx> { + fn local_copy(def_use_analysis: &DefUseAnalysis, src_lvalue: &Lvalue<'tcx>) + -> Option> { + // The source must be a local. + let src_local = if let Lvalue::Local(local) = *src_lvalue { + local + } else { + debug!(" Can't copy-propagate local: source is not a local"); + return None; + }; + + // We're trying to copy propagate a local. + // There must be exactly one use of the source used in a statement (not in a terminator). + let src_use_info = def_use_analysis.local_info(src_local); + let src_use_count = src_use_info.use_count(); + if src_use_count == 0 { + debug!(" Can't copy-propagate local: no uses"); + return None + } + if src_use_count != 1 { + debug!(" Can't copy-propagate local: {} uses", src_use_info.use_count()); + return None + } + + // Verify that the source doesn't change in between. This is done conservatively for now, + // by ensuring that the source has exactly one mutation. The goal is to prevent things + // like: + // + // DEST = SRC; + // SRC = X; + // USE(DEST); + // + // From being misoptimized into: + // + // SRC = X; + // USE(SRC); + let src_def_count = src_use_info.def_count_not_including_drop(); + if src_def_count != 1 { + debug!(" Can't copy-propagate local: {} defs of src", + src_use_info.def_count_not_including_drop()); + return None + } + + Some(Action::PropagateLocalCopy(src_local)) + } + + fn constant(src_constant: &Constant<'tcx>) -> Option> { + Some(Action::PropagateConstant((*src_constant).clone())) + } + + fn perform(self, + mir: &mut Mir<'tcx>, + def_use_analysis: &DefUseAnalysis<'tcx>, + dest_local: Local, + location: Location) + -> bool { + match self { + Action::PropagateLocalCopy(src_local) => { + // Eliminate the destination and the assignment. + // + // First, remove all markers. + // + // FIXME(pcwalton): Don't do this. Merge live ranges instead. + debug!(" Replacing all uses of {:?} with {:?} (local)", + dest_local, + src_local); + for lvalue_use in &def_use_analysis.local_info(dest_local).defs_and_uses { + if lvalue_use.context.is_storage_marker() { + mir.make_statement_nop(lvalue_use.location) + } + } + for lvalue_use in &def_use_analysis.local_info(src_local).defs_and_uses { + if lvalue_use.context.is_storage_marker() { + mir.make_statement_nop(lvalue_use.location) + } + } + + // Replace all uses of the destination local with the source local. + let src_lvalue = Lvalue::Local(src_local); + def_use_analysis.replace_all_defs_and_uses_with(dest_local, mir, src_lvalue); + + // Finally, zap the now-useless assignment instruction. + debug!(" Deleting assignment"); + mir.make_statement_nop(location); + + true + } + Action::PropagateConstant(src_constant) => { + // First, remove all markers. + // + // FIXME(pcwalton): Don't do this. Merge live ranges instead. + debug!(" Replacing all uses of {:?} with {:?} (constant)", + dest_local, + src_constant); + let dest_local_info = def_use_analysis.local_info(dest_local); + for lvalue_use in &dest_local_info.defs_and_uses { + if lvalue_use.context.is_storage_marker() { + mir.make_statement_nop(lvalue_use.location) + } + } + + // Replace all uses of the destination local with the constant. + let mut visitor = ConstantPropagationVisitor::new(dest_local, + src_constant); + for dest_lvalue_use in &dest_local_info.defs_and_uses { + visitor.visit_location(mir, dest_lvalue_use.location) + } + + // Zap the assignment instruction if we eliminated all the uses. We won't have been + // able to do that if the destination was used in a projection, because projections + // must have lvalues on their LHS. + let use_count = dest_local_info.use_count(); + if visitor.uses_replaced == use_count { + debug!(" {} of {} use(s) replaced; deleting assignment", + visitor.uses_replaced, + use_count); + mir.make_statement_nop(location); + true + } else if visitor.uses_replaced == 0 { + debug!(" No uses replaced; not deleting assignment"); + false + } else { + debug!(" {} of {} use(s) replaced; not deleting assignment", + visitor.uses_replaced, + use_count); + true + } + } + } + } +} + +struct ConstantPropagationVisitor<'tcx> { + dest_local: Local, + constant: Constant<'tcx>, + uses_replaced: usize, +} + +impl<'tcx> ConstantPropagationVisitor<'tcx> { + fn new(dest_local: Local, constant: Constant<'tcx>) + -> ConstantPropagationVisitor<'tcx> { + ConstantPropagationVisitor { + dest_local: dest_local, + constant: constant, + uses_replaced: 0, + } + } +} + +impl<'tcx> MutVisitor<'tcx> for ConstantPropagationVisitor<'tcx> { + fn visit_operand(&mut self, operand: &mut Operand<'tcx>, location: Location) { + self.super_operand(operand, location); + + match *operand { + Operand::Consume(Lvalue::Local(local)) if local == self.dest_local => {} + _ => return, + } + + *operand = Operand::Constant(self.constant.clone()); + self.uses_replaced += 1 + } +} diff --git a/src/librustc_mir/transform/deaggregator.rs b/src/librustc_mir/transform/deaggregator.rs index 77af02c18c60e..fcdeae6d6c080 100644 --- a/src/librustc_mir/transform/deaggregator.rs +++ b/src/librustc_mir/transform/deaggregator.rs @@ -9,10 +9,9 @@ // except according to those terms. use rustc::ty::TyCtxt; -use rustc::mir::repr::*; +use rustc::mir::*; use rustc::mir::transform::{MirPass, MirSource, Pass}; use rustc_data_structures::indexed_vec::Idx; -use rustc::ty::VariantKind; pub struct Deaggregator; @@ -129,10 +128,7 @@ fn get_aggregate_statement_index<'a, 'tcx, 'b>(start: usize, } debug!("getting variant {:?}", variant); debug!("for adt_def {:?}", adt_def); - let variant_def = &adt_def.variants[variant]; - if variant_def.kind == VariantKind::Struct { - return Some(i); - } + return Some(i); }; None } diff --git a/src/librustc_mir/transform/dump_mir.rs b/src/librustc_mir/transform/dump_mir.rs index 694b017bbd706..b8fd9fb12ab01 100644 --- a/src/librustc_mir/transform/dump_mir.rs +++ b/src/librustc_mir/transform/dump_mir.rs @@ -13,7 +13,7 @@ use std::fmt; use rustc::ty::TyCtxt; -use rustc::mir::repr::*; +use rustc::mir::*; use rustc::mir::transform::{Pass, MirPass, MirPassHook, MirSource}; use pretty; diff --git a/src/librustc_mir/transform/erase_regions.rs b/src/librustc_mir/transform/erase_regions.rs index 485ca3ea84a7e..cebd9dd9668e3 100644 --- a/src/librustc_mir/transform/erase_regions.rs +++ b/src/librustc_mir/transform/erase_regions.rs @@ -14,7 +14,7 @@ use rustc::ty::subst::Substs; use rustc::ty::{Ty, TyCtxt}; -use rustc::mir::repr::*; +use rustc::mir::*; use rustc::mir::visit::MutVisitor; use rustc::mir::transform::{MirPass, MirSource, Pass}; diff --git a/src/librustc_mir/transform/instcombine.rs b/src/librustc_mir/transform/instcombine.rs new file mode 100644 index 0000000000000..a01724d6d0e9b --- /dev/null +++ b/src/librustc_mir/transform/instcombine.rs @@ -0,0 +1,111 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Performs various peephole optimizations. + +use rustc::mir::{Location, Lvalue, Mir, Operand, ProjectionElem, Rvalue, Local}; +use rustc::mir::transform::{MirPass, MirSource, Pass}; +use rustc::mir::visit::{MutVisitor, Visitor}; +use rustc::ty::TyCtxt; +use rustc::util::nodemap::FnvHashSet; +use rustc_data_structures::indexed_vec::Idx; +use std::mem; + +pub struct InstCombine { + optimizations: OptimizationList, +} + +impl InstCombine { + pub fn new() -> InstCombine { + InstCombine { + optimizations: OptimizationList::default(), + } + } +} + +impl Pass for InstCombine {} + +impl<'tcx> MirPass<'tcx> for InstCombine { + fn run_pass<'a>(&mut self, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + _: MirSource, + mir: &mut Mir<'tcx>) { + // We only run when optimizing MIR (at any level). + if tcx.sess.opts.debugging_opts.mir_opt_level == Some(0) { + return + } + + // First, find optimization opportunities. This is done in a pre-pass to keep the MIR + // read-only so that we can do global analyses on the MIR in the process (e.g. + // `Lvalue::ty()`). + { + let mut optimization_finder = OptimizationFinder::new(mir, tcx); + optimization_finder.visit_mir(mir); + self.optimizations = optimization_finder.optimizations + } + + // Then carry out those optimizations. + MutVisitor::visit_mir(&mut *self, mir); + } +} + +impl<'tcx> MutVisitor<'tcx> for InstCombine { + fn visit_rvalue(&mut self, rvalue: &mut Rvalue<'tcx>, location: Location) { + if self.optimizations.and_stars.remove(&location) { + debug!("Replacing `&*`: {:?}", rvalue); + let new_lvalue = match *rvalue { + Rvalue::Ref(_, _, Lvalue::Projection(ref mut projection)) => { + // Replace with dummy + mem::replace(&mut projection.base, Lvalue::Local(Local::new(0))) + } + _ => bug!("Detected `&*` but didn't find `&*`!"), + }; + *rvalue = Rvalue::Use(Operand::Consume(new_lvalue)) + } + + self.super_rvalue(rvalue, location) + } +} + +/// Finds optimization opportunities on the MIR. +struct OptimizationFinder<'b, 'a, 'tcx:'a+'b> { + mir: &'b Mir<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + optimizations: OptimizationList, +} + +impl<'b, 'a, 'tcx:'b> OptimizationFinder<'b, 'a, 'tcx> { + fn new(mir: &'b Mir<'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> OptimizationFinder<'b, 'a, 'tcx> { + OptimizationFinder { + mir: mir, + tcx: tcx, + optimizations: OptimizationList::default(), + } + } +} + +impl<'b, 'a, 'tcx> Visitor<'tcx> for OptimizationFinder<'b, 'a, 'tcx> { + fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) { + if let Rvalue::Ref(_, _, Lvalue::Projection(ref projection)) = *rvalue { + if let ProjectionElem::Deref = projection.elem { + if projection.base.ty(self.mir, self.tcx).to_ty(self.tcx).is_region_ptr() { + self.optimizations.and_stars.insert(location); + } + } + } + + self.super_rvalue(rvalue, location) + } +} + +#[derive(Default)] +struct OptimizationList { + and_stars: FnvHashSet, +} diff --git a/src/librustc_mir/transform/mod.rs b/src/librustc_mir/transform/mod.rs index c3485b8256da1..7bcb89b5895e7 100644 --- a/src/librustc_mir/transform/mod.rs +++ b/src/librustc_mir/transform/mod.rs @@ -18,3 +18,5 @@ pub mod promote_consts; pub mod qualify_consts; pub mod dump_mir; pub mod deaggregator; +pub mod instcombine; +pub mod copy_prop; diff --git a/src/librustc_mir/transform/no_landing_pads.rs b/src/librustc_mir/transform/no_landing_pads.rs index 32fddd293cacd..6ef5720b330c9 100644 --- a/src/librustc_mir/transform/no_landing_pads.rs +++ b/src/librustc_mir/transform/no_landing_pads.rs @@ -12,7 +12,7 @@ //! specified. use rustc::ty::TyCtxt; -use rustc::mir::repr::*; +use rustc::mir::*; use rustc::mir::visit::MutVisitor; use rustc::mir::transform::{Pass, MirPass, MirSource}; diff --git a/src/librustc_mir/transform/promote_consts.rs b/src/librustc_mir/transform/promote_consts.rs index f864f1678f236..41698574e0f1f 100644 --- a/src/librustc_mir/transform/promote_consts.rs +++ b/src/librustc_mir/transform/promote_consts.rs @@ -22,7 +22,7 @@ //! initialization and can otherwise silence errors, if //! move analysis runs after promotion on broken MIR. -use rustc::mir::repr::*; +use rustc::mir::*; use rustc::mir::visit::{LvalueContext, MutVisitor, Visitor}; use rustc::mir::traversal::ReversePostorder; use rustc::ty::TyCtxt; @@ -30,6 +30,7 @@ use syntax_pos::Span; use rustc_data_structures::indexed_vec::{IndexVec, Idx}; +use std::iter; use std::mem; use std::usize; @@ -74,15 +75,24 @@ pub enum Candidate { ShuffleIndices(BasicBlock) } -struct TempCollector { - temps: IndexVec, - span: Span +struct TempCollector<'tcx> { + temps: IndexVec, + span: Span, + mir: &'tcx Mir<'tcx>, } -impl<'tcx> Visitor<'tcx> for TempCollector { - fn visit_lvalue(&mut self, lvalue: &Lvalue<'tcx>, context: LvalueContext, location: Location) { +impl<'tcx> Visitor<'tcx> for TempCollector<'tcx> { + fn visit_lvalue(&mut self, + lvalue: &Lvalue<'tcx>, + context: LvalueContext<'tcx>, + location: Location) { self.super_lvalue(lvalue, context, location); - if let Lvalue::Temp(index) = *lvalue { + if let Lvalue::Local(index) = *lvalue { + // We're only interested in temporaries + if self.mir.local_kind(index) != LocalKind::Temp { + return; + } + // Ignore drops, if the temp gets promoted, // then it's constant and thus drop is noop. // Storage live ranges are also irrelevant. @@ -126,10 +136,11 @@ impl<'tcx> Visitor<'tcx> for TempCollector { } } -pub fn collect_temps(mir: &Mir, rpo: &mut ReversePostorder) -> IndexVec { +pub fn collect_temps(mir: &Mir, rpo: &mut ReversePostorder) -> IndexVec { let mut collector = TempCollector { - temps: IndexVec::from_elem(TempState::Undefined, &mir.temp_decls), - span: mir.span + temps: IndexVec::from_elem(TempState::Undefined, &mir.local_decls), + span: mir.span, + mir: mir, }; for (bb, data) in rpo { collector.visit_basic_block_data(bb, data); @@ -140,7 +151,7 @@ pub fn collect_temps(mir: &Mir, rpo: &mut ReversePostorder) -> IndexVec { source: &'a mut Mir<'tcx>, promoted: Mir<'tcx>, - temps: &'a mut IndexVec, + temps: &'a mut IndexVec, /// If true, all nested temps are also kept in the /// source MIR, not moved to the promoted MIR. @@ -163,7 +174,7 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> { }) } - fn assign(&mut self, dest: Lvalue<'tcx>, rvalue: Rvalue<'tcx>, span: Span) { + fn assign(&mut self, dest: Local, rvalue: Rvalue<'tcx>, span: Span) { let last = self.promoted.basic_blocks().last().unwrap(); let data = &mut self.promoted[last]; data.statements.push(Statement { @@ -171,13 +182,13 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> { span: span, scope: ARGUMENT_VISIBILITY_SCOPE }, - kind: StatementKind::Assign(dest, rvalue) + kind: StatementKind::Assign(Lvalue::Local(dest), rvalue) }); } /// Copy the initialization of this temp to the /// promoted MIR, recursing through temps. - fn promote_temp(&mut self, temp: Temp) -> Temp { + fn promote_temp(&mut self, temp: Local) -> Local { let old_keep_original = self.keep_original; let (bb, stmt_idx) = match self.temps[temp] { TempState::Defined { @@ -259,20 +270,19 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> { }); } - let new_temp = self.promoted.temp_decls.push(TempDecl { - ty: self.source.temp_decls[temp].ty - }); + let new_temp = self.promoted.local_decls.push( + LocalDecl::new_temp(self.source.local_decls[temp].ty)); // Inject the Rvalue or Call into the promoted MIR. if stmt_idx < no_stmts { - self.assign(Lvalue::Temp(new_temp), rvalue.unwrap(), source_info.span); + self.assign(new_temp, rvalue.unwrap(), source_info.span); } else { let last = self.promoted.basic_blocks().last().unwrap(); let new_target = self.new_block(); let mut call = call.unwrap(); match call { TerminatorKind::Call { ref mut destination, ..} => { - *destination = Some((Lvalue::Temp(new_temp), new_target)); + *destination = Some((Lvalue::Local(new_temp), new_target)); } _ => bug!() } @@ -315,11 +325,12 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> { } } }; - self.visit_rvalue(&mut rvalue, Location{ + self.visit_rvalue(&mut rvalue, Location { block: BasicBlock::new(0), statement_index: usize::MAX }); - self.assign(Lvalue::ReturnPointer, rvalue, span); + + self.assign(RETURN_POINTER, rvalue, span); self.source.promoted.push(self.promoted); } } @@ -328,10 +339,12 @@ impl<'a, 'tcx> Promoter<'a, 'tcx> { impl<'a, 'tcx> MutVisitor<'tcx> for Promoter<'a, 'tcx> { fn visit_lvalue(&mut self, lvalue: &mut Lvalue<'tcx>, - context: LvalueContext, + context: LvalueContext<'tcx>, location: Location) { - if let Lvalue::Temp(ref mut temp) = *lvalue { - *temp = self.promote_temp(*temp); + if let Lvalue::Local(ref mut temp) = *lvalue { + if self.source.local_kind(*temp) == LocalKind::Temp { + *temp = self.promote_temp(*temp); + } } self.super_lvalue(lvalue, context, location); } @@ -339,7 +352,7 @@ impl<'a, 'tcx> MutVisitor<'tcx> for Promoter<'a, 'tcx> { pub fn promote_candidates<'a, 'tcx>(mir: &mut Mir<'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>, - mut temps: IndexVec, + mut temps: IndexVec, candidates: Vec) { // Visit candidates in reverse, in case they're nested. for candidate in candidates.into_iter().rev() { @@ -353,7 +366,7 @@ pub fn promote_candidates<'a, 'tcx>(mir: &mut Mir<'tcx>, "expected assignment to promote"); } }; - if let Lvalue::Temp(index) = *dest { + if let Lvalue::Local(index) = *dest { if temps[index] == TempState::PromotedOut { // Already promoted. continue; @@ -376,8 +389,10 @@ pub fn promote_candidates<'a, 'tcx>(mir: &mut Mir<'tcx>, } }; + // Declare return pointer local + let initial_locals = iter::once(LocalDecl::new_return_pointer(ty)).collect(); + let mut promoter = Promoter { - source: mir, promoted: Mir::new( IndexVec::new(), Some(VisibilityScopeData { @@ -386,12 +401,12 @@ pub fn promote_candidates<'a, 'tcx>(mir: &mut Mir<'tcx>, }).into_iter().collect(), IndexVec::new(), ty, - IndexVec::new(), - IndexVec::new(), - IndexVec::new(), + initial_locals, + 0, vec![], span ), + source: mir, temps: &mut temps, keep_original: false }; @@ -400,13 +415,13 @@ pub fn promote_candidates<'a, 'tcx>(mir: &mut Mir<'tcx>, } // Eliminate assignments to, and drops of promoted temps. - let promoted = |index: Temp| temps[index] == TempState::PromotedOut; + let promoted = |index: Local| temps[index] == TempState::PromotedOut; for block in mir.basic_blocks_mut() { block.statements.retain(|statement| { match statement.kind { - StatementKind::Assign(Lvalue::Temp(index), _) | - StatementKind::StorageLive(Lvalue::Temp(index)) | - StatementKind::StorageDead(Lvalue::Temp(index)) => { + StatementKind::Assign(Lvalue::Local(index), _) | + StatementKind::StorageLive(Lvalue::Local(index)) | + StatementKind::StorageDead(Lvalue::Local(index)) => { !promoted(index) } _ => true @@ -414,7 +429,7 @@ pub fn promote_candidates<'a, 'tcx>(mir: &mut Mir<'tcx>, }); let terminator = block.terminator_mut(); match terminator.kind { - TerminatorKind::Drop { location: Lvalue::Temp(index), target, .. } => { + TerminatorKind::Drop { location: Lvalue::Local(index), target, .. } => { if promoted(index) { terminator.kind = TerminatorKind::Goto { target: target diff --git a/src/librustc_mir/transform/qualify_consts.rs b/src/librustc_mir/transform/qualify_consts.rs index a3f8f7a63eeab..b33a7060e3753 100644 --- a/src/librustc_mir/transform/qualify_consts.rs +++ b/src/librustc_mir/transform/qualify_consts.rs @@ -16,7 +16,6 @@ use rustc_data_structures::bitvec::BitVector; use rustc_data_structures::indexed_vec::{IndexVec, Idx}; -use rustc::dep_graph::DepNode; use rustc::hir; use rustc::hir::map as hir_map; use rustc::hir::def_id::DefId; @@ -25,10 +24,9 @@ use rustc::hir::map::blocks::FnLikeNode; use rustc::traits::{self, Reveal}; use rustc::ty::{self, TyCtxt, Ty}; use rustc::ty::cast::CastTy; -use rustc::mir::repr::*; -use rustc::mir::mir_map::MirMap; -use rustc::mir::traversal::{self, ReversePostorder}; -use rustc::mir::transform::{Pass, MirMapPass, MirPassHook, MirSource}; +use rustc::mir::*; +use rustc::mir::traversal::ReversePostorder; +use rustc::mir::transform::{Pass, MirPass, MirSource}; use rustc::mir::visit::{LvalueContext, Visitor}; use rustc::util::nodemap::DefIdMap; use syntax::abi::Abi; @@ -116,7 +114,7 @@ impl fmt::Display for Mode { } } -fn is_const_fn(tcx: TyCtxt, def_id: DefId) -> bool { +pub fn is_const_fn(tcx: TyCtxt, def_id: DefId) -> bool { if let Some(node_id) = tcx.map.as_local_node_id(def_id) { let fn_like = FnLikeNode::from_node(tcx.map.get(node_id)); match fn_like.map(|f| f.kind()) { @@ -142,12 +140,11 @@ struct Qualifier<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { tcx: TyCtxt<'a, 'gcx, 'tcx>, param_env: ty::ParameterEnvironment<'tcx>, qualif_map: &'a mut DefIdMap, - mir_map: Option<&'a MirMap<'tcx>>, - temp_qualif: IndexVec>, + temp_qualif: IndexVec>, return_qualif: Option, qualif: Qualif, const_fn_arg_vars: BitVector, - temp_promotion_state: IndexVec, + temp_promotion_state: IndexVec, promotion_candidates: Vec } @@ -155,7 +152,6 @@ impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> { fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>, param_env: ty::ParameterEnvironment<'tcx>, qualif_map: &'a mut DefIdMap, - mir_map: Option<&'a MirMap<'tcx>>, def_id: DefId, mir: &'a Mir<'tcx>, mode: Mode) @@ -172,11 +168,10 @@ impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> { tcx: tcx, param_env: param_env, qualif_map: qualif_map, - mir_map: mir_map, - temp_qualif: IndexVec::from_elem(None, &mir.temp_decls), + temp_qualif: IndexVec::from_elem(None, &mir.local_decls), return_qualif: None, qualif: Qualif::empty(), - const_fn_arg_vars: BitVector::new(mir.var_decls.len()), + const_fn_arg_vars: BitVector::new(mir.local_decls.len()), temp_promotion_state: temps, promotion_candidates: vec![] } @@ -332,8 +327,10 @@ impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> { // Only handle promotable temps in non-const functions. if self.mode == Mode::Fn { - if let Lvalue::Temp(index) = *dest { - if self.temp_promotion_state[index].is_promotable() { + if let Lvalue::Local(index) = *dest { + if self.mir.local_kind(index) == LocalKind::Temp + && self.temp_promotion_state[index].is_promotable() { + debug!("store to promotable temp {:?}", index); store(&mut self.temp_qualif[index]); } } @@ -341,13 +338,20 @@ impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> { } match *dest { - Lvalue::Temp(index) => store(&mut self.temp_qualif[index]), - Lvalue::ReturnPointer => store(&mut self.return_qualif), + Lvalue::Local(index) if self.mir.local_kind(index) == LocalKind::Temp => { + debug!("store to temp {:?}", index); + store(&mut self.temp_qualif[index]) + } + Lvalue::Local(index) if self.mir.local_kind(index) == LocalKind::ReturnPointer => { + debug!("store to return pointer {:?}", index); + store(&mut self.return_qualif) + } Lvalue::Projection(box Projection { - base: Lvalue::Temp(index), + base: Lvalue::Local(index), elem: ProjectionElem::Deref - }) if self.mir.temp_decls[index].ty.is_unique() + }) if self.mir.local_kind(index) == LocalKind::Temp + && self.mir.local_decls[index].ty.is_unique() && self.temp_qualif[index].map_or(false, |qualif| { qualif.intersects(Qualif::NOT_CONST) }) => { @@ -366,6 +370,8 @@ impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> { /// Qualify a whole const, static initializer or const fn. fn qualify_const(&mut self) -> Qualif { + debug!("qualifying {} {}", self.mode, self.tcx.item_path_str(self.def_id)); + let mir = self.mir; let mut seen_blocks = BitVector::new(mir.basic_blocks().len()); @@ -399,7 +405,7 @@ impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> { TerminatorKind::Return => { // Check for unused values. This usually means // there are extra statements in the AST. - for temp in mir.temp_decls.indices() { + for temp in mir.temps_iter() { if self.temp_qualif[temp].is_none() { continue; } @@ -424,9 +430,10 @@ impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> { // Make sure there are no extra unassigned variables. self.qualif = Qualif::NOT_CONST; - for index in 0..mir.var_decls.len() { - if !self.const_fn_arg_vars.contains(index) { - self.assign(&Lvalue::Var(Var::new(index)), Location { + for index in mir.vars_iter() { + if !self.const_fn_arg_vars.contains(index.index()) { + debug!("unassigned variable {:?}", index); + self.assign(&Lvalue::Local(index), Location { block: bb, statement_index: usize::MAX, }); @@ -475,25 +482,33 @@ impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> { /// For functions (constant or not), it also records /// candidates for promotion in promotion_candidates. impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> { - fn visit_lvalue(&mut self, lvalue: &Lvalue<'tcx>, context: LvalueContext, location: Location) { + fn visit_lvalue(&mut self, + lvalue: &Lvalue<'tcx>, + context: LvalueContext<'tcx>, + location: Location) { match *lvalue { - Lvalue::Arg(_) => { - self.add(Qualif::FN_ARGUMENT); - } - Lvalue::Var(_) => { - self.add(Qualif::NOT_CONST); - } - Lvalue::Temp(index) => { - if !self.temp_promotion_state[index].is_promotable() { - self.add(Qualif::NOT_PROMOTABLE); + Lvalue::Local(local) => match self.mir.local_kind(local) { + LocalKind::ReturnPointer => { + self.not_const(); + } + LocalKind::Arg => { + self.add(Qualif::FN_ARGUMENT); + } + LocalKind::Var => { + self.add(Qualif::NOT_CONST); } + LocalKind::Temp => { + if !self.temp_promotion_state[local].is_promotable() { + self.add(Qualif::NOT_PROMOTABLE); + } - if let Some(qualif) = self.temp_qualif[index] { - self.add(qualif); - } else { - self.not_const(); + if let Some(qualif) = self.temp_qualif[local] { + self.add(qualif); + } else { + self.not_const(); + } } - } + }, Lvalue::Static(_) => { self.add(Qualif::STATIC); if self.mode == Mode::Const || self.mode == Mode::ConstFn { @@ -502,9 +517,6 @@ impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> { a constant instead", self.mode); } } - Lvalue::ReturnPointer => { - self.not_const(); - } Lvalue::Projection(ref proj) => { self.nest(|this| { this.super_lvalue(lvalue, context, location); @@ -578,7 +590,6 @@ impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> { } else { let qualif = qualify_const_item_cached(self.tcx, self.qualif_map, - self.mir_map, def_id); self.add(qualif); } @@ -682,8 +693,10 @@ impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> { if self.mode == Mode::Fn || self.mode == Mode::ConstFn { if !self.qualif.intersects(Qualif::NEVER_PROMOTE) { // We can only promote direct borrows of temps. - if let Lvalue::Temp(_) = *lvalue { - self.promotion_candidates.push(candidate); + if let Lvalue::Local(local) = *lvalue { + if self.mir.local_kind(local) == LocalKind::Temp { + self.promotion_candidates.push(candidate); + } } } } @@ -876,17 +889,21 @@ impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> { self.visit_rvalue(rvalue, location); // Check the allowed const fn argument forms. - if let (Mode::ConstFn, &Lvalue::Var(index)) = (self.mode, dest) { - if self.const_fn_arg_vars.insert(index.index()) { + if let (Mode::ConstFn, &Lvalue::Local(index)) = (self.mode, dest) { + if self.mir.local_kind(index) == LocalKind::Var && + self.const_fn_arg_vars.insert(index.index()) { + // Direct use of an argument is permitted. - if let Rvalue::Use(Operand::Consume(Lvalue::Arg(_))) = *rvalue { - return; + if let Rvalue::Use(Operand::Consume(Lvalue::Local(local))) = *rvalue { + if self.mir.local_kind(local) == LocalKind::Arg { + return; + } } // Avoid a generic error for other uses of arguments. if self.qualif.intersects(Qualif::FN_ARGUMENT) { - let decl = &self.mir.var_decls[index]; - span_err!(self.tcx.sess, decl.source_info.span, E0022, + let decl = &self.mir.local_decls[index]; + span_err!(self.tcx.sess, decl.source_info.unwrap().span, E0022, "arguments of constant functions can only \ be immutable by-value bindings"); return; @@ -910,7 +927,8 @@ impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> { } StatementKind::SetDiscriminant { .. } | StatementKind::StorageLive(_) | - StatementKind::StorageDead(_) => {} + StatementKind::StorageDead(_) | + StatementKind::Nop => {} } }); } @@ -925,7 +943,6 @@ impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> { fn qualify_const_item_cached<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, qualif_map: &mut DefIdMap, - mir_map: Option<&MirMap<'tcx>>, def_id: DefId) -> Qualif { match qualif_map.entry(def_id) { @@ -936,124 +953,100 @@ fn qualify_const_item_cached<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, } } - let extern_mir; - let param_env_and_mir = if def_id.is_local() { - mir_map.and_then(|map| map.map.get(&def_id)).map(|mir| { - let node_id = tcx.map.as_local_node_id(def_id).unwrap(); - (ty::ParameterEnvironment::for_item(tcx, node_id), mir) - }) - } else if let Some(mir) = tcx.sess.cstore.maybe_get_item_mir(tcx, def_id) { - // These should only be monomorphic constants. - extern_mir = mir; - Some((tcx.empty_parameter_environment(), &extern_mir)) + let param_env = if def_id.is_local() { + let node_id = tcx.map.as_local_node_id(def_id).unwrap(); + ty::ParameterEnvironment::for_item(tcx, node_id) } else { - None + // These should only be monomorphic constants. + tcx.empty_parameter_environment() }; - let (param_env, mir) = param_env_and_mir.unwrap_or_else(|| { - bug!("missing constant MIR for {}", tcx.item_path_str(def_id)) - }); - - let mut qualifier = Qualifier::new(tcx, param_env, qualif_map, mir_map, - def_id, mir, Mode::Const); + let mir = &tcx.item_mir(def_id); + let mut qualifier = Qualifier::new(tcx, param_env, qualif_map, def_id, mir, Mode::Const); let qualif = qualifier.qualify_const(); qualifier.qualif_map.insert(def_id, qualif); qualif } -pub struct QualifyAndPromoteConstants; +#[derive(Default)] +pub struct QualifyAndPromoteConstants { + qualif_map: DefIdMap +} impl Pass for QualifyAndPromoteConstants {} -impl<'tcx> MirMapPass<'tcx> for QualifyAndPromoteConstants { - fn run_pass<'a>(&mut self, - tcx: TyCtxt<'a, 'tcx, 'tcx>, - map: &mut MirMap<'tcx>, - hooks: &mut [Box MirPassHook<'s>>]) { - let mut qualif_map = DefIdMap(); - - // First, visit `const` items, potentially recursing, to get - // accurate MUTABLE_INTERIOR and NEEDS_DROP qualifications. - let keys = map.map.keys(); - for &def_id in &keys { - let _task = tcx.dep_graph.in_task(DepNode::Mir(def_id)); - let id = tcx.map.as_local_node_id(def_id).unwrap(); - let src = MirSource::from_node(tcx, id); - if let MirSource::Const(_) = src { - qualify_const_item_cached(tcx, &mut qualif_map, Some(map), def_id); +impl<'tcx> MirPass<'tcx> for QualifyAndPromoteConstants { + fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, + src: MirSource, mir: &mut Mir<'tcx>) { + let id = src.item_id(); + let def_id = tcx.map.local_def_id(id); + let mode = match src { + MirSource::Fn(_) => { + if is_const_fn(tcx, def_id) { + Mode::ConstFn + } else { + Mode::Fn + } } - } - - // Then, handle everything else, without recursing, - // as the MIR map is not shared, since promotion - // in functions (including `const fn`) mutates it. - for &def_id in &keys { - let _task = tcx.dep_graph.in_task(DepNode::Mir(def_id)); - let id = tcx.map.as_local_node_id(def_id).unwrap(); - let src = MirSource::from_node(tcx, id); - let mode = match src { - MirSource::Fn(_) => { - if is_const_fn(tcx, def_id) { - Mode::ConstFn - } else { - Mode::Fn + MirSource::Const(_) => { + match self.qualif_map.entry(def_id) { + Entry::Occupied(_) => return, + Entry::Vacant(entry) => { + // Guard against `const` recursion. + entry.insert(Qualif::RECURSIVE); } } - MirSource::Const(_) => continue, - MirSource::Static(_, hir::MutImmutable) => Mode::Static, - MirSource::Static(_, hir::MutMutable) => Mode::StaticMut, - MirSource::Promoted(..) => bug!() - }; - let param_env = ty::ParameterEnvironment::for_item(tcx, id); - - let mir = map.map.get_mut(&def_id).unwrap(); - for hook in &mut *hooks { - hook.on_mir_pass(tcx, src, mir, self, false); + Mode::Const } - - if mode == Mode::Fn || mode == Mode::ConstFn { - // This is ugly because Qualifier holds onto mir, - // which can't be mutated until its scope ends. - let (temps, candidates) = { - let mut qualifier = Qualifier::new(tcx, param_env, &mut qualif_map, - None, def_id, mir, mode); - if mode == Mode::ConstFn { - // Enforce a constant-like CFG for `const fn`. - qualifier.qualify_const(); - } else { - while let Some((bb, data)) = qualifier.rpo.next() { - qualifier.visit_basic_block_data(bb, data); - } + MirSource::Static(_, hir::MutImmutable) => Mode::Static, + MirSource::Static(_, hir::MutMutable) => Mode::StaticMut, + MirSource::Promoted(..) => return + }; + let param_env = ty::ParameterEnvironment::for_item(tcx, id); + + if mode == Mode::Fn || mode == Mode::ConstFn { + // This is ugly because Qualifier holds onto mir, + // which can't be mutated until its scope ends. + let (temps, candidates) = { + let mut qualifier = Qualifier::new(tcx, param_env, + &mut self.qualif_map, + def_id, mir, mode); + if mode == Mode::ConstFn { + // Enforce a constant-like CFG for `const fn`. + qualifier.qualify_const(); + } else { + while let Some((bb, data)) = qualifier.rpo.next() { + qualifier.visit_basic_block_data(bb, data); } + } - (qualifier.temp_promotion_state, - qualifier.promotion_candidates) - }; + (qualifier.temp_promotion_state, qualifier.promotion_candidates) + }; - // Do the actual promotion, now that we know what's viable. - promote_consts::promote_candidates(mir, tcx, temps, candidates); - } else { - let mut qualifier = Qualifier::new(tcx, param_env, &mut qualif_map, - None, def_id, mir, mode); - qualifier.qualify_const(); - } + // Do the actual promotion, now that we know what's viable. + promote_consts::promote_candidates(mir, tcx, temps, candidates); + } else { + let mut qualifier = Qualifier::new(tcx, param_env, + &mut self.qualif_map, + def_id, mir, mode); + let qualif = qualifier.qualify_const(); - for hook in &mut *hooks { - hook.on_mir_pass(tcx, src, mir, self, true); + if mode == Mode::Const { + qualifier.qualif_map.insert(def_id, qualif); } + } - // Statics must be Sync. - if mode == Mode::Static { - let ty = mir.return_ty; - tcx.infer_ctxt(None, None, Reveal::NotSpecializable).enter(|infcx| { - let cause = traits::ObligationCause::new(mir.span, id, traits::SharedStatic); - let mut fulfillment_cx = traits::FulfillmentContext::new(); - fulfillment_cx.register_builtin_bound(&infcx, ty, ty::BoundSync, cause); - if let Err(err) = fulfillment_cx.select_all_or_error(&infcx) { - infcx.report_fulfillment_errors(&err); - } - }); - } + // Statics must be Sync. + if mode == Mode::Static { + let ty = mir.return_ty; + tcx.infer_ctxt(None, None, Reveal::NotSpecializable).enter(|infcx| { + let cause = traits::ObligationCause::new(mir.span, id, traits::SharedStatic); + let mut fulfillment_cx = traits::FulfillmentContext::new(); + fulfillment_cx.register_builtin_bound(&infcx, ty, ty::BoundSync, cause); + if let Err(err) = fulfillment_cx.select_all_or_error(&infcx) { + infcx.report_fulfillment_errors(&err); + } + }); } } } diff --git a/src/librustc_mir/transform/simplify_branches.rs b/src/librustc_mir/transform/simplify_branches.rs index 407e21616102c..8759a340d7e3c 100644 --- a/src/librustc_mir/transform/simplify_branches.rs +++ b/src/librustc_mir/transform/simplify_branches.rs @@ -13,7 +13,7 @@ use rustc::ty::TyCtxt; use rustc::middle::const_val::ConstVal; use rustc::mir::transform::{MirPass, MirSource, Pass}; -use rustc::mir::repr::*; +use rustc::mir::*; use std::fmt; diff --git a/src/librustc_mir/transform/simplify_cfg.rs b/src/librustc_mir/transform/simplify_cfg.rs index 8e1b7b44976f3..1a8a5fa18cf59 100644 --- a/src/librustc_mir/transform/simplify_cfg.rs +++ b/src/librustc_mir/transform/simplify_cfg.rs @@ -35,9 +35,8 @@ use rustc_data_structures::bitvec::BitVector; use rustc_data_structures::indexed_vec::{Idx, IndexVec}; use rustc::ty::TyCtxt; -use rustc::mir::repr::*; +use rustc::mir::*; use rustc::mir::transform::{MirPass, MirSource, Pass}; -use rustc::mir::traversal; use std::fmt; pub struct SimplifyCfg<'a> { label: &'a str } @@ -50,6 +49,7 @@ impl<'a> SimplifyCfg<'a> { impl<'l, 'tcx> MirPass<'tcx> for SimplifyCfg<'l> { fn run_pass<'a>(&mut self, _tcx: TyCtxt<'a, 'tcx, 'tcx>, _src: MirSource, mir: &mut Mir<'tcx>) { + debug!("SimplifyCfg({:?}) - simplifying {:?}", self.label, mir); CfgSimplifier::new(mir).simplify(); remove_dead_blocks(mir); @@ -78,6 +78,8 @@ impl<'a, 'tcx: 'a> CfgSimplifier<'a, 'tcx> { // we can't use mir.predecessors() here because that counts // dead blocks, which we don't want to. + pred_count[START_BLOCK] = 1; + for (_, data) in traversal::preorder(mir) { if let Some(ref term) = data.terminator { for &tgt in term.successors().iter() { @@ -157,8 +159,16 @@ impl<'a, 'tcx: 'a> CfgSimplifier<'a, 'tcx> { debug!("collapsing goto chain from {:?} to {:?}", *start, target); *changed |= *start != target; - self.pred_count[target] += 1; - self.pred_count[*start] -= 1; + + if self.pred_count[*start] == 1 { + // This is the last reference to *start, so the pred-count to + // to target is moved into the current block. + self.pred_count[*start] = 0; + } else { + self.pred_count[target] += 1; + self.pred_count[*start] -= 1; + } + *start = target; } diff --git a/src/librustc_mir/transform/type_check.rs b/src/librustc_mir/transform/type_check.rs index 7fda658185e07..9d3afe541cca8 100644 --- a/src/librustc_mir/transform/type_check.rs +++ b/src/librustc_mir/transform/type_check.rs @@ -15,11 +15,12 @@ use rustc::infer::{self, InferCtxt, InferOk}; use rustc::traits::{self, Reveal}; use rustc::ty::fold::TypeFoldable; use rustc::ty::{self, Ty, TyCtxt, TypeVariants}; -use rustc::mir::repr::*; +use rustc::mir::*; use rustc::mir::tcx::LvalueTy; use rustc::mir::transform::{MirPass, MirSource, Pass}; -use rustc::mir::visit::{self, Visitor}; +use rustc::mir::visit::Visitor; use std::fmt; +use syntax::ast; use syntax_pos::{Span, DUMMY_SP}; use rustc_data_structures::indexed_vec::Idx; @@ -89,14 +90,8 @@ impl<'a, 'b, 'gcx, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'gcx, 'tcx> { fn visit_mir(&mut self, mir: &Mir<'tcx>) { self.sanitize_type(&"return type", mir.return_ty); - for var_decl in &mir.var_decls { - self.sanitize_type(var_decl, var_decl.ty); - } - for (n, arg_decl) in mir.arg_decls.iter().enumerate() { - self.sanitize_type(&(n, arg_decl), arg_decl.ty); - } - for (n, tmp_decl) in mir.temp_decls.iter().enumerate() { - self.sanitize_type(&(n, tmp_decl), tmp_decl.ty); + for local_decl in &mir.local_decls { + self.sanitize_type(local_decl, local_decl.ty); } if self.errors_reported { return; @@ -130,14 +125,9 @@ impl<'a, 'b, 'gcx, 'tcx> TypeVerifier<'a, 'b, 'gcx, 'tcx> { fn sanitize_lvalue(&mut self, lvalue: &Lvalue<'tcx>, location: Location) -> LvalueTy<'tcx> { debug!("sanitize_lvalue: {:?}", lvalue); match *lvalue { - Lvalue::Var(index) => LvalueTy::Ty { ty: self.mir.var_decls[index].ty }, - Lvalue::Temp(index) => LvalueTy::Ty { ty: self.mir.temp_decls[index].ty }, - Lvalue::Arg(index) => LvalueTy::Ty { ty: self.mir.arg_decls[index].ty }, + Lvalue::Local(index) => LvalueTy::Ty { ty: self.mir.local_decls[index].ty }, Lvalue::Static(def_id) => LvalueTy::Ty { ty: self.tcx().lookup_item_type(def_id).ty }, - Lvalue::ReturnPointer => { - LvalueTy::Ty { ty: self.mir.return_ty } - } Lvalue::Projection(ref proj) => { let base_ty = self.sanitize_lvalue(&proj.base, location); if let LvalueTy::Ty { ty } = base_ty { @@ -379,12 +369,13 @@ impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> { StatementKind::StorageLive(ref lv) | StatementKind::StorageDead(ref lv) => { match *lv { - Lvalue::Temp(_) | Lvalue::Var(_) => {} + Lvalue::Local(_) => {} _ => { - span_mirbug!(self, stmt, "bad lvalue: expected temp or var"); + span_mirbug!(self, stmt, "bad lvalue: expected local"); } } } + StatementKind::Nop => {} } } @@ -673,7 +664,7 @@ impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> { where T: fmt::Debug + TypeFoldable<'tcx> { let mut selcx = traits::SelectionContext::new(self.infcx); - let cause = traits::ObligationCause::misc(self.last_span, 0); + let cause = traits::ObligationCause::misc(self.last_span, ast::CRATE_NODE_ID); let traits::Normalized { value, obligations } = traits::normalize(&mut selcx, cause, value); @@ -709,6 +700,8 @@ impl TypeckMir { impl<'tcx> MirPass<'tcx> for TypeckMir { fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, src: MirSource, mir: &mut Mir<'tcx>) { + debug!("run_pass: {}", tcx.node_path_str(src.item_id())); + if tcx.sess.err_count() > 0 { // compiling a broken program can obviously result in a // broken MIR, so try not to report duplicate errors. diff --git a/src/librustc_passes/ast_validation.rs b/src/librustc_passes/ast_validation.rs index 6275639a9adff..828efbf373131 100644 --- a/src/librustc_passes/ast_validation.rs +++ b/src/librustc_passes/ast_validation.rs @@ -53,8 +53,11 @@ impl<'a> AstValidator<'a> { span, E0449, "unnecessary visibility qualifier"); + if vis == &Visibility::Public { + err.span_label(span, &format!("`pub` not needed here")); + } if let Some(note) = note { - err.span_note(span, note); + err.note(note); } err.emit(); } @@ -187,8 +190,16 @@ impl<'a> Visitor for AstValidator<'a> { } ItemKind::Trait(.., ref trait_items) => { for trait_item in trait_items { - if let TraitItemKind::Method(ref sig, _) = trait_item.node { + if let TraitItemKind::Method(ref sig, ref block) = trait_item.node { self.check_trait_fn_not_const(sig.constness); + if block.is_none() { + self.check_decl_no_pat(&sig.decl, |span, _| { + self.session.add_lint(lint::builtin::PATTERNS_IN_FNS_WITHOUT_BODY, + trait_item.id, span, + "patterns aren't allowed in methods \ + without bodies".to_string()); + }); + } } } } diff --git a/src/librustc_passes/consts.rs b/src/librustc_passes/consts.rs index f919e42b6bd7d..8ad4d7f57a6f0 100644 --- a/src/librustc_passes/consts.rs +++ b/src/librustc_passes/consts.rs @@ -33,7 +33,7 @@ use rustc_const_eval::ErrKind::{ErroneousReferencedConstant, MiscBinaryOp, NonCo use rustc_const_eval::ErrKind::UnresolvedPath; use rustc_const_eval::EvalHint::ExprTypeChecked; use rustc_const_math::{ConstMathErr, Op}; -use rustc::hir::def::Def; +use rustc::hir::def::{Def, CtorKind}; use rustc::hir::def_id::DefId; use rustc::middle::expr_use_visitor as euv; use rustc::middle::mem_categorization as mc; @@ -489,20 +489,12 @@ fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, e: &hir::Expr, node } hir::ExprPath(..) => { match v.tcx.expect_def(e.id) { - Def::Variant(..) => { - // Count the discriminator or function pointer. - v.add_qualif(ConstQualif::NON_ZERO_SIZED); - } - Def::Struct(..) => { - if let ty::TyFnDef(..) = node_ty.sty { - // Count the function pointer. - v.add_qualif(ConstQualif::NON_ZERO_SIZED); - } - } - Def::Fn(..) | Def::Method(..) => { - // Count the function pointer. + Def::VariantCtor(_, CtorKind::Const) => { + // Size is determined by the whole enum, may be non-zero. v.add_qualif(ConstQualif::NON_ZERO_SIZED); } + Def::VariantCtor(..) | Def::StructCtor(..) | + Def::Fn(..) | Def::Method(..) => {} Def::Static(..) => { match v.mode { Mode::Static | Mode::StaticMut => {} @@ -539,9 +531,9 @@ fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, e: &hir::Expr, node } // The callee is an arbitrary expression, it doesn't necessarily have a definition. let is_const = match v.tcx.expect_def_or_none(callee.id) { - Some(Def::Struct(..)) => true, - Some(Def::Variant(..)) => { - // Count the discriminator. + Some(Def::StructCtor(_, CtorKind::Fn)) | + Some(Def::VariantCtor(_, CtorKind::Fn)) => { + // `NON_ZERO_SIZED` is about the call result, not about the ctor itself. v.add_qualif(ConstQualif::NON_ZERO_SIZED); true } @@ -573,9 +565,11 @@ fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, e: &hir::Expr, node } } hir::ExprStruct(..) => { - // unsafe_cell_type doesn't necessarily exist with no_core - if Some(v.tcx.expect_def(e.id).def_id()) == v.tcx.lang_items.unsafe_cell_type() { - v.add_qualif(ConstQualif::MUTABLE_MEM); + if let ty::TyAdt(adt, ..) = v.tcx.expr_ty(e).sty { + // unsafe_cell_type doesn't necessarily exist with no_core + if Some(adt.did) == v.tcx.lang_items.unsafe_cell_type() { + v.add_qualif(ConstQualif::MUTABLE_MEM); + } } } @@ -602,7 +596,7 @@ fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, e: &hir::Expr, node hir::ExprIndex(..) | hir::ExprField(..) | hir::ExprTupField(..) | - hir::ExprVec(_) | + hir::ExprArray(_) | hir::ExprType(..) | hir::ExprTup(..) => {} diff --git a/src/librustc_passes/static_recursion.rs b/src/librustc_passes/static_recursion.rs index d23f77af32155..0e0f8a8456731 100644 --- a/src/librustc_passes/static_recursion.rs +++ b/src/librustc_passes/static_recursion.rs @@ -14,7 +14,7 @@ use rustc::dep_graph::DepNode; use rustc::hir::map as ast_map; use rustc::session::{CompileResult, Session}; -use rustc::hir::def::{Def, DefMap}; +use rustc::hir::def::{Def, CtorKind, DefMap}; use rustc::util::nodemap::NodeMap; use syntax::ast; @@ -143,7 +143,7 @@ impl<'a, 'ast: 'a> CheckItemRecursionVisitor<'a, 'ast> { }); if any_static { if !self.sess.features.borrow().static_recursion { - emit_feature_err(&self.sess.parse_sess.span_diagnostic, + emit_feature_err(&self.sess.parse_sess, "static_recursion", *self.root_span, GateIssue::Language, @@ -272,20 +272,18 @@ impl<'a, 'ast: 'a> Visitor<'ast> for CheckItemRecursionVisitor<'a, 'ast> { // affect the specific variant used, but we need to check // the whole enum definition to see what expression that // might be (if any). - Some(Def::Variant(enum_id, variant_id)) => { - if let Some(enum_node_id) = self.ast_map.as_local_node_id(enum_id) { - if let hir::ItemEnum(ref enum_def, ref generics) = self.ast_map - .expect_item(enum_node_id) - .node { + Some(Def::VariantCtor(variant_id, CtorKind::Const)) => { + if let Some(variant_id) = self.ast_map.as_local_node_id(variant_id) { + let variant = self.ast_map.expect_variant(variant_id); + let enum_id = self.ast_map.get_parent(variant_id); + let enum_item = self.ast_map.expect_item(enum_id); + if let hir::ItemEnum(ref enum_def, ref generics) = enum_item.node { self.populate_enum_discriminants(enum_def); - let enum_id = self.ast_map.as_local_node_id(enum_id).unwrap(); - let variant_id = self.ast_map.as_local_node_id(variant_id).unwrap(); - let variant = self.ast_map.expect_variant(variant_id); self.visit_variant(variant, generics, enum_id); } else { span_bug!(e.span, "`check_static_recursion` found \ - non-enum in Def::Variant"); + non-enum in Def::VariantCtor"); } } } diff --git a/src/librustc_plugin/load.rs b/src/librustc_plugin/load.rs index 9e56397bc99e9..4438241999a39 100644 --- a/src/librustc_plugin/load.rs +++ b/src/librustc_plugin/load.rs @@ -11,7 +11,7 @@ //! Used by `rustc` when loading a plugin. use rustc::session::Session; -use rustc_metadata::creader::CrateReader; +use rustc_metadata::creader::CrateLoader; use rustc_metadata::cstore::CStore; use registry::Registry; @@ -33,7 +33,7 @@ pub struct PluginRegistrar { struct PluginLoader<'a> { sess: &'a Session, - reader: CrateReader<'a>, + reader: CrateLoader<'a>, plugins: Vec, } @@ -47,7 +47,7 @@ pub fn load_plugins(sess: &Session, krate: &ast::Crate, crate_name: &str, addl_plugins: Option>) -> Vec { - let mut loader = PluginLoader::new(sess, cstore, crate_name, krate.config.clone()); + let mut loader = PluginLoader::new(sess, cstore, crate_name); // do not report any error now. since crate attributes are // not touched by expansion, every use of plugin without @@ -89,14 +89,10 @@ pub fn load_plugins(sess: &Session, } impl<'a> PluginLoader<'a> { - fn new(sess: &'a Session, - cstore: &'a CStore, - crate_name: &str, - crate_config: ast::CrateConfig) - -> PluginLoader<'a> { + fn new(sess: &'a Session, cstore: &'a CStore, crate_name: &str) -> Self { PluginLoader { sess: sess, - reader: CrateReader::new(sess, cstore, crate_name, crate_config), + reader: CrateLoader::new(sess, cstore, crate_name), plugins: vec![], } } diff --git a/src/librustc_plugin/registry.rs b/src/librustc_plugin/registry.rs index 435196d956187..88e248e2efa38 100644 --- a/src/librustc_plugin/registry.rs +++ b/src/librustc_plugin/registry.rs @@ -15,9 +15,8 @@ use rustc::session::Session; use rustc::mir::transform::MirMapPass; -use syntax::ext::base::{SyntaxExtension, NamedSyntaxExtension, NormalTT}; -use syntax::ext::base::{IdentTT, MultiModifier, MultiDecorator}; -use syntax::ext::base::{MacroExpanderFn, MacroRulesTT}; +use syntax::ext::base::{SyntaxExtension, NamedSyntaxExtension, NormalTT, IdentTT}; +use syntax::ext::base::MacroExpanderFn; use syntax::parse::token; use syntax::ast; use syntax::feature_gate::AttributeType; @@ -74,12 +73,12 @@ impl<'a> Registry<'a> { sess: sess, args_hidden: None, krate_span: krate_span, - syntax_exts: vec!(), - early_lint_passes: vec!(), - late_lint_passes: vec!(), + syntax_exts: vec![], + early_lint_passes: vec![], + late_lint_passes: vec![], lint_groups: HashMap::new(), - llvm_passes: vec!(), - attributes: vec!(), + llvm_passes: vec![], + attributes: vec![], mir_passes: Vec::new(), } } @@ -102,6 +101,9 @@ impl<'a> Registry<'a> { /// /// This is the most general hook into `libsyntax`'s expansion behavior. pub fn register_syntax_extension(&mut self, name: ast::Name, extension: SyntaxExtension) { + if name.as_str() == "macro_rules" { + panic!("user-defined macros may not be named `macro_rules`"); + } self.syntax_exts.push((name, match extension { NormalTT(ext, _, allow_internal_unstable) => { NormalTT(ext, Some(self.krate_span), allow_internal_unstable) @@ -109,12 +111,7 @@ impl<'a> Registry<'a> { IdentTT(ext, _, allow_internal_unstable) => { IdentTT(ext, Some(self.krate_span), allow_internal_unstable) } - MultiDecorator(ext) => MultiDecorator(ext), - MultiModifier(ext) => MultiModifier(ext), - MacroRulesTT => { - self.sess.err("plugin tried to register a new MacroRulesTT"); - return; - } + _ => extension, })); } diff --git a/src/librustc_privacy/lib.rs b/src/librustc_privacy/lib.rs index 4012c1cb34889..77b3e76fc541f 100644 --- a/src/librustc_privacy/lib.rs +++ b/src/librustc_privacy/lib.rs @@ -28,7 +28,7 @@ extern crate syntax_pos; use rustc::dep_graph::DepNode; use rustc::hir::{self, PatKind}; -use rustc::hir::def::{self, Def}; +use rustc::hir::def::{self, Def, CtorKind}; use rustc::hir::def_id::DefId; use rustc::hir::intravisit::{self, Visitor}; use rustc::hir::pat_util::EnumerateAndAdjustIterator; @@ -286,7 +286,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for EmbargoVisitor<'a, 'tcx> { if self.prev_level.is_some() { if let Some(exports) = self.export_map.get(&id) { for export in exports { - if let Some(node_id) = self.tcx.map.as_local_node_id(export.def_id) { + if let Some(node_id) = self.tcx.map.as_local_node_id(export.def.def_id()) { self.update(node_id, Some(AccessLevel::Exported)); } } @@ -323,8 +323,13 @@ impl<'b, 'a, 'tcx: 'a, 'v> Visitor<'v> for ReachEverythingInTheInterfaceVisitor< let def = self.ev.tcx.expect_def(ty.id); match def { Def::Struct(def_id) | Def::Union(def_id) | Def::Enum(def_id) | - Def::TyAlias(def_id) | Def::Trait(def_id) | Def::AssociatedTy(def_id, _) => { - if let Some(node_id) = self.ev.tcx.map.as_local_node_id(def_id) { + Def::TyAlias(def_id) | Def::Trait(def_id) | Def::AssociatedTy(def_id) => { + if let Some(mut node_id) = self.ev.tcx.map.as_local_node_id(def_id) { + // Check the trait for associated types. + if let hir::map::NodeTraitItem(_) = self.ev.tcx.map.get(node_id) { + node_id = self.ev.tcx.map.get_parent(node_id); + } + let item = self.ev.tcx.map.expect_item(node_id); if let Def::TyAlias(..) = def { // Type aliases are substituted. Associated type aliases are not @@ -449,36 +454,25 @@ impl<'a, 'tcx, 'v> Visitor<'v> for PrivacyVisitor<'a, 'tcx> { } } hir::ExprPath(..) => { - if let Def::Struct(..) = self.tcx.expect_def(expr.id) { - let expr_ty = self.tcx.expr_ty(expr); - let def = match expr_ty.sty { - ty::TyFnDef(.., &ty::BareFnTy { sig: ty::Binder(ty::FnSig { - output: ty, .. - }), ..}) => ty, - _ => expr_ty - }.ty_adt_def().unwrap(); - - let private_indexes : Vec<_> = def.struct_variant().fields.iter().enumerate() - .filter(|&(_,f)| { - !f.vis.is_accessible_from(self.curitem, &self.tcx.map) - }).map(|(n,&_)|n).collect(); + if let def @ Def::StructCtor(_, CtorKind::Fn) = self.tcx.expect_def(expr.id) { + let adt_def = self.tcx.expect_variant_def(def); + let private_indexes = adt_def.fields.iter().enumerate().filter(|&(_, field)| { + !field.vis.is_accessible_from(self.curitem, &self.tcx.map) + }).map(|(i, _)| i).collect::>(); if !private_indexes.is_empty() { - let mut error = struct_span_err!(self.tcx.sess, expr.span, E0450, "cannot invoke tuple struct constructor \ - with private fields"); + with private fields"); error.span_label(expr.span, &format!("cannot construct with a private field")); - if let Some(def_id) = self.tcx.map.as_local_node_id(def.did) { - if let Some(hir::map::NodeItem(node)) = self.tcx.map.find(def_id) { - if let hir::Item_::ItemStruct(ref tuple_data, _) = node.node { - - for i in private_indexes { - error.span_label(tuple_data.fields()[i].span, - &format!("private field declared here")); - } + if let Some(node_id) = self.tcx.map.as_local_node_id(adt_def.did) { + let node = self.tcx.map.find(node_id); + if let Some(hir::map::NodeStructCtor(vdata)) = node { + for i in private_indexes { + error.span_label(vdata.fields()[i].span, + &format!("private field declared here")); } } } @@ -865,9 +859,6 @@ impl<'a, 'tcx, 'v> Visitor<'v> for ObsoleteVisiblePrivateTypesVisitor<'a, 'tcx> // expression/block context can't possibly contain exported things. // (Making them no-ops stops us from traversing the whole AST without // having to be super careful about our `walk_...` calls above.) - // FIXME(#29524): Unfortunately this ^^^ is not true, blocks can contain - // exported items (e.g. impls) and actual code in rustc itself breaks - // if we don't traverse blocks in `EmbargoVisitor` fn visit_block(&mut self, _: &hir::Block) {} fn visit_expr(&mut self, _: &hir::Expr) {} } @@ -947,9 +938,14 @@ impl<'a, 'tcx: 'a, 'v> Visitor<'v> for SearchInterfaceForPrivateItemsVisitor<'a, return } Def::Struct(def_id) | Def::Union(def_id) | Def::Enum(def_id) | - Def::TyAlias(def_id) | Def::Trait(def_id) | Def::AssociatedTy(def_id, _) => { + Def::TyAlias(def_id) | Def::Trait(def_id) | Def::AssociatedTy(def_id) => { // Non-local means public (private items can't leave their crate, modulo bugs) - if let Some(node_id) = self.tcx.map.as_local_node_id(def_id) { + if let Some(mut node_id) = self.tcx.map.as_local_node_id(def_id) { + // Check the trait for associated types. + if let hir::map::NodeTraitItem(_) = self.tcx.map.get(node_id) { + node_id = self.tcx.map.get_parent(node_id); + } + let item = self.tcx.map.expect_item(node_id); let vis = match self.substituted_alias_visibility(item, path) { Some(vis) => vis, @@ -962,8 +958,10 @@ impl<'a, 'tcx: 'a, 'v> Visitor<'v> for SearchInterfaceForPrivateItemsVisitor<'a, if !vis.is_at_least(self.required_visibility, &self.tcx.map) { if self.tcx.sess.features.borrow().pub_restricted || self.old_error_set.contains(&ty.id) { - span_err!(self.tcx.sess, ty.span, E0446, + let mut err = struct_span_err!(self.tcx.sess, ty.span, E0446, "private type in public interface"); + err.span_label(ty.span, &format!("can't leak private type")); + err.emit(); } else { self.tcx.sess.add_lint(lint::builtin::PRIVATE_IN_PUBLIC, node_id, diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs index 83f03e7cfc5ac..d90fe769caf63 100644 --- a/src/librustc_resolve/build_reduced_graph.rs +++ b/src/librustc_resolve/build_reduced_graph.rs @@ -13,29 +13,35 @@ //! Here we build the "reduced graph": the graph of the module tree without //! any imports resolved. +use macros::{InvocationData, LegacyScope}; use resolve_imports::ImportDirectiveSubclass::{self, GlobImport}; -use Module; +use {Module, ModuleS, ModuleKind}; use Namespace::{self, TypeNS, ValueNS}; use {NameBinding, NameBindingKind, ToNameBinding}; -use ParentLink::{ModuleParentLink, BlockParentLink}; use Resolver; use {resolve_error, resolve_struct_error, ResolutionError}; -use rustc::middle::cstore::{ChildItem, DlDef}; +use rustc::middle::cstore::LoadedMacros; use rustc::hir::def::*; use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId}; -use rustc::ty::{self, VariantKind}; +use rustc::ty; +use rustc::util::nodemap::FnvHashMap; use std::cell::Cell; +use std::rc::Rc; use syntax::ast::Name; use syntax::attr; use syntax::parse::token; -use syntax::ast::{Block, Crate}; -use syntax::ast::{ForeignItem, ForeignItemKind, Item, ItemKind}; -use syntax::ast::{Mutability, StmtKind, TraitItemKind}; +use syntax::ast::{self, Block, ForeignItem, ForeignItemKind, Item, ItemKind}; +use syntax::ast::{Mutability, StmtKind, TraitItem, TraitItemKind}; use syntax::ast::{Variant, ViewPathGlob, ViewPathList, ViewPathSimple}; +use syntax::ext::base::{SyntaxExtension, Resolver as SyntaxResolver}; +use syntax::ext::expand::mark_tts; +use syntax::ext::hygiene::Mark; +use syntax::feature_gate::{self, emit_feature_err}; +use syntax::ext::tt::macro_rules; use syntax::parse::token::keywords; use syntax::visit::{self, Visitor}; @@ -53,14 +59,15 @@ impl<'a> ToNameBinding<'a> for (Def, Span, ty::Visibility) { } } -impl<'b> Resolver<'b> { - /// Constructs the reduced graph for the entire crate. - pub fn build_reduced_graph(&mut self, krate: &Crate) { - let no_implicit_prelude = attr::contains_name(&krate.attrs, "no_implicit_prelude"); - self.graph_root.no_implicit_prelude.set(no_implicit_prelude); - visit::walk_crate(&mut BuildReducedGraphVisitor { resolver: self }, krate); - } +#[derive(Default, PartialEq, Eq)] +struct LegacyMacroImports { + import_all: Option, + imports: Vec<(Name, Span)>, + reexports: Vec<(Name, Span)>, + no_link: bool, +} +impl<'b> Resolver<'b> { /// Defines `name` in namespace `ns` of module `parent` to be `def` if it is not yet defined; /// otherwise, reports an error. fn define(&mut self, parent: Module<'b>, name: Name, ns: Namespace, def: T) @@ -75,13 +82,19 @@ impl<'b> Resolver<'b> { fn block_needs_anonymous_module(&mut self, block: &Block) -> bool { // If any statements are items, we need to create an anonymous module block.stmts.iter().any(|statement| match statement.node { - StmtKind::Item(_) => true, + StmtKind::Item(_) | StmtKind::Mac(_) => true, _ => false, }) } + fn insert_field_names(&mut self, def_id: DefId, field_names: Vec) { + if !field_names.is_empty() { + self.field_names.insert(def_id, field_names); + } + } + /// Constructs the reduced graph for one item. - fn build_reduced_graph_for_item(&mut self, item: &Item) { + fn build_reduced_graph_for_item(&mut self, item: &Item, expansion: Mark) { let parent = self.current_module; let name = item.ident.name; let sp = item.span; @@ -92,14 +105,14 @@ impl<'b> Resolver<'b> { // Extract and intern the module part of the path. For // globs and lists, the path is found directly in the AST; // for simple paths we have to munge the path a little. - let module_path: Vec = match view_path.node { + let module_path: Vec<_> = match view_path.node { ViewPathSimple(_, ref full_path) => { full_path.segments .split_last() .unwrap() .1 .iter() - .map(|seg| seg.identifier.name) + .map(|seg| seg.identifier) .collect() } @@ -107,7 +120,7 @@ impl<'b> Resolver<'b> { ViewPathList(ref module_ident_path, _) => { module_ident_path.segments .iter() - .map(|seg| seg.identifier.name) + .map(|seg| seg.identifier) .collect() } }; @@ -117,14 +130,27 @@ impl<'b> Resolver<'b> { match view_path.node { ViewPathSimple(binding, ref full_path) => { - let source_name = full_path.segments.last().unwrap().identifier.name; - if source_name.as_str() == "mod" || source_name.as_str() == "self" { + let mut source = full_path.segments.last().unwrap().identifier; + let source_name = source.name.as_str(); + if source_name == "mod" || source_name == "self" { resolve_error(self, view_path.span, ResolutionError::SelfImportsOnlyAllowedWithin); + } else if source_name == "$crate" && full_path.segments.len() == 1 { + let crate_root = self.resolve_crate_var(source.ctxt); + let crate_name = match crate_root.kind { + ModuleKind::Def(_, name) => name, + ModuleKind::Block(..) => unreachable!(), + }; + source.name = crate_name; + + self.session.struct_span_warn(item.span, "`$crate` may not be imported") + .note("`use $crate;` was erroneously allowed and \ + will become a hard error in a future release") + .emit(); } - let subclass = ImportDirectiveSubclass::single(binding.name, source_name); + let subclass = ImportDirectiveSubclass::single(binding.name, source.name); let span = view_path.span; self.add_import_directive(module_path, subclass, span, item.id, vis); } @@ -156,7 +182,7 @@ impl<'b> Resolver<'b> { (module_path.clone(), node.name.name, rename) } else { let name = match module_path.last() { - Some(name) => *name, + Some(ident) => ident.name, None => { resolve_error( self, @@ -189,29 +215,65 @@ impl<'b> Resolver<'b> { } ItemKind::ExternCrate(_) => { - // n.b. we don't need to look at the path option here, because cstore already - // did - if let Some(crate_id) = self.session.cstore.extern_mod_stmt_cnum(item.id) { + let legacy_imports = self.legacy_macro_imports(&item.attrs); + // `#[macro_use]` and `#[macro_reexport]` are only allowed at the crate root. + if self.current_module.parent.is_some() && { + legacy_imports.import_all.is_some() || !legacy_imports.imports.is_empty() || + !legacy_imports.reexports.is_empty() + } { + if self.current_module.parent.is_some() { + span_err!(self.session, item.span, E0468, + "an `extern crate` loading macros must be at the crate root"); + } + } + + let loaded_macros = if legacy_imports != LegacyMacroImports::default() { + self.crate_loader.process_item(item, &self.definitions, true) + } else { + self.crate_loader.process_item(item, &self.definitions, false) + }; + + // n.b. we don't need to look at the path option here, because cstore already did + let crate_id = self.session.cstore.extern_mod_stmt_cnum(item.id); + let module = if let Some(crate_id) = crate_id { let def_id = DefId { krate: crate_id, index: CRATE_DEF_INDEX, }; - let parent_link = ModuleParentLink(parent, name); - let def = Def::Mod(def_id); - let module = self.new_extern_crate_module(parent_link, def, item.id); + let module = self.arenas.alloc_module(ModuleS { + extern_crate_id: Some(item.id), + populated: Cell::new(false), + ..ModuleS::new(Some(parent), ModuleKind::Def(Def::Mod(def_id), name)) + }); + self.define(parent, name, TypeNS, (module, sp, vis)); + self.populate_module_if_necessary(module); + module + } else { + // Define an empty module + let def = Def::Mod(self.definitions.local_def_id(item.id)); + let module = ModuleS::new(Some(parent), ModuleKind::Def(def, name)); + let module = self.arenas.alloc_module(module); self.define(parent, name, TypeNS, (module, sp, vis)); + module + }; - self.build_reduced_graph_for_external_crate(module); + if let Some(loaded_macros) = loaded_macros { + self.import_extern_crate_macros( + item, module, loaded_macros, legacy_imports, expansion == Mark::root(), + ); } } + ItemKind::Mod(..) if item.ident == keywords::Invalid.ident() => {} // Crate root + ItemKind::Mod(..) => { - let parent_link = ModuleParentLink(parent, name); let def = Def::Mod(self.definitions.local_def_id(item.id)); - let module = self.new_module(parent_link, Some(def), Some(item.id)); - module.no_implicit_prelude.set({ - parent.no_implicit_prelude.get() || + let module = self.arenas.alloc_module(ModuleS { + no_implicit_prelude: parent.no_implicit_prelude || { attr::contains_name(&item.attrs, "no_implicit_prelude") + }, + normal_ancestor_id: Some(item.id), + ..ModuleS::new(Some(parent), ModuleKind::Def(def, name)) }); self.define(parent, name, TypeNS, (module, sp, vis)); self.module_map.insert(item.id, module); @@ -220,7 +282,9 @@ impl<'b> Resolver<'b> { self.current_module = module; } - ItemKind::ForeignMod(..) => {} + ItemKind::ForeignMod(..) => { + self.crate_loader.process_item(item, &self.definitions, false); + } // These items live in the value namespace. ItemKind::Static(_, m, _) => { @@ -244,14 +308,12 @@ impl<'b> Resolver<'b> { } ItemKind::Enum(ref enum_definition, _) => { - let parent_link = ModuleParentLink(parent, name); let def = Def::Enum(self.definitions.local_def_id(item.id)); - let module = self.new_module(parent_link, Some(def), parent.normal_ancestor_id); + let module = self.new_module(parent, ModuleKind::Def(def, name), true); self.define(parent, name, TypeNS, (module, sp, vis)); for variant in &(*enum_definition).variants { - let item_def_id = self.definitions.local_def_id(item.id); - self.build_reduced_graph_for_variant(variant, item_def_id, module, vis); + self.build_reduced_graph_for_variant(variant, module, vis); } } @@ -264,91 +326,67 @@ impl<'b> Resolver<'b> { // If this is a tuple or unit struct, define a name // in the value namespace as well. if !struct_def.is_struct() { - let def = Def::Struct(self.definitions.local_def_id(struct_def.id())); - self.define(parent, name, ValueNS, (def, sp, vis)); + let ctor_def = Def::StructCtor(self.definitions.local_def_id(struct_def.id()), + CtorKind::from_ast(struct_def)); + self.define(parent, name, ValueNS, (ctor_def, sp, vis)); } - // Record the def ID and fields of this struct. - let field_names = struct_def.fields().iter().enumerate().map(|(index, field)| { + // Record field names for error reporting. + let field_names = struct_def.fields().iter().filter_map(|field| { self.resolve_visibility(&field.vis); field.ident.map(|ident| ident.name) - .unwrap_or_else(|| token::intern(&index.to_string())) }).collect(); let item_def_id = self.definitions.local_def_id(item.id); - self.structs.insert(item_def_id, field_names); + self.insert_field_names(item_def_id, field_names); } ItemKind::Union(ref vdata, _) => { let def = Def::Union(self.definitions.local_def_id(item.id)); self.define(parent, name, TypeNS, (def, sp, vis)); - // Record the def ID and fields of this union. - let field_names = vdata.fields().iter().enumerate().map(|(index, field)| { + // Record field names for error reporting. + let field_names = vdata.fields().iter().filter_map(|field| { self.resolve_visibility(&field.vis); field.ident.map(|ident| ident.name) - .unwrap_or_else(|| token::intern(&index.to_string())) }).collect(); let item_def_id = self.definitions.local_def_id(item.id); - self.structs.insert(item_def_id, field_names); + self.insert_field_names(item_def_id, field_names); } ItemKind::DefaultImpl(..) | ItemKind::Impl(..) => {} - ItemKind::Trait(.., ref items) => { + ItemKind::Trait(..) => { let def_id = self.definitions.local_def_id(item.id); // Add all the items within to a new module. - let parent_link = ModuleParentLink(parent, name); - let def = Def::Trait(def_id); - let module_parent = - self.new_module(parent_link, Some(def), parent.normal_ancestor_id); - self.define(parent, name, TypeNS, (module_parent, sp, vis)); - - // Add the names of all the items to the trait info. - for item in items { - let item_def_id = self.definitions.local_def_id(item.id); - let mut is_static_method = false; - let (def, ns) = match item.node { - TraitItemKind::Const(..) => (Def::AssociatedConst(item_def_id), ValueNS), - TraitItemKind::Method(ref sig, _) => { - is_static_method = !sig.decl.has_self(); - (Def::Method(item_def_id), ValueNS) - } - TraitItemKind::Type(..) => (Def::AssociatedTy(def_id, item_def_id), TypeNS), - TraitItemKind::Macro(_) => panic!("unexpanded macro in resolve!"), - }; - - self.define(module_parent, item.ident.name, ns, (def, item.span, vis)); - - self.trait_item_map.insert((item.ident.name, def_id), is_static_method); - } + let module = + self.new_module(parent, ModuleKind::Def(Def::Trait(def_id), name), true); + self.define(parent, name, TypeNS, (module, sp, vis)); + self.current_module = module; } ItemKind::Mac(_) => panic!("unexpanded macro in resolve!"), } - - visit::walk_item(&mut BuildReducedGraphVisitor { resolver: self }, item); - self.current_module = parent; } // Constructs the reduced graph for one variant. Variants exist in the // type and value namespaces. fn build_reduced_graph_for_variant(&mut self, variant: &Variant, - item_id: DefId, parent: Module<'b>, vis: ty::Visibility) { let name = variant.node.name.name; - if variant.node.data.is_struct() { - // Not adding fields for variants as they are not accessed with a self receiver - let variant_def_id = self.definitions.local_def_id(variant.node.data.id()); - self.structs.insert(variant_def_id, Vec::new()); - } + let def_id = self.definitions.local_def_id(variant.node.data.id()); - // Variants are always treated as importable to allow them to be glob used. - // All variants are defined in both type and value namespaces as future-proofing. - let def = Def::Variant(item_id, self.definitions.local_def_id(variant.node.data.id())); - self.define(parent, name, ValueNS, (def, variant.span, vis)); + // Define a name in the type namespace. + let def = Def::Variant(def_id); self.define(parent, name, TypeNS, (def, variant.span, vis)); + + // Define a constructor name in the value namespace. + // Braced variants, unlike structs, generate unusable names in + // value namespace, they are reserved for possible future use. + let ctor_kind = CtorKind::from_ast(&variant.node.data); + let ctor_def = Def::VariantCtor(def_id, ctor_kind); + self.define(parent, name, ValueNS, (ctor_def, variant.span, vis)); } /// Constructs the reduced graph for one foreign item. @@ -377,110 +415,75 @@ impl<'b> Resolver<'b> { {}", block_id); - let parent_link = BlockParentLink(parent, block_id); - let new_module = self.new_module(parent_link, None, parent.normal_ancestor_id); + let new_module = self.new_module(parent, ModuleKind::Block(block_id), true); self.module_map.insert(block_id, new_module); self.current_module = new_module; // Descend into the block. } - - visit::walk_block(&mut BuildReducedGraphVisitor { resolver: self }, block); - self.current_module = parent; } /// Builds the reduced graph for a single item in an external crate. - fn build_reduced_graph_for_external_crate_def(&mut self, parent: Module<'b>, xcdef: ChildItem) { - let def = match xcdef.def { - DlDef(def) => def, - _ => return, + fn build_reduced_graph_for_external_crate_def(&mut self, parent: Module<'b>, + child: Export) { + let name = child.name; + let def = child.def; + let def_id = def.def_id(); + let vis = if parent.is_trait() { + ty::Visibility::Public + } else { + self.session.cstore.visibility(def_id) }; - if let Def::ForeignMod(def_id) = def { - // Foreign modules have no names. Recur and populate eagerly. - for child in self.session.cstore.item_children(def_id) { - self.build_reduced_graph_for_external_crate_def(parent, child); - } - return; - } - - let name = xcdef.name; - let vis = if parent.is_trait() { ty::Visibility::Public } else { xcdef.vis }; - match def { - Def::Mod(_) | Def::ForeignMod(_) | Def::Enum(..) => { - debug!("(building reduced graph for external crate) building module {} {:?}", - name, vis); - let parent_link = ModuleParentLink(parent, name); - let module = self.new_module(parent_link, Some(def), None); - let _ = self.try_define(parent, name, TypeNS, (module, DUMMY_SP, vis)); - } - Def::Variant(_, variant_id) => { - debug!("(building reduced graph for external crate) building variant {}", name); - // Variants are always treated as importable to allow them to be glob used. - // All variants are defined in both type and value namespaces as future-proofing. - let _ = self.try_define(parent, name, TypeNS, (def, DUMMY_SP, vis)); - let _ = self.try_define(parent, name, ValueNS, (def, DUMMY_SP, vis)); - if self.session.cstore.variant_kind(variant_id) == Some(VariantKind::Struct) { - // Not adding fields for variants as they are not accessed with a self receiver - self.structs.insert(variant_id, Vec::new()); - } + Def::Mod(..) | Def::Enum(..) => { + let module = self.new_module(parent, ModuleKind::Def(def, name), false); + self.define(parent, name, TypeNS, (module, DUMMY_SP, vis)); + } + Def::Variant(..) => { + self.define(parent, name, TypeNS, (def, DUMMY_SP, vis)); + } + Def::VariantCtor(..) => { + self.define(parent, name, ValueNS, (def, DUMMY_SP, vis)); } Def::Fn(..) | Def::Static(..) | Def::Const(..) | Def::AssociatedConst(..) | Def::Method(..) => { - debug!("(building reduced graph for external crate) building value (fn/static) {}", - name); - let _ = self.try_define(parent, name, ValueNS, (def, DUMMY_SP, vis)); + self.define(parent, name, ValueNS, (def, DUMMY_SP, vis)); } - Def::Trait(def_id) => { - debug!("(building reduced graph for external crate) building type {}", name); - - // If this is a trait, add all the trait item names to the trait - // info. - - let trait_item_def_ids = self.session.cstore.trait_item_def_ids(def_id); - for trait_item_def in &trait_item_def_ids { - let trait_item_name = - self.session.cstore.item_name(trait_item_def.def_id()); - - debug!("(building reduced graph for external crate) ... adding trait item \ - '{}'", - trait_item_name); - + Def::Trait(..) => { + let module = self.new_module(parent, ModuleKind::Def(def, name), false); + self.define(parent, name, TypeNS, (module, DUMMY_SP, vis)); + + // If this is a trait, add all the trait item names to the trait info. + let trait_item_def_ids = self.session.cstore.impl_or_trait_items(def_id); + for trait_item_def_id in trait_item_def_ids { + let trait_item_name = self.session.cstore.def_key(trait_item_def_id) + .disambiguated_data.data.get_opt_name() + .expect("opt_item_name returned None for trait"); self.trait_item_map.insert((trait_item_name, def_id), false); } - - let parent_link = ModuleParentLink(parent, name); - let module = self.new_module(parent_link, Some(def), None); - let _ = self.try_define(parent, name, TypeNS, (module, DUMMY_SP, vis)); } Def::TyAlias(..) | Def::AssociatedTy(..) => { - debug!("(building reduced graph for external crate) building type {}", name); - let _ = self.try_define(parent, name, TypeNS, (def, DUMMY_SP, vis)); - } - Def::Struct(def_id) - if self.session.cstore.tuple_struct_definition_if_ctor(def_id).is_none() => { - debug!("(building reduced graph for external crate) building type and value for {}", - name); - let _ = self.try_define(parent, name, TypeNS, (def, DUMMY_SP, vis)); - if let Some(ctor_def_id) = self.session.cstore.struct_ctor_def_id(def_id) { - let def = Def::Struct(ctor_def_id); - let _ = self.try_define(parent, name, ValueNS, (def, DUMMY_SP, vis)); - } + self.define(parent, name, TypeNS, (def, DUMMY_SP, vis)); + } + Def::Struct(..) => { + self.define(parent, name, TypeNS, (def, DUMMY_SP, vis)); - // Record the def ID and fields of this struct. - let fields = self.session.cstore.struct_field_names(def_id); - self.structs.insert(def_id, fields); + // Record field names for error reporting. + let field_names = self.session.cstore.struct_field_names(def_id); + self.insert_field_names(def_id, field_names); + } + Def::StructCtor(..) => { + self.define(parent, name, ValueNS, (def, DUMMY_SP, vis)); } - Def::Union(def_id) => { - let _ = self.try_define(parent, name, TypeNS, (def, DUMMY_SP, vis)); + Def::Union(..) => { + self.define(parent, name, TypeNS, (def, DUMMY_SP, vis)); - // Record the def ID and fields of this union. - let fields = self.session.cstore.struct_field_names(def_id); - self.structs.insert(def_id, fields); + // Record field names for error reporting. + let field_names = self.session.cstore.struct_field_names(def_id); + self.insert_field_names(def_id, field_names); } - Def::Struct(..) => {} Def::Local(..) | Def::PrimTy(..) | Def::TyParam(..) | @@ -488,20 +491,11 @@ impl<'b> Resolver<'b> { Def::Label(..) | Def::SelfTy(..) | Def::Err => { - bug!("didn't expect `{:?}`", def); + bug!("unexpected definition: {:?}", def); } } } - /// Builds the reduced graph rooted at the 'use' directive for an external - /// crate. - fn build_reduced_graph_for_external_crate(&mut self, root: Module<'b>) { - let root_cnum = root.def_id().unwrap().krate; - for child in self.session.cstore.crate_top_level_items(root_cnum) { - self.build_reduced_graph_for_external_crate_def(root, child); - } - } - /// Ensures that the reduced graph rooted at the given external module /// is built, building it if it is not. pub fn populate_module_if_necessary(&mut self, module: Module<'b>) { @@ -511,22 +505,258 @@ impl<'b> Resolver<'b> { } module.populated.set(true) } + + fn import_extern_crate_macros(&mut self, + extern_crate: &Item, + module: Module<'b>, + loaded_macros: LoadedMacros, + legacy_imports: LegacyMacroImports, + allow_shadowing: bool) { + let import_macro = |this: &mut Self, name, ext: Rc<_>, span| { + this.used_crates.insert(module.def_id().unwrap().krate); + if let SyntaxExtension::NormalTT(..) = *ext { + this.macro_names.insert(name); + } + if this.builtin_macros.insert(name, ext).is_some() && !allow_shadowing { + let msg = format!("`{}` is already in scope", name); + let note = + "macro-expanded `#[macro_use]`s may not shadow existing macros (see RFC 1560)"; + this.session.struct_span_err(span, &msg).note(note).emit(); + } + }; + + match loaded_macros { + LoadedMacros::MacroRules(macros) => { + let mark = Mark::fresh(); + if !macros.is_empty() { + let invocation = self.arenas.alloc_invocation_data(InvocationData { + module: Cell::new(module), + def_index: CRATE_DEF_INDEX, + const_integer: false, + legacy_scope: Cell::new(LegacyScope::Empty), + expansion: Cell::new(LegacyScope::Empty), + }); + self.invocations.insert(mark, invocation); + } + + let mut macros: FnvHashMap<_, _> = macros.into_iter().map(|mut def| { + def.body = mark_tts(&def.body, mark); + let ext = macro_rules::compile(&self.session.parse_sess, &def); + (def.ident.name, (def, Rc::new(ext))) + }).collect(); + + if let Some(span) = legacy_imports.import_all { + for (&name, &(_, ref ext)) in macros.iter() { + import_macro(self, name, ext.clone(), span); + } + } else { + for (name, span) in legacy_imports.imports { + if let Some(&(_, ref ext)) = macros.get(&name) { + import_macro(self, name, ext.clone(), span); + } else { + span_err!(self.session, span, E0469, "imported macro not found"); + } + } + } + for (name, span) in legacy_imports.reexports { + if let Some((mut def, _)) = macros.remove(&name) { + def.id = self.next_node_id(); + self.exported_macros.push(def); + } else { + span_err!(self.session, span, E0470, "reexported macro not found"); + } + } + } + + LoadedMacros::ProcMacros(macros) => { + if !self.session.features.borrow().proc_macro { + let sess = &self.session.parse_sess; + let issue = feature_gate::GateIssue::Language; + let msg = + "loading custom derive macro crates is experimentally supported"; + emit_feature_err(sess, "proc_macro", extern_crate.span, issue, msg); + } + if !legacy_imports.imports.is_empty() { + let msg = "`proc-macro` crates cannot be selectively imported from, \ + must use `#[macro_use]`"; + self.session.span_err(extern_crate.span, msg); + } + if !legacy_imports.reexports.is_empty() { + let msg = "`proc-macro` crates cannot be reexported from"; + self.session.span_err(extern_crate.span, msg); + } + if let Some(span) = legacy_imports.import_all { + for (name, ext) in macros { + import_macro(self, name, Rc::new(ext), span); + } + } + } + } + } + + // does this attribute list contain "macro_use"? + fn contains_macro_use(&mut self, attrs: &[ast::Attribute]) -> bool { + for attr in attrs { + if attr.check_name("macro_escape") { + let msg = "macro_escape is a deprecated synonym for macro_use"; + let mut err = self.session.struct_span_warn(attr.span, msg); + if let ast::AttrStyle::Inner = attr.node.style { + err.help("consider an outer attribute, #[macro_use] mod ...").emit(); + } else { + err.emit(); + } + } else if !attr.check_name("macro_use") { + continue; + } + + if !attr.is_word() { + self.session.span_err(attr.span, "arguments to macro_use are not allowed here"); + } + return true; + } + + false + } + + fn legacy_macro_imports(&mut self, attrs: &[ast::Attribute]) -> LegacyMacroImports { + let mut imports = LegacyMacroImports::default(); + for attr in attrs { + if attr.check_name("macro_use") { + match attr.meta_item_list() { + Some(names) => for attr in names { + if let Some(word) = attr.word() { + imports.imports.push((token::intern(&word.name()), attr.span())); + } else { + span_err!(self.session, attr.span(), E0466, "bad macro import"); + } + }, + None => imports.import_all = Some(attr.span), + } + } else if attr.check_name("macro_reexport") { + let bad_macro_reexport = |this: &mut Self, span| { + span_err!(this.session, span, E0467, "bad macro reexport"); + }; + if let Some(names) = attr.meta_item_list() { + for attr in names { + if let Some(word) = attr.word() { + imports.reexports.push((token::intern(&word.name()), attr.span())); + } else { + bad_macro_reexport(self, attr.span()); + } + } + } else { + bad_macro_reexport(self, attr.span()); + } + } else if attr.check_name("no_link") { + imports.no_link = true; + } + } + imports + } +} + +pub struct BuildReducedGraphVisitor<'a, 'b: 'a> { + pub resolver: &'a mut Resolver<'b>, + pub legacy_scope: LegacyScope<'b>, + pub expansion: Mark, } -struct BuildReducedGraphVisitor<'a, 'b: 'a> { - resolver: &'a mut Resolver<'b>, +impl<'a, 'b> BuildReducedGraphVisitor<'a, 'b> { + fn visit_invoc(&mut self, id: ast::NodeId) -> &'b InvocationData<'b> { + let invocation = self.resolver.invocations[&Mark::from_placeholder_id(id)]; + invocation.module.set(self.resolver.current_module); + invocation.legacy_scope.set(self.legacy_scope); + invocation + } +} + +macro_rules! method { + ($visit:ident: $ty:ty, $invoc:path, $walk:ident) => { + fn $visit(&mut self, node: &$ty) { + if let $invoc(..) = node.node { + self.visit_invoc(node.id); + } else { + visit::$walk(self, node); + } + } + } } impl<'a, 'b> Visitor for BuildReducedGraphVisitor<'a, 'b> { + method!(visit_impl_item: ast::ImplItem, ast::ImplItemKind::Macro, walk_impl_item); + method!(visit_expr: ast::Expr, ast::ExprKind::Mac, walk_expr); + method!(visit_pat: ast::Pat, ast::PatKind::Mac, walk_pat); + method!(visit_ty: ast::Ty, ast::TyKind::Mac, walk_ty); + fn visit_item(&mut self, item: &Item) { - self.resolver.build_reduced_graph_for_item(item); + let macro_use = match item.node { + ItemKind::Mac(..) if item.id == ast::DUMMY_NODE_ID => return, // Scope placeholder + ItemKind::Mac(..) => { + return self.legacy_scope = LegacyScope::Expansion(self.visit_invoc(item.id)); + } + ItemKind::Mod(..) => self.resolver.contains_macro_use(&item.attrs), + _ => false, + }; + + let (parent, legacy_scope) = (self.resolver.current_module, self.legacy_scope); + self.resolver.build_reduced_graph_for_item(item, self.expansion); + visit::walk_item(self, item); + self.resolver.current_module = parent; + if !macro_use { + self.legacy_scope = legacy_scope; + } + } + + fn visit_stmt(&mut self, stmt: &ast::Stmt) { + if let ast::StmtKind::Mac(..) = stmt.node { + self.legacy_scope = LegacyScope::Expansion(self.visit_invoc(stmt.id)); + } else { + visit::walk_stmt(self, stmt); + } } fn visit_foreign_item(&mut self, foreign_item: &ForeignItem) { self.resolver.build_reduced_graph_for_foreign_item(foreign_item); + visit::walk_foreign_item(self, foreign_item); } fn visit_block(&mut self, block: &Block) { + let (parent, legacy_scope) = (self.resolver.current_module, self.legacy_scope); self.resolver.build_reduced_graph_for_block(block); + visit::walk_block(self, block); + self.resolver.current_module = parent; + self.legacy_scope = legacy_scope; + } + + fn visit_trait_item(&mut self, item: &TraitItem) { + let parent = self.resolver.current_module; + let def_id = parent.def_id().unwrap(); + + if let TraitItemKind::Macro(_) = item.node { + self.visit_invoc(item.id); + return + } + + // Add the item to the trait info. + let item_def_id = self.resolver.definitions.local_def_id(item.id); + let mut is_static_method = false; + let (def, ns) = match item.node { + TraitItemKind::Const(..) => (Def::AssociatedConst(item_def_id), ValueNS), + TraitItemKind::Method(ref sig, _) => { + is_static_method = !sig.decl.has_self(); + (Def::Method(item_def_id), ValueNS) + } + TraitItemKind::Type(..) => (Def::AssociatedTy(item_def_id), TypeNS), + TraitItemKind::Macro(_) => bug!(), // handled above + }; + + self.resolver.trait_item_map.insert((item.ident.name, def_id), is_static_method); + + let vis = ty::Visibility::Public; + self.resolver.define(parent, item.ident.name, ns, (def, item.span, vis)); + + self.resolver.current_module = parent.parent.unwrap(); // nearest normal ancestor + visit::walk_trait_item(self, item); + self.resolver.current_module = parent; } } diff --git a/src/librustc_resolve/check_unused.rs b/src/librustc_resolve/check_unused.rs index 93abe07128fa1..e1ea40809da07 100644 --- a/src/librustc_resolve/check_unused.rs +++ b/src/librustc_resolve/check_unused.rs @@ -59,10 +59,12 @@ impl<'a, 'b> UnusedImportCheckVisitor<'a, 'b> { // Check later. return; } - self.session.add_lint(lint::builtin::UNUSED_IMPORTS, - id, - span, - "unused import".to_string()); + let msg = if let Ok(snippet) = self.session.codemap().span_to_snippet(span) { + format!("unused import: `{}`", snippet) + } else { + "unused import".to_string() + }; + self.session.add_lint(lint::builtin::UNUSED_IMPORTS, id, span, msg); } else { // This trait import is definitely used, in a way other than // method resolution. diff --git a/src/librustc_resolve/diagnostics.rs b/src/librustc_resolve/diagnostics.rs index f8f90bdb4e7f5..5eb269030a004 100644 --- a/src/librustc_resolve/diagnostics.rs +++ b/src/librustc_resolve/diagnostics.rs @@ -860,31 +860,6 @@ match (A, B, C) { ``` "##, -E0422: r##" -You are trying to use an identifier that is either undefined or not a struct. - -Erroneous code example: - -``` compile_fail,E0422 -fn main () { - let x = Foo { x: 1, y: 2 }; -} -``` - -In this case, `Foo` is undefined, so it inherently isn't anything, and -definitely not a struct. - -```compile_fail,E0422 -fn main () { - let foo = 1; - let x = foo { x: 1, y: 2 }; -} -``` - -In this case, `foo` is defined, but is not a struct, so Rust can't use it as -one. -"##, - E0423: r##" A `struct` variant name was used like a function name. @@ -1272,6 +1247,185 @@ impl Foo for i32 {} ``` "##, +E0466: r##" +Macro import declarations were malformed. + +Erroneous code examples: + +```compile_fail,E0466 +#[macro_use(a_macro(another_macro))] // error: invalid import declaration +extern crate core as some_crate; + +#[macro_use(i_want = "some_macros")] // error: invalid import declaration +extern crate core as another_crate; +``` + +This is a syntax error at the level of attribute declarations. The proper +syntax for macro imports is the following: + +```ignore +// In some_crate: +#[macro_export] +macro_rules! get_tacos { + ... +} + +#[macro_export] +macro_rules! get_pimientos { + ... +} + +// In your crate: +#[macro_use(get_tacos, get_pimientos)] // It imports `get_tacos` and +extern crate some_crate; // `get_pimientos` macros from some_crate +``` + +If you would like to import all exported macros, write `macro_use` with no +arguments. +"##, + +E0467: r##" +Macro reexport declarations were empty or malformed. + +Erroneous code examples: + +```compile_fail,E0467 +#[macro_reexport] // error: no macros listed for export +extern crate core as macros_for_good; + +#[macro_reexport(fun_macro = "foo")] // error: not a macro identifier +extern crate core as other_macros_for_good; +``` + +This is a syntax error at the level of attribute declarations. + +Currently, `macro_reexport` requires at least one macro name to be listed. +Unlike `macro_use`, listing no names does not reexport all macros from the +given crate. + +Decide which macros you would like to export and list them properly. + +These are proper reexport declarations: + +```ignore +#[macro_reexport(some_macro, another_macro)] +extern crate macros_for_good; +``` +"##, + +E0468: r##" +A non-root module attempts to import macros from another crate. + +Example of erroneous code: + +```compile_fail,E0468 +mod foo { + #[macro_use(helpful_macro)] // error: must be at crate root to import + extern crate core; // macros from another crate + helpful_macro!(...); +} +``` + +Only `extern crate` imports at the crate root level are allowed to import +macros. + +Either move the macro import to crate root or do without the foreign macros. +This will work: + +```ignore +#[macro_use(helpful_macro)] +extern crate some_crate; + +mod foo { + helpful_macro!(...) +} +``` +"##, + +E0469: r##" +A macro listed for import was not found. + +Erroneous code example: + +```compile_fail,E0469 +#[macro_use(drink, be_merry)] // error: imported macro not found +extern crate collections; + +fn main() { + // ... +} +``` + +Either the listed macro is not contained in the imported crate, or it is not +exported from the given crate. + +This could be caused by a typo. Did you misspell the macro's name? + +Double-check the names of the macros listed for import, and that the crate +in question exports them. + +A working version would be: + +```ignore +// In some_crate crate: +#[macro_export] +macro_rules! eat { + ... +} + +#[macro_export] +macro_rules! drink { + ... +} + +// In your crate: +#[macro_use(eat, drink)] +extern crate some_crate; //ok! +``` +"##, + +E0470: r##" +A macro listed for reexport was not found. + +Erroneous code example: + +```compile_fail,E0470 +#[macro_reexport(drink, be_merry)] +extern crate collections; + +fn main() { + // ... +} +``` + +Either the listed macro is not contained in the imported crate, or it is not +exported from the given crate. + +This could be caused by a typo. Did you misspell the macro's name? + +Double-check the names of the macros listed for reexport, and that the crate +in question exports them. + +A working version: + +```ignore +// In some_crate crate: +#[macro_export] +macro_rules! eat { + ... +} + +#[macro_export] +macro_rules! drink { + ... +} + +// In your_crate: +#[macro_reexport(eat, drink)] +extern crate some_crate; +``` +"##, + E0530: r##" A binding shadowed something it shouldn't. @@ -1307,6 +1461,47 @@ match r { ``` "##, +E0532: r##" +Pattern arm did not match expected kind. + +Erroneous code example: + +```compile_fail,E0532 +enum State { + Succeeded, + Failed(String), +} + +fn print_on_failure(state: &State) { + match *state { + // error: expected unit struct/variant or constant, found tuple + // variant `State::Failed` + State::Failed => println!("Failed"), + _ => () + } +} +``` + +To fix this error, ensure the match arm kind is the same as the expression +matched. + +Fixed example: + +``` +enum State { + Succeeded, + Failed(String), +} + +fn print_on_failure(state: &State) { + match *state { + State::Failed(ref msg) => println!("Failed with {}", msg), + _ => () + } +} +``` +"##, + } register_diagnostics! { @@ -1324,7 +1519,7 @@ register_diagnostics! { // E0419, merged into 531 // E0420, merged into 532 // E0421, merged into 531 +// E0422, merged into 531/532 E0531, // unresolved pattern path kind `name` - E0532, // expected pattern path kind, found another pattern path kind // E0427, merged into 530 } diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index 912b39cafff36..0b382fcbfdd51 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -41,26 +41,27 @@ use self::TypeParameters::*; use self::RibKind::*; use self::UseLexicalScopeFlag::*; use self::ModulePrefixResult::*; -use self::ParentLink::*; -use rustc::hir::map::Definitions; +use rustc::hir::map::{Definitions, DefCollector}; use rustc::hir::{self, PrimTy, TyBool, TyChar, TyFloat, TyInt, TyUint, TyStr}; -use rustc::middle::cstore::MacroLoader; +use rustc::middle::cstore::CrateLoader; use rustc::session::Session; use rustc::lint; use rustc::hir::def::*; -use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId}; +use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId}; use rustc::ty; use rustc::hir::{Freevar, FreevarMap, TraitCandidate, TraitMap, GlobMap}; use rustc::util::nodemap::{NodeMap, NodeSet, FnvHashMap, FnvHashSet}; -use syntax::ext::hygiene::Mark; +use syntax::ext::hygiene::{Mark, SyntaxContext}; use syntax::ast::{self, FloatTy}; -use syntax::ast::{CRATE_NODE_ID, Name, NodeId, CrateNum, IntTy, UintTy}; +use syntax::ast::{CRATE_NODE_ID, Name, NodeId, Ident, SpannedIdent, IntTy, UintTy}; +use syntax::ext::base::SyntaxExtension; use syntax::parse::token::{self, keywords}; use syntax::util::lev_distance::find_best_match_for_name; use syntax::visit::{self, FnKind, Visitor}; +use syntax::attr; use syntax::ast::{Arm, BindingMode, Block, Crate, Expr, ExprKind}; use syntax::ast::{FnDecl, ForeignItem, ForeignItemKind, Generics}; use syntax::ast::{Item, ItemKind, ImplItem, ImplItemKind}; @@ -73,8 +74,10 @@ use errors::DiagnosticBuilder; use std::cell::{Cell, RefCell}; use std::fmt; use std::mem::replace; +use std::rc::Rc; use resolve_imports::{ImportDirective, NameResolution}; +use macros::{InvocationData, LegacyBinding, LegacyScope}; // NB: This module needs to be declared first so diagnostics are // registered before they are used. @@ -126,8 +129,6 @@ enum ResolutionError<'a> { IdentifierBoundMoreThanOnceInParameterList(&'a str), /// error E0416: identifier is bound more than once in the same pattern IdentifierBoundMoreThanOnceInSamePattern(&'a str), - /// error E0422: does not name a struct - DoesNotNameAStruct(&'a str), /// error E0423: is a struct variant name, but this expression uses it like a function name StructVariantUsedAsFunction(&'a str), /// error E0424: `self` is not available in a static method @@ -272,13 +273,15 @@ fn resolve_struct_error<'b, 'a: 'b, 'c>(resolver: &'b Resolver<'a>, err } ResolutionError::VariableNotBoundInPattern(variable_name, from, to) => { - struct_span_err!(resolver.session, + let mut err = struct_span_err!(resolver.session, span, E0408, "variable `{}` from pattern #{} is not bound in pattern #{}", variable_name, from, - to) + to); + err.span_label(span, &format!("pattern doesn't bind `{}`", variable_name)); + err } ResolutionError::VariableBoundWithDifferentMode(variable_name, pattern_number, @@ -331,15 +334,6 @@ fn resolve_struct_error<'b, 'a: 'b, 'c>(resolver: &'b Resolver<'a>, err.span_label(span, &format!("used in a pattern more than once")); err } - ResolutionError::DoesNotNameAStruct(name) => { - let mut err = struct_span_err!(resolver.session, - span, - E0422, - "`{}` does not name a structure", - name); - err.span_label(span, &format!("not a structure")); - err - } ResolutionError::StructVariantUsedAsFunction(path_name) => { let mut err = struct_span_err!(resolver.session, span, @@ -364,9 +358,14 @@ fn resolve_struct_error<'b, 'a: 'b, 'c>(resolver: &'b Resolver<'a>, let mut err = struct_span_err!(resolver.session, span, E0425, - "unresolved name `{}`{}", - path, - msg); + "unresolved name `{}`", + path); + if msg != "" { + err.span_label(span, &msg); + } else { + err.span_label(span, &format!("unresolved name")); + } + match context { UnresolvedNameContext::Other => { if msg.is_empty() && is_static_method && is_field { @@ -478,7 +477,7 @@ fn resolve_struct_error<'b, 'a: 'b, 'c>(resolver: &'b Resolver<'a>, E0531, "unresolved {} `{}`", expected_what, - path.segments.last().unwrap().identifier) + path) } ResolutionError::PatPathUnexpected(expected_what, found_what, path) => { struct_span_err!(resolver.session, @@ -487,7 +486,7 @@ fn resolve_struct_error<'b, 'a: 'b, 'c>(resolver: &'b Resolver<'a>, "expected {}, found {} `{}`", expected_what, found_what, - path.segments.last().unwrap().identifier) + path) } } } @@ -499,7 +498,7 @@ struct BindingInfo { } // Map from the name in a pattern to its binding mode. -type BindingMap = FnvHashMap; +type BindingMap = FnvHashMap; #[derive(Copy, Clone, PartialEq, Eq, Debug)] enum PatternSource { @@ -704,7 +703,7 @@ enum ModulePrefixResult<'a> { /// One local scope. #[derive(Debug)] struct Rib<'a> { - bindings: FnvHashMap, + bindings: FnvHashMap, kind: RibKind<'a>, } @@ -753,18 +752,15 @@ impl<'a> LexicalScopeBinding<'a> { } } -/// The link from a module up to its nearest parent node. -#[derive(Clone,Debug)] -enum ParentLink<'a> { - NoParentLink, - ModuleParentLink(Module<'a>, Name), - BlockParentLink(Module<'a>, NodeId), +enum ModuleKind { + Block(NodeId), + Def(Def, Name), } /// One node in the tree of modules. pub struct ModuleS<'a> { - parent_link: ParentLink<'a>, - def: Option, + parent: Option>, + kind: ModuleKind, // The node id of the closest normal module (`mod`) ancestor (including this module). normal_ancestor_id: Option, @@ -775,7 +771,7 @@ pub struct ModuleS<'a> { resolutions: RefCell>>>, - no_implicit_prelude: Cell, + no_implicit_prelude: bool, glob_importers: RefCell>>, globs: RefCell>>, @@ -792,19 +788,18 @@ pub struct ModuleS<'a> { pub type Module<'a> = &'a ModuleS<'a>; impl<'a> ModuleS<'a> { - fn new(parent_link: ParentLink<'a>, def: Option, normal_ancestor_id: Option) - -> Self { + fn new(parent: Option>, kind: ModuleKind) -> Self { ModuleS { - parent_link: parent_link, - def: def, - normal_ancestor_id: normal_ancestor_id, + parent: parent, + kind: kind, + normal_ancestor_id: None, extern_crate_id: None, resolutions: RefCell::new(FnvHashMap()), - no_implicit_prelude: Cell::new(false), + no_implicit_prelude: false, glob_importers: RefCell::new(Vec::new()), globs: RefCell::new((Vec::new())), traits: RefCell::new(None), - populated: Cell::new(normal_ancestor_id.is_some()), + populated: Cell::new(true), } } @@ -814,36 +809,40 @@ impl<'a> ModuleS<'a> { } } + fn def(&self) -> Option { + match self.kind { + ModuleKind::Def(def, _) => Some(def), + _ => None, + } + } + fn def_id(&self) -> Option { - self.def.as_ref().map(Def::def_id) + self.def().as_ref().map(Def::def_id) } // `self` resolves to the first module ancestor that `is_normal`. fn is_normal(&self) -> bool { - match self.def { - Some(Def::Mod(_)) => true, + match self.kind { + ModuleKind::Def(Def::Mod(_), _) => true, _ => false, } } fn is_trait(&self) -> bool { - match self.def { - Some(Def::Trait(_)) => true, + match self.kind { + ModuleKind::Def(Def::Trait(_), _) => true, _ => false, } } - fn parent(&self) -> Option<&'a Self> { - match self.parent_link { - ModuleParentLink(parent, _) | BlockParentLink(parent, _) => Some(parent), - NoParentLink => None, - } + fn is_local(&self) -> bool { + self.normal_ancestor_id.is_some() } } impl<'a> fmt::Debug for ModuleS<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{:?}", self.def) + write!(f, "{:?}", self.def()) } } @@ -903,7 +902,7 @@ impl<'a> NameBinding<'a> { fn def(&self) -> Def { match self.kind { NameBindingKind::Def(def) => def, - NameBindingKind::Module(module) => module.def.unwrap(), + NameBindingKind::Module(module) => module.def().unwrap(), NameBindingKind::Import { binding, .. } => binding.def(), NameBindingKind::Ambiguity { .. } => Def::Err, } @@ -916,7 +915,8 @@ impl<'a> NameBinding<'a> { fn is_variant(&self) -> bool { match self.kind { - NameBindingKind::Def(Def::Variant(..)) => true, + NameBindingKind::Def(Def::Variant(..)) | + NameBindingKind::Def(Def::VariantCtor(..)) => true, _ => false, } } @@ -997,7 +997,9 @@ pub struct Resolver<'a> { trait_item_map: FnvHashMap<(Name, DefId), bool /* is static method? */>, - structs: FnvHashMap>, + // Names of fields of an item `DefId` accessible with dot syntax. + // Used for hints during error reporting. + field_names: FnvHashMap>, // All imports known to succeed or fail. determined_imports: Vec<&'a ImportDirective<'a>>, @@ -1065,16 +1067,19 @@ pub struct Resolver<'a> { privacy_errors: Vec>, ambiguity_errors: Vec>, + disallowed_shadowing: Vec<(Name, Span, LegacyScope<'a>)>, arenas: &'a ResolverArenas<'a>, dummy_binding: &'a NameBinding<'a>, new_import_semantics: bool, // true if `#![feature(item_like_imports)]` - macro_loader: &'a mut MacroLoader, + pub exported_macros: Vec, + crate_loader: &'a mut CrateLoader, macro_names: FnvHashSet, + builtin_macros: FnvHashMap>, // Maps the `Mark` of an expansion to its containing module or block. - expansion_data: Vec, + invocations: FnvHashMap>, } pub struct ResolverArenas<'a> { @@ -1083,6 +1088,8 @@ pub struct ResolverArenas<'a> { name_bindings: arena::TypedArena>, import_directives: arena::TypedArena>, name_resolutions: arena::TypedArena>>, + invocation_data: arena::TypedArena>, + legacy_bindings: arena::TypedArena>, } impl<'a> ResolverArenas<'a> { @@ -1106,12 +1113,19 @@ impl<'a> ResolverArenas<'a> { fn alloc_name_resolution(&'a self) -> &'a RefCell> { self.name_resolutions.alloc(Default::default()) } + fn alloc_invocation_data(&'a self, expansion_data: InvocationData<'a>) + -> &'a InvocationData<'a> { + self.invocation_data.alloc(expansion_data) + } + fn alloc_legacy_binding(&'a self, binding: LegacyBinding<'a>) -> &'a LegacyBinding<'a> { + self.legacy_bindings.alloc(binding) + } } impl<'a> ty::NodeIdTree for Resolver<'a> { fn is_descendant_of(&self, mut node: NodeId, ancestor: NodeId) -> bool { while node != ancestor { - node = match self.module_map[&node].parent() { + node = match self.module_map[&node].parent { Some(parent) => parent.normal_ancestor_id.unwrap(), None => return false, } @@ -1151,44 +1165,54 @@ impl<'a> hir::lowering::Resolver for Resolver<'a> { self.def_map.insert(id, PathResolution::new(def)); } - fn definitions(&mut self) -> Option<&mut Definitions> { - Some(&mut self.definitions) + fn definitions(&mut self) -> &mut Definitions { + &mut self.definitions } } trait Named { - fn name(&self) -> Name; + fn ident(&self) -> Ident; } impl Named for ast::PathSegment { - fn name(&self) -> Name { - self.identifier.name + fn ident(&self) -> Ident { + self.identifier } } impl Named for hir::PathSegment { - fn name(&self) -> Name { - self.name + fn ident(&self) -> Ident { + Ident::with_empty_ctxt(self.name) } } impl<'a> Resolver<'a> { pub fn new(session: &'a Session, + krate: &Crate, make_glob_map: MakeGlobMap, - macro_loader: &'a mut MacroLoader, + crate_loader: &'a mut CrateLoader, arenas: &'a ResolverArenas<'a>) -> Resolver<'a> { - let root_def_id = DefId::local(CRATE_DEF_INDEX); - let graph_root = - ModuleS::new(NoParentLink, Some(Def::Mod(root_def_id)), Some(CRATE_NODE_ID)); - let graph_root = arenas.alloc_module(graph_root); + let root_def = Def::Mod(DefId::local(CRATE_DEF_INDEX)); + let graph_root = arenas.alloc_module(ModuleS { + normal_ancestor_id: Some(CRATE_NODE_ID), + no_implicit_prelude: attr::contains_name(&krate.attrs, "no_implicit_prelude"), + ..ModuleS::new(None, ModuleKind::Def(root_def, keywords::Invalid.name())) + }); let mut module_map = NodeMap(); module_map.insert(CRATE_NODE_ID, graph_root); + let mut definitions = Definitions::new(); + DefCollector::new(&mut definitions).collect_root(); + + let mut invocations = FnvHashMap(); + invocations.insert(Mark::root(), + arenas.alloc_invocation_data(InvocationData::root(graph_root))); + Resolver { session: session, - definitions: Definitions::new(), + definitions: definitions, macros_at_scope: FnvHashMap(), // The outermost module has def ID 0; this is not reflected in the @@ -1197,7 +1221,7 @@ impl<'a> Resolver<'a> { prelude: None, trait_item_map: FnvHashMap(), - structs: FnvHashMap(), + field_names: FnvHashMap(), determined_imports: Vec::new(), indeterminate_imports: Vec::new(), @@ -1229,6 +1253,7 @@ impl<'a> Resolver<'a> { privacy_errors: Vec::new(), ambiguity_errors: Vec::new(), + disallowed_shadowing: Vec::new(), arenas: arenas, dummy_binding: arenas.alloc_name_binding(NameBinding { @@ -1238,9 +1263,11 @@ impl<'a> Resolver<'a> { }), new_import_semantics: session.features.borrow().item_like_imports, - macro_loader: macro_loader, + exported_macros: Vec::new(), + crate_loader: crate_loader, macro_names: FnvHashSet(), - expansion_data: vec![macros::ExpansionData::default()], + builtin_macros: FnvHashMap(), + invocations: invocations, } } @@ -1251,31 +1278,34 @@ impl<'a> Resolver<'a> { name_bindings: arena::TypedArena::new(), import_directives: arena::TypedArena::new(), name_resolutions: arena::TypedArena::new(), + invocation_data: arena::TypedArena::new(), + legacy_bindings: arena::TypedArena::new(), } } /// Entry point to crate resolution. pub fn resolve_crate(&mut self, krate: &Crate) { + // Collect `DefId`s for exported macro defs. + for def in &krate.exported_macros { + DefCollector::new(&mut self.definitions).with_parent(CRATE_DEF_INDEX, |collector| { + collector.visit_macro_def(def) + }) + } + self.current_module = self.graph_root; visit::walk_crate(self, krate); check_unused::check_crate(self, krate); self.report_errors(); + self.crate_loader.postprocess(krate); } - fn new_module(&self, - parent_link: ParentLink<'a>, - def: Option, - normal_ancestor_id: Option) - -> Module<'a> { - self.arenas.alloc_module(ModuleS::new(parent_link, def, normal_ancestor_id)) - } - - fn new_extern_crate_module(&self, parent_link: ParentLink<'a>, def: Def, local_node_id: NodeId) - -> Module<'a> { - let mut module = ModuleS::new(parent_link, Some(def), Some(local_node_id)); - module.extern_crate_id = Some(local_node_id); - self.arenas.modules.alloc(module) + fn new_module(&self, parent: Module<'a>, kind: ModuleKind, local: bool) -> Module<'a> { + self.arenas.alloc_module(ModuleS { + normal_ancestor_id: if local { self.current_module.normal_ancestor_id } else { None }, + populated: Cell::new(local), + ..ModuleS::new(Some(parent), kind) + }) } fn get_ribs<'b>(&'b mut self, ns: Namespace) -> &'b mut Vec> { @@ -1327,7 +1357,7 @@ impl<'a> Resolver<'a> { /// Resolves the given module path from the given root `search_module`. fn resolve_module_path_from_root(&mut self, mut search_module: Module<'a>, - module_path: &[Name], + module_path: &[Ident], index: usize, span: Option) -> ResolveResult> { @@ -1335,11 +1365,10 @@ impl<'a> Resolver<'a> { -> Option> { match this.resolve_name_in_module(module, needle, TypeNS, false, None) { Success(binding) if binding.is_extern_crate() => Some(module), - _ => match module.parent_link { - ModuleParentLink(ref parent, _) => { - search_parent_externals(this, needle, parent) - } - _ => None, + _ => if let (&ModuleKind::Def(..), Some(parent)) = (&module.kind, module.parent) { + search_parent_externals(this, needle, parent) + } else { + None }, } } @@ -1351,7 +1380,7 @@ impl<'a> Resolver<'a> { // upward though scope chains; we simply resolve names directly in // modules as we go. while index < module_path_len { - let name = module_path[index]; + let name = module_path[index].name; match self.resolve_name_in_module(search_module, name, TypeNS, false, span) { Failed(_) => { let segment_name = name.as_str(); @@ -1372,7 +1401,7 @@ impl<'a> Resolver<'a> { format!("Did you mean `{}{}`?", prefix, path_str) } - None => format!("Maybe a missing `extern crate {}`?", segment_name), + None => format!("Maybe a missing `extern crate {};`?", segment_name), } } else { format!("Could not find `{}` in `{}`", segment_name, module_name) @@ -1405,7 +1434,7 @@ impl<'a> Resolver<'a> { /// Attempts to resolve the module part of an import directive or path /// rooted at the given module. fn resolve_module_path(&mut self, - module_path: &[Name], + module_path: &[Ident], use_lexical_scope: UseLexicalScopeFlag, span: Option) -> ResolveResult> { @@ -1443,7 +1472,7 @@ impl<'a> Resolver<'a> { // This is not a crate-relative path. We resolve the // first component of the path in the current lexical // scope and then proceed to resolve below that. - let ident = ast::Ident::with_empty_ctxt(module_path[0]); + let ident = module_path[0]; let lexical_binding = self.resolve_ident_in_lexical_scope(ident, TypeNS, span); if let Some(binding) = lexical_binding.and_then(LexicalScopeBinding::item) { @@ -1489,12 +1518,12 @@ impl<'a> Resolver<'a> { /// Invariant: This must only be called during main resolution, not during /// import resolution. fn resolve_ident_in_lexical_scope(&mut self, - mut ident: ast::Ident, + mut ident: Ident, ns: Namespace, record_used: Option) -> Option> { if ns == TypeNS { - ident = ast::Ident::with_empty_ctxt(ident.name); + ident = Ident::with_empty_ctxt(ident.name); } // Walk backwards up the ribs in scope. @@ -1515,15 +1544,13 @@ impl<'a> Resolver<'a> { return Some(LexicalScopeBinding::Item(binding)); } - // We can only see through anonymous modules - if module.def.is_some() { - return match self.prelude { - Some(prelude) if !module.no_implicit_prelude.get() => { - self.resolve_name_in_module(prelude, name, ns, false, None).success() - .map(LexicalScopeBinding::Item) - } - _ => None, - }; + if let ModuleKind::Block(..) = module.kind { // We can see through blocks + } else if !module.no_implicit_prelude { + return self.prelude.and_then(|prelude| { + self.resolve_name_in_module(prelude, name, ns, false, None).success() + }).map(LexicalScopeBinding::Item) + } else { + return None; } } @@ -1543,11 +1570,15 @@ impl<'a> Resolver<'a> { /// Resolves a "module prefix". A module prefix is one or both of (a) `self::`; /// (b) some chain of `super::`. /// grammar: (SELF MOD_SEP ) ? (SUPER MOD_SEP) * - fn resolve_module_prefix(&mut self, module_path: &[Name], span: Option) + fn resolve_module_prefix(&mut self, module_path: &[Ident], span: Option) -> ResolveResult> { + if &*module_path[0].name.as_str() == "$crate" { + return Success(PrefixFound(self.resolve_crate_var(module_path[0].ctxt), 1)); + } + // Start at the current module if we see `self` or `super`, or at the // top of the crate otherwise. - let mut i = match &*module_path[0].as_str() { + let mut i = match &*module_path[0].name.as_str() { "self" => 1, "super" => 0, _ => return Success(NoPrefixFound), @@ -1557,10 +1588,10 @@ impl<'a> Resolver<'a> { self.module_map[&self.current_module.normal_ancestor_id.unwrap()]; // Now loop through all the `super`s we find. - while i < module_path.len() && "super" == module_path[i].as_str() { + while i < module_path.len() && "super" == module_path[i].name.as_str() { debug!("(resolving module prefix) resolving `super` at {}", module_to_string(&containing_module)); - if let Some(parent) = containing_module.parent() { + if let Some(parent) = containing_module.parent { containing_module = self.module_map[&parent.normal_ancestor_id.unwrap()]; i += 1; } else { @@ -1575,6 +1606,14 @@ impl<'a> Resolver<'a> { return Success(PrefixFound(containing_module, i)); } + fn resolve_crate_var(&mut self, mut crate_var_ctxt: SyntaxContext) -> Module<'a> { + while crate_var_ctxt.source().0 != SyntaxContext::empty() { + crate_var_ctxt = crate_var_ctxt.source().0; + } + let module = self.invocations[&crate_var_ctxt.source().1].module.get(); + if module.is_local() { self.graph_root } else { module } + } + // AST resolution // // We maintain a list of value ribs and type ribs. @@ -1615,7 +1654,7 @@ impl<'a> Resolver<'a> { /// Searches the current set of local scopes for labels. /// Stops after meeting a closure. - fn search_label(&self, mut ident: ast::Ident) -> Option { + fn search_label(&self, mut ident: Ident) -> Option { for rib in self.label_ribs.iter().rev() { match rib.kind { NormalRibKind => { @@ -1779,7 +1818,7 @@ impl<'a> Resolver<'a> { // plain insert (no renaming) let def_id = self.definitions.local_def_id(type_parameter.id); let def = Def::TyParam(def_id); - function_type_rib.bindings.insert(ast::Ident::with_empty_ctxt(name), def); + function_type_rib.bindings.insert(Ident::with_empty_ctxt(name), def); self.record_def(type_parameter.id, PathResolution::new(def)); } self.type_ribs.push(function_type_rib); @@ -1959,7 +1998,8 @@ impl<'a> Resolver<'a> { // Resolve the self type. this.visit_ty(self_type); - this.with_self_rib(Def::SelfTy(trait_id, Some(item_id)), |this| { + let item_def_id = this.definitions.local_def_id(item_id); + this.with_self_rib(Def::SelfTy(trait_id, Some(item_def_id)), |this| { this.with_current_self_type(self_type, |this| { for impl_item in impl_items { this.resolve_visibility(&impl_item.vis); @@ -2232,18 +2272,18 @@ impl<'a> Resolver<'a> { } fn fresh_binding(&mut self, - ident: &ast::SpannedIdent, + ident: &SpannedIdent, pat_id: NodeId, outer_pat_id: NodeId, pat_src: PatternSource, - bindings: &mut FnvHashMap) + bindings: &mut FnvHashMap) -> PathResolution { // Add the binding to the local ribs, if it // doesn't already exist in the bindings map. (We // must not add it if it's in the bindings map // because that breaks the assumptions later // passes make about or-patterns.) - let mut def = Def::Local(self.definitions.local_def_id(pat_id), pat_id); + let mut def = Def::Local(self.definitions.local_def_id(pat_id)); match bindings.get(&ident.node).cloned() { Some(id) if id == outer_pat_id => { // `Variant(a, a)`, error @@ -2332,12 +2372,24 @@ impl<'a> Resolver<'a> { self.record_def(pat_id, resolution); } + fn resolve_struct_path(&mut self, node_id: NodeId, path: &Path) { + // Resolution logic is equivalent for expressions and patterns, + // reuse `resolve_pattern_path` for both. + self.resolve_pattern_path(node_id, None, path, TypeNS, |def| { + match def { + Def::Struct(..) | Def::Union(..) | Def::Variant(..) | + Def::TyAlias(..) | Def::AssociatedTy(..) | Def::SelfTy(..) => true, + _ => false, + } + }, "struct, variant or union type"); + } + fn resolve_pattern(&mut self, pat: &Pat, pat_src: PatternSource, // Maps idents to the node ID for the // outermost pattern that binds them. - bindings: &mut FnvHashMap) { + bindings: &mut FnvHashMap) { // Visit all direct subpatterns of this pattern. let outer_pat_id = pat.id; pat.walk(&mut |pat| { @@ -2351,15 +2403,16 @@ impl<'a> Resolver<'a> { let always_binding = !pat_src.is_refutable() || opt_pat.is_some() || bmode != BindingMode::ByValue(Mutability::Immutable); match def { - Def::Struct(..) | Def::Variant(..) | - Def::Const(..) | Def::AssociatedConst(..) if !always_binding => { - // A constant, unit variant, etc pattern. + Def::StructCtor(_, CtorKind::Const) | + Def::VariantCtor(_, CtorKind::Const) | + Def::Const(..) if !always_binding => { + // A unit struct/variant or constant pattern. let name = ident.node.name; self.record_use(name, ValueNS, binding.unwrap(), ident.span); Some(PathResolution::new(def)) } - Def::Struct(..) | Def::Variant(..) | - Def::Const(..) | Def::AssociatedConst(..) | Def::Static(..) => { + Def::StructCtor(..) | Def::VariantCtor(..) | + Def::Const(..) | Def::Static(..) => { // A fresh binding that shadows something unacceptable. resolve_error( self, @@ -2376,7 +2429,7 @@ impl<'a> Resolver<'a> { } def => { span_bug!(ident.span, "unexpected definition for an \ - identifier in pattern {:?}", def); + identifier in pattern: {:?}", def); } } }).unwrap_or_else(|| { @@ -2389,30 +2442,26 @@ impl<'a> Resolver<'a> { PatKind::TupleStruct(ref path, ..) => { self.resolve_pattern_path(pat.id, None, path, ValueNS, |def| { match def { - Def::Struct(..) | Def::Variant(..) => true, + Def::StructCtor(_, CtorKind::Fn) | + Def::VariantCtor(_, CtorKind::Fn) => true, _ => false, } - }, "variant or struct"); + }, "tuple struct/variant"); } PatKind::Path(ref qself, ref path) => { self.resolve_pattern_path(pat.id, qself.as_ref(), path, ValueNS, |def| { match def { - Def::Struct(..) | Def::Variant(..) | + Def::StructCtor(_, CtorKind::Const) | + Def::VariantCtor(_, CtorKind::Const) | Def::Const(..) | Def::AssociatedConst(..) => true, _ => false, } - }, "variant, struct or constant"); + }, "unit struct/variant or constant"); } PatKind::Struct(ref path, ..) => { - self.resolve_pattern_path(pat.id, None, path, TypeNS, |def| { - match def { - Def::Struct(..) | Def::Union(..) | Def::Variant(..) | - Def::TyAlias(..) | Def::AssociatedTy(..) => true, - _ => false, - } - }, "variant, struct or type alias"); + self.resolve_struct_path(pat.id, path); } _ => {} @@ -2520,7 +2569,8 @@ impl<'a> Resolver<'a> { let unqualified_def = resolve_identifier_with_fallback(self, None); let qualified_binding = self.resolve_module_relative_path(span, segments, namespace); match (qualified_binding, unqualified_def) { - (Ok(binding), Some(ref ud)) if binding.def() == ud.def => { + (Ok(binding), Some(ref ud)) if binding.def() == ud.def && + segments[0].identifier.name.as_str() != "$crate" => { self.session .add_lint(lint::builtin::UNUSED_QUALIFICATIONS, id, @@ -2535,7 +2585,7 @@ impl<'a> Resolver<'a> { // Resolve a single identifier fn resolve_identifier(&mut self, - identifier: ast::Ident, + identifier: Ident, namespace: Namespace, record_used: Option) -> Option { @@ -2559,7 +2609,7 @@ impl<'a> Resolver<'a> { Def::Upvar(..) => { span_bug!(span, "unexpected {:?} in bindings", def) } - Def::Local(_, node_id) => { + Def::Local(def_id) => { for rib in ribs { match rib.kind { NormalRibKind | ModuleRibKind(..) | MacroDefinition(..) => { @@ -2567,13 +2617,13 @@ impl<'a> Resolver<'a> { } ClosureRibKind(function_id) => { let prev_def = def; - let node_def_id = self.definitions.local_def_id(node_id); + let node_id = self.definitions.as_local_node_id(def_id).unwrap(); let seen = self.freevars_seen .entry(function_id) .or_insert_with(|| NodeMap()); if let Some(&index) = seen.get(&node_id) { - def = Def::Upvar(node_def_id, node_id, index, function_id); + def = Def::Upvar(def_id, index, function_id); continue; } let vec = self.freevars @@ -2585,7 +2635,7 @@ impl<'a> Resolver<'a> { span: span, }); - def = Def::Upvar(node_def_id, node_id, depth, function_id); + def = Def::Upvar(def_id, depth, function_id); seen.insert(node_id, depth); } ItemRibKind | MethodRibKind(_) => { @@ -2643,12 +2693,8 @@ impl<'a> Resolver<'a> { namespace: Namespace) -> Result<&'a NameBinding<'a>, bool /* true if an error was reported */> { - let module_path = segments.split_last() - .unwrap() - .1 - .iter() - .map(|ps| ps.identifier.name) - .collect::>(); + let module_path = + segments.split_last().unwrap().1.iter().map(|ps| ps.identifier).collect::>(); let containing_module; match self.resolve_module_path(&module_path, UseLexicalScope, Some(span)) { @@ -2677,7 +2723,7 @@ impl<'a> Resolver<'a> { bool /* true if an error was reported */> where T: Named, { - let module_path = segments.split_last().unwrap().1.iter().map(T::name).collect::>(); + let module_path = segments.split_last().unwrap().1.iter().map(T::ident).collect::>(); let root_module = self.graph_root; let containing_module; @@ -2696,7 +2742,7 @@ impl<'a> Resolver<'a> { } } - let name = segments.last().unwrap().name(); + let name = segments.last().unwrap().ident().name; let result = self.resolve_name_in_module(containing_module, name, namespace, false, Some(span)); result.success().ok_or(false) @@ -2754,10 +2800,9 @@ impl<'a> Resolver<'a> { // Look for a field with the same name in the current self_type. if let Some(resolution) = self.def_map.get(&node_id) { match resolution.base_def { - Def::Enum(did) | Def::TyAlias(did) | Def::Union(did) | - Def::Struct(did) | Def::Variant(_, did) if resolution.depth == 0 => { - if let Some(fields) = self.structs.get(&did) { - if fields.iter().any(|&field_name| name == field_name) { + Def::Struct(did) | Def::Union(did) if resolution.depth == 0 => { + if let Some(field_names) = self.field_names.get(&did) { + if field_names.iter().any(|&field_name| name == field_name) { return Field; } } @@ -2798,11 +2843,11 @@ impl<'a> Resolver<'a> { } SuggestionType::NotFound } - fn resolve_labeled_block(&mut self, label: Option, id: NodeId, block: &Block) { + fn resolve_labeled_block(&mut self, label: Option, id: NodeId, block: &Block) { if let Some(label) = label { let def = Def::Label(id); self.with_label_rib(|this| { - this.label_ribs.last_mut().unwrap().bindings.insert(label, def); + this.label_ribs.last_mut().unwrap().bindings.insert(label.node, def); this.visit_block(block); }); } else { @@ -2824,13 +2869,11 @@ impl<'a> Resolver<'a> { if let Some(path_res) = self.resolve_possibly_assoc_item(expr.id, maybe_qself.as_ref(), path, ValueNS) { // Check if struct variant - let is_struct_variant = if let Def::Variant(_, variant_id) = path_res.base_def { - self.structs.contains_key(&variant_id) - } else { - false + let is_struct_variant = match path_res.base_def { + Def::VariantCtor(_, CtorKind::Fictive) => true, + _ => false, }; if is_struct_variant { - let _ = self.structs.contains_key(&path_res.base_def.def_id()); let path_name = path_names_to_string(path, 0); let mut err = resolve_struct_error(self, @@ -2863,9 +2906,6 @@ impl<'a> Resolver<'a> { } } else { // Be helpful if the name refers to a struct - // (The pattern matching def_tys where the id is in self.structs - // matches on regular structs while excluding tuple- and enum-like - // structs, which wouldn't result in this error.) let path_name = path_names_to_string(path, 0); let type_res = self.with_no_errors(|this| { this.resolve_path(expr.id, path, 0, TypeNS) @@ -2941,18 +2981,17 @@ impl<'a> Resolver<'a> { let mut context = UnresolvedNameContext::Other; let mut def = Def::Err; if !msg.is_empty() { - msg = format!(". Did you mean {}?", msg); + msg = format!("did you mean {}?", msg); } else { // we display a help message if this is a module - let name_path = path.segments.iter() - .map(|seg| seg.identifier.name) - .collect::>(); + let name_path: Vec<_> = + path.segments.iter().map(|seg| seg.identifier).collect(); match self.resolve_module_path(&name_path[..], UseLexicalScope, Some(expr.span)) { Success(e) => { - if let Some(def_type) = e.def { + if let Some(def_type) = e.def() { def = def_type; } context = UnresolvedNameContext::PathIsMod(parent); @@ -2980,40 +3019,11 @@ impl<'a> Resolver<'a> { } ExprKind::Struct(ref path, ..) => { - // Resolve the path to the structure it goes to. We don't - // check to ensure that the path is actually a structure; that - // is checked later during typeck. - match self.resolve_path(expr.id, path, 0, TypeNS) { - Ok(definition) => self.record_def(expr.id, definition), - Err(true) => self.record_def(expr.id, err_path_resolution()), - Err(false) => { - debug!("(resolving expression) didn't find struct def",); - - resolve_error(self, - path.span, - ResolutionError::DoesNotNameAStruct( - &path_names_to_string(path, 0)) - ); - self.record_def(expr.id, err_path_resolution()); - } - } + self.resolve_struct_path(expr.id, path); visit::walk_expr(self, expr); } - ExprKind::Loop(_, Some(label)) | ExprKind::While(.., Some(label)) => { - self.with_label_rib(|this| { - let def = Def::Label(expr.id); - - { - let rib = this.label_ribs.last_mut().unwrap(); - rib.bindings.insert(label.node, def); - } - - visit::walk_expr(this, expr); - }) - } - ExprKind::Break(Some(label)) | ExprKind::Continue(Some(label)) => { match self.search_label(label.node) { None => { @@ -3043,12 +3053,19 @@ impl<'a> Resolver<'a> { optional_else.as_ref().map(|expr| self.visit_expr(expr)); } + ExprKind::Loop(ref block, label) => self.resolve_labeled_block(label, expr.id, &block), + + ExprKind::While(ref subexpression, ref block, label) => { + self.visit_expr(subexpression); + self.resolve_labeled_block(label, expr.id, &block); + } + ExprKind::WhileLet(ref pattern, ref subexpression, ref block, label) => { self.visit_expr(subexpression); self.value_ribs.push(Rib::new(NormalRibKind)); self.resolve_pattern(pattern, PatternSource::WhileLet, &mut FnvHashMap()); - self.resolve_labeled_block(label.map(|l| l.node), expr.id, block); + self.resolve_labeled_block(label, expr.id, block); self.value_ribs.pop(); } @@ -3058,7 +3075,7 @@ impl<'a> Resolver<'a> { self.value_ribs.push(Rib::new(NormalRibKind)); self.resolve_pattern(pattern, PatternSource::For, &mut FnvHashMap()); - self.resolve_labeled_block(label.map(|l| l.node), expr.id, block); + self.resolve_labeled_block(label, expr.id, block); self.value_ribs.pop(); } @@ -3161,16 +3178,13 @@ impl<'a> Resolver<'a> { }; search_in_module(self, search_module); - match search_module.parent_link { - NoParentLink | ModuleParentLink(..) => { - if !search_module.no_implicit_prelude.get() { - self.prelude.map(|prelude| search_in_module(self, prelude)); - } - break; - } - BlockParentLink(parent_module, _) => { - search_module = parent_module; + if let ModuleKind::Block(..) = search_module.kind { + search_module = search_module.parent.unwrap(); + } else { + if !search_module.no_implicit_prelude { + self.prelude.map(|prelude| search_in_module(self, prelude)); } + break; } } @@ -3208,7 +3222,7 @@ impl<'a> Resolver<'a> { if name == lookup_name && ns == namespace { if filter_fn(name_binding.def()) { // create the path - let ident = ast::Ident::with_empty_ctxt(name); + let ident = Ident::with_empty_ctxt(name); let params = PathParameters::none(); let segment = PathSegment { identifier: ident, @@ -3238,11 +3252,11 @@ impl<'a> Resolver<'a> { // collect submodules to explore if let Ok(module) = name_binding.module() { // form the path - let path_segments = match module.parent_link { - NoParentLink => path_segments.clone(), - ModuleParentLink(_, name) => { + let path_segments = match module.kind { + _ if module.parent.is_none() => path_segments.clone(), + ModuleKind::Def(_, name) => { let mut paths = path_segments.clone(); - let ident = ast::Ident::with_empty_ctxt(name); + let ident = Ident::with_empty_ctxt(name); let params = PathParameters::none(); let segm = PathSegment { identifier: ident, @@ -3257,7 +3271,7 @@ impl<'a> Resolver<'a> { if !in_module_is_extern || name_binding.vis == ty::Visibility::Public { // add the module to the lookup let is_extern = in_module_is_extern || name_binding.is_extern_crate(); - if !worklist.iter().any(|&(m, ..)| m.def == module.def) { + if !worklist.iter().any(|&(m, ..)| m.def() == module.def()) { worklist.push((module, path_segments, is_extern)); } } @@ -3288,11 +3302,11 @@ impl<'a> Resolver<'a> { } }; - let segments: Vec<_> = path.segments.iter().map(|seg| seg.identifier.name).collect(); + let segments: Vec<_> = path.segments.iter().map(|seg| seg.identifier).collect(); let mut path_resolution = err_path_resolution(); let vis = match self.resolve_module_path(&segments, DontUseLexicalScope, Some(path.span)) { Success(module) => { - path_resolution = PathResolution::new(module.def.unwrap()); + path_resolution = PathResolution::new(module.def().unwrap()); ty::Visibility::Restricted(module.normal_ancestor_id.unwrap()) } Indeterminate => unreachable!(), @@ -3319,7 +3333,8 @@ impl<'a> Resolver<'a> { vis.is_accessible_from(module.normal_ancestor_id.unwrap(), self) } - fn report_errors(&self) { + fn report_errors(&mut self) { + self.report_shadowing_errors(); let mut reported_spans = FnvHashSet(); for &AmbiguityError { span, name, b1, b2 } in &self.ambiguity_errors { @@ -3347,6 +3362,20 @@ impl<'a> Resolver<'a> { } } + fn report_shadowing_errors(&mut self) { + let mut reported_errors = FnvHashSet(); + for (name, span, scope) in replace(&mut self.disallowed_shadowing, Vec::new()) { + if self.resolve_macro_name(scope, name, false).is_some() && + reported_errors.insert((name, span)) { + let msg = format!("`{}` is already in scope", name); + self.session.struct_span_err(span, &msg) + .note("macro-expanded `macro_rules!`s may not shadow \ + existing macros (see RFC 1560)") + .emit(); + } + } + } + fn report_conflict(&self, parent: Module, name: Name, @@ -3358,10 +3387,10 @@ impl<'a> Resolver<'a> { return self.report_conflict(parent, name, ns, old_binding, binding); } - let container = match parent.def { - Some(Def::Mod(_)) => "module", - Some(Def::Trait(_)) => "trait", - None => "block", + let container = match parent.kind { + ModuleKind::Def(Def::Mod(_), _) => "module", + ModuleKind::Def(Def::Trait(_), _) => "trait", + ModuleKind::Block(..) => "block", _ => "enum", }; @@ -3425,26 +3454,24 @@ impl<'a> Resolver<'a> { } } -fn names_to_string(names: &[Name]) -> String { +fn names_to_string(names: &[Ident]) -> String { let mut first = true; let mut result = String::new(); - for name in names { + for ident in names { if first { first = false } else { result.push_str("::") } - result.push_str(&name.as_str()); + result.push_str(&ident.name.as_str()); } result } fn path_names_to_string(path: &Path, depth: usize) -> String { - let names: Vec = path.segments[..path.segments.len() - depth] - .iter() - .map(|seg| seg.identifier.name) - .collect(); - names_to_string(&names[..]) + let names: Vec<_> = + path.segments[..path.segments.len() - depth].iter().map(|seg| seg.identifier).collect(); + names_to_string(&names) } /// When an entity with a given name is not available in scope, we search for @@ -3507,18 +3534,16 @@ fn show_candidates(session: &mut DiagnosticBuilder, fn module_to_string(module: Module) -> String { let mut names = Vec::new(); - fn collect_mod(names: &mut Vec, module: Module) { - match module.parent_link { - NoParentLink => {} - ModuleParentLink(ref module, name) => { - names.push(name); - collect_mod(names, module); - } - BlockParentLink(ref module, _) => { - // danger, shouldn't be ident? - names.push(token::intern("")); - collect_mod(names, module); + fn collect_mod(names: &mut Vec, module: Module) { + if let ModuleKind::Def(_, name) = module.kind { + if let Some(parent) = module.parent { + names.push(Ident::with_empty_ctxt(name)); + collect_mod(names, parent); } + } else { + // danger, shouldn't be ident? + names.push(token::str_to_ident("")); + collect_mod(names, module.parent.unwrap()); } } collect_mod(&mut names, module); @@ -3526,7 +3551,7 @@ fn module_to_string(module: Module) -> String { if names.is_empty() { return "???".to_string(); } - names_to_string(&names.into_iter().rev().collect::>()) + names_to_string(&names.into_iter().rev().collect::>()) } fn err_path_resolution() -> PathResolution { diff --git a/src/librustc_resolve/macros.rs b/src/librustc_resolve/macros.rs index 67ee4c307d3c3..72e5823598ea1 100644 --- a/src/librustc_resolve/macros.rs +++ b/src/librustc_resolve/macros.rs @@ -8,60 +8,138 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use Resolver; +use {Module, Resolver}; +use build_reduced_graph::BuildReducedGraphVisitor; +use rustc::hir::def_id::{CRATE_DEF_INDEX, DefIndex}; +use rustc::hir::map::{self, DefCollector}; use rustc::util::nodemap::FnvHashMap; -use std::cell::RefCell; -use std::mem; +use std::cell::Cell; use std::rc::Rc; -use syntax::ast::{self, Name}; +use syntax::ast; use syntax::errors::DiagnosticBuilder; -use syntax::ext::base::{self, LoadedMacro, MultiModifier, MultiDecorator}; +use syntax::ext::base::{self, Determinacy, MultiModifier, MultiDecorator}; use syntax::ext::base::{NormalTT, SyntaxExtension}; -use syntax::ext::expand::{Expansion, Invocation, InvocationKind}; +use syntax::ext::expand::Expansion; use syntax::ext::hygiene::Mark; +use syntax::ext::tt::macro_rules; use syntax::parse::token::intern; use syntax::util::lev_distance::find_best_match_for_name; -use syntax::visit::{self, Visitor}; +use syntax_pos::Span; -#[derive(Clone, Default)] -pub struct ExpansionData { - module: Rc, +#[derive(Clone)] +pub struct InvocationData<'a> { + pub module: Cell>, + pub def_index: DefIndex, + // True if this expansion is in a `const_integer` position, for example `[u32; m!()]`. + // c.f. `DefCollector::visit_ast_const_integer`. + pub const_integer: bool, + // The scope in which the invocation path is resolved. + pub legacy_scope: Cell>, + // The smallest scope that includes this invocation's expansion, + // or `Empty` if this invocation has not been expanded yet. + pub expansion: Cell>, } -// FIXME(jseyfried): merge with `::ModuleS`. -#[derive(Default)] -struct ModuleData { - parent: Option>, - macros: RefCell>>, - macros_escape: bool, +impl<'a> InvocationData<'a> { + pub fn root(graph_root: Module<'a>) -> Self { + InvocationData { + module: Cell::new(graph_root), + def_index: CRATE_DEF_INDEX, + const_integer: false, + legacy_scope: Cell::new(LegacyScope::Empty), + expansion: Cell::new(LegacyScope::Empty), + } + } } -impl<'a> base::Resolver for Resolver<'a> { - fn load_crate(&mut self, extern_crate: &ast::Item, allows_macros: bool) -> Vec { - self.macro_loader.load_crate(extern_crate, allows_macros) +#[derive(Copy, Clone)] +pub enum LegacyScope<'a> { + Empty, + Invocation(&'a InvocationData<'a>), // The scope of the invocation, not including its expansion + Expansion(&'a InvocationData<'a>), // The scope of the invocation, including its expansion + Binding(&'a LegacyBinding<'a>), +} + +impl<'a> LegacyScope<'a> { + fn simplify_expansion(mut invoc: &'a InvocationData<'a>) -> Self { + while let LegacyScope::Invocation(_) = invoc.expansion.get() { + match invoc.legacy_scope.get() { + LegacyScope::Expansion(new_invoc) => invoc = new_invoc, + LegacyScope::Binding(_) => break, + scope @ _ => return scope, + } + } + LegacyScope::Expansion(invoc) } +} +pub struct LegacyBinding<'a> { + parent: LegacyScope<'a>, + name: ast::Name, + ext: Rc, + span: Span, +} + +pub type LegacyImports = FnvHashMap, Span)>; + +impl<'a> base::Resolver for Resolver<'a> { fn next_node_id(&mut self) -> ast::NodeId { self.session.next_node_id() } + fn get_module_scope(&mut self, id: ast::NodeId) -> Mark { + let mark = Mark::fresh(); + let module = self.module_map[&id]; + self.invocations.insert(mark, self.arenas.alloc_invocation_data(InvocationData { + module: Cell::new(module), + def_index: module.def_id().unwrap().index, + const_integer: false, + legacy_scope: Cell::new(LegacyScope::Empty), + expansion: Cell::new(LegacyScope::Empty), + })); + mark + } + fn visit_expansion(&mut self, mark: Mark, expansion: &Expansion) { - expansion.visit_with(&mut ExpansionVisitor { - current_module: self.expansion_data[mark.as_u32() as usize].module.clone(), + let invocation = self.invocations[&mark]; + self.collect_def_ids(invocation, expansion); + + self.current_module = invocation.module.get(); + let mut visitor = BuildReducedGraphVisitor { resolver: self, + legacy_scope: LegacyScope::Invocation(invocation), + expansion: mark, + }; + expansion.visit_with(&mut visitor); + invocation.expansion.set(visitor.legacy_scope); + } + + fn add_macro(&mut self, scope: Mark, mut def: ast::MacroDef, export: bool) { + if &def.ident.name.as_str() == "macro_rules" { + self.session.span_err(def.span, "user-defined macros may not be named `macro_rules`"); + } + + let invocation = self.invocations[&scope]; + let binding = self.arenas.alloc_legacy_binding(LegacyBinding { + parent: invocation.legacy_scope.get(), + name: def.ident.name, + ext: Rc::new(macro_rules::compile(&self.session.parse_sess, &def)), + span: def.span, }); + invocation.legacy_scope.set(LegacyScope::Binding(binding)); + self.macro_names.insert(def.ident.name); + + if export { + def.id = self.next_node_id(); + self.exported_macros.push(def); + } } - fn add_macro(&mut self, scope: Mark, ident: ast::Ident, ext: Rc) { + fn add_ext(&mut self, ident: ast::Ident, ext: Rc) { if let NormalTT(..) = *ext { self.macro_names.insert(ident.name); } - - let mut module = self.expansion_data[scope.as_u32() as usize].module.clone(); - while module.macros_escape { - module = module.parent.clone().unwrap(); - } - module.macros.borrow_mut().insert(ident.name, ext); + self.builtin_macros.insert(ident.name, ext); } fn add_expansions_at_stmt(&mut self, id: ast::NodeId, macros: Vec) { @@ -71,9 +149,11 @@ impl<'a> base::Resolver for Resolver<'a> { fn find_attr_invoc(&mut self, attrs: &mut Vec) -> Option { for i in 0..attrs.len() { let name = intern(&attrs[i].name()); - match self.expansion_data[0].module.macros.borrow().get(&name) { + match self.builtin_macros.get(&name) { Some(ext) => match **ext { - MultiModifier(..) | MultiDecorator(..) => return Some(attrs.remove(i)), + MultiModifier(..) | MultiDecorator(..) | SyntaxExtension::AttrProcMacro(..) => { + return Some(attrs.remove(i)) + } _ => {} }, None => {} @@ -82,41 +162,69 @@ impl<'a> base::Resolver for Resolver<'a> { None } - fn resolve_invoc(&mut self, invoc: &Invocation) -> Option> { - let (name, span) = match invoc.kind { - InvocationKind::Bang { ref mac, .. } => { - let path = &mac.node.path; - if path.segments.len() > 1 || path.global || - !path.segments[0].parameters.is_empty() { - self.session.span_err(path.span, - "expected macro name without module separators"); - return None; - } - (path.segments[0].identifier.name, path.span) + fn resolve_macro(&mut self, scope: Mark, path: &ast::Path, force: bool) + -> Result, Determinacy> { + if path.segments.len() > 1 || path.global || !path.segments[0].parameters.is_empty() { + self.session.span_err(path.span, "expected macro name without module separators"); + return Err(Determinacy::Determined); + } + let name = path.segments[0].identifier.name; + + let invocation = self.invocations[&scope]; + if let LegacyScope::Expansion(parent) = invocation.legacy_scope.get() { + invocation.legacy_scope.set(LegacyScope::simplify_expansion(parent)); + } + self.resolve_macro_name(invocation.legacy_scope.get(), name, true).ok_or_else(|| { + if force { + let msg = format!("macro undefined: '{}!'", name); + let mut err = self.session.struct_span_err(path.span, &msg); + self.suggest_macro_name(&name.as_str(), &mut err); + err.emit(); + Determinacy::Determined + } else { + Determinacy::Undetermined } - InvocationKind::Attr { ref attr, .. } => (intern(&*attr.name()), attr.span), - }; + }) + } +} - let mut module = self.expansion_data[invoc.mark().as_u32() as usize].module.clone(); +impl<'a> Resolver<'a> { + pub fn resolve_macro_name(&mut self, + mut scope: LegacyScope<'a>, + name: ast::Name, + record_used: bool) + -> Option> { + let mut relative_depth: u32 = 0; loop { - if let Some(ext) = module.macros.borrow().get(&name) { - return Some(ext.clone()); - } - match module.parent.clone() { - Some(parent) => module = parent, - None => break, - } + scope = match scope { + LegacyScope::Empty => break, + LegacyScope::Expansion(invocation) => { + if let LegacyScope::Empty = invocation.expansion.get() { + invocation.legacy_scope.get() + } else { + relative_depth += 1; + invocation.expansion.get() + } + } + LegacyScope::Invocation(invocation) => { + relative_depth = relative_depth.saturating_sub(1); + invocation.legacy_scope.get() + } + LegacyScope::Binding(binding) => { + if binding.name == name { + if record_used && relative_depth > 0 { + self.disallowed_shadowing.push((name, binding.span, binding.parent)); + } + return Some(binding.ext.clone()); + } + binding.parent + } + }; } - let mut err = - self.session.struct_span_err(span, &format!("macro undefined: '{}!'", name)); - self.suggest_macro_name(&name.as_str(), &mut err); - err.emit(); - None + self.builtin_macros.get(&name).cloned() } -} -impl<'a> Resolver<'a> { fn suggest_macro_name(&mut self, name: &str, err: &mut DiagnosticBuilder<'a>) { if let Some(suggestion) = find_best_match_for_name(self.macro_names.iter(), name, None) { if suggestion != name { @@ -126,92 +234,32 @@ impl<'a> Resolver<'a> { } } } -} -struct ExpansionVisitor<'b, 'a: 'b> { - resolver: &'b mut Resolver<'a>, - current_module: Rc, -} + fn collect_def_ids(&mut self, invocation: &'a InvocationData<'a>, expansion: &Expansion) { + let Resolver { ref mut invocations, arenas, graph_root, .. } = *self; + let InvocationData { def_index, const_integer, .. } = *invocation; -impl<'a, 'b> ExpansionVisitor<'a, 'b> { - fn visit_invoc(&mut self, id: ast::NodeId) { - assert_eq!(id, self.resolver.expansion_data.len() as u32); - self.resolver.expansion_data.push(ExpansionData { - module: self.current_module.clone(), - }); - } + let visit_macro_invoc = &mut |invoc: map::MacroInvocationData| { + invocations.entry(invoc.mark).or_insert_with(|| { + arenas.alloc_invocation_data(InvocationData { + def_index: invoc.def_index, + const_integer: invoc.const_integer, + module: Cell::new(graph_root), + expansion: Cell::new(LegacyScope::Empty), + legacy_scope: Cell::new(LegacyScope::Empty), + }) + }); + }; - // does this attribute list contain "macro_use"? - fn contains_macro_use(&mut self, attrs: &[ast::Attribute]) -> bool { - for attr in attrs { - if attr.check_name("macro_escape") { - let msg = "macro_escape is a deprecated synonym for macro_use"; - let mut err = self.resolver.session.struct_span_warn(attr.span, msg); - if let ast::AttrStyle::Inner = attr.node.style { - err.help("consider an outer attribute, #[macro_use] mod ...").emit(); - } else { - err.emit(); + let mut def_collector = DefCollector::new(&mut self.definitions); + def_collector.visit_macro_invoc = Some(visit_macro_invoc); + def_collector.with_parent(def_index, |def_collector| { + if const_integer { + if let Expansion::Expr(ref expr) = *expansion { + def_collector.visit_ast_const_integer(expr); } - } else if !attr.check_name("macro_use") { - continue; - } - - if !attr.is_word() { - self.resolver.session.span_err(attr.span, - "arguments to macro_use are not allowed here"); } - return true; - } - - false - } -} - -macro_rules! method { - ($visit:ident: $ty:ty, $invoc:path, $walk:ident) => { - fn $visit(&mut self, node: &$ty) { - match node.node { - $invoc(..) => self.visit_invoc(node.id), - _ => visit::$walk(self, node), - } - } - } -} - -impl<'a, 'b> Visitor for ExpansionVisitor<'a, 'b> { - method!(visit_trait_item: ast::TraitItem, ast::TraitItemKind::Macro, walk_trait_item); - method!(visit_impl_item: ast::ImplItem, ast::ImplItemKind::Macro, walk_impl_item); - method!(visit_stmt: ast::Stmt, ast::StmtKind::Mac, walk_stmt); - method!(visit_expr: ast::Expr, ast::ExprKind::Mac, walk_expr); - method!(visit_pat: ast::Pat, ast::PatKind::Mac, walk_pat); - method!(visit_ty: ast::Ty, ast::TyKind::Mac, walk_ty); - - fn visit_item(&mut self, item: &ast::Item) { - match item.node { - ast::ItemKind::Mac(..) if item.id == ast::DUMMY_NODE_ID => {} // Scope placeholder - ast::ItemKind::Mac(..) => self.visit_invoc(item.id), - ast::ItemKind::Mod(..) => { - let module_data = ModuleData { - parent: Some(self.current_module.clone()), - macros: RefCell::new(FnvHashMap()), - macros_escape: self.contains_macro_use(&item.attrs), - }; - let orig_module = mem::replace(&mut self.current_module, Rc::new(module_data)); - visit::walk_item(self, item); - self.current_module = orig_module; - } - _ => visit::walk_item(self, item), - } - } - - fn visit_block(&mut self, block: &ast::Block) { - let module_data = ModuleData { - parent: Some(self.current_module.clone()), - macros: RefCell::new(FnvHashMap()), - macros_escape: false, - }; - let orig_module = mem::replace(&mut self.current_module, Rc::new(module_data)); - visit::walk_block(self, block); - self.current_module = orig_module; + expansion.visit_with(def_collector) + }); } } diff --git a/src/librustc_resolve/resolve_imports.rs b/src/librustc_resolve/resolve_imports.rs index 29add1f9b9d49..2b3945bd0d920 100644 --- a/src/librustc_resolve/resolve_imports.rs +++ b/src/librustc_resolve/resolve_imports.rs @@ -8,7 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use self::Determinacy::*; use self::ImportDirectiveSubclass::*; use Module; @@ -25,7 +24,8 @@ use rustc::ty; use rustc::lint::builtin::PRIVATE_IN_PUBLIC; use rustc::hir::def::*; -use syntax::ast::{NodeId, Name}; +use syntax::ast::{Ident, NodeId, Name}; +use syntax::ext::base::Determinacy::{self, Determined, Undetermined}; use syntax::util::lev_distance::find_best_match_for_name; use syntax_pos::Span; @@ -37,12 +37,6 @@ impl<'a> Resolver<'a> { } } -#[derive(Copy, Clone, Debug)] -pub enum Determinacy { - Determined, - Undetermined, -} - /// Contains data for specific types of import directives. #[derive(Clone, Debug)] pub enum ImportDirectiveSubclass<'a> { @@ -75,7 +69,7 @@ impl<'a> ImportDirectiveSubclass<'a> { pub struct ImportDirective<'a> { pub id: NodeId, parent: Module<'a>, - module_path: Vec, + module_path: Vec, imported_module: Cell>>, // the resolution of `module_path` subclass: ImportDirectiveSubclass<'a>, span: Span, @@ -197,7 +191,8 @@ impl<'a> Resolver<'a> { // If the resolution doesn't depend on glob definability, check privacy and return. if let Some(result) = self.try_result(&resolution, ns) { return result.and_then(|binding| { - if self.is_accessible(binding.vis) && !is_disallowed_private_import(binding) { + if self.is_accessible(binding.vis) && !is_disallowed_private_import(binding) || + binding.is_extern_crate() { // c.f. issue #37020 Success(binding) } else { Failed(None) @@ -257,7 +252,7 @@ impl<'a> Resolver<'a> { // Add an import directive to the current module. pub fn add_import_directive(&mut self, - module_path: Vec, + module_path: Vec, subclass: ImportDirectiveSubclass<'a>, span: Span, id: NodeId, @@ -683,9 +678,8 @@ impl<'a, 'b:'a> ImportResolver<'a, 'b> { }; match (value_result, type_result) { - // With `#![feature(item_like_imports)]`, all namespaces - // must be re-exported with extra visibility for an error to occur. - (Ok(value_binding), Ok(type_binding)) if self.new_import_semantics => { + // All namespaces must be re-exported with extra visibility for an error to occur. + (Ok(value_binding), Ok(type_binding)) => { let vis = directive.vis.get(); if !value_binding.pseudo_vis().is_at_least(vis, self) && !type_binding.pseudo_vis().is_at_least(vis, self) { @@ -733,7 +727,7 @@ impl<'a, 'b:'a> ImportResolver<'a, 'b> { let module = directive.imported_module.get().unwrap(); self.populate_module_if_necessary(module); - if let Some(Def::Trait(_)) = module.def { + if let Some(Def::Trait(_)) = module.def() { self.session.span_err(directive.span, "items in traits are not importable."); return; } else if module.def_id() == directive.parent.def_id() { @@ -798,7 +792,7 @@ impl<'a, 'b:'a> ImportResolver<'a, 'b> { (binding.is_import() || binding.is_extern_crate()) { let def = binding.def(); if def != Def::Err { - reexports.push(Export { name: name, def_id: def.def_id() }); + reexports.push(Export { name: name, def: def }); } } @@ -822,7 +816,7 @@ impl<'a, 'b:'a> ImportResolver<'a, 'b> { } } -fn import_path_to_string(names: &[Name], subclass: &ImportDirectiveSubclass) -> String { +fn import_path_to_string(names: &[Ident], subclass: &ImportDirectiveSubclass) -> String { if names.is_empty() { import_directive_subclass_to_string(subclass) } else { diff --git a/src/librustc_save_analysis/data.rs b/src/librustc_save_analysis/data.rs index 5f6e65e289fd5..fc235aaf9276b 100644 --- a/src/librustc_save_analysis/data.rs +++ b/src/librustc_save_analysis/data.rs @@ -14,8 +14,8 @@ //! retrieve the data from a crate. use rustc::hir; -use rustc::hir::def_id::DefId; -use syntax::ast::{self, CrateNum, NodeId}; +use rustc::hir::def_id::{CrateNum, DefId}; +use syntax::ast::{self, NodeId}; use syntax_pos::Span; pub struct CrateData { @@ -167,7 +167,7 @@ pub struct FunctionData { pub scope: NodeId, pub value: String, pub visibility: Visibility, - pub parent: Option, + pub parent: Option, pub docs: String, } @@ -250,6 +250,7 @@ pub struct MethodData { pub scope: NodeId, pub value: String, pub decl_id: Option, + pub parent: Option, pub visibility: Visibility, pub docs: String, } @@ -300,7 +301,7 @@ pub struct StructVariantData { pub type_value: String, pub value: String, pub scope: NodeId, - pub parent: Option, + pub parent: Option, pub docs: String, } @@ -326,7 +327,7 @@ pub struct TupleVariantData { pub type_value: String, pub value: String, pub scope: NodeId, - pub parent: Option, + pub parent: Option, pub docs: String, } @@ -339,7 +340,7 @@ pub struct TypeDefData { pub qualname: String, pub value: String, pub visibility: Visibility, - pub parent: Option, + pub parent: Option, pub docs: String, } @@ -380,7 +381,7 @@ pub struct VariableData { pub qualname: String, pub span: Span, pub scope: NodeId, - pub parent: Option, + pub parent: Option, pub value: String, pub type_value: String, pub visibility: Visibility, diff --git a/src/librustc_save_analysis/dump_visitor.rs b/src/librustc_save_analysis/dump_visitor.rs index f31e12991c808..8a628289b7f94 100644 --- a/src/librustc_save_analysis/dump_visitor.rs +++ b/src/librustc_save_analysis/dump_visitor.rs @@ -27,16 +27,18 @@ //! is used for recording the output in a format-agnostic way (see CsvDumper //! for an example). +use rustc::hir; use rustc::hir::def::Def; -use rustc::hir::def_id::DefId; -use rustc::hir::map::Node; +use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE}; +use rustc::hir::map::{Node, NodeItem}; use rustc::session::Session; use rustc::ty::{self, TyCtxt, ImplOrTraitItem, ImplOrTraitItemContainer}; use std::collections::HashSet; +use std::collections::hash_map::DefaultHasher; use std::hash::*; -use syntax::ast::{self, NodeId, PatKind, Attribute}; +use syntax::ast::{self, NodeId, PatKind, Attribute, CRATE_NODE_ID}; use syntax::parse::token::{self, keywords}; use syntax::visit::{self, Visitor}; use syntax::print::pprust::{path_to_string, ty_to_string, bounds_to_string, generics_to_string}; @@ -47,7 +49,7 @@ use syntax_pos::*; use super::{escape, generated_code, SaveContext, PathCollector, docs_for_attrs}; use super::data::*; use super::dump::Dump; -use super::external_data::Lower; +use super::external_data::{Lower, make_def_id}; use super::span_utils::SpanUtils; use super::recorder; @@ -94,7 +96,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { analysis: analysis, dumper: dumper, span: span_utils.clone(), - cur_scope: 0, + cur_scope: CRATE_NODE_ID, mac_defs: HashSet::new(), mac_uses: HashSet::new(), } @@ -123,7 +125,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { let lo_loc = self.span.sess.codemap().lookup_char_pos(c.span.lo); ExternalCrateData { name: c.name, - num: c.number, + num: CrateNum::from_u32(c.number), file_name: SpanUtils::make_path_string(&lo_loc.file.name), } }).collect(); @@ -151,7 +153,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { // What could go wrong...? if spans.len() < path.segments.len() { if generated_code(path.span) { - return vec!(); + return vec![]; } error!("Mis-calculated spans for path '{}'. Found {} spans, expected {}. Found spans:", path_to_string(path), @@ -164,12 +166,13 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { loc.file.name, loc.line); } - return vec!(); + error!(" master span: {:?}: `{}`", path.span, self.span.snippet(path.span)); + return vec![]; } - let mut result: Vec<(Span, String)> = vec!(); + let mut result: Vec<(Span, String)> = vec![]; - let mut segs = vec!(); + let mut segs = vec![]; for (i, (seg, span)) in path.segments.iter().zip(&spans).enumerate() { segs.push(seg.clone()); let sub_path = ast::Path { @@ -251,7 +254,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { ref_id: None, span: *span, qualname: qualname.to_owned(), - scope: 0 + scope: CRATE_NODE_ID }.lower(self.tcx)); // write the other sub-paths @@ -269,13 +272,13 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { } } - // looks up anything, not just a type - fn lookup_type_ref(&self, ref_id: NodeId) -> Option { - match self.tcx.expect_def(ref_id) { - Def::PrimTy(..) => None, - Def::SelfTy(..) => None, - def => Some(def.def_id()), - } + fn lookup_def_id(&self, ref_id: NodeId) -> Option { + self.tcx.expect_def_or_none(ref_id).and_then(|def| { + match def { + Def::PrimTy(..) | Def::SelfTy(..) => None, + def => Some(def.def_id()), + } + }) } fn process_def_kind(&mut self, @@ -290,8 +293,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { let def = self.tcx.expect_def(ref_id); match def { - Def::Mod(_) | - Def::ForeignMod(_) => { + Def::Mod(_) => { self.dumper.mod_ref(ModRefData { span: sub_span.expect("No span found for mod ref"), ref_id: Some(def_id), @@ -300,10 +302,10 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { }.lower(self.tcx)); } Def::Struct(..) | + Def::Variant(..) | Def::Union(..) | Def::Enum(..) | Def::TyAlias(..) | - Def::AssociatedTy(..) | Def::Trait(_) => { self.dumper.type_ref(TypeRefData { span: sub_span.expect("No span found for type ref"), @@ -313,11 +315,9 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { }.lower(self.tcx)); } Def::Static(..) | - Def::Const(_) | - Def::AssociatedConst(..) | - Def::Local(..) | - Def::Variant(..) | - Def::Upvar(..) => { + Def::Const(..) | + Def::StructCtor(..) | + Def::VariantCtor(..) => { self.dumper.variable_ref(VariableRefData { span: sub_span.expect("No span found for var ref"), ref_id: def_id, @@ -332,10 +332,14 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { scope: scope }.lower(self.tcx)); } + Def::Local(..) | + Def::Upvar(..) | Def::SelfTy(..) | Def::Label(_) | Def::TyParam(..) | Def::Method(..) | + Def::AssociatedTy(..) | + Def::AssociatedConst(..) | Def::PrimTy(_) | Def::Err => { span_bug!(span, @@ -365,7 +369,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { qualname: format!("{}::{}", qualname, path_to_string(p)), type_value: typ, value: String::new(), - scope: 0, + scope: CRATE_NODE_ID, parent: None, visibility: Visibility::Inherited, docs: String::new(), @@ -399,20 +403,36 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { if !self.span.filter_generated(Some(method_data.span), span) { let container = self.tcx.impl_or_trait_item(self.tcx.map.local_def_id(id)).container(); - let decl_id = if let ImplOrTraitItemContainer::ImplContainer(id) = container { - self.tcx.trait_id_of_impl(id).and_then(|id| { - for item in &**self.tcx.trait_items(id) { - if let &ImplOrTraitItem::MethodTraitItem(ref m) = item { - if m.name == name { - return Some(m.def_id); + let mut trait_id; + let mut decl_id = None; + match container { + ImplOrTraitItemContainer::ImplContainer(id) => { + trait_id = self.tcx.trait_id_of_impl(id); + + match trait_id { + Some(id) => { + for item in &**self.tcx.trait_items(id) { + if let &ImplOrTraitItem::MethodTraitItem(ref m) = item { + if m.name == name { + decl_id = Some(m.def_id); + break; + } + } + } + } + None => { + if let Some(NodeItem(item)) = self.tcx.map.get_if_local(id) { + if let hir::ItemImpl(_, _, _, _, ref ty, _) = item.node { + trait_id = self.lookup_def_id(ty.id); + } } } } - None - }) - } else { - None - }; + } + ImplOrTraitItemContainer::TraitContainer(id) => { + trait_id = Some(id); + } + } self.dumper.method(MethodData { id: method_data.id, @@ -422,6 +442,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { qualname: method_data.qualname.clone(), value: sig_str, decl_id: decl_id, + parent: trait_id, visibility: vis, docs: docs_for_attrs(attrs), }.lower(self.tcx)); @@ -544,7 +565,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { span: Span, typ: &ast::Ty, expr: &ast::Expr, - parent_id: NodeId, + parent_id: DefId, vis: Visibility, attrs: &[Attribute]) { let qualname = format!("::{}", self.tcx.node_path_str(id)); @@ -659,7 +680,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { type_value: enum_data.qualname.clone(), value: val, scope: enum_data.scope, - parent: Some(item.id), + parent: Some(make_def_id(item.id, &self.tcx.map)), docs: docs_for_attrs(&variant.node.attrs), }.lower(self.tcx)); } @@ -684,7 +705,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { type_value: enum_data.qualname.clone(), value: val, scope: enum_data.scope, - parent: Some(item.id), + parent: Some(make_def_id(item.id, &self.tcx.map)), docs: docs_for_attrs(&variant.node.attrs), }.lower(self.tcx)); } @@ -738,7 +759,8 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { } self.process_generic_params(type_parameters, item.span, "", item.id); for impl_item in impl_items { - self.process_impl_item(impl_item, item.id); + let map = &self.tcx.map; + self.process_impl_item(impl_item, make_def_id(item.id, map)); } } @@ -784,7 +806,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { }; let trait_ref = &trait_ref.trait_ref; - if let Some(id) = self.lookup_type_ref(trait_ref.ref_id) { + if let Some(id) = self.lookup_def_id(trait_ref.ref_id) { let sub_span = self.span.sub_span_for_type_name(trait_ref.path.span); if !self.span.filter_generated(sub_span, trait_ref.path.span) { self.dumper.type_ref(TypeRefData { @@ -809,7 +831,8 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { // walk generics and methods self.process_generic_params(generics, item.span, &qualname, item.id); for method in methods { - self.process_trait_item(method, item.id) + let map = &self.tcx.map; + self.process_trait_item(method, make_def_id(item.id, map)) } } @@ -832,9 +855,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { let path_data = match path_data { Some(pd) => pd, None => { - span_bug!(path.span, - "Unexpected def kind while looking up path in `{}`", - self.span.snippet(path.span)) + return; } }; @@ -902,13 +923,19 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { } } } - Def::Local(..) | - Def::Static(..) | + Def::Fn(..) | Def::Const(..) | + Def::Static(..) | + Def::StructCtor(..) | + Def::VariantCtor(..) | Def::AssociatedConst(..) | + Def::Local(..) | + Def::Upvar(..) | Def::Struct(..) | + Def::Union(..) | Def::Variant(..) | - Def::Fn(..) => self.write_sub_paths_truncated(path, false), + Def::TyAlias(..) | + Def::AssociatedTy(..) => self.write_sub_paths_truncated(path, false), _ => {} } } @@ -1022,7 +1049,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { qualname: format!("{}${}", path_to_string(p), id), value: value, type_value: typ, - scope: 0, + scope: CRATE_NODE_ID, parent: None, visibility: Visibility::Inherited, docs: String::new(), @@ -1043,7 +1070,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { None => return, Some(data) => data, }; - let mut hasher = SipHasher::new(); + let mut hasher = DefaultHasher::new(); data.callee_span.hash(&mut hasher); let hash = hasher.finish(); let qualname = format!("{}::{}", data.name, hash); @@ -1076,7 +1103,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { } } - fn process_trait_item(&mut self, trait_item: &ast::TraitItem, trait_id: NodeId) { + fn process_trait_item(&mut self, trait_item: &ast::TraitItem, trait_id: DefId) { self.process_macro_use(trait_item.span, trait_item.id); match trait_item.node { ast::TraitItemKind::Const(ref ty, Some(ref expr)) => { @@ -1104,7 +1131,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { } } - fn process_impl_item(&mut self, impl_item: &ast::ImplItem, impl_id: NodeId) { + fn process_impl_item(&mut self, impl_item: &ast::ImplItem, impl_id: DefId) { self.process_macro_use(impl_item.span, impl_item.id); match impl_item.node { ast::ImplItemKind::Const(ref ty, ref expr) => { @@ -1141,7 +1168,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump +'ll> Visitor for DumpVisitor<'l, 'tcx, 'll, D> match use_item.node { ast::ViewPathSimple(ident, ref path) => { let sub_span = self.span.span_for_last_ident(path.span); - let mod_id = match self.lookup_type_ref(item.id) { + let mod_id = match self.lookup_def_id(item.id) { Some(def_id) => { let scope = self.cur_scope; self.process_def_kind(item.id, path.span, sub_span, def_id, scope); @@ -1199,7 +1226,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump +'ll> Visitor for DumpVisitor<'l, 'tcx, 'll, D> for plid in list { let scope = self.cur_scope; let id = plid.node.id; - if let Some(def_id) = self.lookup_type_ref(id) { + if let Some(def_id) = self.lookup_def_id(id) { let span = plid.span; self.process_def_kind(id, span, Some(span), def_id, scope); } @@ -1217,7 +1244,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump +'ll> Visitor for DumpVisitor<'l, 'tcx, 'll, D> let alias_span = self.span.span_for_last_ident(item.span); let cnum = match self.sess.cstore.extern_mod_stmt_cnum(item.id) { Some(cnum) => cnum, - None => 0, + None => LOCAL_CRATE, }; if !self.span.filter_generated(alias_span, item.span) { @@ -1294,7 +1321,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump +'ll> Visitor for DumpVisitor<'l, 'tcx, 'll, D> self.process_macro_use(t.span, t.id); match t.node { ast::TyKind::Path(_, ref path) => { - if let Some(id) = self.lookup_type_ref(t.id) { + if let Some(id) = self.lookup_def_id(t.id) { let sub_span = self.span.sub_span_for_type_name(t.span); if !self.span.filter_generated(sub_span, t.span) { self.dumper.type_ref(TypeRefData { @@ -1434,7 +1461,8 @@ impl<'l, 'tcx: 'l, 'll, D: Dump +'ll> Visitor for DumpVisitor<'l, 'tcx, 'll, D> // process collected paths for &(id, ref p, immut, ref_kind) in &collector.collected_paths { match self.tcx.expect_def(id) { - Def::Local(_, id) => { + Def::Local(def_id) => { + let id = self.tcx.map.as_local_node_id(def_id).unwrap(); let mut value = if immut == ast::Mutability::Immutable { self.span.snippet(p.span).to_string() } else { @@ -1456,21 +1484,20 @@ impl<'l, 'tcx: 'l, 'll, D: Dump +'ll> Visitor for DumpVisitor<'l, 'tcx, 'll, D> qualname: format!("{}${}", path_to_string(p), id), value: value, type_value: typ, - scope: 0, + scope: CRATE_NODE_ID, parent: None, visibility: Visibility::Inherited, docs: String::new(), }.lower(self.tcx)); } } - Def::Variant(..) | Def::Enum(..) | - Def::TyAlias(..) | Def::Struct(..) => { + Def::StructCtor(..) | Def::VariantCtor(..) | + Def::Const(..) | Def::AssociatedConst(..) | + Def::Struct(..) | Def::Variant(..) | + Def::TyAlias(..) | Def::AssociatedTy(..) | + Def::SelfTy(..) => { paths_to_process.push((id, p.clone(), Some(ref_kind))) } - // FIXME(nrc) what are these doing here? - Def::Static(..) | - Def::Const(..) | - Def::AssociatedConst(..) => {} def => error!("unexpected definition kind when processing collected paths: {:?}", def), } diff --git a/src/librustc_save_analysis/external_data.rs b/src/librustc_save_analysis/external_data.rs index 3642346582bb9..5847575742342 100644 --- a/src/librustc_save_analysis/external_data.rs +++ b/src/librustc_save_analysis/external_data.rs @@ -8,10 +8,10 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use rustc::hir::def_id::{DefId, DefIndex}; +use rustc::hir::def_id::{CrateNum, DefId, DefIndex}; use rustc::hir::map::Map; use rustc::ty::TyCtxt; -use syntax::ast::{CrateNum, NodeId}; +use syntax::ast::NodeId; use syntax::codemap::CodeMap; use syntax_pos::Span; @@ -23,12 +23,15 @@ pub trait Lower { fn lower(self, tcx: TyCtxt) -> Self::Target; } -fn make_def_id(id: NodeId, map: &Map) -> DefId { +pub fn make_def_id(id: NodeId, map: &Map) -> DefId { map.opt_local_def_id(id).unwrap_or(null_def_id()) } pub fn null_def_id() -> DefId { - DefId { krate: u32::max_value(), index: DefIndex::from_u32(u32::max_value()) } + DefId { + krate: CrateNum::from_u32(u32::max_value()), + index: DefIndex::from_u32(u32::max_value()) + } } #[derive(Clone, Debug, RustcEncodable)] @@ -188,7 +191,7 @@ impl Lower for data::FunctionData { scope: make_def_id(self.scope, &tcx.map), value: self.value, visibility: self.visibility, - parent: self.parent.map(|id| make_def_id(id, &tcx.map)), + parent: self.parent, docs: self.docs, } } @@ -353,7 +356,7 @@ impl Lower for data::MethodData { value: self.value, decl_id: self.decl_id, visibility: self.visibility, - parent: Some(make_def_id(self.scope, &tcx.map)), + parent: self.parent, docs: self.docs, } } @@ -471,7 +474,7 @@ impl Lower for data::StructVariantData { type_value: self.type_value, value: self.value, scope: make_def_id(self.scope, &tcx.map), - parent: self.parent.map(|id| make_def_id(id, &tcx.map)), + parent: self.parent, docs: self.docs, } } @@ -533,7 +536,7 @@ impl Lower for data::TupleVariantData { type_value: self.type_value, value: self.value, scope: make_def_id(self.scope, &tcx.map), - parent: self.parent.map(|id| make_def_id(id, &tcx.map)), + parent: self.parent, docs: self.docs, } } @@ -563,7 +566,7 @@ impl Lower for data::TypeDefData { qualname: self.qualname, value: self.value, visibility: self.visibility, - parent: self.parent.map(|id| make_def_id(id, &tcx.map)), + parent: self.parent, docs: self.docs, } } @@ -668,7 +671,7 @@ impl Lower for data::VariableData { scope: make_def_id(self.scope, &tcx.map), value: self.value, type_value: self.type_value, - parent: self.parent.map(|id| make_def_id(id, &tcx.map)), + parent: self.parent, visibility: self.visibility, docs: self.docs, } diff --git a/src/librustc_save_analysis/json_api_dumper.rs b/src/librustc_save_analysis/json_api_dumper.rs index 78eaa65872a4c..d56aae18a7cd1 100644 --- a/src/librustc_save_analysis/json_api_dumper.rs +++ b/src/librustc_save_analysis/json_api_dumper.rs @@ -117,7 +117,7 @@ struct Id { impl From for Id { fn from(id: DefId) -> Id { Id { - krate: id.krate, + krate: id.krate.as_u32(), index: id.index.as_u32(), } } diff --git a/src/librustc_save_analysis/json_dumper.rs b/src/librustc_save_analysis/json_dumper.rs index 75abff8d37edb..eb613c3afdab3 100644 --- a/src/librustc_save_analysis/json_dumper.rs +++ b/src/librustc_save_analysis/json_dumper.rs @@ -120,7 +120,7 @@ struct Id { impl From for Id { fn from(id: DefId) -> Id { Id { - krate: id.krate, + krate: id.krate.as_u32(), index: id.index.as_u32(), } } @@ -129,7 +129,7 @@ impl From for Id { #[derive(Debug, RustcEncodable)] struct Import { kind: ImportKind, - id: Id, + ref_id: Option, span: SpanData, name: String, value: String, @@ -146,7 +146,7 @@ impl From for Import { fn from(data: ExternCrateData) -> Import { Import { kind: ImportKind::ExternCrate, - id: From::from(data.id), + ref_id: None, span: data.span, name: data.name, value: String::new(), @@ -157,7 +157,7 @@ impl From for Import { fn from(data: UseData) -> Import { Import { kind: ImportKind::Use, - id: From::from(data.id), + ref_id: data.mod_id.map(|id| From::from(id)), span: data.span, name: data.name, value: String::new(), @@ -168,7 +168,7 @@ impl From for Import { fn from(data: UseGlobData) -> Import { Import { kind: ImportKind::GlobUse, - id: From::from(data.id), + ref_id: None, span: data.span, name: "*".to_owned(), value: data.names.join(", "), diff --git a/src/librustc_save_analysis/lib.rs b/src/librustc_save_analysis/lib.rs index 35ad2d9316cda..15c74f2ed6ab5 100644 --- a/src/librustc_save_analysis/lib.rs +++ b/src/librustc_save_analysis/lib.rs @@ -52,7 +52,7 @@ use std::env; use std::fs::{self, File}; use std::path::{Path, PathBuf}; -use syntax::ast::{self, NodeId, PatKind, Attribute}; +use syntax::ast::{self, NodeId, PatKind, Attribute, CRATE_NODE_ID}; use syntax::parse::lexer::comments::strip_doc_comment_decoration; use syntax::parse::token::{self, keywords, InternedString}; use syntax::visit::{self, Visitor}; @@ -64,6 +64,7 @@ pub use self::csv_dumper::CsvDumper; pub use self::json_api_dumper::JsonApiDumper; pub use self::json_dumper::JsonDumper; pub use self::data::*; +pub use self::external_data::make_def_id; pub use self::dump::Dump; pub use self::dump_visitor::DumpVisitor; use self::span_utils::SpanUtils; @@ -119,7 +120,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { }; result.push(CrateData { name: (&self.tcx.sess.cstore.crate_name(n)[..]).to_owned(), - number: n, + number: n.as_u32(), span: span, }); } @@ -295,7 +296,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { qualname: qualname, span: sub_span.unwrap(), scope: scope, - parent: Some(scope), + parent: Some(make_def_id(scope, &self.tcx.map)), value: "".to_owned(), type_value: typ, visibility: From::from(&field.vis), @@ -312,7 +313,8 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { name: ast::Name, span: Span) -> Option { // The qualname for a method is the trait name or name of the struct in an impl in // which the method is declared in, followed by the method's name. - let (qualname, vis, docs) = match self.tcx.impl_of_method(self.tcx.map.local_def_id(id)) { + let (qualname, parent_scope, vis, docs) = + match self.tcx.impl_of_method(self.tcx.map.local_def_id(id)) { Some(impl_id) => match self.tcx.map.get_if_local(impl_id) { Some(NodeItem(item)) => { match item.node { @@ -320,12 +322,13 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { let mut result = String::from("<"); result.push_str(&rustc::hir::print::ty_to_string(&ty)); - if let Some(def_id) = self.tcx.trait_id_of_impl(impl_id) { + let trait_id = self.tcx.trait_id_of_impl(impl_id); + if let Some(def_id) = trait_id { result.push_str(" as "); result.push_str(&self.tcx.item_path_str(def_id)); } result.push_str(">"); - (result, From::from(&item.vis), docs_for_attrs(&item.attrs)) + (result, trait_id, From::from(&item.vis), docs_for_attrs(&item.attrs)) } _ => { span_bug!(span, @@ -348,6 +351,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { match self.tcx.map.get_if_local(def_id) { Some(NodeItem(item)) => { (format!("::{}", self.tcx.item_path_str(def_id)), + Some(def_id), From::from(&item.vis), docs_for_attrs(&item.attrs)) } @@ -370,8 +374,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { let qualname = format!("{}::{}", qualname, name); let def_id = self.tcx.map.local_def_id(id); - let decl_id = self.tcx.trait_item_of_item(def_id).and_then(|new_id| { - let new_def_id = new_id.def_id(); + let decl_id = self.tcx.trait_item_of_item(def_id).and_then(|new_def_id| { if new_def_id != def_id { Some(new_def_id) } else { @@ -381,7 +384,6 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { let sub_span = self.span_utils.sub_span_after_keyword(span, keywords::Fn); filter!(self.span_utils, sub_span, span, None); - let parent_scope = self.enclosing_scope(id); Some(FunctionData { id: id, name: name.to_string(), @@ -392,7 +394,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { // FIXME you get better data here by using the visitor. value: String::new(), visibility: vis, - parent: Some(parent_scope), + parent: parent_scope, docs: docs, }) } @@ -505,7 +507,8 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { Def::Static(..) | Def::Const(..) | Def::AssociatedConst(..) | - Def::Variant(..) => { + Def::StructCtor(..) | + Def::VariantCtor(..) => { Some(Data::VariableRefData(VariableRefData { name: self.span_utils.snippet(sub_span.unwrap()), span: sub_span.unwrap(), @@ -514,9 +517,11 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { })) } Def::Struct(def_id) | + Def::Variant(def_id, ..) | Def::Union(def_id) | Def::Enum(def_id) | Def::TyAlias(def_id) | + Def::AssociatedTy(def_id) | Def::Trait(def_id) | Def::TyParam(def_id) => { Some(Data::TypeRefData(TypeRefData { @@ -540,16 +545,9 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { .map(|mr| mr.def_id()) } ty::ImplContainer(def_id) => { - let impl_items = self.tcx.impl_items.borrow(); - Some(impl_items.get(&def_id) - .unwrap() - .iter() - .find(|mr| { - self.tcx.impl_or_trait_item(mr.def_id()).name() == - ti.name() - }) - .unwrap() - .def_id()) + Some(*self.tcx.impl_or_trait_items(def_id).iter().find(|&&mr| { + self.tcx.impl_or_trait_item(mr).name() == ti.name() + }).unwrap()) } } } else { @@ -577,7 +575,10 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { qualname: String::new() // FIXME: generate the real qualname })) } - _ => None, + Def::PrimTy(..) | + Def::SelfTy(..) | + Def::Label(..) | + Def::Err => None, } } @@ -673,7 +674,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { #[inline] pub fn enclosing_scope(&self, id: NodeId) -> NodeId { - self.tcx.map.get_enclosing_scope(id).unwrap_or(0) + self.tcx.map.get_enclosing_scope(id).unwrap_or(CRATE_NODE_ID) } } diff --git a/src/librustc_save_analysis/span_utils.rs b/src/librustc_save_analysis/span_utils.rs index 953c65549195a..031b9a6a5aa51 100644 --- a/src/librustc_save_analysis/span_utils.rs +++ b/src/librustc_save_analysis/span_utils.rs @@ -139,9 +139,9 @@ impl<'a> SpanUtils<'a> { let mut prev = toks.real_token(); let mut result = None; let mut bracket_count = 0; - let mut last_span = None; + let mut prev_span = None; while prev.tok != token::Eof { - last_span = None; + prev_span = None; let mut next = toks.real_token(); if (next.tok == token::OpenDelim(token::Paren) || next.tok == token::Lt) && @@ -166,12 +166,12 @@ impl<'a> SpanUtils<'a> { }; if prev.tok.is_ident() && bracket_count == 0 { - last_span = Some(prev.sp); + prev_span = Some(prev.sp); } prev = next; } - if result.is_none() && last_span.is_some() { - return self.make_sub_span(span, last_span); + if result.is_none() && prev_span.is_some() { + return self.make_sub_span(span, prev_span); } return self.make_sub_span(span, result); } @@ -225,7 +225,7 @@ impl<'a> SpanUtils<'a> { // Nesting = 0: all idents outside of brackets: [Foo] // Nesting = 1: idents within one level of brackets: [Bar, Bar] pub fn spans_with_brackets(&self, span: Span, nesting: isize, limit: isize) -> Vec { - let mut result: Vec = vec!(); + let mut result: Vec = vec![]; let mut toks = self.retokenise_span(span); // We keep track of how many brackets we're nested in @@ -236,7 +236,7 @@ impl<'a> SpanUtils<'a> { if ts.tok == token::Eof { if bracket_count != 0 { if generated_code(span) { - return vec!(); + return vec![]; } let loc = self.sess.codemap().lookup_char_pos(span.lo); span_bug!(span, diff --git a/src/librustc_trans/abi.rs b/src/librustc_trans/abi.rs index 1a6c34b55af65..0a5b013c79ac2 100644 --- a/src/librustc_trans/abi.rs +++ b/src/librustc_trans/abi.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use llvm::{self, ValueRef}; +use llvm::{self, ValueRef, Integer, Pointer, Float, Double, Struct, Array, Vector}; use base; use build::AllocaFcx; use common::{type_is_fat_ptr, BlockAndBuilder, C_uint}; @@ -24,7 +24,7 @@ use cabi_s390x; use cabi_mips; use cabi_mips64; use cabi_asmjs; -use machine::{llalign_of_min, llsize_of, llsize_of_real, llsize_of_store}; +use machine::{llalign_of_min, llsize_of, llsize_of_alloc}; use type_::Type; use type_of; @@ -102,7 +102,7 @@ impl ArgType { // Wipe old attributes, likely not valid through indirection. self.attrs = llvm::Attributes::default(); - let llarg_sz = llsize_of_real(ccx, self.ty); + let llarg_sz = llsize_of_alloc(ccx, self.ty); // For non-immediate arguments the callee gets its own copy of // the value on the stack, so there are no aliases. It's also @@ -200,7 +200,7 @@ impl ArgType { base::call_memcpy(bcx, bcx.pointercast(dst, Type::i8p(ccx)), bcx.pointercast(llscratch, Type::i8p(ccx)), - C_uint(ccx, llsize_of_store(ccx, self.ty)), + C_uint(ccx, llsize_of_alloc(ccx, self.ty)), cmp::min(llalign_of_min(ccx, self.ty), llalign_of_min(ccx, ty)) as u32); @@ -327,7 +327,7 @@ impl FnType { if let Layout::CEnum { signed, .. } = *ccx.layout_of(ty) { arg.signedness = Some(signed); } - if llsize_of_real(ccx, arg.ty) == 0 { + if llsize_of_alloc(ccx, arg.ty) == 0 { // For some forsaken reason, x86_64-pc-windows-gnu // doesn't ignore zero-sized struct arguments. // The same is true for s390x-unknown-linux-gnu. @@ -358,7 +358,7 @@ impl FnType { ty::TyRef(_, ty::TypeAndMut { ty, .. }) | ty::TyBox(ty) => { let llty = type_of::sizing_type_of(ccx, ty); - let llsz = llsize_of_real(ccx, llty); + let llsz = llsize_of_alloc(ccx, llty); ret.attrs.set_dereferenceable(llsz); } _ => {} @@ -427,7 +427,7 @@ impl FnType { } else { if let Some(inner) = rust_ptr_attrs(ty, &mut arg) { let llty = type_of::sizing_type_of(ccx, inner); - let llsz = llsize_of_real(ccx, llty); + let llsz = llsize_of_alloc(ccx, llty); arg.attrs.set_dereferenceable(llsz); } args.push(arg); @@ -469,8 +469,8 @@ impl FnType { return; } - let size = llsize_of_real(ccx, llty); - if size > llsize_of_real(ccx, ccx.int_type()) { + let size = llsize_of_alloc(ccx, llty); + if size > llsize_of_alloc(ccx, ccx.int_type()) { arg.make_indirect(ccx); } else if size > 0 { // We want to pass small aggregates as immediates, but using @@ -519,6 +519,7 @@ impl FnType { "powerpc64" => cabi_powerpc64::compute_abi_info(ccx, self), "s390x" => cabi_s390x::compute_abi_info(ccx, self), "asmjs" => cabi_asmjs::compute_abi_info(ccx, self), + "wasm32" => cabi_asmjs::compute_abi_info(ccx, self), a => ccx.sess().fatal(&format!("unrecognized arch \"{}\" in target specification", a)) } @@ -598,3 +599,73 @@ impl FnType { } } } + +pub fn align_up_to(off: usize, a: usize) -> usize { + return (off + a - 1) / a * a; +} + +fn align(off: usize, ty: Type, pointer: usize) -> usize { + let a = ty_align(ty, pointer); + return align_up_to(off, a); +} + +pub fn ty_align(ty: Type, pointer: usize) -> usize { + match ty.kind() { + Integer => ((ty.int_width() as usize) + 7) / 8, + Pointer => pointer, + Float => 4, + Double => 8, + Struct => { + if ty.is_packed() { + 1 + } else { + let str_tys = ty.field_types(); + str_tys.iter().fold(1, |a, t| cmp::max(a, ty_align(*t, pointer))) + } + } + Array => { + let elt = ty.element_type(); + ty_align(elt, pointer) + } + Vector => { + let len = ty.vector_length(); + let elt = ty.element_type(); + ty_align(elt, pointer) * len + } + _ => bug!("ty_align: unhandled type") + } +} + +pub fn ty_size(ty: Type, pointer: usize) -> usize { + match ty.kind() { + Integer => ((ty.int_width() as usize) + 7) / 8, + Pointer => pointer, + Float => 4, + Double => 8, + Struct => { + if ty.is_packed() { + let str_tys = ty.field_types(); + str_tys.iter().fold(0, |s, t| s + ty_size(*t, pointer)) + } else { + let str_tys = ty.field_types(); + let size = str_tys.iter().fold(0, |s, t| { + align(s, *t, pointer) + ty_size(*t, pointer) + }); + align(size, ty, pointer) + } + } + Array => { + let len = ty.array_length(); + let elt = ty.element_type(); + let eltsz = ty_size(elt, pointer); + len * eltsz + } + Vector => { + let len = ty.vector_length(); + let elt = ty.element_type(); + let eltsz = ty_size(elt, pointer); + len * eltsz + }, + _ => bug!("ty_size: unhandled type") + } +} diff --git a/src/librustc_trans/adt.rs b/src/librustc_trans/adt.rs index 67e5ec2616d29..c6f3ef0a5beed 100644 --- a/src/librustc_trans/adt.rs +++ b/src/librustc_trans/adt.rs @@ -41,23 +41,19 @@ //! used unboxed and any field can have pointers (including mutable) //! taken to it, implementing them for Rust seems difficult. -pub use self::Repr::*; use super::Disr; use std; -use std::rc::Rc; use llvm::{ValueRef, True, IntEQ, IntNE}; -use rustc::ty::subst::Substs; -use rustc::ty::{self, AdtKind, Ty, TyCtxt}; -use syntax::ast; +use rustc::ty::layout; +use rustc::ty::{self, Ty, AdtKind}; use syntax::attr; -use syntax::attr::IntType; -use abi::FAT_PTR_ADDR; use build::*; use common::*; use debuginfo::DebugLoc; use glue; +use base; use machine; use monomorphize; use type_::Type; @@ -72,66 +68,6 @@ pub enum BranchKind { type Hint = attr::ReprAttr; -/// Representations. -#[derive(Eq, PartialEq, Debug)] -pub enum Repr<'tcx> { - /// C-like enums; basically an int. - CEnum(IntType, Disr, Disr), // discriminant range (signedness based on the IntType) - /// Single-case variants, and structs/tuples/records. - Univariant(Struct<'tcx>), - /// Untagged unions. - UntaggedUnion(Union<'tcx>), - /// General-case enums: for each case there is a struct, and they - /// all start with a field for the discriminant. - General(IntType, Vec>), - /// Two cases distinguished by a nullable pointer: the case with discriminant - /// `nndiscr` must have single field which is known to be nonnull due to its type. - /// The other case is known to be zero sized. Hence we represent the enum - /// as simply a nullable pointer: if not null it indicates the `nndiscr` variant, - /// otherwise it indicates the other case. - RawNullablePointer { - nndiscr: Disr, - nnty: Ty<'tcx>, - nullfields: Vec> - }, - /// Two cases distinguished by a nullable pointer: the case with discriminant - /// `nndiscr` is represented by the struct `nonnull`, where the `discrfield`th - /// field is known to be nonnull due to its type; if that field is null, then - /// it represents the other case, which is inhabited by at most one value - /// (and all other fields are undefined/unused). - /// - /// For example, `std::option::Option` instantiated at a safe pointer type - /// is represented such that `None` is a null pointer and `Some` is the - /// identity function. - StructWrappedNullablePointer { - nonnull: Struct<'tcx>, - nndiscr: Disr, - discrfield: DiscrField, - nullfields: Vec>, - } -} - -/// For structs, and struct-like parts of anything fancier. -#[derive(Eq, PartialEq, Debug)] -pub struct Struct<'tcx> { - // If the struct is DST, then the size and alignment do not take into - // account the unsized fields of the struct. - pub size: u64, - pub align: u32, - pub sized: bool, - pub packed: bool, - pub fields: Vec>, -} - -/// For untagged unions. -#[derive(Eq, PartialEq, Debug)] -pub struct Union<'tcx> { - pub min_size: u64, - pub align: u32, - pub packed: bool, - pub fields: Vec>, -} - #[derive(Copy, Clone)] pub struct MaybeSizedValue { pub value: ValueRef, @@ -158,506 +94,33 @@ impl MaybeSizedValue { } } -/// Decides how to represent a given type. -pub fn represent_type<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, - t: Ty<'tcx>) - -> Rc> { - debug!("Representing: {}", t); - if let Some(repr) = cx.adt_reprs().borrow().get(&t) { - return repr.clone(); - } - - let repr = Rc::new(represent_type_uncached(cx, t)); - debug!("Represented as: {:?}", repr); - cx.adt_reprs().borrow_mut().insert(t, repr.clone()); - repr -} - -fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, - t: Ty<'tcx>) -> Repr<'tcx> { +/// Given an enum, struct, closure, or tuple, extracts fields. +/// Treats closures as a struct with one variant. +/// `empty_if_no_variants` is a switch to deal with empty enums. +/// If true, `variant_index` is disregarded and an empty Vec returned in this case. +fn compute_fields<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>, + variant_index: usize, + empty_if_no_variants: bool) -> Vec> { match t.sty { - ty::TyTuple(ref elems) => { - Univariant(mk_struct(cx, &elems[..], false, t)) - } - ty::TyClosure(_, ref substs) => { - Univariant(mk_struct(cx, &substs.upvar_tys, false, t)) - } - ty::TyAdt(def, substs) => match def.adt_kind() { - AdtKind::Struct => { - let ftys = def.struct_variant().fields.iter().map(|field| { - monomorphize::field_ty(cx.tcx(), substs, field) - }).collect::>(); - let packed = cx.tcx().lookup_packed(def.did); - - Univariant(mk_struct(cx, &ftys[..], packed, t)) - } - AdtKind::Union => { - let ftys = def.struct_variant().fields.iter().map(|field| { - monomorphize::field_ty(cx.tcx(), substs, field) - }).collect::>(); - let packed = cx.tcx().lookup_packed(def.did); - UntaggedUnion(mk_union(cx, &ftys[..], packed, t)) - } - AdtKind::Enum => { - let cases = get_cases(cx.tcx(), def, substs); - let hint = *cx.tcx().lookup_repr_hints(def.did).get(0) - .unwrap_or(&attr::ReprAny); - - if cases.is_empty() { - // Uninhabitable; represent as unit - // (Typechecking will reject discriminant-sizing attrs.) - assert_eq!(hint, attr::ReprAny); - return Univariant(mk_struct(cx, &[], false, t)); - } - - if cases.iter().all(|c| c.tys.is_empty()) { - // All bodies empty -> intlike - let discrs: Vec<_> = cases.iter().map(|c| Disr::from(c.discr)).collect(); - let bounds = IntBounds { - ulo: discrs.iter().min().unwrap().0, - uhi: discrs.iter().max().unwrap().0, - slo: discrs.iter().map(|n| n.0 as i64).min().unwrap(), - shi: discrs.iter().map(|n| n.0 as i64).max().unwrap() - }; - return mk_cenum(cx, hint, &bounds); - } - - // Since there's at least one - // non-empty body, explicit discriminants should have - // been rejected by a checker before this point. - if !cases.iter().enumerate().all(|(i,c)| c.discr == Disr::from(i)) { - bug!("non-C-like enum {} with specified discriminants", - cx.tcx().item_path_str(def.did)); - } - - if cases.len() == 1 && hint == attr::ReprAny { - // Equivalent to a struct or tuple. - return Univariant(mk_struct(cx, &cases[0].tys, false, t)); - } - - if cases.len() == 2 && hint == attr::ReprAny { - // Nullable pointer optimization - let mut discr = 0; - while discr < 2 { - if cases[1 - discr].is_zerolen(cx, t) { - let st = mk_struct(cx, &cases[discr].tys, - false, t); - match cases[discr].find_ptr(cx) { - Some(ref df) if df.len() == 1 && st.fields.len() == 1 => { - return RawNullablePointer { - nndiscr: Disr::from(discr), - nnty: st.fields[0], - nullfields: cases[1 - discr].tys.clone() - }; - } - Some(mut discrfield) => { - discrfield.push(0); - discrfield.reverse(); - return StructWrappedNullablePointer { - nndiscr: Disr::from(discr), - nonnull: st, - discrfield: discrfield, - nullfields: cases[1 - discr].tys.clone() - }; - } - None => {} - } - } - discr += 1; - } - } - - // The general case. - assert!((cases.len() - 1) as i64 >= 0); - let bounds = IntBounds { ulo: 0, uhi: (cases.len() - 1) as u64, - slo: 0, shi: (cases.len() - 1) as i64 }; - let min_ity = range_to_inttype(cx, hint, &bounds); - - // Create the set of structs that represent each variant - // Use the minimum integer type we figured out above - let fields : Vec<_> = cases.iter().map(|c| { - let mut ftys = vec!(ty_of_inttype(cx.tcx(), min_ity)); - ftys.extend_from_slice(&c.tys); - mk_struct(cx, &ftys, false, t) - }).collect(); - - - // Check to see if we should use a different type for the - // discriminant. If the overall alignment of the type is - // the same as the first field in each variant, we can safely use - // an alignment-sized type. - // We increase the size of the discriminant to avoid LLVM copying - // padding when it doesn't need to. This normally causes unaligned - // load/stores and excessive memcpy/memset operations. By using a - // bigger integer size, LLVM can be sure about it's contents and - // won't be so conservative. - // This check is needed to avoid increasing the size of types when - // the alignment of the first field is smaller than the overall - // alignment of the type. - let (_, align) = union_size_and_align(&fields); - let mut use_align = true; - for st in &fields { - // Get the first non-zero-sized field - let field = st.fields.iter().skip(1).filter(|ty| { - let t = type_of::sizing_type_of(cx, **ty); - machine::llsize_of_real(cx, t) != 0 || - // This case is only relevant for zero-sized types with large alignment - machine::llalign_of_min(cx, t) != 1 - }).next(); - - if let Some(field) = field { - let field_align = type_of::align_of(cx, *field); - if field_align != align { - use_align = false; - break; - } - } - } - - // If the alignment is smaller than the chosen discriminant size, don't use the - // alignment as the final size. - let min_ty = ll_inttype(&cx, min_ity); - let min_size = machine::llsize_of_real(cx, min_ty); - if (align as u64) < min_size { - use_align = false; - } - - let ity = if use_align { - // Use the overall alignment - match align { - 1 => attr::UnsignedInt(ast::UintTy::U8), - 2 => attr::UnsignedInt(ast::UintTy::U16), - 4 => attr::UnsignedInt(ast::UintTy::U32), - 8 if machine::llalign_of_min(cx, Type::i64(cx)) == 8 => - attr::UnsignedInt(ast::UintTy::U64), - _ => min_ity // use min_ity as a fallback - } - } else { - min_ity - }; - - let fields : Vec<_> = cases.iter().map(|c| { - let mut ftys = vec!(ty_of_inttype(cx.tcx(), ity)); - ftys.extend_from_slice(&c.tys); - mk_struct(cx, &ftys[..], false, t) - }).collect(); - - ensure_enum_fits_in_address_space(cx, &fields[..], t); - - General(ity, fields) - } + ty::TyAdt(ref def, _) if def.variants.len() == 0 && empty_if_no_variants => { + Vec::default() }, - _ => bug!("adt::represent_type called on non-ADT type: {}", t) - } -} - -// this should probably all be in ty -struct Case<'tcx> { - discr: Disr, - tys: Vec> -} - -/// This represents the (GEP) indices to follow to get to the discriminant field -pub type DiscrField = Vec; - -fn find_discr_field_candidate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - ty: Ty<'tcx>, - mut path: DiscrField) - -> Option { - match ty.sty { - // Fat &T/&mut T/Box i.e. T is [T], str, or Trait - ty::TyRef(_, ty::TypeAndMut { ty, .. }) | ty::TyBox(ty) if !type_is_sized(tcx, ty) => { - path.push(FAT_PTR_ADDR); - Some(path) + ty::TyAdt(ref def, ref substs) => { + def.variants[variant_index].fields.iter().map(|f| { + monomorphize::field_ty(cx.tcx(), substs, f) + }).collect::>() }, - - // Regular thin pointer: &T/&mut T/Box - ty::TyRef(..) | ty::TyBox(..) => Some(path), - - // Function pointer: `fn() -> i32` - ty::TyFnPtr(_) => Some(path), - - // Is this the NonZero lang item wrapping a pointer or integer type? - ty::TyAdt(def, substs) if Some(def.did) == tcx.lang_items.non_zero() => { - let nonzero_fields = &def.struct_variant().fields; - assert_eq!(nonzero_fields.len(), 1); - let field_ty = monomorphize::field_ty(tcx, substs, &nonzero_fields[0]); - match field_ty.sty { - ty::TyRawPtr(ty::TypeAndMut { ty, .. }) if !type_is_sized(tcx, ty) => { - path.extend_from_slice(&[0, FAT_PTR_ADDR]); - Some(path) - }, - ty::TyRawPtr(..) | ty::TyInt(..) | ty::TyUint(..) => { - path.push(0); - Some(path) - }, - _ => None - } - }, - - // Perhaps one of the fields of this struct is non-zero - // let's recurse and find out - ty::TyAdt(def, substs) if def.is_struct() => { - for (j, field) in def.struct_variant().fields.iter().enumerate() { - let field_ty = monomorphize::field_ty(tcx, substs, field); - if let Some(mut fpath) = find_discr_field_candidate(tcx, field_ty, path.clone()) { - fpath.push(j); - return Some(fpath); - } - } - None - }, - - // Perhaps one of the upvars of this struct is non-zero - // Let's recurse and find out! - ty::TyClosure(_, ref substs) => { - for (j, &ty) in substs.upvar_tys.iter().enumerate() { - if let Some(mut fpath) = find_discr_field_candidate(tcx, ty, path.clone()) { - fpath.push(j); - return Some(fpath); - } - } - None - }, - - // Can we use one of the fields in this tuple? - ty::TyTuple(ref tys) => { - for (j, &ty) in tys.iter().enumerate() { - if let Some(mut fpath) = find_discr_field_candidate(tcx, ty, path.clone()) { - fpath.push(j); - return Some(fpath); - } - } - None - }, - - // Is this a fixed-size array of something non-zero - // with at least one element? - ty::TyArray(ety, d) if d > 0 => { - if let Some(mut vpath) = find_discr_field_candidate(tcx, ety, path) { - vpath.push(0); - Some(vpath) - } else { - None - } - }, - - // Anything else is not a pointer - _ => None - } -} - -impl<'tcx> Case<'tcx> { - fn is_zerolen<'a>(&self, cx: &CrateContext<'a, 'tcx>, scapegoat: Ty<'tcx>) -> bool { - mk_struct(cx, &self.tys, false, scapegoat).size == 0 - } - - fn find_ptr<'a>(&self, cx: &CrateContext<'a, 'tcx>) -> Option { - for (i, &ty) in self.tys.iter().enumerate() { - if let Some(mut path) = find_discr_field_candidate(cx.tcx(), ty, vec![]) { - path.push(i); - return Some(path); - } - } - None - } -} - -fn get_cases<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - adt: ty::AdtDef<'tcx>, - substs: &Substs<'tcx>) - -> Vec> { - adt.variants.iter().map(|vi| { - let field_tys = vi.fields.iter().map(|field| { - monomorphize::field_ty(tcx, substs, field) - }).collect(); - Case { discr: Disr::from(vi.disr_val), tys: field_tys } - }).collect() -} - -fn mk_struct<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, - tys: &[Ty<'tcx>], packed: bool, - scapegoat: Ty<'tcx>) - -> Struct<'tcx> { - let sized = tys.iter().all(|&ty| type_is_sized(cx.tcx(), ty)); - let lltys : Vec = if sized { - tys.iter().map(|&ty| type_of::sizing_type_of(cx, ty)).collect() - } else { - tys.iter().filter(|&ty| type_is_sized(cx.tcx(), *ty)) - .map(|&ty| type_of::sizing_type_of(cx, ty)).collect() - }; - - ensure_struct_fits_in_address_space(cx, &lltys[..], packed, scapegoat); - - let llty_rec = Type::struct_(cx, &lltys[..], packed); - Struct { - size: machine::llsize_of_alloc(cx, llty_rec), - align: machine::llalign_of_min(cx, llty_rec), - sized: sized, - packed: packed, - fields: tys.to_vec(), - } -} - -fn mk_union<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, - tys: &[Ty<'tcx>], packed: bool, - _scapegoat: Ty<'tcx>) - -> Union<'tcx> { - let mut min_size = 0; - let mut align = 0; - for llty in tys.iter().map(|&ty| type_of::sizing_type_of(cx, ty)) { - let field_size = machine::llsize_of_alloc(cx, llty); - if min_size < field_size { - min_size = field_size; - } - let field_align = machine::llalign_of_min(cx, llty); - if align < field_align { - align = field_align; - } - } - - Union { - min_size: min_size, - align: if packed { 1 } else { align }, - packed: packed, - fields: tys.to_vec(), - } -} - -#[derive(Debug)] -struct IntBounds { - slo: i64, - shi: i64, - ulo: u64, - uhi: u64 -} - -fn mk_cenum<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, - hint: Hint, bounds: &IntBounds) - -> Repr<'tcx> { - let it = range_to_inttype(cx, hint, bounds); - match it { - attr::SignedInt(_) => CEnum(it, Disr(bounds.slo as u64), Disr(bounds.shi as u64)), - attr::UnsignedInt(_) => CEnum(it, Disr(bounds.ulo), Disr(bounds.uhi)) - } -} - -fn range_to_inttype(cx: &CrateContext, hint: Hint, bounds: &IntBounds) -> IntType { - debug!("range_to_inttype: {:?} {:?}", hint, bounds); - // Lists of sizes to try. u64 is always allowed as a fallback. - #[allow(non_upper_case_globals)] - const choose_shortest: &'static [IntType] = &[ - attr::UnsignedInt(ast::UintTy::U8), attr::SignedInt(ast::IntTy::I8), - attr::UnsignedInt(ast::UintTy::U16), attr::SignedInt(ast::IntTy::I16), - attr::UnsignedInt(ast::UintTy::U32), attr::SignedInt(ast::IntTy::I32)]; - #[allow(non_upper_case_globals)] - const at_least_32: &'static [IntType] = &[ - attr::UnsignedInt(ast::UintTy::U32), attr::SignedInt(ast::IntTy::I32)]; - - let attempts; - match hint { - attr::ReprInt(span, ity) => { - if !bounds_usable(cx, ity, bounds) { - span_bug!(span, "representation hint insufficient for discriminant range") - } - return ity; - } - attr::ReprExtern => { - attempts = match &cx.sess().target.target.arch[..] { - // WARNING: the ARM EABI has two variants; the one corresponding to `at_least_32` - // appears to be used on Linux and NetBSD, but some systems may use the variant - // corresponding to `choose_shortest`. However, we don't run on those yet...? - "arm" => at_least_32, - _ => at_least_32, - } - } - attr::ReprAny => { - attempts = choose_shortest; + ty::TyTuple(fields) => fields.to_vec(), + ty::TyClosure(_, substs) => { + if variant_index > 0 { bug!("{} is a closure, which only has one variant", t);} + substs.upvar_tys.to_vec() }, - attr::ReprPacked => { - bug!("range_to_inttype: found ReprPacked on an enum"); - } - attr::ReprSimd => { - bug!("range_to_inttype: found ReprSimd on an enum"); - } - } - for &ity in attempts { - if bounds_usable(cx, ity, bounds) { - return ity; - } - } - return attr::UnsignedInt(ast::UintTy::U64); -} - -pub fn ll_inttype(cx: &CrateContext, ity: IntType) -> Type { - match ity { - attr::SignedInt(t) => Type::int_from_ty(cx, t), - attr::UnsignedInt(t) => Type::uint_from_ty(cx, t) - } -} - -fn bounds_usable(cx: &CrateContext, ity: IntType, bounds: &IntBounds) -> bool { - debug!("bounds_usable: {:?} {:?}", ity, bounds); - match ity { - attr::SignedInt(_) => { - let lllo = C_integral(ll_inttype(cx, ity), bounds.slo as u64, true); - let llhi = C_integral(ll_inttype(cx, ity), bounds.shi as u64, true); - bounds.slo == const_to_int(lllo) as i64 && bounds.shi == const_to_int(llhi) as i64 - } - attr::UnsignedInt(_) => { - let lllo = C_integral(ll_inttype(cx, ity), bounds.ulo, false); - let llhi = C_integral(ll_inttype(cx, ity), bounds.uhi, false); - bounds.ulo == const_to_uint(lllo) as u64 && bounds.uhi == const_to_uint(llhi) as u64 - } - } -} - -pub fn ty_of_inttype<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ity: IntType) -> Ty<'tcx> { - match ity { - attr::SignedInt(t) => tcx.mk_mach_int(t), - attr::UnsignedInt(t) => tcx.mk_mach_uint(t) - } -} - -// LLVM doesn't like types that don't fit in the address space -fn ensure_struct_fits_in_address_space<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, - fields: &[Type], - packed: bool, - scapegoat: Ty<'tcx>) { - let mut offset = 0; - for &llty in fields { - // Invariant: offset < ccx.obj_size_bound() <= 1<<61 - if !packed { - let type_align = machine::llalign_of_min(ccx, llty); - offset = roundup(offset, type_align); - } - // type_align is a power-of-2, so still offset < ccx.obj_size_bound() - // llsize_of_alloc(ccx, llty) is also less than ccx.obj_size_bound() - // so the sum is less than 1<<62 (and therefore can't overflow). - offset += machine::llsize_of_alloc(ccx, llty); - - if offset >= ccx.obj_size_bound() { - ccx.report_overbig_object(scapegoat); - } - } -} - -fn union_size_and_align(sts: &[Struct]) -> (machine::llsize, machine::llalign) { - let size = sts.iter().map(|st| st.size).max().unwrap(); - let align = sts.iter().map(|st| st.align).max().unwrap(); - (roundup(size, align), align) -} - -fn ensure_enum_fits_in_address_space<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, - fields: &[Struct], - scapegoat: Ty<'tcx>) { - let (total_size, _) = union_size_and_align(fields); - - if total_size >= ccx.obj_size_bound() { - ccx.report_overbig_object(scapegoat); + _ => bug!("{} is not a type that can have fields.", t) } } +/// This represents the (GEP) indices to follow to get to the discriminant field +pub type DiscrField = Vec; /// LLVM-level types are a little complicated. /// @@ -667,8 +130,8 @@ fn ensure_enum_fits_in_address_space<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, /// For nominal types, in some cases, we need to use LLVM named structs /// and fill in the actual contents in a second pass to prevent /// unbounded recursion; see also the comments in `trans::type_of`. -pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>) -> Type { - generic_type_of(cx, r, None, false, false) +pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Type { + generic_type_of(cx, t, None, false, false) } @@ -676,41 +139,63 @@ pub fn type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>) -> Type { // this out, but if you call this on an unsized type without realising it, you // are going to get the wrong type (it will not include the unsized parts of it). pub fn sizing_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, - r: &Repr<'tcx>, dst: bool) -> Type { - generic_type_of(cx, r, None, true, dst) + t: Ty<'tcx>, dst: bool) -> Type { + generic_type_of(cx, t, None, true, dst) } pub fn incomplete_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, - r: &Repr<'tcx>, name: &str) -> Type { - generic_type_of(cx, r, Some(name), false, false) + t: Ty<'tcx>, name: &str) -> Type { + generic_type_of(cx, t, Some(name), false, false) } pub fn finish_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, - r: &Repr<'tcx>, llty: &mut Type) { - match *r { - CEnum(..) | General(..) | UntaggedUnion(..) | RawNullablePointer { .. } => { } - Univariant(ref st) | StructWrappedNullablePointer { nonnull: ref st, .. } => - llty.set_struct_body(&struct_llfields(cx, st, false, false), - st.packed) + t: Ty<'tcx>, llty: &mut Type) { + let l = cx.layout_of(t); + debug!("finish_type_of: {} with layout {:#?}", t, l); + match *l { + layout::CEnum { .. } | layout::General { .. } + | layout::UntaggedUnion { .. } | layout::RawNullablePointer { .. } => { } + layout::Univariant { ..} + | layout::StructWrappedNullablePointer { .. } => { + let (nonnull_variant, packed) = match *l { + layout::Univariant { ref variant, .. } => (0, variant.packed), + layout::StructWrappedNullablePointer { nndiscr, ref nonnull, .. } => + (nndiscr, nonnull.packed), + _ => unreachable!() + }; + let fields = compute_fields(cx, t, nonnull_variant as usize, true); + llty.set_struct_body(&struct_llfields(cx, &fields, false, false), + packed) + }, + _ => bug!("This function cannot handle {} with layout {:#?}", t, l) } } fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, - r: &Repr<'tcx>, + t: Ty<'tcx>, name: Option<&str>, sizing: bool, dst: bool) -> Type { - debug!("adt::generic_type_of r: {:?} name: {:?} sizing: {} dst: {}", - r, name, sizing, dst); - match *r { - CEnum(ity, ..) => ll_inttype(cx, ity), - RawNullablePointer { nnty, .. } => - type_of::sizing_type_of(cx, nnty), - StructWrappedNullablePointer { nonnull: ref st, .. } => { + let l = cx.layout_of(t); + debug!("adt::generic_type_of t: {:?} name: {:?} sizing: {} dst: {}", + t, name, sizing, dst); + match *l { + layout::CEnum { discr, .. } => Type::from_integer(cx, discr), + layout::RawNullablePointer { nndiscr, .. } => { + let (def, substs) = match t.sty { + ty::TyAdt(d, s) => (d, s), + _ => bug!("{} is not an ADT", t) + }; + let nnty = monomorphize::field_ty(cx.tcx(), substs, + &def.variants[nndiscr as usize].fields[0]); + type_of::sizing_type_of(cx, nnty) + } + layout::StructWrappedNullablePointer { nndiscr, ref nonnull, .. } => { + let fields = compute_fields(cx, t, nndiscr as usize, false); match name { None => { - Type::struct_(cx, &struct_llfields(cx, st, sizing, dst), - st.packed) + Type::struct_(cx, &struct_llfields(cx, &fields, sizing, dst), + nonnull.packed) } Some(name) => { assert_eq!(sizing, false); @@ -718,11 +203,14 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, } } } - Univariant(ref st) => { + layout::Univariant { ref variant, .. } => { + // Note that this case also handles empty enums. + // Thus the true as the final parameter here. + let fields = compute_fields(cx, t, 0, true); match name { None => { - let fields = struct_llfields(cx, st, sizing, dst); - Type::struct_(cx, &fields, st.packed) + let fields = struct_llfields(cx, &fields, sizing, dst); + Type::struct_(cx, &fields, variant.packed) } Some(name) => { // Hypothesis: named_struct's can never need a @@ -732,35 +220,27 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, } } } - UntaggedUnion(ref un) => { + layout::Vector { element, count } => { + let elem_ty = Type::from_primitive(cx, element); + Type::vector(&elem_ty, count) + } + layout::UntaggedUnion { ref variants, .. }=> { // Use alignment-sized ints to fill all the union storage. - let (size, align) = (roundup(un.min_size, un.align), un.align); - - let align_s = align as u64; - assert_eq!(size % align_s, 0); // Ensure division in align_units comes out evenly - let align_units = size / align_s; - let fill_ty = match align_s { - 1 => Type::array(&Type::i8(cx), align_units), - 2 => Type::array(&Type::i16(cx), align_units), - 4 => Type::array(&Type::i32(cx), align_units), - 8 if machine::llalign_of_min(cx, Type::i64(cx)) == 8 => - Type::array(&Type::i64(cx), align_units), - a if a.count_ones() == 1 => Type::array(&Type::vector(&Type::i32(cx), a / 4), - align_units), - _ => bug!("unsupported union alignment: {}", align) - }; + let size = variants.stride().bytes(); + let align = variants.align.abi(); + let fill = union_fill(cx, size, align); match name { None => { - Type::struct_(cx, &[fill_ty], un.packed) + Type::struct_(cx, &[fill], variants.packed) } Some(name) => { let mut llty = Type::named_struct(cx, name); - llty.set_struct_body(&[fill_ty], un.packed); + llty.set_struct_body(&[fill], variants.packed); llty } } } - General(ity, ref sts) => { + layout::General { discr, size, align, .. } => { // We need a representation that has: // * The alignment of the most-aligned field // * The size of the largest variant (rounded up to that alignment) @@ -773,29 +253,20 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, // of the size. // // FIXME #10604: this breaks when vector types are present. - let (size, align) = union_size_and_align(&sts[..]); - let align_s = align as u64; - let discr_ty = ll_inttype(cx, ity); - let discr_size = machine::llsize_of_alloc(cx, discr_ty); - let padded_discr_size = roundup(discr_size, align); - assert_eq!(size % align_s, 0); // Ensure division in align_units comes out evenly - let align_units = (size - padded_discr_size) / align_s; - let fill_ty = match align_s { - 1 => Type::array(&Type::i8(cx), align_units), - 2 => Type::array(&Type::i16(cx), align_units), - 4 => Type::array(&Type::i32(cx), align_units), - 8 if machine::llalign_of_min(cx, Type::i64(cx)) == 8 => - Type::array(&Type::i64(cx), align_units), - a if a.count_ones() == 1 => Type::array(&Type::vector(&Type::i32(cx), a / 4), - align_units), - _ => bug!("unsupported enum alignment: {}", align) - }; - assert_eq!(machine::llalign_of_min(cx, fill_ty), align); + let size = size.bytes(); + let align = align.abi(); + let discr_ty = Type::from_integer(cx, discr); + let discr_size = discr.size().bytes(); + let padded_discr_size = roundup(discr_size, align as u32); + let variant_part_size = size-padded_discr_size; + let variant_fill = union_fill(cx, variant_part_size, align); + + assert_eq!(machine::llalign_of_min(cx, variant_fill), align as u32); assert_eq!(padded_discr_size % discr_size, 0); // Ensure discr_ty can fill pad evenly let fields: Vec = [discr_ty, Type::array(&discr_ty, (padded_discr_size - discr_size)/discr_size), - fill_ty].iter().cloned().collect(); + variant_fill].iter().cloned().collect(); match name { None => { Type::struct_(cx, &fields[..], false) @@ -807,100 +278,126 @@ fn generic_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, } } } + _ => bug!("Unsupported type {} represented as {:#?}", t, l) } } -fn struct_llfields<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, st: &Struct<'tcx>, +fn union_fill(cx: &CrateContext, size: u64, align: u64) -> Type { + assert_eq!(size%align, 0); + assert_eq!(align.count_ones(), 1, "Alignment must be a power fof 2. Got {}", align); + let align_units = size/align; + let dl = &cx.tcx().data_layout; + let layout_align = layout::Align::from_bytes(align, align).unwrap(); + if let Some(ity) = layout::Integer::for_abi_align(dl, layout_align) { + Type::array(&Type::from_integer(cx, ity), align_units) + } else { + Type::array(&Type::vector(&Type::i32(cx), align/4), + align_units) + } +} + + +fn struct_llfields<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, fields: &Vec>, sizing: bool, dst: bool) -> Vec { if sizing { - st.fields.iter().filter(|&ty| !dst || type_is_sized(cx.tcx(), *ty)) + fields.iter().filter(|&ty| !dst || type_is_sized(cx.tcx(), *ty)) .map(|&ty| type_of::sizing_type_of(cx, ty)).collect() } else { - st.fields.iter().map(|&ty| type_of::in_memory_type_of(cx, ty)).collect() + fields.iter().map(|&ty| type_of::in_memory_type_of(cx, ty)).collect() } } /// Obtain a representation of the discriminant sufficient to translate /// destructuring; this may or may not involve the actual discriminant. pub fn trans_switch<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, - r: &Repr<'tcx>, + t: Ty<'tcx>, scrutinee: ValueRef, range_assert: bool) -> (BranchKind, Option) { - match *r { - CEnum(..) | General(..) | - RawNullablePointer { .. } | StructWrappedNullablePointer { .. } => { - (BranchKind::Switch, Some(trans_get_discr(bcx, r, scrutinee, None, range_assert))) + let l = bcx.ccx().layout_of(t); + match *l { + layout::CEnum { .. } | layout::General { .. } | + layout::RawNullablePointer { .. } | layout::StructWrappedNullablePointer { .. } => { + (BranchKind::Switch, Some(trans_get_discr(bcx, t, scrutinee, None, range_assert))) } - Univariant(..) | UntaggedUnion(..) => { + layout::Univariant { .. } | layout::UntaggedUnion { .. } => { // N.B.: Univariant means <= 1 enum variants (*not* == 1 variants). (BranchKind::Single, None) - } + }, + _ => bug!("{} is not an enum.", t) } } -pub fn is_discr_signed<'tcx>(r: &Repr<'tcx>) -> bool { - match *r { - CEnum(ity, ..) => ity.is_signed(), - General(ity, _) => ity.is_signed(), - Univariant(..) | UntaggedUnion(..) => false, - RawNullablePointer { .. } => false, - StructWrappedNullablePointer { .. } => false, +pub fn is_discr_signed<'tcx>(l: &layout::Layout) -> bool { + match *l { + layout::CEnum { signed, .. }=> signed, + _ => false, } } /// Obtain the actual discriminant of a value. -pub fn trans_get_discr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr<'tcx>, +pub fn trans_get_discr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>, scrutinee: ValueRef, cast_to: Option, range_assert: bool) -> ValueRef { - debug!("trans_get_discr r: {:?}", r); - let val = match *r { - CEnum(ity, min, max) => { - load_discr(bcx, ity, scrutinee, min, max, range_assert) + let (def, substs) = match t.sty { + ty::TyAdt(ref def, substs) if def.adt_kind() == AdtKind::Enum => (def, substs), + _ => bug!("{} is not an enum", t) + }; + + debug!("trans_get_discr t: {:?}", t); + let l = bcx.ccx().layout_of(t); + + let val = match *l { + layout::CEnum { discr, min, max, .. } => { + load_discr(bcx, discr, scrutinee, min, max, range_assert) } - General(ity, ref cases) => { + layout::General { discr, .. } => { let ptr = StructGEP(bcx, scrutinee, 0); - load_discr(bcx, ity, ptr, Disr(0), Disr(cases.len() as u64 - 1), + load_discr(bcx, discr, ptr, 0, def.variants.len() as u64 - 1, range_assert) } - Univariant(..) | UntaggedUnion(..) => C_u8(bcx.ccx(), 0), - RawNullablePointer { nndiscr, nnty, .. } => { - let cmp = if nndiscr == Disr(0) { IntEQ } else { IntNE }; - let llptrty = type_of::sizing_type_of(bcx.ccx(), nnty); + layout::Univariant { .. } | layout::UntaggedUnion { .. } => C_u8(bcx.ccx(), 0), + layout::RawNullablePointer { nndiscr, .. } => { + let cmp = if nndiscr == 0 { IntEQ } else { IntNE }; + let llptrty = type_of::sizing_type_of(bcx.ccx(), + monomorphize::field_ty(bcx.ccx().tcx(), substs, + &def.variants[nndiscr as usize].fields[0])); ICmp(bcx, cmp, Load(bcx, scrutinee), C_null(llptrty), DebugLoc::None) } - StructWrappedNullablePointer { nndiscr, ref discrfield, .. } => { + layout::StructWrappedNullablePointer { nndiscr, ref discrfield, .. } => { struct_wrapped_nullable_bitdiscr(bcx, nndiscr, discrfield, scrutinee) - } + }, + _ => bug!("{} is not an enum", t) }; match cast_to { None => val, - Some(llty) => if is_discr_signed(r) { SExt(bcx, val, llty) } else { ZExt(bcx, val, llty) } + Some(llty) => if is_discr_signed(&l) { SExt(bcx, val, llty) } else { ZExt(bcx, val, llty) } } } -fn struct_wrapped_nullable_bitdiscr(bcx: Block, nndiscr: Disr, discrfield: &DiscrField, +fn struct_wrapped_nullable_bitdiscr(bcx: Block, nndiscr: u64, discrfield: &layout::FieldPath, scrutinee: ValueRef) -> ValueRef { - let llptrptr = GEPi(bcx, scrutinee, &discrfield[..]); + let llptrptr = GEPi(bcx, scrutinee, + &discrfield.iter().map(|f| *f as usize).collect::>()[..]); let llptr = Load(bcx, llptrptr); - let cmp = if nndiscr == Disr(0) { IntEQ } else { IntNE }; + let cmp = if nndiscr == 0 { IntEQ } else { IntNE }; ICmp(bcx, cmp, llptr, C_null(val_ty(llptr)), DebugLoc::None) } /// Helper for cases where the discriminant is simply loaded. -fn load_discr(bcx: Block, ity: IntType, ptr: ValueRef, min: Disr, max: Disr, +fn load_discr(bcx: Block, ity: layout::Integer, ptr: ValueRef, min: u64, max: u64, range_assert: bool) -> ValueRef { - let llty = ll_inttype(bcx.ccx(), ity); + let llty = Type::from_integer(bcx.ccx(), ity); assert_eq!(val_ty(ptr), llty.ptr_to()); - let bits = machine::llbitsize_of_real(bcx.ccx(), llty); + let bits = ity.size().bits(); assert!(bits <= 64); let bits = bits as usize; - let mask = Disr(!0u64 >> (64 - bits)); + let mask = !0u64 >> (64 - bits); // For a (max) discr of -1, max will be `-1 as usize`, which overflows. // However, that is fine here (it would still represent the full range), - if max.wrapping_add(Disr(1)) & mask == min & mask || !range_assert { + if max.wrapping_add(1) & mask == min & mask || !range_assert { // i.e., if the range is everything. The lo==hi case would be // rejected by the LLVM verifier (it would mean either an // empty set, which is impossible, or the entire range of the @@ -909,7 +406,7 @@ fn load_discr(bcx: Block, ity: IntType, ptr: ValueRef, min: Disr, max: Disr, } else { // llvm::ConstantRange can deal with ranges that wrap around, // so an overflow on (max + 1) is fine. - LoadRangeAssert(bcx, ptr, min.0, max.0.wrapping_add(1), /* signed: */ True) + LoadRangeAssert(bcx, ptr, min, max.wrapping_add(1), /* signed: */ True) } } @@ -917,108 +414,133 @@ fn load_discr(bcx: Block, ity: IntType, ptr: ValueRef, min: Disr, max: Disr, /// discriminant-like value returned by `trans_switch`. /// /// This should ideally be less tightly tied to `_match`. -pub fn trans_case<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr, discr: Disr) +pub fn trans_case<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>, value: Disr) -> ValueRef { - match *r { - CEnum(ity, ..) => { - C_integral(ll_inttype(bcx.ccx(), ity), discr.0, true) - } - General(ity, _) => { - C_integral(ll_inttype(bcx.ccx(), ity), discr.0, true) + let l = bcx.ccx().layout_of(t); + match *l { + layout::CEnum { discr, .. } + | layout::General { discr, .. }=> { + C_integral(Type::from_integer(bcx.ccx(), discr), value.0, true) } - Univariant(..) | UntaggedUnion(..) => { - bug!("no cases for univariants, structs or unions") + layout::RawNullablePointer { .. } | + layout::StructWrappedNullablePointer { .. } => { + assert!(value == Disr(0) || value == Disr(1)); + C_bool(bcx.ccx(), value != Disr(0)) } - RawNullablePointer { .. } | - StructWrappedNullablePointer { .. } => { - assert!(discr == Disr(0) || discr == Disr(1)); - C_bool(bcx.ccx(), discr != Disr(0)) + _ => { + bug!("{} does not have a discriminant. Represented as {:#?}", t, l); } } } /// Set the discriminant for a new value of the given case of the given /// representation. -pub fn trans_set_discr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr<'tcx>, - val: ValueRef, discr: Disr) { - match *r { - CEnum(ity, min, max) => { - assert_discr_in_range(ity, min, max, discr); - Store(bcx, C_integral(ll_inttype(bcx.ccx(), ity), discr.0, true), +pub fn trans_set_discr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>, + val: ValueRef, to: Disr) { + let l = bcx.ccx().layout_of(t); + match *l { + layout::CEnum{ discr, min, max, .. } => { + assert_discr_in_range(Disr(min), Disr(max), to); + Store(bcx, C_integral(Type::from_integer(bcx.ccx(), discr), to.0, true), val); } - General(ity, _) => { - Store(bcx, C_integral(ll_inttype(bcx.ccx(), ity), discr.0, true), + layout::General{ discr, .. } => { + Store(bcx, C_integral(Type::from_integer(bcx.ccx(), discr), to.0, true), StructGEP(bcx, val, 0)); } - Univariant(_) => { - assert_eq!(discr, Disr(0)); + layout::Univariant { .. } + | layout::UntaggedUnion { .. } + | layout::Vector { .. } => { + assert_eq!(to, Disr(0)); } - UntaggedUnion(..) => { - assert_eq!(discr, Disr(0)); - } - RawNullablePointer { nndiscr, nnty, ..} => { - if discr != nndiscr { + layout::RawNullablePointer { nndiscr, .. } => { + let nnty = compute_fields(bcx.ccx(), t, nndiscr as usize, false)[0]; + if to.0 != nndiscr { let llptrty = type_of::sizing_type_of(bcx.ccx(), nnty); Store(bcx, C_null(llptrty), val); } } - StructWrappedNullablePointer { nndiscr, ref discrfield, .. } => { - if discr != nndiscr { - let llptrptr = GEPi(bcx, val, &discrfield[..]); - let llptrty = val_ty(llptrptr).element_type(); - Store(bcx, C_null(llptrty), llptrptr); + layout::StructWrappedNullablePointer { nndiscr, ref discrfield, ref nonnull, .. } => { + if to.0 != nndiscr { + if target_sets_discr_via_memset(bcx) { + // Issue #34427: As workaround for LLVM bug on + // ARM, use memset of 0 on whole struct rather + // than storing null to single target field. + let b = B(bcx); + let llptr = b.pointercast(val, Type::i8(b.ccx).ptr_to()); + let fill_byte = C_u8(b.ccx, 0); + let size = C_uint(b.ccx, nonnull.stride().bytes()); + let align = C_i32(b.ccx, nonnull.align.abi() as i32); + base::call_memset(&b, llptr, fill_byte, size, align, false); + } else { + let path = discrfield.iter().map(|&i| i as usize).collect::>(); + let llptrptr = GEPi(bcx, val, &path[..]); + let llptrty = val_ty(llptrptr).element_type(); + Store(bcx, C_null(llptrty), llptrptr); + } } } + _ => bug!("Cannot handle {} represented as {:#?}", t, l) } } -fn assert_discr_in_range(ity: IntType, min: Disr, max: Disr, discr: Disr) { - match ity { - attr::UnsignedInt(_) => { - assert!(min <= discr); - assert!(discr <= max); - }, - attr::SignedInt(_) => { - assert!(min.0 as i64 <= discr.0 as i64); - assert!(discr.0 as i64 <= max.0 as i64); - }, +fn target_sets_discr_via_memset<'blk, 'tcx>(bcx: Block<'blk, 'tcx>) -> bool { + bcx.sess().target.target.arch == "arm" || bcx.sess().target.target.arch == "aarch64" +} + +fn assert_discr_in_range(min: Disr, max: Disr, discr: Disr) { + if min <= max { + assert!(min <= discr && discr <= max) + } else { + assert!(min <= discr || discr <= max) } } /// Access a field, at a point when the value's case is known. -pub fn trans_field_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, r: &Repr<'tcx>, +pub fn trans_field_ptr<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, t: Ty<'tcx>, val: MaybeSizedValue, discr: Disr, ix: usize) -> ValueRef { - trans_field_ptr_builder(&bcx.build(), r, val, discr, ix) + trans_field_ptr_builder(&bcx.build(), t, val, discr, ix) } /// Access a field, at a point when the value's case is known. pub fn trans_field_ptr_builder<'blk, 'tcx>(bcx: &BlockAndBuilder<'blk, 'tcx>, - r: &Repr<'tcx>, + t: Ty<'tcx>, val: MaybeSizedValue, discr: Disr, ix: usize) -> ValueRef { + let l = bcx.ccx().layout_of(t); + debug!("trans_field_ptr_builder on {} represented as {:#?}", t, l); // Note: if this ever needs to generate conditionals (e.g., if we // decide to do some kind of cdr-coding-like non-unique repr // someday), it will need to return a possibly-new bcx as well. - match *r { - CEnum(..) => { - bug!("element access in C-like enum") - } - Univariant(ref st) => { + match *l { + layout::Univariant { ref variant, .. } => { assert_eq!(discr, Disr(0)); - struct_field_ptr(bcx, st, val, ix, false) - } - General(_, ref cases) => { - struct_field_ptr(bcx, &cases[discr.0 as usize], val, ix + 1, true) - } - UntaggedUnion(ref un) => { - let ty = type_of::in_memory_type_of(bcx.ccx(), un.fields[ix]); + struct_field_ptr(bcx, &variant, + &compute_fields(bcx.ccx(), t, 0, false), + val, ix, false) + } + layout::Vector { count, .. } => { + assert_eq!(discr.0, 0); + assert!((ix as u64) < count); + bcx.struct_gep(val.value, ix) + } + layout::General { discr: d, ref variants, .. } => { + let mut fields = compute_fields(bcx.ccx(), t, discr.0 as usize, false); + fields.insert(0, d.to_ty(&bcx.ccx().tcx(), false)); + struct_field_ptr(bcx, &variants[discr.0 as usize], + &fields, + val, ix + 1, true) + } + layout::UntaggedUnion { .. } => { + let fields = compute_fields(bcx.ccx(), t, 0, false); + let ty = type_of::in_memory_type_of(bcx.ccx(), fields[ix]); if bcx.is_unreachable() { return C_undef(ty.ptr_to()); } bcx.pointercast(val.value, ty.ptr_to()) } - RawNullablePointer { nndiscr, ref nullfields, .. } | - StructWrappedNullablePointer { nndiscr, ref nullfields, .. } if discr != nndiscr => { + layout::RawNullablePointer { nndiscr, .. } | + layout::StructWrappedNullablePointer { nndiscr, .. } if discr.0 != nndiscr => { + let nullfields = compute_fields(bcx.ccx(), t, (1-nndiscr) as usize, false); // The unit-like case might have a nonzero number of unit-like fields. // (e.d., Result of Either with (), as one side.) let ty = type_of::type_of(bcx.ccx(), nullfields[ix]); @@ -1028,32 +550,36 @@ pub fn trans_field_ptr_builder<'blk, 'tcx>(bcx: &BlockAndBuilder<'blk, 'tcx>, if bcx.is_unreachable() { return C_undef(ty.ptr_to()); } bcx.pointercast(val.value, ty.ptr_to()) } - RawNullablePointer { nndiscr, nnty, .. } => { + layout::RawNullablePointer { nndiscr, .. } => { + let nnty = compute_fields(bcx.ccx(), t, nndiscr as usize, false)[0]; assert_eq!(ix, 0); - assert_eq!(discr, nndiscr); + assert_eq!(discr.0, nndiscr); let ty = type_of::type_of(bcx.ccx(), nnty); if bcx.is_unreachable() { return C_undef(ty.ptr_to()); } bcx.pointercast(val.value, ty.ptr_to()) } - StructWrappedNullablePointer { ref nonnull, nndiscr, .. } => { - assert_eq!(discr, nndiscr); - struct_field_ptr(bcx, nonnull, val, ix, false) + layout::StructWrappedNullablePointer { ref nonnull, nndiscr, .. } => { + assert_eq!(discr.0, nndiscr); + struct_field_ptr(bcx, &nonnull, + &compute_fields(bcx.ccx(), t, discr.0 as usize, false), + val, ix, false) } + _ => bug!("element access in type without elements: {} represented as {:#?}", t, l) } } fn struct_field_ptr<'blk, 'tcx>(bcx: &BlockAndBuilder<'blk, 'tcx>, - st: &Struct<'tcx>, val: MaybeSizedValue, + st: &layout::Struct, fields: &Vec>, val: MaybeSizedValue, ix: usize, needs_cast: bool) -> ValueRef { let ccx = bcx.ccx(); - let fty = st.fields[ix]; + let fty = fields[ix]; let ll_fty = type_of::in_memory_type_of(bcx.ccx(), fty); if bcx.is_unreachable() { return C_undef(ll_fty.ptr_to()); } let ptr_val = if needs_cast { - let fields = st.fields.iter().map(|&ty| { + let fields = fields.iter().map(|&ty| { type_of::in_memory_type_of(ccx, ty) }).collect::>(); let real_ty = Type::struct_(ccx, &fields[..], st.packed); @@ -1105,14 +631,8 @@ fn struct_field_ptr<'blk, 'tcx>(bcx: &BlockAndBuilder<'blk, 'tcx>, let meta = val.meta; - // Calculate the unaligned offset of the unsized field. - let mut offset = 0; - for &ty in &st.fields[0..ix] { - let llty = type_of::sizing_type_of(ccx, ty); - let type_align = type_of::align_of(ccx, ty); - offset = roundup(offset, type_align); - offset += machine::llsize_of_alloc(ccx, llty); - } + + let offset = st.offsets[ix].bytes(); let unaligned_offset = C_uint(bcx.ccx(), offset); // Get the alignment of the field @@ -1160,82 +680,71 @@ fn struct_field_ptr<'blk, 'tcx>(bcx: &BlockAndBuilder<'blk, 'tcx>, /// Currently the returned value has the same size as the type, but /// this could be changed in the future to avoid allocating unnecessary /// space after values of shorter-than-maximum cases. -pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, r: &Repr<'tcx>, discr: Disr, +pub fn trans_const<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>, discr: Disr, vals: &[ValueRef]) -> ValueRef { - match *r { - CEnum(ity, min, max) => { + let l = ccx.layout_of(t); + let dl = &ccx.tcx().data_layout; + match *l { + layout::CEnum { discr: d, min, max, .. } => { assert_eq!(vals.len(), 0); - assert_discr_in_range(ity, min, max, discr); - C_integral(ll_inttype(ccx, ity), discr.0, true) - } - General(ity, ref cases) => { - let case = &cases[discr.0 as usize]; - let (max_sz, _) = union_size_and_align(&cases[..]); - let lldiscr = C_integral(ll_inttype(ccx, ity), discr.0 as u64, true); - let mut f = vec![lldiscr]; - f.extend_from_slice(vals); - let mut contents = build_const_struct(ccx, case, &f[..]); - contents.extend_from_slice(&[padding(ccx, max_sz - case.size)]); + assert_discr_in_range(Disr(min), Disr(max), discr); + C_integral(Type::from_integer(ccx, d), discr.0, true) + } + layout::General { discr: d, ref variants, .. } => { + let variant = &variants[discr.0 as usize]; + let lldiscr = C_integral(Type::from_integer(ccx, d), discr.0 as u64, true); + let mut vals_with_discr = vec![lldiscr]; + vals_with_discr.extend_from_slice(vals); + let mut contents = build_const_struct(ccx, &variant, + &vals_with_discr[..]); + let needed_padding = l.size(dl).bytes() - variant.min_size.bytes(); + if needed_padding > 0 { + contents.push(padding(ccx, needed_padding)); + } C_struct(ccx, &contents[..], false) } - UntaggedUnion(ref un) => { + layout::UntaggedUnion { ref variants, .. }=> { assert_eq!(discr, Disr(0)); - let contents = build_const_union(ccx, un, vals[0]); - C_struct(ccx, &contents, un.packed) + let contents = build_const_union(ccx, variants, vals[0]); + C_struct(ccx, &contents, variants.packed) } - Univariant(ref st) => { + layout::Univariant { ref variant, .. } => { assert_eq!(discr, Disr(0)); - let contents = build_const_struct(ccx, st, vals); - C_struct(ccx, &contents[..], st.packed) + let contents = build_const_struct(ccx, + &variant, vals); + C_struct(ccx, &contents[..], variant.packed) + } + layout::Vector { .. } => { + C_vector(vals) } - RawNullablePointer { nndiscr, nnty, .. } => { - if discr == nndiscr { + layout::RawNullablePointer { nndiscr, .. } => { + let nnty = compute_fields(ccx, t, nndiscr as usize, false)[0]; + if discr.0 == nndiscr { assert_eq!(vals.len(), 1); vals[0] } else { C_null(type_of::sizing_type_of(ccx, nnty)) } } - StructWrappedNullablePointer { ref nonnull, nndiscr, .. } => { - if discr == nndiscr { - C_struct(ccx, &build_const_struct(ccx, - nonnull, - vals), + layout::StructWrappedNullablePointer { ref nonnull, nndiscr, .. } => { + if discr.0 == nndiscr { + C_struct(ccx, &build_const_struct(ccx, &nonnull, vals), false) } else { - let vals = nonnull.fields.iter().map(|&ty| { + let fields = compute_fields(ccx, t, nndiscr as usize, false); + let vals = fields.iter().map(|&ty| { // Always use null even if it's not the `discrfield`th // field; see #8506. C_null(type_of::sizing_type_of(ccx, ty)) }).collect::>(); - C_struct(ccx, &build_const_struct(ccx, - nonnull, - &vals[..]), + C_struct(ccx, &build_const_struct(ccx, &nonnull, &vals[..]), false) } } + _ => bug!("trans_const: cannot handle type {} repreented as {:#?}", t, l) } } -/// Compute struct field offsets relative to struct begin. -fn compute_struct_field_offsets<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, - st: &Struct<'tcx>) -> Vec { - let mut offsets = vec!(); - - let mut offset = 0; - for &ty in &st.fields { - let llty = type_of::sizing_type_of(ccx, ty); - if !st.packed { - let type_align = type_of::align_of(ccx, ty); - offset = roundup(offset, type_align); - } - offsets.push(offset); - offset += machine::llsize_of_alloc(ccx, llty); - } - assert_eq!(st.fields.len(), offsets.len()); - offsets -} - /// Building structs is a little complicated, because we might need to /// insert padding if a field's value is less aligned than its type. /// @@ -1245,21 +754,21 @@ fn compute_struct_field_offsets<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, /// a two-element struct will locate it at offset 4, and accesses to it /// will read the wrong memory. fn build_const_struct<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, - st: &Struct<'tcx>, vals: &[ValueRef]) + st: &layout::Struct, + vals: &[ValueRef]) -> Vec { - assert_eq!(vals.len(), st.fields.len()); + assert_eq!(vals.len(), st.offsets.len()); - let target_offsets = compute_struct_field_offsets(ccx, st); + if vals.len() == 0 { + return Vec::new(); + } // offset of current value let mut offset = 0; let mut cfields = Vec::new(); - for (&val, target_offset) in vals.iter().zip(target_offsets) { - if !st.packed { - let val_align = machine::llalign_of_min(ccx, val_ty(val)); - offset = roundup(offset, val_align); - } - if offset != target_offset { + let offsets = st.offsets.iter().map(|i| i.bytes()); + for (&val, target_offset) in vals.iter().zip(offsets) { + if offset < target_offset { cfields.push(padding(ccx, target_offset - offset)); offset = target_offset; } @@ -1268,22 +777,21 @@ fn build_const_struct<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, offset += machine::llsize_of_alloc(ccx, val_ty(val)); } - assert!(st.sized && offset <= st.size); - if offset != st.size { - cfields.push(padding(ccx, st.size - offset)); + if offset < st.stride().bytes() { + cfields.push(padding(ccx, st.stride().bytes() - offset)); } cfields } fn build_const_union<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, - un: &Union<'tcx>, + un: &layout::Union, field_val: ValueRef) -> Vec { let mut cfields = vec![field_val]; let offset = machine::llsize_of_alloc(ccx, val_ty(field_val)); - let size = roundup(un.min_size, un.align); + let size = un.stride().bytes(); if offset != size { cfields.push(padding(ccx, size - offset)); } @@ -1304,18 +812,21 @@ fn roundup(x: u64, a: u32) -> u64 { let a = a as u64; ((x + (a - 1)) / a) * a } /// /// (Not to be confused with `common::const_get_elt`, which operates on /// raw LLVM-level structs and arrays.) -pub fn const_get_field(r: &Repr, val: ValueRef, _discr: Disr, +pub fn const_get_field<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>, + val: ValueRef, _discr: Disr, ix: usize) -> ValueRef { - match *r { - CEnum(..) => bug!("element access in C-like enum const"), - Univariant(..) => const_struct_field(val, ix), - UntaggedUnion(..) => const_struct_field(val, 0), - General(..) => const_struct_field(val, ix + 1), - RawNullablePointer { .. } => { + let l = ccx.layout_of(t); + match *l { + layout::CEnum { .. } => bug!("element access in C-like enum const"), + layout::Univariant { .. } | layout::Vector { .. } => const_struct_field(val, ix), + layout::UntaggedUnion { .. } => const_struct_field(val, 0), + layout::General { .. } => const_struct_field(val, ix + 1), + layout::RawNullablePointer { .. } => { assert_eq!(ix, 0); val }, - StructWrappedNullablePointer{ .. } => const_struct_field(val, ix) + layout::StructWrappedNullablePointer{ .. } => const_struct_field(val, ix), + _ => bug!("{} does not have fields.", t) } } diff --git a/src/librustc_trans/asm.rs b/src/librustc_trans/asm.rs index 308118b1fbc6c..8c704cc32993c 100644 --- a/src/librustc_trans/asm.rs +++ b/src/librustc_trans/asm.rs @@ -61,7 +61,7 @@ pub fn trans_inline_asm<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, // Default per-arch clobbers // Basically what clang does let arch_clobbers = match &bcx.sess().target.target.arch[..] { - "x86" | "x86_64" => vec!("~{dirflag}", "~{fpsr}", "~{flags}"), + "x86" | "x86_64" => vec!["~{dirflag}", "~{fpsr}", "~{flags}"], _ => Vec::new() }; diff --git a/src/librustc_trans/assert_module_sources.rs b/src/librustc_trans/assert_module_sources.rs index 7fe6d2bbfe24e..264ed4cd12fc1 100644 --- a/src/librustc_trans/assert_module_sources.rs +++ b/src/librustc_trans/assert_module_sources.rs @@ -134,7 +134,7 @@ impl<'a, 'tcx> AssertModuleSource<'a, 'tcx> { /// Scan for a `cfg="foo"` attribute and check whether we have a /// cfg flag called `foo`. fn check_config(&self, attr: &ast::Attribute) -> bool { - let config = &self.tcx.map.krate().config; + let config = &self.tcx.sess.parse_sess.config; let value = self.field(attr, CFG); debug!("check_config(config={:?}, value={:?})", config, value); if config.iter().any(|c| c.check_name(&value[..])) { diff --git a/src/librustc_trans/back/link.rs b/src/librustc_trans/back/link.rs index 288249a7d9934..e46bdbb5ccf4a 100644 --- a/src/librustc_trans/back/link.rs +++ b/src/librustc_trans/back/link.rs @@ -26,6 +26,7 @@ use CrateTranslation; use util::common::time; use util::fs::fix_windows_verbatim_for_gcc; use rustc::dep_graph::DepNode; +use rustc::hir::def_id::CrateNum; use rustc::hir::svh::Svh; use rustc_back::tempdir::TempDir; use rustc_incremental::IncrementalHashesMap; @@ -129,7 +130,7 @@ pub fn build_link_meta(incremental_hashes_map: &IncrementalHashesMap, -> LinkMeta { let r = LinkMeta { crate_name: name.to_owned(), - crate_hash: Svh::new(incremental_hashes_map[&DepNode::Krate]), + crate_hash: Svh::new(incremental_hashes_map[&DepNode::Krate].to_smaller_hash()), }; info!("{:?}", r); return r; @@ -238,7 +239,7 @@ pub fn invalid_output_for_target(sess: &Session, match (sess.target.target.options.dynamic_linking, sess.target.target.options.executables, crate_type) { (false, _, config::CrateTypeCdylib) | - (false, _, config::CrateTypeRustcMacro) | + (false, _, config::CrateTypeProcMacro) | (false, _, config::CrateTypeDylib) => true, (_, false, config::CrateTypeExecutable) => true, _ => false @@ -262,7 +263,7 @@ pub fn filename_for_input(sess: &Session, outputs.out_directory.join(&format!("lib{}.rlib", libname)) } config::CrateTypeCdylib | - config::CrateTypeRustcMacro | + config::CrateTypeProcMacro | config::CrateTypeDylib => { let (prefix, suffix) = (&sess.target.target.options.dll_prefix, &sess.target.target.options.dll_suffix); @@ -288,18 +289,18 @@ pub fn filename_for_input(sess: &Session, } pub fn each_linked_rlib(sess: &Session, - f: &mut FnMut(ast::CrateNum, &Path)) { + f: &mut FnMut(CrateNum, &Path)) { let crates = sess.cstore.used_crates(LinkagePreference::RequireStatic).into_iter(); let fmts = sess.dependency_formats.borrow(); let fmts = fmts.get(&config::CrateTypeExecutable) .or_else(|| fmts.get(&config::CrateTypeStaticlib)) .or_else(|| fmts.get(&config::CrateTypeCdylib)) - .or_else(|| fmts.get(&config::CrateTypeRustcMacro)); + .or_else(|| fmts.get(&config::CrateTypeProcMacro)); let fmts = fmts.unwrap_or_else(|| { bug!("could not find formats for rlibs") }); for (cnum, path) in crates { - match fmts[cnum as usize - 1] { + match fmts[cnum.as_usize() - 1] { Linkage::NotLinked | Linkage::IncludedFromDylib => continue, _ => {} } @@ -735,7 +736,7 @@ fn link_args(cmd: &mut Linker, // executable. This metadata is in a separate object file from the main // object file, so we link that in here. if crate_type == config::CrateTypeDylib || - crate_type == config::CrateTypeRustcMacro { + crate_type == config::CrateTypeProcMacro { cmd.add_object(&outputs.with_extension("metadata.o")); } @@ -753,7 +754,8 @@ fn link_args(cmd: &mut Linker, let empty_vec = Vec::new(); let empty_str = String::new(); let args = sess.opts.cg.link_args.as_ref().unwrap_or(&empty_vec); - let mut args = args.iter().chain(used_link_args.iter()); + let more_args = &sess.opts.cg.link_arg; + let mut args = args.iter().chain(more_args.iter()).chain(used_link_args.iter()); let relocation_model = sess.opts.cg.relocation_model.as_ref() .unwrap_or(&empty_str); if (t.options.relocation_model == "pic" || *relocation_model == "pic") @@ -843,6 +845,7 @@ fn link_args(cmd: &mut Linker, if let Some(ref args) = sess.opts.cg.link_args { cmd.args(args); } + cmd.args(&sess.opts.cg.link_arg); cmd.args(&used_link_args); } @@ -933,7 +936,7 @@ fn add_upstream_rust_crates(cmd: &mut Linker, // appear statically in an existing dylib, meaning we'll pick up all the // symbols from the dylib. let src = sess.cstore.used_crate_source(cnum); - match data[cnum as usize - 1] { + match data[cnum.as_usize() - 1] { // compiler-builtins are always placed last to ensure that they're // linked correctly. _ if sess.cstore.is_compiler_builtins(cnum) => { @@ -1003,7 +1006,7 @@ fn add_upstream_rust_crates(cmd: &mut Linker, sess: &Session, tmpdir: &Path, crate_type: config::CrateType, - cnum: ast::CrateNum) { + cnum: CrateNum) { let src = sess.cstore.used_crate_source(cnum); let cratepath = &src.rlib.unwrap().0; if !sess.lto() && crate_type != config::CrateTypeDylib { diff --git a/src/librustc_trans/back/linker.rs b/src/librustc_trans/back/linker.rs index 58cad5c117f93..3222571a76e17 100644 --- a/src/librustc_trans/back/linker.rs +++ b/src/librustc_trans/back/linker.rs @@ -21,10 +21,10 @@ use monomorphize::Instance; use back::archive; use middle::dependency_format::Linkage; +use rustc::hir::def_id::CrateNum; use session::Session; use session::config::CrateType; use session::config; -use syntax::ast; /// For all the linkers we support, and information they might /// need out of the shared crate context before we get rid of it. @@ -245,7 +245,7 @@ impl<'a> Linker for GnuLinker<'a> { // have far more public symbols than we actually want to export, so we // hide them all here. if crate_type == CrateType::CrateTypeDylib || - crate_type == CrateType::CrateTypeRustcMacro { + crate_type == CrateType::CrateTypeProcMacro { return } @@ -450,7 +450,7 @@ fn exported_symbols(scx: &SharedCrateContext, // See explanation in GnuLinker::export_symbols, for // why we don't ever need dylib symbols on non-MSVC. if crate_type == CrateType::CrateTypeDylib || - crate_type == CrateType::CrateTypeRustcMacro { + crate_type == CrateType::CrateTypeProcMacro { if !scx.sess().target.target.options.is_like_msvc { return vec![]; } @@ -473,7 +473,7 @@ fn exported_symbols(scx: &SharedCrateContext, let deps = formats[&crate_type].iter(); symbols.extend(deps.enumerate().filter_map(|(i, f)| { if *f == Linkage::Static { - Some((i + 1) as ast::CrateNum) + Some(CrateNum::new(i + 1)) } else { None } diff --git a/src/librustc_back/rpath.rs b/src/librustc_trans/back/rpath.rs similarity index 98% rename from src/librustc_back/rpath.rs rename to src/librustc_trans/back/rpath.rs index 6cba27fcf3406..8758cdcf9d0ab 100644 --- a/src/librustc_back/rpath.rs +++ b/src/librustc_trans/back/rpath.rs @@ -12,10 +12,11 @@ use std::collections::HashSet; use std::env; use std::path::{Path, PathBuf}; use std::fs; -use syntax::ast; + +use rustc::hir::def_id::CrateNum; pub struct RPathConfig<'a> { - pub used_crates: Vec<(ast::CrateNum, Option)>, + pub used_crates: Vec<(CrateNum, Option)>, pub out_filename: PathBuf, pub is_like_osx: bool, pub has_rpath: bool, @@ -67,7 +68,7 @@ fn get_rpaths(config: &mut RPathConfig, libs: &[PathBuf]) -> Vec { let rel_rpaths = get_rpaths_relative_to_output(config, libs); // And a final backup rpath to the global library location. - let fallback_rpaths = vec!(get_install_prefix_rpath(config)); + let fallback_rpaths = vec![get_install_prefix_rpath(config)]; fn log_rpaths(desc: &str, rpaths: &[String]) { debug!("{} rpaths:", desc); diff --git a/src/librustc_trans/back/symbol_names.rs b/src/librustc_trans/back/symbol_names.rs index 76d83eee4bb41..bf2a5d76c10d4 100644 --- a/src/librustc_trans/back/symbol_names.rs +++ b/src/librustc_trans/back/symbol_names.rs @@ -97,14 +97,16 @@ //! virtually impossible. Thus, symbol hash generation exclusively relies on //! DefPaths which are much more robust in the face of changes to the code base. -use common::{CrateContext, SharedCrateContext, gensym_name}; +use common::SharedCrateContext; use monomorphize::Instance; -use util::sha2::{Digest, Sha256}; +use rustc_data_structures::fmt_wrap::FmtWrap; +use rustc_data_structures::blake2b::Blake2bHasher; -use rustc::middle::{cstore, weak_lang_items}; -use rustc::hir::def_id::DefId; +use rustc::middle::weak_lang_items; +use rustc::hir::def_id::LOCAL_CRATE; use rustc::hir::map as hir_map; -use rustc::ty::{Ty, TyCtxt, TypeFoldable}; +use rustc::ty::{self, Ty, TypeFoldable}; +use rustc::ty::fold::TypeVisitor; use rustc::ty::item_path::{self, ItemPathBuffer, RootMode}; use rustc::ty::subst::Substs; use rustc::hir::map::definitions::{DefPath, DefPathData}; @@ -112,12 +114,6 @@ use rustc::util::common::record_time; use syntax::attr; use syntax::parse::token::{self, InternedString}; -use serialize::hex::ToHex; - -pub fn def_id_to_string<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> String { - let def_path = tcx.def_path(def_id); - def_path.to_string(tcx) -} fn get_symbol_hash<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>, @@ -132,53 +128,44 @@ fn get_symbol_hash<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>, // values for generic type parameters, // if any. - substs: Option<&Substs<'tcx>>) + substs: Option<&'tcx Substs<'tcx>>) -> String { debug!("get_symbol_hash(def_path={:?}, parameters={:?})", def_path, substs); let tcx = scx.tcx(); - return record_time(&tcx.sess.perf_stats.symbol_hash_time, || { - let mut hash_state = scx.symbol_hasher().borrow_mut(); - - hash_state.reset(); + let mut hasher = ty::util::TypeIdHasher::new(tcx, Blake2bHasher::new(8, &[])); + record_time(&tcx.sess.perf_stats.symbol_hash_time, || { // the main symbol name is not necessarily unique; hash in the // compiler's internal def-path, guaranteeing each symbol has a // truly unique path - hash_state.input_str(&def_path.to_string(tcx)); + hasher.def_path(def_path); // Include the main item-type. Note that, in this case, the // assertions about `needs_subst` may not hold, but this item-type // ought to be the same for every reference anyway. assert!(!item_type.has_erasable_regions()); - let encoded_item_type = tcx.sess.cstore.encode_type(tcx, item_type, def_id_to_string); - hash_state.input(&encoded_item_type[..]); + hasher.visit_ty(item_type); // also include any type parameters (for generic items) if let Some(substs) = substs { - for t in substs.types() { - assert!(!t.has_erasable_regions()); - assert!(!t.needs_subst()); - let encoded_type = tcx.sess.cstore.encode_type(tcx, t, def_id_to_string); - hash_state.input(&encoded_type[..]); - } + assert!(!substs.has_erasable_regions()); + assert!(!substs.needs_subst()); + substs.visit_with(&mut hasher); } - - format!("h{}", truncated_hash_result(&mut *hash_state)) }); - fn truncated_hash_result(symbol_hasher: &mut Sha256) -> String { - let output = symbol_hasher.result_bytes(); - // 64 bits should be enough to avoid collisions. - output[.. 8].to_hex() - } + // 64 bits should be enough to avoid collisions. + let mut hasher = hasher.into_inner(); + let hash_bytes = hasher.finalize(); + format!("h{:x}", FmtWrap(hash_bytes)) } impl<'a, 'tcx> Instance<'tcx> { pub fn symbol_name(self, scx: &SharedCrateContext<'a, 'tcx>) -> String { - let Instance { def: def_id, ref substs } = self; + let Instance { def: def_id, substs } = self; debug!("symbol_name(def_id={:?}, substs={:?})", def_id, substs); @@ -273,7 +260,7 @@ impl<'a, 'tcx> Instance<'tcx> { scx.tcx().push_item_path(&mut buffer, def_id); }); - mangle(buffer.names.into_iter(), Some(&hash[..])) + mangle(buffer.names.into_iter(), &hash) } } @@ -298,27 +285,11 @@ pub fn exported_name_from_type_and_prefix<'a, 'tcx>(scx: &SharedCrateContext<'a, -> String { let empty_def_path = DefPath { data: vec![], - krate: cstore::LOCAL_CRATE, + krate: LOCAL_CRATE, }; let hash = get_symbol_hash(scx, &empty_def_path, t, None); let path = [token::intern_and_get_ident(prefix)]; - mangle(path.iter().cloned(), Some(&hash[..])) -} - -/// Only symbols that are invisible outside their compilation unit should use a -/// name generated by this function. -pub fn internal_name_from_type_and_suffix<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, - t: Ty<'tcx>, - suffix: &str) - -> String { - let path = [token::intern(&t.to_string()).as_str(), - gensym_name(suffix).as_str()]; - let def_path = DefPath { - data: vec![], - krate: cstore::LOCAL_CRATE, - }; - let hash = get_symbol_hash(ccx.shared(), &def_path, t, None); - mangle(path.iter().cloned(), Some(&hash[..])) + mangle(path.iter().cloned(), &hash) } // Name sanitation. LLVM will happily accept identifiers with weird names, but @@ -371,7 +342,7 @@ pub fn sanitize(s: &str) -> String { return result; } -pub fn mangle>(path: PI, hash: Option<&str>) -> String { +fn mangle>(path: PI, hash: &str) -> String { // Follow C++ namespace-mangling style, see // http://en.wikipedia.org/wiki/Name_mangling for more info. // @@ -398,9 +369,7 @@ pub fn mangle>(path: PI, hash: Option<&str>) - push(&mut n, &data); } - if let Some(s) = hash { - push(&mut n, s) - } + push(&mut n, hash); n.push('E'); // End name-sequence. n diff --git a/src/librustc_trans/back/write.rs b/src/librustc_trans/back/write.rs index 04b814e2b9772..9012914deeb09 100644 --- a/src/librustc_trans/back/write.rs +++ b/src/librustc_trans/back/write.rs @@ -665,7 +665,7 @@ pub fn run_passes(sess: &Session, // Figure out what we actually need to build. let mut modules_config = ModuleConfig::new(tm, sess.opts.cg.passes.clone()); - let mut metadata_config = ModuleConfig::new(tm, vec!()); + let mut metadata_config = ModuleConfig::new(tm, vec![]); modules_config.opt_level = Some(get_llvm_opt_level(sess.opts.optimize)); modules_config.opt_size = Some(get_llvm_opt_size(sess.opts.optimize)); diff --git a/src/librustc_trans/base.rs b/src/librustc_trans/base.rs index a6581ae605b56..977ababbf5688 100644 --- a/src/librustc_trans/base.rs +++ b/src/librustc_trans/base.rs @@ -35,6 +35,7 @@ use back::link; use back::linker::LinkerInfo; use llvm::{Linkage, ValueRef, Vector, get_param}; use llvm; +use rustc::hir::def::Def; use rustc::hir::def_id::DefId; use middle::lang_items::{LangItem, ExchangeMallocFnLangItem, StartFnLangItem}; use rustc::ty::subst::Substs; @@ -44,7 +45,6 @@ use rustc::ty::adjustment::CustomCoerceUnsized; use rustc::dep_graph::{DepNode, WorkProduct}; use rustc::hir::map as hir_map; use rustc::util::common::time; -use rustc::mir::mir_map::MirMap; use session::config::{self, NoDebugInfo}; use rustc_incremental::IncrementalHashesMap; use session::Session; @@ -79,7 +79,6 @@ use type_::Type; use type_of; use value::Value; use Disr; -use util::sha2::Sha256; use util::nodemap::{NodeSet, FnvHashMap, FnvHashSet}; use arena::TypedArena; @@ -183,6 +182,14 @@ pub fn get_dataptr(bcx: Block, fat_ptr: ValueRef) -> ValueRef { StructGEP(bcx, fat_ptr, abi::FAT_PTR_ADDR) } +pub fn get_meta_builder(b: &Builder, fat_ptr: ValueRef) -> ValueRef { + b.struct_gep(fat_ptr, abi::FAT_PTR_EXTRA) +} + +pub fn get_dataptr_builder(b: &Builder, fat_ptr: ValueRef) -> ValueRef { + b.struct_gep(fat_ptr, abi::FAT_PTR_ADDR) +} + fn require_alloc_fn<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, info_ty: Ty<'tcx>, it: LangItem) -> DefId { match bcx.tcx().lang_items.require(it) { Ok(id) => id, @@ -206,7 +213,7 @@ pub fn malloc_raw_dyn<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, // Allocate space: let def_id = require_alloc_fn(bcx, info_ty, ExchangeMallocFnLangItem); - let r = Callee::def(bcx.ccx(), def_id, Substs::empty(bcx.tcx())) + let r = Callee::def(bcx.ccx(), def_id, bcx.tcx().intern_substs(&[])) .call(bcx, debug_loc, &[size, align], None); Result::new(r.bcx, PointerCast(r.bcx, r.val, llty_ptr)) @@ -247,115 +254,6 @@ pub fn bin_op_to_fcmp_predicate(op: hir::BinOp_) -> llvm::RealPredicate { } } -pub fn compare_fat_ptrs<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, - lhs_addr: ValueRef, - lhs_extra: ValueRef, - rhs_addr: ValueRef, - rhs_extra: ValueRef, - _t: Ty<'tcx>, - op: hir::BinOp_, - debug_loc: DebugLoc) - -> ValueRef { - match op { - hir::BiEq => { - let addr_eq = ICmp(bcx, llvm::IntEQ, lhs_addr, rhs_addr, debug_loc); - let extra_eq = ICmp(bcx, llvm::IntEQ, lhs_extra, rhs_extra, debug_loc); - And(bcx, addr_eq, extra_eq, debug_loc) - } - hir::BiNe => { - let addr_eq = ICmp(bcx, llvm::IntNE, lhs_addr, rhs_addr, debug_loc); - let extra_eq = ICmp(bcx, llvm::IntNE, lhs_extra, rhs_extra, debug_loc); - Or(bcx, addr_eq, extra_eq, debug_loc) - } - hir::BiLe | hir::BiLt | hir::BiGe | hir::BiGt => { - // a OP b ~ a.0 STRICT(OP) b.0 | (a.0 == b.0 && a.1 OP a.1) - let (op, strict_op) = match op { - hir::BiLt => (llvm::IntULT, llvm::IntULT), - hir::BiLe => (llvm::IntULE, llvm::IntULT), - hir::BiGt => (llvm::IntUGT, llvm::IntUGT), - hir::BiGe => (llvm::IntUGE, llvm::IntUGT), - _ => bug!(), - }; - - let addr_eq = ICmp(bcx, llvm::IntEQ, lhs_addr, rhs_addr, debug_loc); - let extra_op = ICmp(bcx, op, lhs_extra, rhs_extra, debug_loc); - let addr_eq_extra_op = And(bcx, addr_eq, extra_op, debug_loc); - - let addr_strict = ICmp(bcx, strict_op, lhs_addr, rhs_addr, debug_loc); - Or(bcx, addr_strict, addr_eq_extra_op, debug_loc) - } - _ => { - bug!("unexpected fat ptr binop"); - } - } -} - -pub fn compare_scalar_types<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, - lhs: ValueRef, - rhs: ValueRef, - t: Ty<'tcx>, - op: hir::BinOp_, - debug_loc: DebugLoc) - -> ValueRef { - match t.sty { - ty::TyTuple(ref tys) if tys.is_empty() => { - // We don't need to do actual comparisons for nil. - // () == () holds but () < () does not. - match op { - hir::BiEq | hir::BiLe | hir::BiGe => return C_bool(bcx.ccx(), true), - hir::BiNe | hir::BiLt | hir::BiGt => return C_bool(bcx.ccx(), false), - // refinements would be nice - _ => bug!("compare_scalar_types: must be a comparison operator"), - } - } - ty::TyFnDef(..) | ty::TyFnPtr(_) | ty::TyBool | ty::TyUint(_) | ty::TyChar => { - ICmp(bcx, - bin_op_to_icmp_predicate(op, false), - lhs, - rhs, - debug_loc) - } - ty::TyRawPtr(mt) if common::type_is_sized(bcx.tcx(), mt.ty) => { - ICmp(bcx, - bin_op_to_icmp_predicate(op, false), - lhs, - rhs, - debug_loc) - } - ty::TyRawPtr(_) => { - let lhs_addr = Load(bcx, GEPi(bcx, lhs, &[0, abi::FAT_PTR_ADDR])); - let lhs_extra = Load(bcx, GEPi(bcx, lhs, &[0, abi::FAT_PTR_EXTRA])); - - let rhs_addr = Load(bcx, GEPi(bcx, rhs, &[0, abi::FAT_PTR_ADDR])); - let rhs_extra = Load(bcx, GEPi(bcx, rhs, &[0, abi::FAT_PTR_EXTRA])); - compare_fat_ptrs(bcx, - lhs_addr, - lhs_extra, - rhs_addr, - rhs_extra, - t, - op, - debug_loc) - } - ty::TyInt(_) => { - ICmp(bcx, - bin_op_to_icmp_predicate(op, true), - lhs, - rhs, - debug_loc) - } - ty::TyFloat(_) => { - FCmp(bcx, - bin_op_to_fcmp_predicate(op), - lhs, - rhs, - debug_loc) - } - // Should never get here, because t is scalar. - _ => bug!("non-scalar type passed to compare_scalar_types"), - } -} - pub fn compare_simd_types<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, lhs: ValueRef, rhs: ValueRef, @@ -466,32 +364,27 @@ pub fn coerce_unsized_into<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, store_fat_ptr(bcx, base, info, dst, dst_ty); } - // This can be extended to enums and tuples in the future. - (&ty::TyAdt(def_a, _), &ty::TyAdt(def_b, _)) => { + (&ty::TyAdt(def_a, substs_a), &ty::TyAdt(def_b, substs_b)) => { assert_eq!(def_a, def_b); - let src_repr = adt::represent_type(bcx.ccx(), src_ty); - let src_fields = match &*src_repr { - &adt::Repr::Univariant(ref s) => &s.fields, - _ => bug!("struct has non-univariant repr"), - }; - let dst_repr = adt::represent_type(bcx.ccx(), dst_ty); - let dst_fields = match &*dst_repr { - &adt::Repr::Univariant(ref s) => &s.fields, - _ => bug!("struct has non-univariant repr"), - }; + let src_fields = def_a.variants[0].fields.iter().map(|f| { + monomorphize::field_ty(bcx.tcx(), substs_a, f) + }); + let dst_fields = def_b.variants[0].fields.iter().map(|f| { + monomorphize::field_ty(bcx.tcx(), substs_b, f) + }); let src = adt::MaybeSizedValue::sized(src); let dst = adt::MaybeSizedValue::sized(dst); - let iter = src_fields.iter().zip(dst_fields).enumerate(); + let iter = src_fields.zip(dst_fields).enumerate(); for (i, (src_fty, dst_fty)) in iter { if type_is_zero_size(bcx.ccx(), dst_fty) { continue; } - let src_f = adt::trans_field_ptr(bcx, &src_repr, src, Disr(0), i); - let dst_f = adt::trans_field_ptr(bcx, &dst_repr, dst, Disr(0), i); + let src_f = adt::trans_field_ptr(bcx, src_ty, src, Disr(0), i); + let dst_f = adt::trans_field_ptr(bcx, dst_ty, dst, Disr(0), i); if src_fty == dst_fty { memcpy_ty(bcx, dst_f, src_f, src_fty); } else { @@ -511,7 +404,7 @@ pub fn custom_coerce_unsize_info<'scx, 'tcx>(scx: &SharedCrateContext<'scx, 'tcx -> CustomCoerceUnsized { let trait_ref = ty::Binder(ty::TraitRef { def_id: scx.tcx().lang_items.coerce_unsized_trait().unwrap(), - substs: Substs::new_trait(scx.tcx(), source_ty, &[target_ty]) + substs: scx.tcx().mk_substs_trait(source_ty, &[target_ty]) }); match fulfill_obligation(scx, DUMMY_SP, trait_ref) { @@ -628,6 +521,11 @@ pub fn need_invoke(bcx: Block) -> bool { } } +pub fn call_assume<'a, 'tcx>(b: &Builder<'a, 'tcx>, val: ValueRef) { + let assume_intrinsic = b.ccx.get_intrinsic("llvm.assume"); + b.call(assume_intrinsic, &[val], None); +} + /// Helper for loading values from memory. Does the necessary conversion if the in-memory type /// differs from the type used for SSA values. Also handles various special cases where the type /// gives us better information about what we are loading. @@ -681,12 +579,9 @@ pub fn store_ty<'blk, 'tcx>(cx: Block<'blk, 'tcx>, v: ValueRef, dst: ValueRef, t debug!("store_ty: {:?} : {:?} <- {:?}", Value(dst), t, Value(v)); if common::type_is_fat_ptr(cx.tcx(), t) { - Store(cx, - ExtractValue(cx, v, abi::FAT_PTR_ADDR), - get_dataptr(cx, dst)); - Store(cx, - ExtractValue(cx, v, abi::FAT_PTR_EXTRA), - get_meta(cx, dst)); + let lladdr = ExtractValue(cx, v, abi::FAT_PTR_ADDR); + let llextra = ExtractValue(cx, v, abi::FAT_PTR_EXTRA); + store_fat_ptr(cx, lladdr, llextra, dst, t); } else { Store(cx, from_immediate(cx, v), dst); } @@ -704,11 +599,36 @@ pub fn store_fat_ptr<'blk, 'tcx>(cx: Block<'blk, 'tcx>, pub fn load_fat_ptr<'blk, 'tcx>(cx: Block<'blk, 'tcx>, src: ValueRef, - _ty: Ty<'tcx>) - -> (ValueRef, ValueRef) { - // FIXME: emit metadata - (Load(cx, get_dataptr(cx, src)), - Load(cx, get_meta(cx, src))) + ty: Ty<'tcx>) + -> (ValueRef, ValueRef) +{ + if cx.unreachable.get() { + // FIXME: remove me + return (Load(cx, get_dataptr(cx, src)), + Load(cx, get_meta(cx, src))); + } + + load_fat_ptr_builder(&B(cx), src, ty) +} + +pub fn load_fat_ptr_builder<'a, 'tcx>( + b: &Builder<'a, 'tcx>, + src: ValueRef, + t: Ty<'tcx>) + -> (ValueRef, ValueRef) +{ + + let ptr = get_dataptr_builder(b, src); + let ptr = if t.is_region_ptr() || t.is_unique() { + b.load_nonnull(ptr) + } else { + b.load(ptr) + }; + + // FIXME: emit metadata on `meta`. + let meta = b.load(get_meta_builder(b, src)); + + (ptr, meta) } pub fn from_immediate(bcx: Block, val: ValueRef) -> ValueRef { @@ -927,7 +847,7 @@ impl<'blk, 'tcx> FunctionContext<'blk, 'tcx> { common::validate_substs(instance.substs); (instance.substs, Some(instance.def)) } - None => (Substs::empty(ccx.tcx()), None) + None => (ccx.tcx().intern_substs(&[]), None) }; let local_id = def_id.and_then(|id| ccx.tcx().map.as_local_node_id(id)); @@ -945,7 +865,7 @@ impl<'blk, 'tcx> FunctionContext<'blk, 'tcx> { false }; - let mir = def_id.and_then(|id| ccx.get_mir(id)); + let mir = def_id.map(|id| ccx.tcx().item_mir(id)); let debug_context = if let (false, Some((instance, sig, abi)), &Some(ref mir)) = (no_debug, definition, &mir) { @@ -1129,8 +1049,7 @@ pub fn trans_instance<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, instance: Instance let fn_ty = ccx.tcx().erase_regions(&fn_ty); let fn_ty = monomorphize::apply_param_substs(ccx.shared(), instance.substs, &fn_ty); - let sig = ccx.tcx().erase_late_bound_regions(fn_ty.fn_sig()); - let sig = ccx.tcx().normalize_associated_type(&sig); + let sig = ccx.tcx().erase_late_bound_regions_and_normalize(fn_ty.fn_sig()); let abi = fn_ty.fn_abi(); let lldecl = match ccx.instances().borrow().get(&instance) { @@ -1152,8 +1071,7 @@ pub fn trans_ctor_shim<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let ctor_ty = ccx.tcx().lookup_item_type(def_id).ty; let ctor_ty = monomorphize::apply_param_substs(ccx.shared(), substs, &ctor_ty); - let sig = ccx.tcx().erase_late_bound_regions(&ctor_ty.fn_sig()); - let sig = ccx.tcx().normalize_associated_type(&sig); + let sig = ccx.tcx().erase_late_bound_regions_and_normalize(&ctor_ty.fn_sig()); let fn_ty = FnType::new(ccx, Abi::Rust, &sig, &[]); let (arena, fcx): (TypedArena<_>, FunctionContext); @@ -1164,11 +1082,10 @@ pub fn trans_ctor_shim<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, if !fcx.fn_ty.ret.is_ignore() { let dest = fcx.llretslotptr.get().unwrap(); let dest_val = adt::MaybeSizedValue::sized(dest); // Can return unsized value - let repr = adt::represent_type(ccx, sig.output); let mut llarg_idx = fcx.fn_ty.ret.is_indirect() as usize; let mut arg_idx = 0; for (i, arg_ty) in sig.inputs.into_iter().enumerate() { - let lldestptr = adt::trans_field_ptr(bcx, &repr, dest_val, Disr::from(disr), i); + let lldestptr = adt::trans_field_ptr(bcx, sig.output, dest_val, Disr::from(disr), i); let arg = &fcx.fn_ty.args[arg_idx]; arg_idx += 1; let b = &bcx.build(); @@ -1181,7 +1098,7 @@ pub fn trans_ctor_shim<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, arg.store_fn_arg(b, &mut llarg_idx, lldestptr); } } - adt::trans_set_discr(bcx, &repr, dest, disr); + adt::trans_set_discr(bcx, sig.output, dest, disr); } fcx.finish(bcx, DebugLoc::None); @@ -1293,7 +1210,7 @@ pub fn maybe_create_entry_wrapper(ccx: &CrateContext) { Ok(id) => id, Err(s) => ccx.sess().fatal(&s) }; - let empty_substs = Substs::empty(ccx.tcx()); + let empty_substs = ccx.tcx().intern_substs(&[]); let start_fn = Callee::def(ccx, start_def_id, empty_substs).reify(ccx); let args = { let opaque_rust_main = @@ -1332,12 +1249,27 @@ fn write_metadata(cx: &SharedCrateContext, reachable_ids: &NodeSet) -> Vec { use flate; - let any_library = cx.sess() - .crate_types - .borrow() - .iter() - .any(|ty| *ty != config::CrateTypeExecutable); - if !any_library { + #[derive(PartialEq, Eq, PartialOrd, Ord)] + enum MetadataKind { + None, + Uncompressed, + Compressed + } + + let kind = cx.sess().crate_types.borrow().iter().map(|ty| { + match *ty { + config::CrateTypeExecutable | + config::CrateTypeStaticlib | + config::CrateTypeCdylib => MetadataKind::None, + + config::CrateTypeRlib => MetadataKind::Uncompressed, + + config::CrateTypeDylib | + config::CrateTypeProcMacro => MetadataKind::Compressed, + } + }).max().unwrap(); + + if kind == MetadataKind::None { return Vec::new(); } @@ -1345,9 +1277,12 @@ fn write_metadata(cx: &SharedCrateContext, let metadata = cstore.encode_metadata(cx.tcx(), cx.export_map(), cx.link_meta(), - reachable_ids, - cx.mir_map(), - cx.tcx().map.krate()); + reachable_ids); + if kind == MetadataKind::Uncompressed { + return metadata; + } + + assert!(kind == MetadataKind::Compressed); let mut compressed = cstore.metadata_encoding_version().to_vec(); compressed.extend_from_slice(&flate::deflate_bytes(&metadata)); @@ -1421,21 +1356,7 @@ fn internalize_symbols<'a, 'tcx>(sess: &Session, .iter() .cloned() .filter(|trans_item|{ - let def_id = match *trans_item { - TransItem::DropGlue(..) => { - return false - }, - TransItem::Fn(ref instance) => { - instance.def - } - TransItem::Static(node_id) => { - tcx.map.local_def_id(node_id) - } - }; - - trans_item.explicit_linkage(tcx).is_some() || - attr::contains_extern_indicator(tcx.sess.diagnostic(), - &tcx.get_attrs(def_id)) + trans_item.explicit_linkage(tcx).is_some() }) .map(|trans_item| symbol_map.get_or_compute(scx, trans_item)) .collect(); @@ -1591,7 +1512,11 @@ pub fn filter_reachable_ids(tcx: TyCtxt, reachable: NodeSet) -> NodeSet { node: hir::ImplItemKind::Method(..), .. }) => { let def_id = tcx.map.local_def_id(id); let generics = tcx.lookup_generics(def_id); - generics.parent_types == 0 && generics.types.is_empty() + let attributes = tcx.get_attrs(def_id); + (generics.parent_types == 0 && generics.types.is_empty()) && + // Functions marked with #[inline] are only ever translated + // with "internal" linkage and are never exported. + !attr::requests_inline(&attributes[..]) } _ => false @@ -1600,7 +1525,6 @@ pub fn filter_reachable_ids(tcx: TyCtxt, reachable: NodeSet) -> NodeSet { } pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - mir_map: &MirMap<'tcx>, analysis: ty::CrateAnalysis, incremental_hashes_map: &IncrementalHashesMap) -> CrateTranslation { @@ -1624,9 +1548,7 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, let link_meta = link::build_link_meta(incremental_hashes_map, name); let shared_ccx = SharedCrateContext::new(tcx, - &mir_map, export_map, - Sha256::new(), link_meta.clone(), reachable, check_overflow); @@ -1789,8 +1711,21 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, // `reachable_symbols` list later on so it should be ok. for cnum in sess.cstore.crates() { let syms = sess.cstore.reachable_ids(cnum); - reachable_symbols.extend(syms.into_iter().filter(|did| { - sess.cstore.is_extern_item(shared_ccx.tcx(), *did) + reachable_symbols.extend(syms.into_iter().filter(|&def_id| { + let applicable = match sess.cstore.describe_def(def_id) { + Some(Def::Static(..)) => true, + Some(Def::Fn(_)) => { + shared_ccx.tcx().lookup_generics(def_id).types.is_empty() + } + _ => false + }; + + if applicable { + let attrs = shared_ccx.tcx().get_attrs(def_id); + attr::contains_extern_indicator(sess.diagnostic(), &attrs) + } else { + false + } }).map(|did| { symbol_for_def_id(did, &shared_ccx, &symbol_map) })); @@ -1896,8 +1831,7 @@ fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a partitioning::partition(scx, items.iter().cloned(), strategy, - &inlining_map, - scx.reachable()) + &inlining_map) }); assert!(scx.tcx().sess.opts.cg.codegen_units == codegen_units.len() || diff --git a/src/librustc_trans/builder.rs b/src/librustc_trans/builder.rs index 90f96af549691..8556e95903c18 100644 --- a/src/librustc_trans/builder.rs +++ b/src/librustc_trans/builder.rs @@ -22,6 +22,7 @@ use value::Value; use util::nodemap::FnvHashMap; use libc::{c_uint, c_char}; +use std::borrow::Cow; use std::ffi::CString; use std::ptr; use syntax_pos::Span; @@ -175,8 +176,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { .collect::>() .join(", ")); - check_call("invoke", llfn, args); - + let args = self.check_call("invoke", llfn, args); let bundle = bundle.as_ref().map(|b| b.raw()).unwrap_or(ptr::null_mut()); unsafe { @@ -543,6 +543,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { debug!("Store {:?} -> {:?}", Value(val), Value(ptr)); assert!(!self.llbuilder.is_null()); self.count_insn("store"); + let ptr = self.check_store(val, ptr); unsafe { llvm::LLVMBuildStore(self.llbuilder, val, ptr) } @@ -552,6 +553,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { debug!("Store {:?} -> {:?}", Value(val), Value(ptr)); assert!(!self.llbuilder.is_null()); self.count_insn("store.volatile"); + let ptr = self.check_store(val, ptr); unsafe { let insn = llvm::LLVMBuildStore(self.llbuilder, val, ptr); llvm::LLVMSetVolatile(insn, llvm::True); @@ -562,6 +564,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { pub fn atomic_store(&self, val: ValueRef, ptr: ValueRef, order: AtomicOrdering) { debug!("Store {:?} -> {:?}", Value(val), Value(ptr)); self.count_insn("store.atomic"); + let ptr = self.check_store(val, ptr); unsafe { let ty = Type::from_ref(llvm::LLVMTypeOf(ptr)); let align = llalign_of_pref(self.ccx, ty.element_type()); @@ -857,8 +860,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { .collect::>() .join(", ")); - check_call("call", llfn, args); - + let args = self.check_call("call", llfn, args); let bundle = bundle.as_ref().map(|b| b.raw()).unwrap_or(ptr::null_mut()); unsafe { @@ -1100,10 +1102,32 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { llvm::LLVMRustBuildAtomicFence(self.llbuilder, order, scope); } } -} -fn check_call(typ: &str, llfn: ValueRef, args: &[ValueRef]) { - if cfg!(debug_assertions) { + /// Returns the ptr value that should be used for storing `val`. + fn check_store<'b>(&self, + val: ValueRef, + ptr: ValueRef) -> ValueRef { + let dest_ptr_ty = val_ty(ptr); + let stored_ty = val_ty(val); + let stored_ptr_ty = stored_ty.ptr_to(); + + assert_eq!(dest_ptr_ty.kind(), llvm::TypeKind::Pointer); + + if dest_ptr_ty == stored_ptr_ty { + ptr + } else { + debug!("Type mismatch in store. \ + Expected {:?}, got {:?}; inserting bitcast", + dest_ptr_ty, stored_ptr_ty); + self.bitcast(ptr, stored_ptr_ty) + } + } + + /// Returns the args that should be used for a call to `llfn`. + fn check_call<'b>(&self, + typ: &str, + llfn: ValueRef, + args: &'b [ValueRef]) -> Cow<'b, [ValueRef]> { let mut fn_ty = val_ty(llfn); // Strip off pointers while fn_ty.kind() == llvm::TypeKind::Pointer { @@ -1115,16 +1139,31 @@ fn check_call(typ: &str, llfn: ValueRef, args: &[ValueRef]) { let param_tys = fn_ty.func_params(); - let iter = param_tys.into_iter() - .zip(args.iter().map(|&v| val_ty(v))); - for (i, (expected_ty, actual_ty)) in iter.enumerate() { - if expected_ty != actual_ty { - bug!("Type mismatch in function call of {:?}. \ - Expected {:?} for param {}, got {:?}", - Value(llfn), - expected_ty, i, actual_ty); + let all_args_match = param_tys.iter() + .zip(args.iter().map(|&v| val_ty(v))) + .all(|(expected_ty, actual_ty)| *expected_ty == actual_ty); + + if all_args_match { + return Cow::Borrowed(args); + } + + let casted_args: Vec<_> = param_tys.into_iter() + .zip(args.iter()) + .enumerate() + .map(|(i, (expected_ty, &actual_val))| { + let actual_ty = val_ty(actual_val); + if expected_ty != actual_ty { + debug!("Type mismatch in function call of {:?}. \ + Expected {:?} for param {}, got {:?}; injecting bitcast", + Value(llfn), + expected_ty, i, actual_ty); + self.bitcast(actual_val, expected_ty) + } else { + actual_val + } + }) + .collect(); - } - } + return Cow::Owned(casted_args); } } diff --git a/src/librustc_trans/cabi_aarch64.rs b/src/librustc_trans/cabi_aarch64.rs index fc11e3888d3c2..59a84439950ba 100644 --- a/src/librustc_trans/cabi_aarch64.rs +++ b/src/librustc_trans/cabi_aarch64.rs @@ -11,78 +11,12 @@ #![allow(non_upper_case_globals)] use llvm::{Integer, Pointer, Float, Double, Struct, Array, Vector}; -use abi::{FnType, ArgType}; +use abi::{self, FnType, ArgType}; use context::CrateContext; use type_::Type; -use std::cmp; - -fn align_up_to(off: usize, a: usize) -> usize { - return (off + a - 1) / a * a; -} - -fn align(off: usize, ty: Type) -> usize { - let a = ty_align(ty); - return align_up_to(off, a); -} - -fn ty_align(ty: Type) -> usize { - match ty.kind() { - Integer => ((ty.int_width() as usize) + 7) / 8, - Pointer => 8, - Float => 4, - Double => 8, - Struct => { - if ty.is_packed() { - 1 - } else { - let str_tys = ty.field_types(); - str_tys.iter().fold(1, |a, t| cmp::max(a, ty_align(*t))) - } - } - Array => { - let elt = ty.element_type(); - ty_align(elt) - } - Vector => { - let len = ty.vector_length(); - let elt = ty.element_type(); - ty_align(elt) * len - } - _ => bug!("ty_align: unhandled type") - } -} - fn ty_size(ty: Type) -> usize { - match ty.kind() { - Integer => ((ty.int_width() as usize) + 7) / 8, - Pointer => 8, - Float => 4, - Double => 8, - Struct => { - if ty.is_packed() { - let str_tys = ty.field_types(); - str_tys.iter().fold(0, |s, t| s + ty_size(*t)) - } else { - let str_tys = ty.field_types(); - let size = str_tys.iter().fold(0, |s, t| align(s, *t) + ty_size(*t)); - align(size, ty) - } - } - Array => { - let len = ty.array_length(); - let elt = ty.element_type(); - let eltsz = ty_size(elt); - len * eltsz - } - Vector => { - let len = ty.vector_length(); - let elt = ty.element_type(); - let eltsz = ty_size(elt); - len * eltsz - } - _ => bug!("ty_size: unhandled type") - } + abi::ty_size(ty, 8) } fn is_homogenous_aggregate_ty(ty: Type) -> Option<(Type, u64)> { diff --git a/src/librustc_trans/cabi_arm.rs b/src/librustc_trans/cabi_arm.rs index 68a2e8aa8ce95..93d43f7d96116 100644 --- a/src/librustc_trans/cabi_arm.rs +++ b/src/librustc_trans/cabi_arm.rs @@ -11,7 +11,7 @@ #![allow(non_upper_case_globals)] use llvm::{Integer, Pointer, Float, Double, Struct, Array, Vector}; -use abi::{FnType, ArgType}; +use abi::{self, align_up_to, FnType, ArgType}; use context::CrateContext; use type_::Type; @@ -24,40 +24,13 @@ pub enum Flavor { type TyAlignFn = fn(ty: Type) -> usize; -fn align_up_to(off: usize, a: usize) -> usize { - return (off + a - 1) / a * a; -} - fn align(off: usize, ty: Type, align_fn: TyAlignFn) -> usize { let a = align_fn(ty); return align_up_to(off, a); } fn general_ty_align(ty: Type) -> usize { - match ty.kind() { - Integer => ((ty.int_width() as usize) + 7) / 8, - Pointer => 4, - Float => 4, - Double => 8, - Struct => { - if ty.is_packed() { - 1 - } else { - let str_tys = ty.field_types(); - str_tys.iter().fold(1, |a, t| cmp::max(a, general_ty_align(*t))) - } - } - Array => { - let elt = ty.element_type(); - general_ty_align(elt) - } - Vector => { - let len = ty.vector_length(); - let elt = ty.element_type(); - general_ty_align(elt) * len - } - _ => bug!("ty_align: unhandled type") - } + abi::ty_align(ty, 4) } // For more information see: diff --git a/src/librustc_trans/cabi_mips.rs b/src/librustc_trans/cabi_mips.rs index 680310e195a41..25fe53e7ef40f 100644 --- a/src/librustc_trans/cabi_mips.rs +++ b/src/librustc_trans/cabi_mips.rs @@ -13,77 +13,17 @@ use libc::c_uint; use std::cmp; use llvm; -use llvm::{Integer, Pointer, Float, Double, Struct, Array, Vector}; -use abi::{ArgType, FnType}; +use llvm::{Integer, Pointer, Float, Double, Vector}; +use abi::{self, align_up_to, ArgType, FnType}; use context::CrateContext; use type_::Type; -fn align_up_to(off: usize, a: usize) -> usize { - return (off + a - 1) / a * a; -} - -fn align(off: usize, ty: Type) -> usize { - let a = ty_align(ty); - return align_up_to(off, a); -} - fn ty_align(ty: Type) -> usize { - match ty.kind() { - Integer => ((ty.int_width() as usize) + 7) / 8, - Pointer => 4, - Float => 4, - Double => 8, - Struct => { - if ty.is_packed() { - 1 - } else { - let str_tys = ty.field_types(); - str_tys.iter().fold(1, |a, t| cmp::max(a, ty_align(*t))) - } - } - Array => { - let elt = ty.element_type(); - ty_align(elt) - } - Vector => { - let len = ty.vector_length(); - let elt = ty.element_type(); - ty_align(elt) * len - } - _ => bug!("ty_align: unhandled type") - } + abi::ty_align(ty, 4) } fn ty_size(ty: Type) -> usize { - match ty.kind() { - Integer => ((ty.int_width() as usize) + 7) / 8, - Pointer => 4, - Float => 4, - Double => 8, - Struct => { - if ty.is_packed() { - let str_tys = ty.field_types(); - str_tys.iter().fold(0, |s, t| s + ty_size(*t)) - } else { - let str_tys = ty.field_types(); - let size = str_tys.iter().fold(0, |s, t| align(s, *t) + ty_size(*t)); - align(size, ty) - } - } - Array => { - let len = ty.array_length(); - let elt = ty.element_type(); - let eltsz = ty_size(elt); - len * eltsz - } - Vector => { - let len = ty.vector_length(); - let elt = ty.element_type(); - let eltsz = ty_size(elt); - len * eltsz - } - _ => bug!("ty_size: unhandled type") - } + abi::ty_size(ty, 4) } fn classify_ret_ty(ccx: &CrateContext, ret: &mut ArgType) { diff --git a/src/librustc_trans/cabi_mips64.rs b/src/librustc_trans/cabi_mips64.rs index e92ef1eaec8ec..e6b500c88dc7a 100644 --- a/src/librustc_trans/cabi_mips64.rs +++ b/src/librustc_trans/cabi_mips64.rs @@ -13,77 +13,17 @@ use libc::c_uint; use std::cmp; use llvm; -use llvm::{Integer, Pointer, Float, Double, Struct, Array, Vector}; -use abi::{ArgType, FnType}; +use llvm::{Integer, Pointer, Float, Double, Vector}; +use abi::{self, align_up_to, ArgType, FnType}; use context::CrateContext; use type_::Type; -fn align_up_to(off: usize, a: usize) -> usize { - return (off + a - 1) / a * a; -} - -fn align(off: usize, ty: Type) -> usize { - let a = ty_align(ty); - return align_up_to(off, a); -} - fn ty_align(ty: Type) -> usize { - match ty.kind() { - Integer => ((ty.int_width() as usize) + 7) / 8, - Pointer => 8, - Float => 4, - Double => 8, - Struct => { - if ty.is_packed() { - 1 - } else { - let str_tys = ty.field_types(); - str_tys.iter().fold(1, |a, t| cmp::max(a, ty_align(*t))) - } - } - Array => { - let elt = ty.element_type(); - ty_align(elt) - } - Vector => { - let len = ty.vector_length(); - let elt = ty.element_type(); - ty_align(elt) * len - } - _ => bug!("ty_align: unhandled type") - } + abi::ty_align(ty, 8) } fn ty_size(ty: Type) -> usize { - match ty.kind() { - Integer => ((ty.int_width() as usize) + 7) / 8, - Pointer => 8, - Float => 4, - Double => 8, - Struct => { - if ty.is_packed() { - let str_tys = ty.field_types(); - str_tys.iter().fold(0, |s, t| s + ty_size(*t)) - } else { - let str_tys = ty.field_types(); - let size = str_tys.iter().fold(0, |s, t| align(s, *t) + ty_size(*t)); - align(size, ty) - } - } - Array => { - let len = ty.array_length(); - let elt = ty.element_type(); - let eltsz = ty_size(elt); - len * eltsz - } - Vector => { - let len = ty.vector_length(); - let elt = ty.element_type(); - let eltsz = ty_size(elt); - len * eltsz - } - _ => bug!("ty_size: unhandled type") - } + abi::ty_size(ty, 8) } fn classify_ret_ty(ccx: &CrateContext, ret: &mut ArgType) { diff --git a/src/librustc_trans/cabi_powerpc.rs b/src/librustc_trans/cabi_powerpc.rs index e05c31b1d88cd..4e1d7a9337827 100644 --- a/src/librustc_trans/cabi_powerpc.rs +++ b/src/librustc_trans/cabi_powerpc.rs @@ -10,67 +10,26 @@ use libc::c_uint; use llvm; -use llvm::{Integer, Pointer, Float, Double, Struct, Array}; -use abi::{FnType, ArgType}; +use llvm::{Integer, Pointer, Float, Double, Vector}; +use abi::{self, align_up_to, FnType, ArgType}; use context::CrateContext; use type_::Type; use std::cmp; -fn align_up_to(off: usize, a: usize) -> usize { - return (off + a - 1) / a * a; -} - -fn align(off: usize, ty: Type) -> usize { - let a = ty_align(ty); - return align_up_to(off, a); -} - fn ty_align(ty: Type) -> usize { - match ty.kind() { - Integer => ((ty.int_width() as usize) + 7) / 8, - Pointer => 4, - Float => 4, - Double => 8, - Struct => { - if ty.is_packed() { - 1 - } else { - let str_tys = ty.field_types(); - str_tys.iter().fold(1, |a, t| cmp::max(a, ty_align(*t))) - } - } - Array => { - let elt = ty.element_type(); - ty_align(elt) - } - _ => bug!("ty_size: unhandled type") + if ty.kind() == Vector { + bug!("ty_size: unhandled type") + } else { + abi::ty_align(ty, 4) } } fn ty_size(ty: Type) -> usize { - match ty.kind() { - Integer => ((ty.int_width() as usize) + 7) / 8, - Pointer => 4, - Float => 4, - Double => 8, - Struct => { - if ty.is_packed() { - let str_tys = ty.field_types(); - str_tys.iter().fold(0, |s, t| s + ty_size(*t)) - } else { - let str_tys = ty.field_types(); - let size = str_tys.iter().fold(0, |s, t| align(s, *t) + ty_size(*t)); - align(size, ty) - } - } - Array => { - let len = ty.array_length(); - let elt = ty.element_type(); - let eltsz = ty_size(elt); - len * eltsz - } - _ => bug!("ty_size: unhandled type") + if ty.kind() == Vector { + bug!("ty_size: unhandled type") + } else { + abi::ty_size(ty, 4) } } diff --git a/src/librustc_trans/cabi_powerpc64.rs b/src/librustc_trans/cabi_powerpc64.rs index ba54e369fd838..cdc7c1fd1afb3 100644 --- a/src/librustc_trans/cabi_powerpc64.rs +++ b/src/librustc_trans/cabi_powerpc64.rs @@ -15,67 +15,16 @@ // Alignment of 128 bit types is not currently handled, this will // need to be fixed when PowerPC vector support is added. -use llvm::{Integer, Pointer, Float, Double, Struct, Array}; -use abi::{FnType, ArgType}; +use llvm::{Integer, Pointer, Float, Double, Struct, Vector, Array}; +use abi::{self, FnType, ArgType}; use context::CrateContext; use type_::Type; -use std::cmp; - -fn align_up_to(off: usize, a: usize) -> usize { - return (off + a - 1) / a * a; -} - -fn align(off: usize, ty: Type) -> usize { - let a = ty_align(ty); - return align_up_to(off, a); -} - -fn ty_align(ty: Type) -> usize { - match ty.kind() { - Integer => ((ty.int_width() as usize) + 7) / 8, - Pointer => 8, - Float => 4, - Double => 8, - Struct => { - if ty.is_packed() { - 1 - } else { - let str_tys = ty.field_types(); - str_tys.iter().fold(1, |a, t| cmp::max(a, ty_align(*t))) - } - } - Array => { - let elt = ty.element_type(); - ty_align(elt) - } - _ => bug!("ty_align: unhandled type") - } -} - fn ty_size(ty: Type) -> usize { - match ty.kind() { - Integer => ((ty.int_width() as usize) + 7) / 8, - Pointer => 8, - Float => 4, - Double => 8, - Struct => { - if ty.is_packed() { - let str_tys = ty.field_types(); - str_tys.iter().fold(0, |s, t| s + ty_size(*t)) - } else { - let str_tys = ty.field_types(); - let size = str_tys.iter().fold(0, |s, t| align(s, *t) + ty_size(*t)); - align(size, ty) - } - } - Array => { - let len = ty.array_length(); - let elt = ty.element_type(); - let eltsz = ty_size(elt); - len * eltsz - } - _ => bug!("ty_size: unhandled type") + if ty.kind() == Vector { + bug!("ty_size: unhandled type") + } else { + abi::ty_size(ty, 8) } } diff --git a/src/librustc_trans/cabi_s390x.rs b/src/librustc_trans/cabi_s390x.rs index 19404b667e1fc..5a666c6083d16 100644 --- a/src/librustc_trans/cabi_s390x.rs +++ b/src/librustc_trans/cabi_s390x.rs @@ -12,16 +12,12 @@ // for a pre-z13 machine or using -mno-vx. use llvm::{Integer, Pointer, Float, Double, Struct, Array, Vector}; -use abi::{FnType, ArgType}; +use abi::{align_up_to, FnType, ArgType}; use context::CrateContext; use type_::Type; use std::cmp; -fn align_up_to(off: usize, a: usize) -> usize { - return (off + a - 1) / a * a; -} - fn align(off: usize, ty: Type) -> usize { let a = ty_align(ty); return align_up_to(off, a); diff --git a/src/librustc_trans/cabi_x86_64.rs b/src/librustc_trans/cabi_x86_64.rs index eb67f4ca6185a..33990148c8b7d 100644 --- a/src/librustc_trans/cabi_x86_64.rs +++ b/src/librustc_trans/cabi_x86_64.rs @@ -16,12 +16,10 @@ use self::RegClass::*; use llvm::{Integer, Pointer, Float, Double}; use llvm::{Struct, Array, Attribute, Vector}; -use abi::{ArgType, FnType}; +use abi::{self, ArgType, FnType}; use context::CrateContext; use type_::Type; -use std::cmp; - #[derive(Clone, Copy, PartialEq)] enum RegClass { NoClass, @@ -90,62 +88,11 @@ fn classify_ty(ty: Type) -> Vec { } fn ty_align(ty: Type) -> usize { - match ty.kind() { - Integer => ((ty.int_width() as usize) + 7) / 8, - Pointer => 8, - Float => 4, - Double => 8, - Struct => { - if ty.is_packed() { - 1 - } else { - let str_tys = ty.field_types(); - str_tys.iter().fold(1, |a, t| cmp::max(a, ty_align(*t))) - } - } - Array => { - let elt = ty.element_type(); - ty_align(elt) - } - Vector => { - let len = ty.vector_length(); - let elt = ty.element_type(); - ty_align(elt) * len - } - _ => bug!("ty_align: unhandled type") - } + abi::ty_align(ty, 8) } fn ty_size(ty: Type) -> usize { - match ty.kind() { - Integer => (ty.int_width() as usize + 7) / 8, - Pointer => 8, - Float => 4, - Double => 8, - Struct => { - let str_tys = ty.field_types(); - if ty.is_packed() { - str_tys.iter().fold(0, |s, t| s + ty_size(*t)) - } else { - let size = str_tys.iter().fold(0, |s, t| align(s, *t) + ty_size(*t)); - align(size, ty) - } - } - Array => { - let len = ty.array_length(); - let elt = ty.element_type(); - let eltsz = ty_size(elt); - len * eltsz - } - Vector => { - let len = ty.vector_length(); - let elt = ty.element_type(); - let eltsz = ty_size(elt); - len * eltsz - } - - _ => bug!("ty_size: unhandled type") - } + abi::ty_size(ty, 8) } fn all_mem(cls: &mut [RegClass]) { diff --git a/src/librustc_trans/callee.rs b/src/librustc_trans/callee.rs index 9785ee2455793..cd81d114eb770 100644 --- a/src/librustc_trans/callee.rs +++ b/src/librustc_trans/callee.rs @@ -17,7 +17,6 @@ pub use self::CalleeData::*; use arena::TypedArena; -use back::symbol_names; use llvm::{self, ValueRef, get_params}; use rustc::hir::def_id::DefId; use rustc::ty::subst::Substs; @@ -133,26 +132,25 @@ impl<'tcx> Callee<'tcx> { let trait_ref = tcx.normalize_associated_type(&ty::Binder(trait_ref)); match common::fulfill_obligation(ccx.shared(), DUMMY_SP, trait_ref) { traits::VtableImpl(vtable_impl) => { - let impl_did = vtable_impl.impl_def_id; - let mname = tcx.item_name(def_id); - // create a concatenated set of substitutions which includes - // those from the impl and those from the method: - let mth = meth::get_impl_method(tcx, substs, impl_did, vtable_impl.substs, mname); + let name = tcx.item_name(def_id); + let (def_id, substs) = traits::find_method(tcx, name, substs, &vtable_impl); // Translate the function, bypassing Callee::def. // That is because default methods have the same ID as the // trait method used to look up the impl method that ended // up here, so calling Callee::def would infinitely recurse. - let (llfn, ty) = get_fn(ccx, mth.method.def_id, mth.substs); + let (llfn, ty) = get_fn(ccx, def_id, substs); Callee::ptr(llfn, ty) } traits::VtableClosure(vtable_closure) => { // The substitutions should have no type parameters remaining // after passing through fulfill_obligation let trait_closure_kind = tcx.lang_items.fn_trait_kind(trait_id).unwrap(); + let instance = Instance::new(def_id, substs); let llfn = closure::trans_closure_method(ccx, vtable_closure.closure_def_id, vtable_closure.substs, + instance, trait_closure_kind); let method_ty = def_ty(ccx.shared(), def_id, substs); @@ -160,7 +158,10 @@ impl<'tcx> Callee<'tcx> { } traits::VtableFnPointer(vtable_fn_pointer) => { let trait_closure_kind = tcx.lang_items.fn_trait_kind(trait_id).unwrap(); - let llfn = trans_fn_pointer_shim(ccx, trait_closure_kind, vtable_fn_pointer.fn_ty); + let instance = Instance::new(def_id, substs); + let llfn = trans_fn_pointer_shim(ccx, instance, + trait_closure_kind, + vtable_fn_pointer.fn_ty); let method_ty = def_ty(ccx.shared(), def_id, substs); Callee::ptr(llfn, method_ty) @@ -183,8 +184,7 @@ impl<'tcx> Callee<'tcx> { pub fn direct_fn_type<'a>(&self, ccx: &CrateContext<'a, 'tcx>, extra_args: &[Ty<'tcx>]) -> FnType { let abi = self.ty.fn_abi(); - let sig = ccx.tcx().erase_late_bound_regions(self.ty.fn_sig()); - let sig = ccx.tcx().normalize_associated_type(&sig); + let sig = ccx.tcx().erase_late_bound_regions_and_normalize(self.ty.fn_sig()); let mut fn_ty = FnType::unadjusted(ccx, abi, &sig, extra_args); if let Virtual(_) = self.data { // Don't pass the vtable, it's not an argument of the virtual fn. @@ -217,9 +217,7 @@ impl<'tcx> Callee<'tcx> { pub fn reify<'a>(self, ccx: &CrateContext<'a, 'tcx>) -> ValueRef { match self.data { Fn(llfn) => llfn, - Virtual(idx) => { - meth::trans_object_shim(ccx, self.ty, idx) - } + Virtual(_) => meth::trans_object_shim(ccx, self), NamedTupleConstructor(disr) => match self.ty.sty { ty::TyFnDef(def_id, substs, _) => { let instance = Instance::new(def_id, substs); @@ -264,8 +262,9 @@ fn def_ty<'a, 'tcx>(shared: &SharedCrateContext<'a, 'tcx>, /// ``` /// /// but for the bare function type given. -pub fn trans_fn_pointer_shim<'a, 'tcx>( +fn trans_fn_pointer_shim<'a, 'tcx>( ccx: &'a CrateContext<'a, 'tcx>, + method_instance: Instance<'tcx>, closure_kind: ty::ClosureKind, bare_fn_ty: Ty<'tcx>) -> ValueRef @@ -303,9 +302,8 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>( }; // Check if we already trans'd this shim. - match ccx.fn_pointer_shims().borrow().get(&bare_fn_ty_maybe_ref) { - Some(&llval) => { return llval; } - None => { } + if let Some(&llval) = ccx.fn_pointer_shims().borrow().get(&bare_fn_ty_maybe_ref) { + return llval; } debug!("trans_fn_pointer_shim(bare_fn_ty={:?})", @@ -327,9 +325,8 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>( bare_fn_ty); } }; - let sig = tcx.erase_late_bound_regions(sig); - let sig = ccx.tcx().normalize_associated_type(&sig); - let tuple_input_ty = tcx.mk_tup(sig.inputs.to_vec()); + let sig = tcx.erase_late_bound_regions_and_normalize(sig); + let tuple_input_ty = tcx.intern_tup(&sig.inputs[..]); let sig = ty::FnSig { inputs: vec![bare_fn_ty_maybe_ref, tuple_input_ty], @@ -345,10 +342,7 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>( debug!("tuple_fn_ty: {:?}", tuple_fn_ty); // - let function_name = - symbol_names::internal_name_from_type_and_suffix(ccx, - bare_fn_ty, - "fn_pointer_shim"); + let function_name = method_instance.symbol_name(ccx.shared()); let llfn = declare::define_internal_fn(ccx, &function_name, tuple_fn_ty); attributes::set_frame_pointer_elimination(ccx, llfn); // diff --git a/src/librustc_trans/cleanup.rs b/src/librustc_trans/cleanup.rs index d368ce47430b7..b9f24eba9dc1e 100644 --- a/src/librustc_trans/cleanup.rs +++ b/src/librustc_trans/cleanup.rs @@ -305,7 +305,7 @@ impl<'blk, 'tcx> FunctionContext<'blk, 'tcx> { assert!(orig_scopes_len > 0); // Remove any scopes that do not have cleanups on panic: - let mut popped_scopes = vec!(); + let mut popped_scopes = vec![]; while !self.top_scope(|s| s.needs_invoke()) { debug!("top scope does not need invoke"); popped_scopes.push(self.pop_scope()); @@ -402,7 +402,7 @@ impl<'blk, 'tcx> FunctionContext<'blk, 'tcx> { let orig_scopes_len = self.scopes_len(); let mut prev_llbb; - let mut popped_scopes = vec!(); + let mut popped_scopes = vec![]; let mut skip = 0; // First we pop off all the cleanup stacks that are @@ -585,8 +585,8 @@ impl<'tcx> CleanupScope<'tcx> { fn new(debug_loc: DebugLoc) -> CleanupScope<'tcx> { CleanupScope { debug_loc: debug_loc, - cleanups: vec!(), - cached_early_exits: vec!(), + cleanups: vec![], + cached_early_exits: vec![], cached_landing_pad: None, } } diff --git a/src/librustc_trans/closure.rs b/src/librustc_trans/closure.rs index 83882c27e8e7b..a1d645fb993b0 100644 --- a/src/librustc_trans/closure.rs +++ b/src/librustc_trans/closure.rs @@ -9,7 +9,6 @@ // except according to those terms. use arena::TypedArena; -use back::symbol_names; use llvm::{self, ValueRef, get_params}; use rustc::hir::def_id::DefId; use abi::{Abi, FnType}; @@ -62,8 +61,7 @@ fn get_or_create_closure_declaration<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, // Compute the rust-call form of the closure call method. let sig = &tcx.closure_type(closure_id, substs).sig; - let sig = tcx.erase_late_bound_regions(sig); - let sig = tcx.normalize_associated_type(&sig); + let sig = tcx.erase_late_bound_regions_and_normalize(sig); let closure_type = tcx.mk_closure_from_closure_substs(closure_id, substs); let function_type = tcx.mk_fn_ptr(tcx.mk_bare_fn(ty::BareFnTy { unsafety: hir::Unsafety::Normal, @@ -127,8 +125,7 @@ pub fn trans_closure_body_via_mir<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, // of the closure expression. let sig = &tcx.closure_type(closure_def_id, closure_substs).sig; - let sig = tcx.erase_late_bound_regions(sig); - let sig = tcx.normalize_associated_type(&sig); + let sig = tcx.erase_late_bound_regions_and_normalize(sig); let closure_type = tcx.mk_closure_from_closure_substs(closure_def_id, closure_substs); @@ -152,6 +149,7 @@ pub fn trans_closure_body_via_mir<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, pub fn trans_closure_method<'a, 'tcx>(ccx: &'a CrateContext<'a, 'tcx>, closure_def_id: DefId, substs: ty::ClosureSubsts<'tcx>, + method_instance: Instance<'tcx>, trait_closure_kind: ty::ClosureKind) -> ValueRef { @@ -199,7 +197,7 @@ pub fn trans_closure_method<'a, 'tcx>(ccx: &'a CrateContext<'a, 'tcx>, // fn call_once(mut self, ...) { call_mut(&mut self, ...) } // // These are both the same at trans time. - trans_fn_once_adapter_shim(ccx, closure_def_id, substs, llfn) + trans_fn_once_adapter_shim(ccx, closure_def_id, substs, method_instance, llfn) } _ => { bug!("trans_closure_adapter_shim: cannot convert {:?} to {:?}", @@ -213,9 +211,14 @@ fn trans_fn_once_adapter_shim<'a, 'tcx>( ccx: &'a CrateContext<'a, 'tcx>, closure_def_id: DefId, substs: ty::ClosureSubsts<'tcx>, + method_instance: Instance<'tcx>, llreffn: ValueRef) -> ValueRef { + if let Some(&llfn) = ccx.instances().borrow().get(&method_instance) { + return llfn; + } + debug!("trans_fn_once_adapter_shim(closure_def_id={:?}, substs={:?}, llreffn={:?})", closure_def_id, substs, Value(llreffn)); @@ -244,8 +247,7 @@ fn trans_fn_once_adapter_shim<'a, 'tcx>( assert_eq!(abi, Abi::RustCall); sig.0.inputs[0] = closure_ty; - let sig = tcx.erase_late_bound_regions(&sig); - let sig = tcx.normalize_associated_type(&sig); + let sig = tcx.erase_late_bound_regions_and_normalize(&sig); let fn_ty = FnType::new(ccx, abi, &sig, &[]); let llonce_fn_ty = tcx.mk_fn_ptr(tcx.mk_bare_fn(ty::BareFnTy { @@ -255,9 +257,8 @@ fn trans_fn_once_adapter_shim<'a, 'tcx>( })); // Create the by-value helper. - let function_name = - symbol_names::internal_name_from_type_and_suffix(ccx, llonce_fn_ty, "once_shim"); - let lloncefn = declare::declare_fn(ccx, &function_name, llonce_fn_ty); + let function_name = method_instance.symbol_name(ccx.shared()); + let lloncefn = declare::define_internal_fn(ccx, &function_name, llonce_fn_ty); attributes::set_frame_pointer_elimination(ccx, lloncefn); let (block_arena, fcx): (TypedArena<_>, FunctionContext); @@ -312,5 +313,7 @@ fn trans_fn_once_adapter_shim<'a, 'tcx>( fcx.finish(bcx, DebugLoc::None); + ccx.instances().borrow_mut().insert(method_instance, lloncefn); + lloncefn } diff --git a/src/librustc_trans/collector.rs b/src/librustc_trans/collector.rs index 5a8ab62a2aa2d..8348da9f7b7bf 100644 --- a/src/librustc_trans/collector.rs +++ b/src/librustc_trans/collector.rs @@ -153,7 +153,7 @@ //! The collection algorithm handles this more or less transparently. If it is //! about to create a translation item for something with an external `DefId`, //! it will take a look if the MIR for that item is available, and if so just -//! proceed normally. If the MIR is not available, it assumes that that item is +//! proceed normally. If the MIR is not available, it assumes that the item is //! just linked to and no node is created; which is exactly what we want, since //! no machine code should be generated in the current crate for such an item. //! @@ -198,21 +198,18 @@ use rustc::traits; use rustc::ty::subst::{Substs, Subst}; use rustc::ty::{self, TypeFoldable, TyCtxt}; use rustc::ty::adjustment::CustomCoerceUnsized; -use rustc::mir::repr as mir; +use rustc::mir::{self, Location}; use rustc::mir::visit as mir_visit; use rustc::mir::visit::Visitor as MirVisitor; -use rustc::mir::repr::Location; use rustc_const_eval as const_eval; use syntax::abi::Abi; -use errors; use syntax_pos::DUMMY_SP; use base::custom_coerce_unsize_info; use context::SharedCrateContext; -use common::{fulfill_obligation, normalize_and_test_predicates, type_is_sized}; +use common::{fulfill_obligation, type_is_sized}; use glue::{self, DropGlueKind}; -use meth; use monomorphize::{self, Instance}; use util::nodemap::{FnvHashSet, FnvHashMap, DefIdMap}; @@ -348,8 +345,7 @@ fn collect_items_rec<'a, 'tcx: 'a>(scx: &SharedCrateContext<'a, 'tcx>, // Scan the MIR in order to find function calls, closures, and // drop-glue - let mir = errors::expect(scx.sess().diagnostic(), scx.get_mir(def_id), - || format!("Could not find MIR for static: {:?}", def_id)); + let mir = scx.tcx().item_mir(def_id); let empty_substs = scx.empty_substs_for_def_id(def_id); let visitor = MirNeighborCollector { @@ -369,8 +365,7 @@ fn collect_items_rec<'a, 'tcx: 'a>(scx: &SharedCrateContext<'a, 'tcx>, // Scan the MIR in order to find function calls, closures, and // drop-glue - let mir = errors::expect(scx.sess().diagnostic(), scx.get_mir(instance.def), - || format!("Could not find MIR for function: {}", instance)); + let mir = scx.tcx().item_mir(instance.def); let visitor = MirNeighborCollector { scx: scx, @@ -401,7 +396,7 @@ fn record_inlining_canditates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, callees: &[TransItem<'tcx>], inlining_map: &mut InliningMap<'tcx>) { let is_inlining_candidate = |trans_item: &TransItem<'tcx>| { - trans_item.is_from_extern_crate() || trans_item.requests_inline(tcx) + trans_item.needs_local_copy(tcx) }; let inlining_candidates = callees.into_iter() @@ -453,11 +448,7 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> { match *rvalue { mir::Rvalue::Aggregate(mir::AggregateKind::Closure(def_id, ref substs), _) => { - let mir = errors::expect(self.scx.sess().diagnostic(), - self.scx.get_mir(def_id), - || { - format!("Could not find MIR for closure: {:?}", def_id) - }); + let mir = self.scx.tcx().item_mir(def_id); let concrete_substs = monomorphize::apply_param_substs(self.scx, self.param_substs, @@ -497,7 +488,7 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> { self.output); } } - mir::Rvalue::Box(_) => { + mir::Rvalue::Box(..) => { let exchange_malloc_fn_def_id = self.scx .tcx() @@ -523,7 +514,7 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> { fn visit_lvalue(&mut self, lvalue: &mir::Lvalue<'tcx>, - context: mir_visit::LvalueContext, + context: mir_visit::LvalueContext<'tcx>, location: Location) { debug!("visiting lvalue {:?}", *lvalue); @@ -627,25 +618,23 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> { fn can_result_in_trans_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> bool { - if !match tcx.lookup_item_type(def_id).ty.sty { - ty::TyFnDef(def_id, ..) => { + match tcx.lookup_item_type(def_id).ty.sty { + ty::TyFnDef(def_id, _, f) => { // Some constructors also have type TyFnDef but they are // always instantiated inline and don't result in // translation item. Same for FFI functions. - match tcx.map.get_if_local(def_id) { - Some(hir_map::NodeVariant(_)) | - Some(hir_map::NodeStructCtor(_)) | - Some(hir_map::NodeForeignItem(_)) => false, - Some(_) => true, - None => { - tcx.sess.cstore.variant_kind(def_id).is_none() + if let Some(hir_map::NodeForeignItem(_)) = tcx.map.get_if_local(def_id) { + return false; + } + + if let Some(adt_def) = f.sig.output().skip_binder().ty_adt_def() { + if adt_def.variants.iter().any(|v| def_id == v.did) { + return false; } } } - ty::TyClosure(..) => true, - _ => false - } { - return false; + ty::TyClosure(..) => {} + _ => return false } can_have_local_instance(tcx, def_id) @@ -735,7 +724,7 @@ fn find_drop_glue_neighbors<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>, create_fn_trans_item(scx, exchange_free_fn_def_id, fn_substs, - Substs::empty(scx.tcx())); + scx.tcx().intern_substs(&[])); output.push(exchange_free_fn_trans_item); } @@ -755,7 +744,7 @@ fn find_drop_glue_neighbors<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>, .drop_trait() .unwrap(); - let self_type_substs = Substs::new_trait(scx.tcx(), ty, &[]); + let self_type_substs = scx.tcx().mk_substs_trait(ty, &[]); let trait_ref = ty::TraitRef { def_id: drop_trait_def_id, @@ -771,7 +760,7 @@ fn find_drop_glue_neighbors<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>, let trans_item = create_fn_trans_item(scx, destructor_did, substs, - Substs::empty(scx.tcx())); + scx.tcx().intern_substs(&[])); output.push(trans_item); } @@ -901,17 +890,8 @@ fn do_static_trait_method_dispatch<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>, // Now that we know which impl is being used, we can dispatch to // the actual function: match vtbl { - traits::VtableImpl(traits::VtableImplData { - impl_def_id: impl_did, - substs: impl_substs, - nested: _ }) => - { - let impl_method = meth::get_impl_method(tcx, - rcvr_substs, - impl_did, - impl_substs, - trait_method.name); - Some((impl_method.method.def_id, &impl_method.substs)) + traits::VtableImpl(impl_data) => { + Some(traits::find_method(tcx, trait_method.name, rcvr_substs, &impl_data)) } // If we have a closure or a function pointer, we will also encounter // the concrete closure/function somewhere else (during closure or fn @@ -1031,7 +1011,9 @@ fn create_fn_trans_item<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>, let concrete_substs = monomorphize::apply_param_substs(scx, param_substs, &fn_substs); - assert!(concrete_substs.is_normalized_for_trans()); + assert!(concrete_substs.is_normalized_for_trans(), + "concrete_substs not normalized for trans: {:?}", + concrete_substs); TransItem::Fn(Instance::new(def_id, concrete_substs)) } @@ -1045,42 +1027,19 @@ fn create_trans_items_for_vtable_methods<'a, 'tcx>(scx: &SharedCrateContext<'a, if let ty::TyTrait(ref trait_ty) = trait_ty.sty { let poly_trait_ref = trait_ty.principal.with_self_ty(scx.tcx(), impl_ty); + let param_substs = scx.tcx().intern_substs(&[]); // Walk all methods of the trait, including those of its supertraits - for trait_ref in traits::supertraits(scx.tcx(), poly_trait_ref) { - let vtable = fulfill_obligation(scx, DUMMY_SP, trait_ref); - match vtable { - traits::VtableImpl( - traits::VtableImplData { - impl_def_id, - substs, - nested: _ }) => { - let items = meth::get_vtable_methods(scx.tcx(), impl_def_id, substs) - .into_iter() - // filter out None values - .filter_map(|opt_impl_method| opt_impl_method) - // create translation items - .filter_map(|impl_method| { - if can_have_local_instance(scx.tcx(), impl_method.method.def_id) { - Some(create_fn_trans_item(scx, - impl_method.method.def_id, - impl_method.substs, - Substs::empty(scx.tcx()))) - } else { - None - } - }); - - output.extend(items); - - // Also add the destructor - let dg_type = glue::get_drop_glue_type(scx.tcx(), - trait_ref.self_ty()); - output.push(TransItem::DropGlue(DropGlueKind::Ty(dg_type))); - } - _ => { /* */ } - } - } + let methods = traits::get_vtable_methods(scx.tcx(), poly_trait_ref); + let methods = methods.filter_map(|method| method) + .filter_map(|(def_id, substs)| do_static_dispatch(scx, def_id, substs, param_substs)) + .filter(|&(def_id, _)| can_have_local_instance(scx.tcx(), def_id)) + .map(|(def_id, substs)| create_fn_trans_item(scx, def_id, substs, param_substs)); + output.extend(methods); + + // Also add the destructor + let dg_type = glue::get_drop_glue_type(scx.tcx(), impl_ty); + output.push(TransItem::DropGlue(DropGlueKind::Ty(dg_type))); } } @@ -1240,25 +1199,27 @@ fn create_trans_items_for_default_impls<'a, 'tcx>(scx: &SharedCrateContext<'a, ' let impl_substs = Substs::for_item(tcx, impl_def_id, |_, _| tcx.mk_region(ty::ReErased), |_, _| tcx.types.err); - let mth = meth::get_impl_method(tcx, - callee_substs, - impl_def_id, - impl_substs, - method.name); - - assert!(mth.is_provided); - - let predicates = mth.method.predicates.predicates.subst(tcx, &mth.substs); - if !normalize_and_test_predicates(tcx, predicates) { + let impl_data = traits::VtableImplData { + impl_def_id: impl_def_id, + substs: impl_substs, + nested: vec![] + }; + let (def_id, substs) = traits::find_method(tcx, + method.name, + callee_substs, + &impl_data); + + let predicates = tcx.lookup_predicates(def_id).predicates + .subst(tcx, substs); + if !traits::normalize_and_test_predicates(tcx, predicates) { continue; } if can_have_local_instance(tcx, method.def_id) { - let empty_substs = tcx.erase_regions(&mth.substs); let item = create_fn_trans_item(scx, method.def_id, callee_substs, - empty_substs); + tcx.erase_regions(&substs)); output.push(item); } } @@ -1280,8 +1241,7 @@ fn collect_const_item_neighbours<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>, { // Scan the MIR in order to find function calls, closures, and // drop-glue - let mir = errors::expect(scx.sess().diagnostic(), scx.get_mir(def_id), - || format!("Could not find MIR for const: {:?}", def_id)); + let mir = scx.tcx().item_mir(def_id); let visitor = MirNeighborCollector { scx: scx, diff --git a/src/librustc_trans/common.rs b/src/librustc_trans/common.rs index e0de04d150ca7..464b261b08e05 100644 --- a/src/librustc_trans/common.rs +++ b/src/librustc_trans/common.rs @@ -19,6 +19,7 @@ use llvm::{True, False, Bool, OperandBundleDef}; use rustc::hir::def::Def; use rustc::hir::def_id::DefId; use rustc::infer::TransNormalize; +use rustc::mir::Mir; use rustc::util::common::MemoizationMap; use middle::lang_items::LangItem; use rustc::ty::subst::Substs; @@ -32,7 +33,6 @@ use consts; use debuginfo::{self, DebugLoc}; use declare; use machine; -use mir::CachedMir; use monomorphize; use type_::Type; use value::Value; @@ -46,7 +46,7 @@ use arena::TypedArena; use libc::{c_uint, c_char}; use std::ops::Deref; use std::ffi::CString; -use std::cell::{Cell, RefCell}; +use std::cell::{Cell, RefCell, Ref}; use syntax::ast; use syntax::parse::token::InternedString; @@ -127,7 +127,7 @@ pub fn type_is_imm_pair<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) Layout::FatPointer { .. } => true, Layout::Univariant { ref variant, .. } => { // There must be only 2 fields. - if variant.offset_after_field.len() != 2 { + if variant.offsets.len() != 2 { return false; } @@ -150,15 +150,6 @@ pub fn type_is_zero_size<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) - llsize_of_alloc(ccx, llty) == 0 } -/// Generates a unique symbol based off the name given. This is used to create -/// unique symbols for things like closures. -pub fn gensym_name(name: &str) -> ast::Name { - let num = token::gensym(name).0; - // use one colon which will get translated to a period by the mangler, and - // we're guaranteed that `num` is globally unique for this crate. - token::gensym(&format!("{}:{}", name, num)) -} - /* * A note on nomenclature of linking: "extern", "foreign", and "upcall". * @@ -259,10 +250,8 @@ pub fn validate_substs(substs: &Substs) { // Function context. Every LLVM function we create will have one of // these. pub struct FunctionContext<'a, 'tcx: 'a> { - // The MIR for this function. At present, this is optional because - // we only have MIR available for things that are local to the - // crate. - pub mir: Option>, + // The MIR for this function. + pub mir: Option>>, // The ValueRef returned from a call to llvm::LLVMAddFunction; the // address of the first instruction in the sequence of @@ -322,8 +311,8 @@ pub struct FunctionContext<'a, 'tcx: 'a> { } impl<'a, 'tcx> FunctionContext<'a, 'tcx> { - pub fn mir(&self) -> CachedMir<'a, 'tcx> { - self.mir.clone().expect("fcx.mir was empty") + pub fn mir(&self) -> Ref<'tcx, Mir<'tcx>> { + self.mir.as_ref().map(Ref::clone).expect("fcx.mir was empty") } pub fn cleanup(&self) { @@ -385,7 +374,7 @@ impl<'a, 'tcx> FunctionContext<'a, 'tcx> { let tcx = ccx.tcx(); match tcx.lang_items.eh_personality() { Some(def_id) if !base::wants_msvc_seh(ccx.sess()) => { - Callee::def(ccx, def_id, Substs::empty(tcx)).reify(ccx) + Callee::def(ccx, def_id, tcx.intern_substs(&[])).reify(ccx) } _ => { if let Some(llpersonality) = ccx.eh_personality().get() { @@ -412,7 +401,7 @@ impl<'a, 'tcx> FunctionContext<'a, 'tcx> { let tcx = ccx.tcx(); assert!(ccx.sess().target.target.options.custom_unwind_resume); if let Some(def_id) = tcx.lang_items.eh_unwind_resume() { - return Callee::def(ccx, def_id, Substs::empty(tcx)); + return Callee::def(ccx, def_id, tcx.intern_substs(&[])); } let ty = tcx.mk_fn_ptr(tcx.mk_bare_fn(ty::BareFnTy { @@ -499,7 +488,7 @@ impl<'blk, 'tcx> BlockS<'blk, 'tcx> { self.set_lpad_ref(lpad.map(|p| &*self.fcx().lpad_arena.alloc(p))) } - pub fn mir(&self) -> CachedMir<'blk, 'tcx> { + pub fn mir(&self) -> Ref<'tcx, Mir<'tcx>> { self.fcx.mir() } @@ -618,7 +607,7 @@ impl<'blk, 'tcx> BlockAndBuilder<'blk, 'tcx> { self.bcx.llbb } - pub fn mir(&self) -> CachedMir<'blk, 'tcx> { + pub fn mir(&self) -> Ref<'tcx, Mir<'tcx>> { self.bcx.mir() } @@ -808,9 +797,7 @@ pub fn C_cstr(cx: &CrateContext, s: InternedString, null_terminated: bool) -> Va s.as_ptr() as *const c_char, s.len() as c_uint, !null_terminated as Bool); - - let gsym = token::gensym("str"); - let sym = format!("str{}", gsym.0); + let sym = cx.generate_local_symbol_name("str"); let g = declare::define_global(cx, &sym[..], val_ty(sc)).unwrap_or_else(||{ bug!("symbol `{}` is already defined", sym); }); @@ -884,12 +871,6 @@ pub fn const_get_elt(v: ValueRef, us: &[c_uint]) } } -pub fn const_to_int(v: ValueRef) -> i64 { - unsafe { - llvm::LLVMConstIntGetSExtValue(v) - } -} - pub fn const_to_uint(v: ValueRef) -> u64 { unsafe { llvm::LLVMConstIntGetZExtValue(v) @@ -1002,35 +983,6 @@ pub fn fulfill_obligation<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>, }) } -/// Normalizes the predicates and checks whether they hold. If this -/// returns false, then either normalize encountered an error or one -/// of the predicates did not hold. Used when creating vtables to -/// check for unsatisfiable methods. -pub fn normalize_and_test_predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - predicates: Vec>) - -> bool -{ - debug!("normalize_and_test_predicates(predicates={:?})", - predicates); - - tcx.infer_ctxt(None, None, Reveal::All).enter(|infcx| { - let mut selcx = SelectionContext::new(&infcx); - let mut fulfill_cx = traits::FulfillmentContext::new(); - let cause = traits::ObligationCause::dummy(); - let traits::Normalized { value: predicates, obligations } = - traits::normalize(&mut selcx, cause.clone(), &predicates); - for obligation in obligations { - fulfill_cx.register_predicate_obligation(&infcx, obligation); - } - for predicate in predicates { - let obligation = traits::Obligation::new(cause.clone(), predicate); - fulfill_cx.register_predicate_obligation(&infcx, obligation); - } - - fulfill_cx.select_all_or_error(&infcx).is_ok() - }) -} - pub fn langcall(tcx: TyCtxt, span: Option, msg: &str, diff --git a/src/librustc_trans/consts.rs b/src/librustc_trans/consts.rs index 15f7132e52d2f..0dc10aa7759ea 100644 --- a/src/librustc_trans/consts.rs +++ b/src/librustc_trans/consts.rs @@ -30,7 +30,6 @@ use rustc::hir; use std::ffi::{CStr, CString}; use syntax::ast; use syntax::attr; -use syntax::parse::token; pub fn ptrcast(val: ValueRef, ty: Type) -> ValueRef { unsafe { @@ -44,10 +43,7 @@ pub fn addr_of_mut(ccx: &CrateContext, kind: &str) -> ValueRef { unsafe { - // FIXME: this totally needs a better name generation scheme, perhaps a simple global - // counter? Also most other uses of gensym in trans. - let gsym = token::gensym("_"); - let name = format!("{}{}", kind, gsym.0); + let name = ccx.generate_local_symbol_name(kind); let gv = declare::define_global(ccx, &name[..], val_ty(cv)).unwrap_or_else(||{ bug!("symbol `{}` is already defined", name); }); diff --git a/src/librustc_trans/context.rs b/src/librustc_trans/context.rs index b10129d1019ae..fc75b1018ec35 100644 --- a/src/librustc_trans/context.rs +++ b/src/librustc_trans/context.rs @@ -15,16 +15,12 @@ use middle::cstore::LinkMeta; use rustc::hir::def::ExportMap; use rustc::hir::def_id::DefId; use rustc::traits; -use rustc::mir::mir_map::MirMap; -use rustc::mir::repr as mir; -use adt; use base; use builder::Builder; use common::BuilderRef_res; use debuginfo; use declare; use glue::DropGlueKind; -use mir::CachedMir; use monomorphize::Instance; use partitioning::CodegenUnit; @@ -36,7 +32,6 @@ use session::config::NoDebugInfo; use session::Session; use session::config; use symbol_map::SymbolMap; -use util::sha2::Sha256; use util::nodemap::{NodeSet, DefIdMap, FnvHashMap, FnvHashSet}; use std::ffi::{CStr, CString}; @@ -73,12 +68,9 @@ pub struct SharedCrateContext<'a, 'tcx: 'a> { export_map: ExportMap, reachable: NodeSet, link_meta: LinkMeta, - symbol_hasher: RefCell, tcx: TyCtxt<'a, 'tcx, 'tcx>, stats: Stats, check_overflow: bool, - mir_map: &'a MirMap<'tcx>, - mir_cache: RefCell>>, use_dll_storage_attrs: bool, @@ -142,7 +134,6 @@ pub struct LocalCrateContext<'tcx> { lltypes: RefCell, Type>>, llsizingtypes: RefCell, Type>>, - adt_reprs: RefCell, Rc>>>, type_hashcodes: RefCell, String>>, int_type: Type, opaque_vec_type: Type, @@ -168,6 +159,9 @@ pub struct LocalCrateContext<'tcx> { type_of_depth: Cell, symbol_map: Rc>, + + /// A counter that is used for generating local symbol names + local_gen_sym_counter: Cell, } // Implement DepTrackingMapConfig for `trait_cache` @@ -183,19 +177,6 @@ impl<'tcx> DepTrackingMapConfig for TraitSelectionCache<'tcx> { } } -// Cache for mir loaded from metadata -struct MirCache<'tcx> { - data: PhantomData<&'tcx ()> -} - -impl<'tcx> DepTrackingMapConfig for MirCache<'tcx> { - type Key = DefId; - type Value = Rc>; - fn to_dep_node(key: &DefId) -> DepNode { - DepNode::Mir(*key) - } -} - // # Global Cache pub struct ProjectionCache<'gcx> { @@ -452,9 +433,7 @@ unsafe fn create_context_and_module(sess: &Session, mod_name: &str) -> (ContextR impl<'b, 'tcx> SharedCrateContext<'b, 'tcx> { pub fn new(tcx: TyCtxt<'b, 'tcx, 'tcx>, - mir_map: &'b MirMap<'tcx>, export_map: ExportMap, - symbol_hasher: Sha256, link_meta: LinkMeta, reachable: NodeSet, check_overflow: bool) @@ -514,10 +493,7 @@ impl<'b, 'tcx> SharedCrateContext<'b, 'tcx> { export_map: export_map, reachable: reachable, link_meta: link_meta, - symbol_hasher: RefCell::new(symbol_hasher), tcx: tcx, - mir_map: mir_map, - mir_cache: RefCell::new(DepTrackingMap::new(tcx.dep_graph.clone())), stats: Stats { n_glues_created: Cell::new(0), n_null_glues: Cell::new(0), @@ -581,23 +557,6 @@ impl<'b, 'tcx> SharedCrateContext<'b, 'tcx> { self.use_dll_storage_attrs } - pub fn get_mir(&self, def_id: DefId) -> Option> { - if def_id.is_local() { - self.mir_map.map.get(&def_id).map(CachedMir::Ref) - } else { - if let Some(mir) = self.mir_cache.borrow().get(&def_id).cloned() { - return Some(CachedMir::Owned(mir)); - } - - let mir = self.sess().cstore.maybe_get_item_mir(self.tcx, def_id); - let cached = mir.map(Rc::new); - if let Some(ref mir) = cached { - self.mir_cache.borrow_mut().insert(def_id, mir.clone()); - } - cached.map(CachedMir::Owned) - } - } - pub fn translation_items(&self) -> &RefCell>> { &self.translation_items } @@ -612,14 +571,6 @@ impl<'b, 'tcx> SharedCrateContext<'b, 'tcx> { }) } - pub fn symbol_hasher(&self) -> &RefCell { - &self.symbol_hasher - } - - pub fn mir_map(&self) -> &MirMap<'tcx> { - &self.mir_map - } - pub fn metadata_symbol_name(&self) -> String { format!("rust_metadata_{}_{}", self.link_meta().crate_name, @@ -677,7 +628,6 @@ impl<'tcx> LocalCrateContext<'tcx> { statics_to_rauw: RefCell::new(Vec::new()), lltypes: RefCell::new(FnvHashMap()), llsizingtypes: RefCell::new(FnvHashMap()), - adt_reprs: RefCell::new(FnvHashMap()), type_hashcodes: RefCell::new(FnvHashMap()), int_type: Type::from_ref(ptr::null_mut()), opaque_vec_type: Type::from_ref(ptr::null_mut()), @@ -691,6 +641,7 @@ impl<'tcx> LocalCrateContext<'tcx> { n_llvm_insns: Cell::new(0), type_of_depth: Cell::new(0), symbol_map: symbol_map, + local_gen_sym_counter: Cell::new(0), }; let (int_type, opaque_vec_type, str_slice_ty, mut local_ccx) = { @@ -918,14 +869,6 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> { &self.local().llsizingtypes } - pub fn adt_reprs<'a>(&'a self) -> &'a RefCell, Rc>>> { - &self.local().adt_reprs - } - - pub fn symbol_hasher<'a>(&'a self) -> &'a RefCell { - &self.shared.symbol_hasher - } - pub fn type_hashcodes<'a>(&'a self) -> &'a RefCell, String>> { &self.local().type_hashcodes } @@ -994,7 +937,11 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> { pub fn layout_of(&self, ty: Ty<'tcx>) -> &'tcx ty::layout::Layout { self.tcx().infer_ctxt(None, None, traits::Reveal::All).enter(|infcx| { ty.layout(&infcx).unwrap_or_else(|e| { - bug!("failed to get layout for `{}`: {}", ty, e); + match e { + ty::layout::LayoutError::SizeOverflow(_) => + self.sess().fatal(&e.to_string()), + _ => bug!("failed to get layout for `{}`: {}", ty, e) + } }) }) } @@ -1007,10 +954,6 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> { self.shared.use_dll_storage_attrs() } - pub fn get_mir(&self, def_id: DefId) -> Option> { - self.shared.get_mir(def_id) - } - pub fn symbol_map(&self) -> &SymbolMap<'tcx> { &*self.local().symbol_map } @@ -1024,6 +967,16 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> { pub fn empty_substs_for_def_id(&self, item_def_id: DefId) -> &'tcx Substs<'tcx> { self.shared().empty_substs_for_def_id(item_def_id) } + + /// Generate a new symbol name with the given prefix. This symbol name must + /// only be used for definitions with `internal` or `private` linkage. + pub fn generate_local_symbol_name(&self, prefix: &str) -> String { + let idx = self.local().local_gen_sym_counter.get(); + self.local().local_gen_sym_counter.set(idx + 1); + // Include a '.' character, so there can be no accidental conflicts with + // user defined names + format!("{}.{}", prefix, idx) + } } pub struct TypeOfDepthLock<'a, 'tcx: 'a>(&'a LocalCrateContext<'tcx>); diff --git a/src/librustc_trans/debuginfo/create_scope_map.rs b/src/librustc_trans/debuginfo/create_scope_map.rs index 21716d55ac6fa..e0c1a80be394d 100644 --- a/src/librustc_trans/debuginfo/create_scope_map.rs +++ b/src/librustc_trans/debuginfo/create_scope_map.rs @@ -15,7 +15,7 @@ use super::utils::{DIB, span_start}; use llvm; use llvm::debuginfo::{DIScope, DISubprogram}; use common::{CrateContext, FunctionContext}; -use rustc::mir::repr::{Mir, VisibilityScope}; +use rustc::mir::{Mir, VisibilityScope}; use libc::c_uint; use std::ptr; @@ -45,7 +45,7 @@ impl MirDebugScope { /// Produce DIScope DIEs for each MIR Scope which has variables defined in it. /// If debuginfo is disabled, the returned vector is empty. pub fn create_mir_scopes(fcx: &FunctionContext) -> IndexVec { - let mir = fcx.mir.clone().expect("create_mir_scopes: missing MIR for fn"); + let mir = fcx.mir(); let null_scope = MirDebugScope { scope_metadata: ptr::null_mut(), file_start_pos: BytePos(0), @@ -63,8 +63,9 @@ pub fn create_mir_scopes(fcx: &FunctionContext) -> IndexVec TypeMap<'tcx> { // Get the string representation of a UniqueTypeId. This method will fail if // the id is unknown. - fn get_unique_type_id_as_string(&self, unique_type_id: UniqueTypeId) -> Rc { + fn get_unique_type_id_as_string(&self, unique_type_id: UniqueTypeId) -> Rc { let UniqueTypeId(interner_key) = unique_type_id; self.unique_id_interner.get(interner_key) } @@ -138,221 +143,33 @@ impl<'tcx> TypeMap<'tcx> { // ID will be generated and stored for later lookup. fn get_unique_type_id_of_type<'a>(&mut self, cx: &CrateContext<'a, 'tcx>, type_: Ty<'tcx>) -> UniqueTypeId { - - // basic type -> {:name of the type:} - // tuple -> {tuple_(:param-uid:)*} - // struct -> {struct_:svh: / :node-id:_<(:param-uid:),*> } - // enum -> {enum_:svh: / :node-id:_<(:param-uid:),*> } - // enum variant -> {variant_:variant-name:_:enum-uid:} - // reference (&) -> {& :pointee-uid:} - // mut reference (&mut) -> {&mut :pointee-uid:} - // ptr (*) -> {* :pointee-uid:} - // mut ptr (*mut) -> {*mut :pointee-uid:} - // unique ptr (box) -> {box :pointee-uid:} - // @-ptr (@) -> {@ :pointee-uid:} - // sized vec ([T; x]) -> {[:size:] :element-uid:} - // unsized vec ([T]) -> {[] :element-uid:} - // trait (T) -> {trait_:svh: / :node-id:_<(:param-uid:),*> } - // closure -> { :store-sigil: |(:param-uid:),* <,_...>| -> \ - // :return-type-uid: : (:bounds:)*} - // function -> { fn( (:param-uid:)* <,_...> ) -> \ - // :return-type-uid:} - + // Let's see if we already have something in the cache match self.type_to_unique_id.get(&type_).cloned() { Some(unique_type_id) => return unique_type_id, None => { /* generate one */} }; - let mut unique_type_id = String::with_capacity(256); - unique_type_id.push('{'); - - match type_.sty { - ty::TyNever | - ty::TyBool | - ty::TyChar | - ty::TyStr | - ty::TyInt(_) | - ty::TyUint(_) | - ty::TyFloat(_) => { - push_debuginfo_type_name(cx, type_, false, &mut unique_type_id); - }, - ty::TyAdt(def, substs) => { - unique_type_id.push_str(&(String::from(def.descr()) + " ")); - from_def_id_and_substs(self, cx, def.did, substs, &mut unique_type_id); - } - ty::TyTuple(component_types) if component_types.is_empty() => { - push_debuginfo_type_name(cx, type_, false, &mut unique_type_id); - }, - ty::TyTuple(component_types) => { - unique_type_id.push_str("tuple "); - for &component_type in component_types { - let component_type_id = - self.get_unique_type_id_of_type(cx, component_type); - let component_type_id = - self.get_unique_type_id_as_string(component_type_id); - unique_type_id.push_str(&component_type_id[..]); - } - }, - ty::TyBox(inner_type) => { - unique_type_id.push_str("box "); - let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); - let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(&inner_type_id[..]); - }, - ty::TyRawPtr(ty::TypeAndMut { ty: inner_type, mutbl } ) => { - unique_type_id.push('*'); - if mutbl == hir::MutMutable { - unique_type_id.push_str("mut"); - } - - let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); - let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(&inner_type_id[..]); - }, - ty::TyRef(_, ty::TypeAndMut { ty: inner_type, mutbl }) => { - unique_type_id.push('&'); - if mutbl == hir::MutMutable { - unique_type_id.push_str("mut"); - } - - let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); - let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(&inner_type_id[..]); - }, - ty::TyArray(inner_type, len) => { - unique_type_id.push_str(&format!("[{}]", len)); - - let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); - let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(&inner_type_id[..]); - }, - ty::TySlice(inner_type) => { - unique_type_id.push_str("[]"); - - let inner_type_id = self.get_unique_type_id_of_type(cx, inner_type); - let inner_type_id = self.get_unique_type_id_as_string(inner_type_id); - unique_type_id.push_str(&inner_type_id[..]); - }, - ty::TyTrait(ref trait_data) => { - unique_type_id.push_str("trait "); - - let principal = cx.tcx().erase_late_bound_regions(&trait_data.principal); - - from_def_id_and_substs(self, - cx, - principal.def_id, - principal.substs, - &mut unique_type_id); - }, - ty::TyFnDef(.., &ty::BareFnTy{ unsafety, abi, ref sig } ) | - ty::TyFnPtr(&ty::BareFnTy{ unsafety, abi, ref sig } ) => { - if unsafety == hir::Unsafety::Unsafe { - unique_type_id.push_str("unsafe "); - } - - unique_type_id.push_str(abi.name()); + // The hasher we are using to generate the UniqueTypeId. We want + // something that provides more than the 64 bits of the DefaultHasher. + const TYPE_ID_HASH_LENGTH: usize = 20; - unique_type_id.push_str(" fn("); + let mut type_id_hasher = TypeIdHasher::new(cx.tcx(), + Blake2bHasher::new(TYPE_ID_HASH_LENGTH, &[])); + type_id_hasher.visit_ty(type_); + let mut hash_state = type_id_hasher.into_inner(); + let hash: &[u8] = hash_state.finalize(); + debug_assert!(hash.len() == TYPE_ID_HASH_LENGTH); - let sig = cx.tcx().erase_late_bound_regions(sig); - let sig = cx.tcx().normalize_associated_type(&sig); + let mut unique_type_id = String::with_capacity(TYPE_ID_HASH_LENGTH * 2); - for ¶meter_type in &sig.inputs { - let parameter_type_id = - self.get_unique_type_id_of_type(cx, parameter_type); - let parameter_type_id = - self.get_unique_type_id_as_string(parameter_type_id); - unique_type_id.push_str(¶meter_type_id[..]); - unique_type_id.push(','); - } - - if sig.variadic { - unique_type_id.push_str("..."); - } - - unique_type_id.push_str(")->"); - let return_type_id = self.get_unique_type_id_of_type(cx, sig.output); - let return_type_id = self.get_unique_type_id_as_string(return_type_id); - unique_type_id.push_str(&return_type_id[..]); - }, - ty::TyClosure(_, substs) if substs.upvar_tys.is_empty() => { - push_debuginfo_type_name(cx, type_, false, &mut unique_type_id); - }, - ty::TyClosure(_, substs) => { - unique_type_id.push_str("closure "); - for upvar_type in substs.upvar_tys { - let upvar_type_id = - self.get_unique_type_id_of_type(cx, upvar_type); - let upvar_type_id = - self.get_unique_type_id_as_string(upvar_type_id); - unique_type_id.push_str(&upvar_type_id[..]); - } - }, - _ => { - bug!("get_unique_type_id_of_type() - unexpected type: {:?}", - type_) - } - }; - - unique_type_id.push('}'); - - // Trim to size before storing permanently - unique_type_id.shrink_to_fit(); + for byte in hash.into_iter() { + write!(&mut unique_type_id, "{:x}", byte).unwrap(); + } - let key = self.unique_id_interner.intern(unique_type_id); + let key = self.unique_id_interner.intern(&unique_type_id); self.type_to_unique_id.insert(type_, UniqueTypeId(key)); return UniqueTypeId(key); - - fn from_def_id_and_substs<'a, 'tcx>(type_map: &mut TypeMap<'tcx>, - cx: &CrateContext<'a, 'tcx>, - def_id: DefId, - substs: &Substs<'tcx>, - output: &mut String) { - // First, find out the 'real' def_id of the type. Items inlined from - // other crates have to be mapped back to their source. - let def_id = if let Some(node_id) = cx.tcx().map.as_local_node_id(def_id) { - if cx.tcx().map.is_inlined_node_id(node_id) { - // The given def_id identifies the inlined copy of a - // type definition, let's take the source of the copy. - cx.defid_for_inlined_node(node_id).unwrap() - } else { - def_id - } - } else { - def_id - }; - - // Get the crate name/disambiguator as first part of the identifier. - let crate_name = if def_id.is_local() { - cx.tcx().crate_name.clone() - } else { - cx.sess().cstore.original_crate_name(def_id.krate) - }; - let crate_disambiguator = cx.tcx().crate_disambiguator(def_id.krate); - - output.push_str(&crate_name[..]); - output.push_str("/"); - output.push_str(&crate_disambiguator[..]); - output.push_str("/"); - // Add the def-index as the second part - output.push_str(&format!("{:x}", def_id.index.as_usize())); - - if substs.types().next().is_some() { - output.push('<'); - - for type_parameter in substs.types() { - let param_type_id = - type_map.get_unique_type_id_of_type(cx, type_parameter); - let param_type_id = - type_map.get_unique_type_id_as_string(param_type_id); - output.push_str(¶m_type_id[..]); - output.push(','); - } - - output.push('>'); - } - } } // Get the UniqueTypeId for an enum variant. Enum variants are not really @@ -367,7 +184,7 @@ impl<'tcx> TypeMap<'tcx> { let enum_variant_type_id = format!("{}::{}", &self.get_unique_type_id_as_string(enum_type_id), variant_name); - let interner_key = self.unique_id_interner.intern(enum_variant_type_id); + let interner_key = self.unique_id_interner.intern(&enum_variant_type_id); UniqueTypeId(interner_key) } } @@ -1077,10 +894,6 @@ struct StructMemberDescriptionFactory<'tcx> { impl<'tcx> StructMemberDescriptionFactory<'tcx> { fn create_member_descriptions<'a>(&self, cx: &CrateContext<'a, 'tcx>) -> Vec { - if self.variant.kind == ty::VariantKind::Unit { - return Vec::new(); - } - let field_size = if self.is_simd { let fty = monomorphize::field_ty(cx.tcx(), self.substs, @@ -1094,7 +907,7 @@ impl<'tcx> StructMemberDescriptionFactory<'tcx> { }; self.variant.fields.iter().enumerate().map(|(i, f)| { - let name = if self.variant.kind == ty::VariantKind::Tuple { + let name = if self.variant.ctor_kind == CtorKind::Fn { format!("__{}", i) } else { f.name.to_string() @@ -1281,7 +1094,7 @@ fn prepare_union_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, // offset of zero bytes). struct EnumMemberDescriptionFactory<'tcx> { enum_type: Ty<'tcx>, - type_rep: Rc>, + type_rep: &'tcx layout::Layout, discriminant_type_metadata: Option, containing_scope: DIScope, file_metadata: DIFile, @@ -1292,11 +1105,15 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> { fn create_member_descriptions<'a>(&self, cx: &CrateContext<'a, 'tcx>) -> Vec { let adt = &self.enum_type.ty_adt_def().unwrap(); + let substs = match self.enum_type.sty { + ty::TyAdt(def, ref s) if def.adt_kind() == AdtKind::Enum => s, + _ => bug!("{} is not an enum", self.enum_type) + }; match *self.type_rep { - adt::General(_, ref struct_defs) => { + layout::General { ref variants, .. } => { let discriminant_info = RegularDiscriminant(self.discriminant_type_metadata .expect("")); - struct_defs + variants .iter() .enumerate() .map(|(i, struct_def)| { @@ -1327,7 +1144,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> { } }).collect() }, - adt::Univariant(ref struct_def) => { + layout::Univariant{ ref variant, .. } => { assert!(adt.variants.len() <= 1); if adt.variants.is_empty() { @@ -1338,7 +1155,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> { member_description_factory) = describe_enum_variant(cx, self.enum_type, - struct_def, + variant, &adt.variants[0], NoDiscriminant, self.containing_scope, @@ -1362,16 +1179,17 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> { ] } } - adt::RawNullablePointer { nndiscr: non_null_variant_index, nnty, .. } => { + layout::RawNullablePointer { nndiscr: non_null_variant_index, .. } => { // As far as debuginfo is concerned, the pointer this enum // represents is still wrapped in a struct. This is to make the // DWARF representation of enums uniform. // First create a description of the artificial wrapper struct: - let non_null_variant = &adt.variants[non_null_variant_index.0 as usize]; + let non_null_variant = &adt.variants[non_null_variant_index as usize]; let non_null_variant_name = non_null_variant.name.as_str(); // The llvm type and metadata of the pointer + let nnty = monomorphize::field_ty(cx.tcx(), &substs, &non_null_variant.fields[0] ); let non_null_llvm_type = type_of::type_of(cx, nnty); let non_null_type_metadata = type_metadata(cx, nnty, self.span); @@ -1383,12 +1201,12 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> { // For the metadata of the wrapper struct, we need to create a // MemberDescription of the struct's single field. let sole_struct_member_description = MemberDescription { - name: match non_null_variant.kind { - ty::VariantKind::Tuple => "__0".to_string(), - ty::VariantKind::Struct => { + name: match non_null_variant.ctor_kind { + CtorKind::Fn => "__0".to_string(), + CtorKind::Fictive => { non_null_variant.fields[0].name.to_string() } - ty::VariantKind::Unit => bug!() + CtorKind::Const => bug!() }, llvm_type: non_null_llvm_type, type_metadata: non_null_type_metadata, @@ -1416,7 +1234,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> { // Encode the information about the null variant in the union // member's name. - let null_variant_index = (1 - non_null_variant_index.0) as usize; + let null_variant_index = (1 - non_null_variant_index) as usize; let null_variant_name = adt.variants[null_variant_index].name; let union_member_name = format!("RUST$ENCODED$ENUM${}${}", 0, @@ -1434,7 +1252,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> { } ] }, - adt::StructWrappedNullablePointer { nonnull: ref struct_def, + layout::StructWrappedNullablePointer { nonnull: ref struct_def, nndiscr, ref discrfield, ..} => { // Create a description of the non-null variant @@ -1442,7 +1260,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> { describe_enum_variant(cx, self.enum_type, struct_def, - &adt.variants[nndiscr.0 as usize], + &adt.variants[nndiscr as usize], OptimizedDiscriminant, self.containing_scope, self.span); @@ -1457,7 +1275,7 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> { // Encode the information about the null variant in the union // member's name. - let null_variant_index = (1 - nndiscr.0) as usize; + let null_variant_index = (1 - nndiscr) as usize; let null_variant_name = adt.variants[null_variant_index].name; let discrfield = discrfield.iter() .skip(1) @@ -1478,9 +1296,8 @@ impl<'tcx> EnumMemberDescriptionFactory<'tcx> { } ] }, - adt::CEnum(..) | adt::UntaggedUnion(..) => { - span_bug!(self.span, "This should be unreachable.") - } + layout::CEnum { .. } => span_bug!(self.span, "This should be unreachable."), + ref l @ _ => bug!("Not an enum layout: {:#?}", l) } } } @@ -1523,16 +1340,39 @@ enum EnumDiscriminantInfo { // full RecursiveTypeDescription. fn describe_enum_variant<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, enum_type: Ty<'tcx>, - struct_def: &adt::Struct<'tcx>, + struct_def: &layout::Struct, variant: ty::VariantDef<'tcx>, discriminant_info: EnumDiscriminantInfo, containing_scope: DIScope, span: Span) -> (DICompositeType, Type, MemberDescriptionFactory<'tcx>) { + let substs = match enum_type.sty { + ty::TyAdt(def, s) if def.adt_kind() == AdtKind::Enum => s, + ref t @ _ => bug!("{:#?} is not an enum", t) + }; + + let maybe_discr_and_signed: Option<(layout::Integer, bool)> = match *cx.layout_of(enum_type) { + layout::CEnum {discr, ..} => Some((discr, true)), + layout::General{discr, ..} => Some((discr, false)), + layout::Univariant { .. } + | layout::RawNullablePointer { .. } + | layout::StructWrappedNullablePointer { .. } => None, + ref l @ _ => bug!("This should be unreachable. Type is {:#?} layout is {:#?}", enum_type, l) + }; + + let mut field_tys = variant.fields.iter().map(|f: ty::FieldDef<'tcx>| { + monomorphize::field_ty(cx.tcx(), &substs, f) + }).collect::>(); + + if let Some((discr, signed)) = maybe_discr_and_signed { + field_tys.insert(0, discr.to_ty(&cx.tcx(), signed)); + } + + let variant_llvm_type = - Type::struct_(cx, &struct_def.fields + Type::struct_(cx, &field_tys .iter() - .map(|&t| type_of::type_of(cx, t)) + .map(|t| type_of::type_of(cx, t)) .collect::>() , struct_def.packed); @@ -1553,16 +1393,16 @@ fn describe_enum_variant<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, containing_scope); // Get the argument names from the enum variant info - let mut arg_names: Vec<_> = match variant.kind { - ty::VariantKind::Unit => vec![], - ty::VariantKind::Tuple => { + let mut arg_names: Vec<_> = match variant.ctor_kind { + CtorKind::Const => vec![], + CtorKind::Fn => { variant.fields .iter() .enumerate() .map(|(i, _)| format!("__{}", i)) .collect() } - ty::VariantKind::Struct => { + CtorKind::Fictive => { variant.fields .iter() .map(|f| f.name.to_string()) @@ -1578,7 +1418,7 @@ fn describe_enum_variant<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, // Build an array of (field name, field type) pairs to be captured in the factory closure. let args: Vec<(String, Ty)> = arg_names.iter() - .zip(&struct_def.fields) + .zip(field_tys.iter()) .map(|(s, &t)| (s.to_string(), t)) .collect(); @@ -1615,7 +1455,6 @@ fn prepare_enum_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let file_metadata = unknown_file_metadata(cx); let variants = &enum_type.ty_adt_def().unwrap().variants; - let enumerators_metadata: Vec = variants .iter() .map(|v| { @@ -1630,7 +1469,7 @@ fn prepare_enum_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, }) .collect(); - let discriminant_type_metadata = |inttype: syntax::attr::IntType| { + let discriminant_type_metadata = |inttype: layout::Integer, signed: bool| { let disr_type_key = (enum_def_id, inttype); let cached_discriminant_type_metadata = debug_context(cx).created_enum_disr_types .borrow() @@ -1638,12 +1477,12 @@ fn prepare_enum_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, match cached_discriminant_type_metadata { Some(discriminant_type_metadata) => discriminant_type_metadata, None => { - let discriminant_llvm_type = adt::ll_inttype(cx, inttype); + let discriminant_llvm_type = Type::from_integer(cx, inttype); let (discriminant_size, discriminant_align) = size_and_align_of(cx, discriminant_llvm_type); let discriminant_base_type_metadata = type_metadata(cx, - adt::ty_of_inttype(cx.tcx(), inttype), + inttype.to_ty(&cx.tcx(), signed), syntax_pos::DUMMY_SP); let discriminant_name = get_enum_discriminant_name(cx, enum_def_id); @@ -1670,16 +1509,17 @@ fn prepare_enum_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, } }; - let type_rep = adt::represent_type(cx, enum_type); + let type_rep = cx.layout_of(enum_type); let discriminant_type_metadata = match *type_rep { - adt::CEnum(inttype, ..) => { - return FinalMetadata(discriminant_type_metadata(inttype)) + layout::CEnum { discr, signed, .. } => { + return FinalMetadata(discriminant_type_metadata(discr, signed)) }, - adt::RawNullablePointer { .. } | - adt::StructWrappedNullablePointer { .. } | - adt::Univariant(..) | adt::UntaggedUnion(..) => None, - adt::General(inttype, _) => Some(discriminant_type_metadata(inttype)), + layout::RawNullablePointer { .. } | + layout::StructWrappedNullablePointer { .. } | + layout::Univariant { .. } => None, + layout::General { discr, .. } => Some(discriminant_type_metadata(discr, false)), + ref l @ _ => bug!("Not an enum layout: {:#?}", l) }; let enum_llvm_type = type_of::type_of(cx, enum_type); @@ -1715,7 +1555,7 @@ fn prepare_enum_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, enum_llvm_type, EnumMDF(EnumMemberDescriptionFactory { enum_type: enum_type, - type_rep: type_rep.clone(), + type_rep: type_rep, discriminant_type_metadata: discriminant_type_metadata, containing_scope: containing_scope, file_metadata: file_metadata, @@ -1904,15 +1744,17 @@ pub fn create_global_var_metadata(cx: &CrateContext, return; } + let tcx = cx.tcx(); + // Don't create debuginfo for globals inlined from other crates. The other // crate should already contain debuginfo for it. More importantly, the // global might not even exist in un-inlined form anywhere which would lead // to a linker errors. - if cx.tcx().map.is_inlined_node_id(node_id) { + if tcx.map.is_inlined_node_id(node_id) { return; } - let node_def_id = cx.tcx().map.local_def_id(node_id); + let node_def_id = tcx.map.local_def_id(node_id); let (var_scope, span) = get_namespace_and_span_for_item(cx, node_def_id); let (file_metadata, line_number) = if span != syntax_pos::DUMMY_SP { @@ -1923,9 +1765,9 @@ pub fn create_global_var_metadata(cx: &CrateContext, }; let is_local_to_unit = is_node_local_to_unit(cx, node_id); - let variable_type = cx.tcx().node_id_to_type(node_id); + let variable_type = tcx.erase_regions(&tcx.node_id_to_type(node_id)); let type_metadata = type_metadata(cx, variable_type, span); - let var_name = cx.tcx().item_name(node_def_id).to_string(); + let var_name = tcx.item_name(node_def_id).to_string(); let linkage_name = mangled_name_of_item(cx, node_def_id, ""); let var_name = CString::new(var_name).unwrap(); diff --git a/src/librustc_trans/debuginfo/mod.rs b/src/librustc_trans/debuginfo/mod.rs index bcd288671bc19..3bc5f4f3dbc4b 100644 --- a/src/librustc_trans/debuginfo/mod.rs +++ b/src/librustc_trans/debuginfo/mod.rs @@ -32,7 +32,7 @@ use abi::Abi; use common::{CrateContext, FunctionContext, Block, BlockAndBuilder}; use monomorphize::{self, Instance}; use rustc::ty::{self, Ty}; -use rustc::mir::repr as mir; +use rustc::mir; use session::config::{self, FullDebugInfo, LimitedDebugInfo, NoDebugInfo}; use util::nodemap::{DefIdMap, FnvHashMap, FnvHashSet}; @@ -43,7 +43,7 @@ use std::ptr; use syntax_pos::{self, Span, Pos}; use syntax::ast; -use syntax::attr::IntType; +use rustc::ty::layout; pub mod gdb; mod utils; @@ -69,7 +69,7 @@ pub struct CrateDebugContext<'tcx> { builder: DIBuilderRef, current_debug_location: Cell, created_files: RefCell>, - created_enum_disr_types: RefCell>, + created_enum_disr_types: RefCell>, type_map: RefCell>, namespace_map: RefCell>, @@ -482,8 +482,6 @@ pub fn declare_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, type_metadata, cx.sess().opts.optimize != config::OptLevel::No, 0, - address_operations.as_ptr(), - address_operations.len() as c_uint, argument_index) }; source_loc::set_debug_location(cx, None, diff --git a/src/librustc_trans/debuginfo/type_names.rs b/src/librustc_trans/debuginfo/type_names.rs index 7f021bee37199..956402edc1166 100644 --- a/src/librustc_trans/debuginfo/type_names.rs +++ b/src/librustc_trans/debuginfo/type_names.rs @@ -94,7 +94,8 @@ pub fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, output.push(']'); }, ty::TyTrait(ref trait_data) => { - let principal = cx.tcx().erase_late_bound_regions(&trait_data.principal); + let principal = cx.tcx().erase_late_bound_regions_and_normalize( + &trait_data.principal); push_item_name(cx, principal.def_id, false, output); push_type_params(cx, principal.substs, output); }, @@ -112,8 +113,7 @@ pub fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, output.push_str("fn("); - let sig = cx.tcx().erase_late_bound_regions(sig); - let sig = cx.tcx().normalize_associated_type(&sig); + let sig = cx.tcx().erase_late_bound_regions_and_normalize(sig); if !sig.inputs.is_empty() { for ¶meter_type in &sig.inputs { push_debuginfo_type_name(cx, parameter_type, true, output); diff --git a/src/librustc_trans/declare.rs b/src/librustc_trans/declare.rs index 0c1156a98743e..1ec5ca4a563a0 100644 --- a/src/librustc_trans/declare.rs +++ b/src/librustc_trans/declare.rs @@ -104,8 +104,7 @@ pub fn declare_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, name: &str, fn_type: ty::Ty<'tcx>) -> ValueRef { debug!("declare_rust_fn(name={:?}, fn_type={:?})", name, fn_type); let abi = fn_type.fn_abi(); - let sig = ccx.tcx().erase_late_bound_regions(fn_type.fn_sig()); - let sig = ccx.tcx().normalize_associated_type(&sig); + let sig = ccx.tcx().erase_late_bound_regions_and_normalize(fn_type.fn_sig()); debug!("declare_rust_fn (after region erasure) sig={:?}", sig); let fty = FnType::new(ccx, abi, &sig, &[]); diff --git a/src/librustc_trans/glue.rs b/src/librustc_trans/glue.rs index 3073b1dbfaeeb..648dd9f3e3aea 100644 --- a/src/librustc_trans/glue.rs +++ b/src/librustc_trans/glue.rs @@ -48,7 +48,7 @@ pub fn trans_exchange_free_dyn<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let def_id = langcall(bcx.tcx(), None, "", ExchangeFreeFnLangItem); let args = [PointerCast(bcx, v, Type::i8p(bcx.ccx())), size, align]; - Callee::def(bcx.ccx(), def_id, Substs::empty(bcx.tcx())) + Callee::def(bcx.ccx(), def_id, bcx.tcx().intern_substs(&[])) .call(bcx, debug_loc, &args, None).bcx } @@ -92,6 +92,8 @@ pub fn get_drop_glue_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, t: Ty<'tcx>) -> Ty<'tcx> { assert!(t.is_normalized_for_trans()); + let t = tcx.erase_regions(&t); + // Even if there is no dtor for t, there might be one deeper down and we // might need to pass in the vtable ptr. if !type_is_sized(tcx, t) { @@ -214,30 +216,14 @@ fn get_drop_glue_core<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, g: DropGlueKind<'tcx>) -> ValueRef { let g = g.map_ty(|t| get_drop_glue_type(ccx.tcx(), t)); match ccx.drop_glues().borrow().get(&g) { - Some(&(glue, _)) => return glue, + Some(&(glue, _)) => glue, None => { - debug!("Could not find drop glue for {:?} -- {} -- {}. \ - Falling back to on-demand instantiation.", + bug!("Could not find drop glue for {:?} -- {} -- {}.", g, TransItem::DropGlue(g).to_raw_string(), ccx.codegen_unit().name()); } } - - // FIXME: #34151 - // Normally, getting here would indicate a bug in trans::collector, - // since it seems to have missed a translation item. When we are - // translating with non-MIR-based trans, however, the results of the - // collector are not entirely reliable since it bases its analysis - // on MIR. Thus, we'll instantiate the missing function on demand in - // this codegen unit, so that things keep working. - - TransItem::DropGlue(g).predefine(ccx, llvm::InternalLinkage); - TransItem::DropGlue(g).define(ccx); - - // Now that we made sure that the glue function is in ccx.drop_glues, - // give it another try - get_drop_glue_core(ccx, g) } pub fn implement_drop_glue<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, @@ -306,7 +292,7 @@ fn trans_custom_dtor<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let trait_ref = ty::Binder(ty::TraitRef { def_id: tcx.lang_items.drop_trait().unwrap(), - substs: Substs::new_trait(tcx, t, &[]) + substs: tcx.mk_substs_trait(t, &[]) }); let vtbl = match fulfill_obligation(bcx.ccx().shared(), DUMMY_SP, trait_ref) { traits::VtableImpl(data) => data, @@ -349,20 +335,9 @@ pub fn size_and_align_of_dst<'blk, 'tcx>(bcx: &BlockAndBuilder<'blk, 'tcx>, let layout = ccx.layout_of(t); debug!("DST {} layout: {:?}", t, layout); - // Returns size in bytes of all fields except the last one - // (we will be recursing on the last one). - fn local_prefix_bytes(variant: &ty::layout::Struct) -> u64 { - let fields = variant.offset_after_field.len(); - if fields > 1 { - variant.offset_after_field[fields - 2].bytes() - } else { - 0 - } - } - let (sized_size, sized_align) = match *layout { ty::layout::Layout::Univariant { ref variant, .. } => { - (local_prefix_bytes(variant), variant.align.abi()) + (variant.offsets.last().map_or(0, |o| o.bytes()), variant.align.abi()) } _ => { bug!("size_and_align_of_dst: expcted Univariant for `{}`, found {:#?}", @@ -527,7 +502,7 @@ fn drop_structural_ty<'blk, 'tcx>(cx: Block<'blk, 'tcx>, let _icx = push_ctxt("drop_structural_ty"); fn iter_variant<'blk, 'tcx>(cx: Block<'blk, 'tcx>, - repr: &adt::Repr<'tcx>, + t: Ty<'tcx>, av: adt::MaybeSizedValue, variant: ty::VariantDef<'tcx>, substs: &Substs<'tcx>) @@ -539,7 +514,7 @@ fn drop_structural_ty<'blk, 'tcx>(cx: Block<'blk, 'tcx>, for (i, field) in variant.fields.iter().enumerate() { let arg = monomorphize::field_ty(tcx, substs, field); cx = drop_ty(cx, - adt::trans_field_ptr(cx, repr, av, Disr::from(variant.disr_val), i), + adt::trans_field_ptr(cx, t, av, Disr::from(variant.disr_val), i), arg, DebugLoc::None); } return cx; @@ -557,9 +532,8 @@ fn drop_structural_ty<'blk, 'tcx>(cx: Block<'blk, 'tcx>, let mut cx = cx; match t.sty { ty::TyClosure(_, ref substs) => { - let repr = adt::represent_type(cx.ccx(), t); for (i, upvar_ty) in substs.upvar_tys.iter().enumerate() { - let llupvar = adt::trans_field_ptr(cx, &repr, value, Disr(0), i); + let llupvar = adt::trans_field_ptr(cx, t, value, Disr(0), i); cx = drop_ty(cx, llupvar, upvar_ty, DebugLoc::None); } } @@ -576,18 +550,16 @@ fn drop_structural_ty<'blk, 'tcx>(cx: Block<'blk, 'tcx>, |bb, vv| drop_ty(bb, vv, unit_ty, DebugLoc::None)); } ty::TyTuple(ref args) => { - let repr = adt::represent_type(cx.ccx(), t); for (i, arg) in args.iter().enumerate() { - let llfld_a = adt::trans_field_ptr(cx, &repr, value, Disr(0), i); + let llfld_a = adt::trans_field_ptr(cx, t, value, Disr(0), i); cx = drop_ty(cx, llfld_a, *arg, DebugLoc::None); } } ty::TyAdt(adt, substs) => match adt.adt_kind() { AdtKind::Struct => { - let repr = adt::represent_type(cx.ccx(), t); let VariantInfo { fields, discr } = VariantInfo::from_ty(cx.tcx(), t, None); for (i, &Field(_, field_ty)) in fields.iter().enumerate() { - let llfld_a = adt::trans_field_ptr(cx, &repr, value, Disr::from(discr), i); + let llfld_a = adt::trans_field_ptr(cx, t, value, Disr::from(discr), i); let val = if type_is_sized(cx.tcx(), field_ty) { llfld_a @@ -607,18 +579,16 @@ fn drop_structural_ty<'blk, 'tcx>(cx: Block<'blk, 'tcx>, AdtKind::Enum => { let fcx = cx.fcx; let ccx = fcx.ccx; - - let repr = adt::represent_type(ccx, t); let n_variants = adt.variants.len(); // NB: we must hit the discriminant first so that structural // comparison know not to proceed when the discriminants differ. - match adt::trans_switch(cx, &repr, av, false) { + match adt::trans_switch(cx, t, av, false) { (adt::BranchKind::Single, None) => { if n_variants != 0 { assert!(n_variants == 1); - cx = iter_variant(cx, &repr, adt::MaybeSizedValue::sized(av), + cx = iter_variant(cx, t, adt::MaybeSizedValue::sized(av), &adt.variants[0], substs); } } @@ -647,10 +617,10 @@ fn drop_structural_ty<'blk, 'tcx>(cx: Block<'blk, 'tcx>, let variant_cx = fcx.new_block(&format!("enum-iter-variant-{}", &variant.disr_val .to_string())); - let case_val = adt::trans_case(cx, &repr, Disr::from(variant.disr_val)); + let case_val = adt::trans_case(cx, t, Disr::from(variant.disr_val)); AddCase(llswitch, case_val, variant_cx.llbb); let variant_cx = iter_variant(variant_cx, - &repr, + t, value, variant, substs); diff --git a/src/librustc_trans/intrinsic.rs b/src/librustc_trans/intrinsic.rs index 0d919cb775701..b1b09d3ca20de 100644 --- a/src/librustc_trans/intrinsic.rs +++ b/src/librustc_trans/intrinsic.rs @@ -99,13 +99,12 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, let _icx = push_ctxt("trans_intrinsic_call"); - let (def_id, substs, sig) = match callee_ty.sty { - ty::TyFnDef(def_id, substs, fty) => { - let sig = tcx.erase_late_bound_regions(&fty.sig); - (def_id, substs, tcx.normalize_associated_type(&sig)) - } + let (def_id, substs, fty) = match callee_ty.sty { + ty::TyFnDef(def_id, substs, ref fty) => (def_id, substs, fty), _ => bug!("expected fn item type, found {}", callee_ty) }; + + let sig = tcx.erase_late_bound_regions_and_normalize(&fty.sig); let arg_tys = sig.inputs; let ret_ty = sig.output; let name = tcx.item_name(def_id).as_str(); @@ -418,8 +417,7 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, let val_ty = substs.type_at(0); match val_ty.sty { ty::TyAdt(adt, ..) if adt.is_enum() => { - let repr = adt::represent_type(ccx, val_ty); - adt::trans_get_discr(bcx, &repr, llargs[0], + adt::trans_get_discr(bcx, val_ty, llargs[0], Some(llret_ty), true) } _ => C_null(llret_ty) @@ -629,13 +627,10 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, // destructors, and the contents are SIMD // etc. assert!(!bcx.fcx.type_needs_drop(arg_type)); - - let repr = adt::represent_type(bcx.ccx(), arg_type); - let repr_ptr = &repr; let arg = adt::MaybeSizedValue::sized(llarg); (0..contents.len()) .map(|i| { - Load(bcx, adt::trans_field_ptr(bcx, repr_ptr, arg, Disr(0), i)) + Load(bcx, adt::trans_field_ptr(bcx, arg_type, arg, Disr(0), i)) }) .collect() } @@ -1112,8 +1107,7 @@ fn generic_simd_intrinsic<'blk, 'tcx, 'a> let tcx = bcx.tcx(); - let sig = tcx.erase_late_bound_regions(callee_ty.fn_sig()); - let sig = tcx.normalize_associated_type(&sig); + let sig = tcx.erase_late_bound_regions_and_normalize(callee_ty.fn_sig()); let arg_tys = sig.inputs; // every intrinsic takes a SIMD vector as its first argument diff --git a/src/librustc_trans/lib.rs b/src/librustc_trans/lib.rs index 3e60369acbff3..07acc54962b87 100644 --- a/src/librustc_trans/lib.rs +++ b/src/librustc_trans/lib.rs @@ -25,6 +25,7 @@ #![feature(box_patterns)] #![feature(box_syntax)] +#![feature(cell_extras)] #![feature(const_fn)] #![feature(custom_attribute)] #![feature(dotdot_in_tuple_patterns)] @@ -36,7 +37,7 @@ #![feature(slice_patterns)] #![feature(staged_api)] #![feature(unicode)] -#![feature(question_mark)] +#![cfg_attr(stage0, feature(question_mark))] use rustc::dep_graph::WorkProduct; @@ -69,7 +70,6 @@ pub use base::trans_crate; pub use disr::Disr; pub mod back { - pub use rustc_back::rpath; pub use rustc::hir::svh; pub mod archive; @@ -79,6 +79,7 @@ pub mod back { pub mod symbol_names; pub mod write; pub mod msvc; + pub mod rpath; } pub mod diagnostics; diff --git a/src/librustc_trans/machine.rs b/src/librustc_trans/machine.rs index 59020b38ddcd3..cd31f02842add 100644 --- a/src/librustc_trans/machine.rs +++ b/src/librustc_trans/machine.rs @@ -24,13 +24,6 @@ pub type llalign = u32; // ______________________________________________________________________ // compute sizeof / alignof -// Returns the number of bytes clobbered by a Store to this type. -pub fn llsize_of_store(cx: &CrateContext, ty: Type) -> llsize { - unsafe { - return llvm::LLVMStoreSizeOfType(cx.td(), ty.to_ref()); - } -} - // Returns the number of bytes between successive elements of type T in an // array of T. This is the "ABI" size. It includes any ABI-mandated padding. pub fn llsize_of_alloc(cx: &CrateContext, ty: Type) -> llsize { @@ -39,28 +32,6 @@ pub fn llsize_of_alloc(cx: &CrateContext, ty: Type) -> llsize { } } -// Returns, as near as we can figure, the "real" size of a type. As in, the -// bits in this number of bytes actually carry data related to the datum -// with the type. Not junk, accidentally-damaged words, or whatever. -// Note that padding of the type will be included for structs, but not for the -// other types (i.e. SIMD types). -// Rounds up to the nearest byte though, so if you have a 1-bit -// value, we return 1 here, not 0. Most of rustc works in bytes. Be warned -// that LLVM *does* distinguish between e.g. a 1-bit value and an 8-bit value -// at the codegen level! In general you should prefer `llbitsize_of_real` -// below. -pub fn llsize_of_real(cx: &CrateContext, ty: Type) -> llsize { - unsafe { - let nbits = llvm::LLVMSizeOfTypeInBits(cx.td(), ty.to_ref()); - if nbits & 7 != 0 { - // Not an even number of bytes, spills into "next" byte. - 1 + (nbits >> 3) - } else { - nbits >> 3 - } - } -} - /// Returns the "real" size of the type in bits. pub fn llbitsize_of_real(cx: &CrateContext, ty: Type) -> llbits { unsafe { diff --git a/src/librustc_trans/meth.rs b/src/librustc_trans/meth.rs index ee2f24d21e07c..1e687f5ff6e3a 100644 --- a/src/librustc_trans/meth.rs +++ b/src/librustc_trans/meth.rs @@ -8,33 +8,25 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use std::rc::Rc; - use attributes; use arena::TypedArena; -use back::symbol_names; use llvm::{ValueRef, get_params}; -use rustc::hir::def_id::DefId; -use rustc::ty::subst::{Subst, Substs}; -use rustc::traits::{self, Reveal}; +use rustc::traits; use abi::FnType; use base::*; use build::*; -use callee::{Callee, Virtual, trans_fn_pointer_shim}; -use closure; +use callee::Callee; use common::*; use consts; use debuginfo::DebugLoc; use declare; use glue; use machine; +use monomorphize::Instance; use type_::Type; use type_of::*; use value::Value; -use rustc::ty::{self, Ty, TyCtxt, TypeFoldable}; - -use syntax::ast::Name; -use syntax_pos::DUMMY_SP; +use rustc::ty; // drop_glue pointer, size, align. const VTABLE_OFFSET: usize = 3; @@ -73,23 +65,25 @@ pub fn get_virtual_method<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, /// In fact, all virtual calls can be thought of as normal trait calls /// that go through this shim function. pub fn trans_object_shim<'a, 'tcx>(ccx: &'a CrateContext<'a, 'tcx>, - method_ty: Ty<'tcx>, - vtable_index: usize) + callee: Callee<'tcx>) -> ValueRef { let _icx = push_ctxt("trans_object_shim"); let tcx = ccx.tcx(); - debug!("trans_object_shim(vtable_index={}, method_ty={:?})", - vtable_index, - method_ty); + debug!("trans_object_shim({:?})", callee); + + let (sig, abi, function_name) = match callee.ty.sty { + ty::TyFnDef(def_id, substs, f) => { + let instance = Instance::new(def_id, substs); + (&f.sig, f.abi, instance.symbol_name(ccx.shared())) + } + _ => bug!() + }; - let sig = tcx.erase_late_bound_regions(&method_ty.fn_sig()); - let sig = tcx.normalize_associated_type(&sig); - let fn_ty = FnType::new(ccx, method_ty.fn_abi(), &sig, &[]); + let sig = tcx.erase_late_bound_regions_and_normalize(sig); + let fn_ty = FnType::new(ccx, abi, &sig, &[]); - let function_name = - symbol_names::internal_name_from_type_and_suffix(ccx, method_ty, "object_shim"); - let llfn = declare::define_internal_fn(ccx, &function_name, method_ty); + let llfn = declare::define_internal_fn(ccx, &function_name, callee.ty); attributes::set_frame_pointer_elimination(ccx, llfn); let (block_arena, fcx): (TypedArena<_>, FunctionContext); @@ -98,16 +92,7 @@ pub fn trans_object_shim<'a, 'tcx>(ccx: &'a CrateContext<'a, 'tcx>, let mut bcx = fcx.init(false); let dest = fcx.llretslotptr.get(); - - debug!("trans_object_shim: method_offset_in_vtable={}", - vtable_index); - let llargs = get_params(fcx.llfn); - - let callee = Callee { - data: Virtual(vtable_index), - ty: method_ty - }; bcx = callee.call(bcx, DebugLoc::None, &llargs[fcx.fn_ty.ret.is_indirect() as usize..], dest).bcx; @@ -134,78 +119,28 @@ pub fn get_vtable<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, debug!("get_vtable(trait_ref={:?})", trait_ref); // Check the cache. - match ccx.vtables().borrow().get(&trait_ref) { - Some(&val) => { return val } - None => { } + if let Some(&val) = ccx.vtables().borrow().get(&trait_ref) { + return val; } // Not in the cache. Build it. - let methods = traits::supertraits(tcx, trait_ref.clone()).flat_map(|trait_ref| { - let vtable = fulfill_obligation(ccx.shared(), DUMMY_SP, trait_ref.clone()); - match vtable { - // Should default trait error here? - traits::VtableDefaultImpl(_) | - traits::VtableBuiltin(_) => { - Vec::new().into_iter() - } - traits::VtableImpl( - traits::VtableImplData { - impl_def_id: id, - substs, - nested: _ }) => { - let nullptr = C_null(Type::nil(ccx).ptr_to()); - get_vtable_methods(tcx, id, substs) - .into_iter() - .map(|opt_mth| opt_mth.map_or(nullptr, |mth| { - Callee::def(ccx, mth.method.def_id, &mth.substs).reify(ccx) - })) - .collect::>() - .into_iter() - } - traits::VtableClosure( - traits::VtableClosureData { - closure_def_id, - substs, - nested: _ }) => { - let trait_closure_kind = tcx.lang_items.fn_trait_kind(trait_ref.def_id()).unwrap(); - let llfn = closure::trans_closure_method(ccx, - closure_def_id, - substs, - trait_closure_kind); - vec![llfn].into_iter() - } - traits::VtableFnPointer( - traits::VtableFnPointerData { - fn_ty: bare_fn_ty, - nested: _ }) => { - let trait_closure_kind = tcx.lang_items.fn_trait_kind(trait_ref.def_id()).unwrap(); - vec![trans_fn_pointer_shim(ccx, trait_closure_kind, bare_fn_ty)].into_iter() - } - traits::VtableObject(ref data) => { - // this would imply that the Self type being erased is - // an object type; this cannot happen because we - // cannot cast an unsized type into a trait object - bug!("cannot get vtable for an object type: {:?}", - data); - } - traits::VtableParam(..) => { - bug!("resolved vtable for {:?} to bad vtable {:?} in trans", - trait_ref, - vtable); - } - } + let nullptr = C_null(Type::nil(ccx).ptr_to()); + let methods = traits::get_vtable_methods(tcx, trait_ref).map(|opt_mth| { + opt_mth.map_or(nullptr, |(def_id, substs)| { + Callee::def(ccx, def_id, substs).reify(ccx) + }) }); let size_ty = sizing_type_of(ccx, trait_ref.self_ty()); let size = machine::llsize_of_alloc(ccx, size_ty); let align = align_of(ccx, trait_ref.self_ty()); - let components: Vec<_> = vec![ + let components: Vec<_> = [ // Generate a destructor for the vtable. glue::get_drop_glue(ccx, trait_ref.self_ty()), C_uint(ccx, size), C_uint(ccx, align) - ].into_iter().chain(methods).collect(); + ].iter().cloned().chain(methods).collect(); let vtable_const = C_struct(ccx, &components, false); let align = machine::llalign_of_pref(ccx, val_ty(vtable_const)); @@ -214,126 +149,3 @@ pub fn get_vtable<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ccx.vtables().borrow_mut().insert(trait_ref, vtable); vtable } - -pub fn get_vtable_methods<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - impl_id: DefId, - substs: &'tcx Substs<'tcx>) - -> Vec>> -{ - debug!("get_vtable_methods(impl_id={:?}, substs={:?}", impl_id, substs); - - let trait_id = match tcx.impl_trait_ref(impl_id) { - Some(t_id) => t_id.def_id, - None => bug!("make_impl_vtable: don't know how to \ - make a vtable for a type impl!") - }; - - tcx.populate_implementations_for_trait_if_necessary(trait_id); - - let trait_item_def_ids = tcx.trait_item_def_ids(trait_id); - trait_item_def_ids - .iter() - - // Filter out non-method items. - .filter_map(|item_def_id| { - match *item_def_id { - ty::MethodTraitItemId(def_id) => Some(def_id), - _ => None, - } - }) - - // Now produce pointers for each remaining method. If the - // method could never be called from this object, just supply - // null. - .map(|trait_method_def_id| { - debug!("get_vtable_methods: trait_method_def_id={:?}", - trait_method_def_id); - - let trait_method_type = match tcx.impl_or_trait_item(trait_method_def_id) { - ty::MethodTraitItem(m) => m, - _ => bug!("should be a method, not other assoc item"), - }; - let name = trait_method_type.name; - - // Some methods cannot be called on an object; skip those. - if !tcx.is_vtable_safe_method(trait_id, &trait_method_type) { - debug!("get_vtable_methods: not vtable safe"); - return None; - } - - debug!("get_vtable_methods: trait_method_type={:?}", - trait_method_type); - - // the method may have some early-bound lifetimes, add - // regions for those - let method_substs = Substs::for_item(tcx, trait_method_def_id, - |_, _| tcx.mk_region(ty::ReErased), - |_, _| tcx.types.err); - - // The substitutions we have are on the impl, so we grab - // the method type from the impl to substitute into. - let mth = get_impl_method(tcx, method_substs, impl_id, substs, name); - - debug!("get_vtable_methods: mth={:?}", mth); - - // If this is a default method, it's possible that it - // relies on where clauses that do not hold for this - // particular set of type parameters. Note that this - // method could then never be called, so we do not want to - // try and trans it, in that case. Issue #23435. - if mth.is_provided { - let predicates = mth.method.predicates.predicates.subst(tcx, &mth.substs); - if !normalize_and_test_predicates(tcx, predicates) { - debug!("get_vtable_methods: predicates do not hold"); - return None; - } - } - - Some(mth) - }) - .collect() -} - -#[derive(Debug)] -pub struct ImplMethod<'tcx> { - pub method: Rc>, - pub substs: &'tcx Substs<'tcx>, - pub is_provided: bool -} - -/// Locates the applicable definition of a method, given its name. -pub fn get_impl_method<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - substs: &'tcx Substs<'tcx>, - impl_def_id: DefId, - impl_substs: &'tcx Substs<'tcx>, - name: Name) - -> ImplMethod<'tcx> -{ - assert!(!substs.needs_infer()); - - let trait_def_id = tcx.trait_id_of_impl(impl_def_id).unwrap(); - let trait_def = tcx.lookup_trait_def(trait_def_id); - - match trait_def.ancestors(impl_def_id).fn_defs(tcx, name).next() { - Some(node_item) => { - let substs = tcx.infer_ctxt(None, None, Reveal::All).enter(|infcx| { - let substs = substs.rebase_onto(tcx, trait_def_id, impl_substs); - let substs = traits::translate_substs(&infcx, impl_def_id, - substs, node_item.node); - tcx.lift(&substs).unwrap_or_else(|| { - bug!("trans::meth::get_impl_method: translate_substs \ - returned {:?} which contains inference types/regions", - substs); - }) - }); - ImplMethod { - method: node_item.item, - substs: substs, - is_provided: node_item.node.is_from_trait(), - } - } - None => { - bug!("method {:?} not found in {:?}", name, impl_def_id) - } - } -} diff --git a/src/librustc_trans/mir/analyze.rs b/src/librustc_trans/mir/analyze.rs index e13da2531024f..a934da12b9e36 100644 --- a/src/librustc_trans/mir/analyze.rs +++ b/src/librustc_trans/mir/analyze.rs @@ -13,14 +13,11 @@ use rustc_data_structures::bitvec::BitVector; use rustc_data_structures::indexed_vec::{Idx, IndexVec}; -use rustc::mir::repr as mir; -use rustc::mir::repr::TerminatorKind; -use rustc::mir::repr::Location; +use rustc::mir::{self, Location, TerminatorKind}; use rustc::mir::visit::{Visitor, LvalueContext}; use rustc::mir::traversal; use common::{self, Block, BlockAndBuilder}; use glue; -use std::iter; use super::rvalue; pub fn lvalue_locals<'bcx, 'tcx>(bcx: Block<'bcx,'tcx>, @@ -30,11 +27,7 @@ pub fn lvalue_locals<'bcx, 'tcx>(bcx: Block<'bcx,'tcx>, analyzer.visit_mir(mir); - let local_types = mir.arg_decls.iter().map(|a| a.ty) - .chain(mir.var_decls.iter().map(|v| v.ty)) - .chain(mir.temp_decls.iter().map(|t| t.ty)) - .chain(iter::once(mir.return_ty)); - for (index, ty) in local_types.enumerate() { + for (index, ty) in mir.local_decls.iter().map(|l| l.ty).enumerate() { let ty = bcx.monomorphize(&ty); debug!("local {} has type {:?}", index, ty); if ty.is_scalar() || @@ -80,12 +73,11 @@ impl<'mir, 'bcx, 'tcx> LocalAnalyzer<'mir, 'bcx, 'tcx> { fn new(mir: &'mir mir::Mir<'tcx>, bcx: &'mir BlockAndBuilder<'bcx, 'tcx>) -> LocalAnalyzer<'mir, 'bcx, 'tcx> { - let local_count = mir.count_locals(); LocalAnalyzer { mir: mir, bcx: bcx, - lvalue_locals: BitVector::new(local_count), - seen_assigned: BitVector::new(local_count) + lvalue_locals: BitVector::new(mir.local_decls.len()), + seen_assigned: BitVector::new(mir.local_decls.len()) } } @@ -109,7 +101,7 @@ impl<'mir, 'bcx, 'tcx> Visitor<'tcx> for LocalAnalyzer<'mir, 'bcx, 'tcx> { location: Location) { debug!("visit_assign(block={:?}, lvalue={:?}, rvalue={:?})", block, lvalue, rvalue); - if let Some(index) = self.mir.local_index(lvalue) { + if let mir::Lvalue::Local(index) = *lvalue { self.mark_assigned(index); if !rvalue::rvalue_creates_operand(self.mir, self.bcx, rvalue) { self.mark_as_lvalue(index); @@ -147,13 +139,13 @@ impl<'mir, 'bcx, 'tcx> Visitor<'tcx> for LocalAnalyzer<'mir, 'bcx, 'tcx> { fn visit_lvalue(&mut self, lvalue: &mir::Lvalue<'tcx>, - context: LvalueContext, + context: LvalueContext<'tcx>, location: Location) { debug!("visit_lvalue(lvalue={:?}, context={:?})", lvalue, context); // Allow uses of projections of immediate pair fields. if let mir::Lvalue::Projection(ref proj) = *lvalue { - if self.mir.local_index(&proj.base).is_some() { + if let mir::Lvalue::Local(_) = proj.base { let ty = proj.base.ty(self.mir, self.bcx.tcx()); let ty = self.bcx.monomorphize(&ty.to_ty(self.bcx.tcx())); @@ -167,7 +159,7 @@ impl<'mir, 'bcx, 'tcx> Visitor<'tcx> for LocalAnalyzer<'mir, 'bcx, 'tcx> { } } - if let Some(index) = self.mir.local_index(lvalue) { + if let mir::Lvalue::Local(index) = *lvalue { match context { LvalueContext::Call => { self.mark_assigned(index); @@ -180,7 +172,7 @@ impl<'mir, 'bcx, 'tcx> Visitor<'tcx> for LocalAnalyzer<'mir, 'bcx, 'tcx> { LvalueContext::Store | LvalueContext::Inspect | LvalueContext::Borrow { .. } | - LvalueContext::Projection => { + LvalueContext::Projection(..) => { self.mark_as_lvalue(index); } diff --git a/src/librustc_trans/mir/block.rs b/src/librustc_trans/mir/block.rs index baeafbe3e346f..8bf27b4babfc2 100644 --- a/src/librustc_trans/mir/block.rs +++ b/src/librustc_trans/mir/block.rs @@ -12,7 +12,7 @@ use llvm::{self, ValueRef}; use rustc_const_eval::{ErrKind, ConstEvalErr, note_const_eval_err}; use rustc::middle::lang_items; use rustc::ty; -use rustc::mir::repr as mir; +use rustc::mir; use abi::{Abi, FnType, ArgType}; use adt; use base; @@ -35,15 +35,16 @@ use syntax::parse::token; use super::{MirContext, LocalRef}; use super::analyze::CleanupKind; use super::constant::Const; -use super::lvalue::{LvalueRef, load_fat_ptr}; +use super::lvalue::{LvalueRef}; use super::operand::OperandRef; -use super::operand::OperandValue::*; +use super::operand::OperandValue::{Pair, Ref, Immediate}; + +use std::cell::Ref as CellRef; impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { pub fn trans_block(&mut self, bb: mir::BasicBlock) { let mut bcx = self.bcx(bb); - let mir = self.mir.clone(); - let data = &mir[bb]; + let data = &CellRef::clone(&self.mir)[bb]; debug!("trans_block({:?}={:?})", bb, data); @@ -139,9 +140,8 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { mir::TerminatorKind::Switch { ref discr, ref adt_def, ref targets } => { let discr_lvalue = self.trans_lvalue(&bcx, discr); let ty = discr_lvalue.ty.to_ty(bcx.tcx()); - let repr = adt::represent_type(bcx.ccx(), ty); let discr = bcx.with_block(|bcx| - adt::trans_get_discr(bcx, &repr, discr_lvalue.llval, None, true) + adt::trans_get_discr(bcx, ty, discr_lvalue.llval, None, true) ); let mut bb_hist = FnvHashMap(); @@ -167,7 +167,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { if default_bb != Some(target) { let llbb = llblock(self, target); let llval = bcx.with_block(|bcx| adt::trans_case( - bcx, &repr, Disr::from(adt_variant.disr_val))); + bcx, ty, Disr::from(adt_variant.disr_val))); build::AddCase(switch, llval, llbb) } } @@ -193,8 +193,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { } let llval = if let Some(cast_ty) = ret.cast { - let index = mir.local_index(&mir::Lvalue::ReturnPointer).unwrap(); - let op = match self.locals[index] { + let op = match self.locals[mir::RETURN_POINTER] { LocalRef::Operand(Some(op)) => op, LocalRef::Operand(None) => bug!("use of return before def"), LocalRef::Lvalue(tr_lvalue) => { @@ -219,7 +218,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { } load } else { - let op = self.trans_consume(&bcx, &mir::Lvalue::ReturnPointer); + let op = self.trans_consume(&bcx, &mir::Lvalue::Local(mir::RETURN_POINTER)); op.pack_if_pair(&bcx).immediate() }; bcx.ret(llval); @@ -230,7 +229,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { } mir::TerminatorKind::Drop { ref location, target, unwind } => { - let ty = location.ty(&mir, bcx.tcx()).to_ty(bcx.tcx()); + let ty = location.ty(&self.mir, bcx.tcx()).to_ty(bcx.tcx()); let ty = bcx.monomorphize(&ty); // Double check for necessity to drop @@ -420,7 +419,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { _ => bug!("{} is not callable", callee.ty) }; - let sig = bcx.tcx().erase_late_bound_regions(sig); + let sig = bcx.tcx().erase_late_bound_regions_and_normalize(sig); // Handle intrinsics old trans wants Expr's for, ourselves. let intrinsic = match (&callee.ty.sty, &callee.data) { @@ -701,12 +700,11 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { // Handle both by-ref and immediate tuples. match tuple.val { Ref(llval) => { - let base_repr = adt::represent_type(bcx.ccx(), tuple.ty); let base = adt::MaybeSizedValue::sized(llval); for (n, &ty) in arg_types.iter().enumerate() { - let ptr = adt::trans_field_ptr_builder(bcx, &base_repr, base, Disr(0), n); + let ptr = adt::trans_field_ptr_builder(bcx, tuple.ty, base, Disr(0), n); let val = if common::type_is_fat_ptr(bcx.tcx(), ty) { - let (lldata, llextra) = load_fat_ptr(bcx, ptr); + let (lldata, llextra) = base::load_fat_ptr_builder(bcx, ptr, ty); Pair(lldata, llextra) } else { // trans_argument will load this if it needs to @@ -846,7 +844,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { if fn_ret_ty.is_ignore() { return ReturnDest::Nothing; } - let dest = if let Some(index) = self.mir.local_index(dest) { + let dest = if let mir::Lvalue::Local(index) = *dest { let ret_ty = self.monomorphized_lvalue_ty(dest); match self.locals[index] { LocalRef::Lvalue(dest) => dest, diff --git a/src/librustc_trans/mir/constant.rs b/src/librustc_trans/mir/constant.rs index e9f324c0b08f0..3d0d889760943 100644 --- a/src/librustc_trans/mir/constant.rs +++ b/src/librustc_trans/mir/constant.rs @@ -16,16 +16,16 @@ use rustc_const_math::ConstFloat::*; use rustc_const_math::{ConstInt, ConstIsize, ConstUsize, ConstMathErr}; use rustc::hir::def_id::DefId; use rustc::infer::TransNormalize; -use rustc::mir::repr as mir; +use rustc::mir; use rustc::mir::tcx::LvalueTy; use rustc::traits; use rustc::ty::{self, Ty, TyCtxt, TypeFoldable}; use rustc::ty::cast::{CastTy, IntTy}; use rustc::ty::subst::Substs; use rustc_data_structures::indexed_vec::{Idx, IndexVec}; -use {abi, adt, base, Disr}; +use {abi, adt, base, Disr, machine}; use callee::Callee; -use common::{self, BlockAndBuilder, CrateContext, const_get_elt, val_ty}; +use common::{self, BlockAndBuilder, CrateContext, const_get_elt, val_ty, type_is_sized}; use common::{C_array, C_bool, C_bytes, C_floating_f64, C_integral}; use common::{C_null, C_struct, C_str_slice, C_undef, C_uint}; use common::{const_to_opt_int, const_to_opt_uint}; @@ -221,16 +221,17 @@ impl<'a, 'tcx> MirConstContext<'a, 'tcx> { fn new(ccx: &'a CrateContext<'a, 'tcx>, mir: &'a mir::Mir<'tcx>, substs: &'tcx Substs<'tcx>, - args: IndexVec>) + args: IndexVec>) -> MirConstContext<'a, 'tcx> { let mut context = MirConstContext { ccx: ccx, mir: mir, substs: substs, - locals: (0..mir.count_locals()).map(|_| None).collect(), + locals: (0..mir.local_decls.len()).map(|_| None).collect(), }; for (i, arg) in args.into_iter().enumerate() { - let index = mir.local_index(&mir::Lvalue::Arg(mir::Arg::new(i))).unwrap(); + // Locals after local 0 are the function arguments + let index = mir::Local::new(i + 1); context.locals[index] = Some(arg); } context @@ -238,7 +239,7 @@ impl<'a, 'tcx> MirConstContext<'a, 'tcx> { fn trans_def(ccx: &'a CrateContext<'a, 'tcx>, mut instance: Instance<'tcx>, - args: IndexVec>) + args: IndexVec>) -> Result, ConstEvalErr> { // Try to resolve associated constants. if let Some(trait_id) = ccx.tcx().trait_of_item(instance.def) { @@ -247,18 +248,20 @@ impl<'a, 'tcx> MirConstContext<'a, 'tcx> { let vtable = common::fulfill_obligation(ccx.shared(), DUMMY_SP, trait_ref); if let traits::VtableImpl(vtable_impl) = vtable { let name = ccx.tcx().item_name(instance.def); - for ac in ccx.tcx().associated_consts(vtable_impl.impl_def_id) { - if ac.name == name { - instance = Instance::new(ac.def_id, vtable_impl.substs); - break; - } + let ac = ccx.tcx().impl_or_trait_items(vtable_impl.impl_def_id) + .iter().filter_map(|&def_id| { + match ccx.tcx().impl_or_trait_item(def_id) { + ty::ConstTraitItem(ac) => Some(ac), + _ => None + } + }).find(|ic| ic.name == name); + if let Some(ac) = ac { + instance = Instance::new(ac.def_id, vtable_impl.substs); } } } - let mir = ccx.get_mir(instance.def).unwrap_or_else(|| { - bug!("missing constant MIR for {}", instance) - }); + let mir = ccx.tcx().item_mir(instance.def); MirConstContext::new(ccx, &mir, instance.substs, args).trans() } @@ -292,7 +295,8 @@ impl<'a, 'tcx> MirConstContext<'a, 'tcx> { } } mir::StatementKind::StorageLive(_) | - mir::StatementKind::StorageDead(_) => {} + mir::StatementKind::StorageDead(_) | + mir::StatementKind::Nop => {} mir::StatementKind::SetDiscriminant{ .. } => { span_bug!(span, "SetDiscriminant should not appear in constants?"); } @@ -306,8 +310,7 @@ impl<'a, 'tcx> MirConstContext<'a, 'tcx> { mir::TerminatorKind::Goto { target } => target, mir::TerminatorKind::Return => { failure?; - let index = self.mir.local_index(&mir::Lvalue::ReturnPointer).unwrap(); - return Ok(self.locals[index].unwrap_or_else(|| { + return Ok(self.locals[mir::RETURN_POINTER].unwrap_or_else(|| { span_bug!(span, "no returned value in constant"); })); } @@ -371,7 +374,7 @@ impl<'a, 'tcx> MirConstContext<'a, 'tcx> { } fn store(&mut self, dest: &mir::Lvalue<'tcx>, value: Const<'tcx>, span: Span) { - if let Some(index) = self.mir.local_index(dest) { + if let mir::Lvalue::Local(index) = *dest { self.locals[index] = Some(value); } else { span_bug!(span, "assignment to {:?} in constant", dest); @@ -382,17 +385,14 @@ impl<'a, 'tcx> MirConstContext<'a, 'tcx> { -> Result, ConstEvalErr> { let tcx = self.ccx.tcx(); - if let Some(index) = self.mir.local_index(lvalue) { + if let mir::Lvalue::Local(index) = *lvalue { return Ok(self.locals[index].unwrap_or_else(|| { span_bug!(span, "{:?} not initialized", lvalue) }).as_lvalue()); } let lvalue = match *lvalue { - mir::Lvalue::Var(_) | - mir::Lvalue::Temp(_) | - mir::Lvalue::Arg(_) | - mir::Lvalue::ReturnPointer => bug!(), // handled above + mir::Lvalue::Local(_) => bug!(), // handled above mir::Lvalue::Static(def_id) => { ConstLvalue { base: Base::Static(consts::get_static(self.ccx, def_id)), @@ -436,8 +436,7 @@ impl<'a, 'tcx> MirConstContext<'a, 'tcx> { } } mir::ProjectionElem::Field(ref field, _) => { - let base_repr = adt::represent_type(self.ccx, tr_base.ty); - let llprojected = adt::const_get_field(&base_repr, base.llval, + let llprojected = adt::const_get_field(self.ccx, tr_base.ty, base.llval, Disr(0), field.index()); let llextra = if is_sized { ptr::null_mut() @@ -568,7 +567,7 @@ impl<'a, 'tcx> MirConstContext<'a, 'tcx> { } match *kind { - mir::AggregateKind::Vec => { + mir::AggregateKind::Array => { self.const_array(dest_ty, &fields) } mir::AggregateKind::Adt(..) | @@ -580,9 +579,8 @@ impl<'a, 'tcx> MirConstContext<'a, 'tcx> { } _ => Disr(0) }; - let repr = adt::represent_type(self.ccx, dest_ty); Const::new( - adt::trans_const(self.ccx, &repr, disr, &fields), + adt::trans_const(self.ccx, dest_ty, disr, &fields), dest_ty ) } @@ -653,8 +651,8 @@ impl<'a, 'tcx> MirConstContext<'a, 'tcx> { let ll_t_out = type_of::immediate_type_of(self.ccx, cast_ty); let llval = operand.llval; let signed = if let CastTy::Int(IntTy::CEnum) = r_t_in { - let repr = adt::represent_type(self.ccx, operand.ty); - adt::is_discr_signed(&repr) + let l = self.ccx.layout_of(operand.ty); + adt::is_discr_signed(&l) } else { operand.ty.is_signed() }; @@ -730,7 +728,12 @@ impl<'a, 'tcx> MirConstContext<'a, 'tcx> { let base = match tr_lvalue.base { Base::Value(llval) => { - let align = type_of::align_of(self.ccx, ty); + // FIXME: may be wrong for &*(&simd_vec as &fmt::Debug) + let align = if type_is_sized(self.ccx.tcx(), ty) { + type_of::align_of(self.ccx, ty) + } else { + self.ccx.tcx().data_layout.pointer_align.abi() as machine::llalign + }; if bk == mir::BorrowKind::Mut { consts::addr_of_mut(self.ccx, llval, align, "ref_mut") } else { @@ -768,7 +771,7 @@ impl<'a, 'tcx> MirConstContext<'a, 'tcx> { let rhs = self.const_operand(rhs, span)?; let ty = lhs.ty; let val_ty = op.ty(tcx, lhs.ty, rhs.ty); - let binop_ty = tcx.mk_tup(vec![val_ty, tcx.types.bool]); + let binop_ty = tcx.intern_tup(&[val_ty, tcx.types.bool]); let (lhs, rhs) = (lhs.llval, rhs.llval); assert!(!ty.is_fp()); diff --git a/src/librustc_trans/mir/lvalue.rs b/src/librustc_trans/mir/lvalue.rs index 5e180887a3604..d28c466e230ba 100644 --- a/src/librustc_trans/mir/lvalue.rs +++ b/src/librustc_trans/mir/lvalue.rs @@ -10,13 +10,11 @@ use llvm::ValueRef; use rustc::ty::{self, Ty, TypeFoldable}; -use rustc::mir::repr as mir; +use rustc::mir; use rustc::mir::tcx::LvalueTy; use rustc_data_structures::indexed_vec::Idx; -use abi; use adt; use base; -use builder::Builder; use common::{self, BlockAndBuilder, CrateContext, C_uint, C_undef}; use consts; use machine; @@ -69,18 +67,6 @@ impl<'tcx> LvalueRef<'tcx> { } } -pub fn get_meta(b: &Builder, fat_ptr: ValueRef) -> ValueRef { - b.struct_gep(fat_ptr, abi::FAT_PTR_EXTRA) -} - -pub fn get_dataptr(b: &Builder, fat_ptr: ValueRef) -> ValueRef { - b.struct_gep(fat_ptr, abi::FAT_PTR_ADDR) -} - -pub fn load_fat_ptr(b: &Builder, fat_ptr: ValueRef) -> (ValueRef, ValueRef) { - (b.load(get_dataptr(b, fat_ptr)), b.load(get_meta(b, fat_ptr))) -} - impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { pub fn trans_lvalue(&mut self, bcx: &BlockAndBuilder<'bcx, 'tcx>, @@ -91,7 +77,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { let ccx = bcx.ccx(); let tcx = bcx.tcx(); - if let Some(index) = self.mir.local_index(lvalue) { + if let mir::Lvalue::Local(index) = *lvalue { match self.locals[index] { LocalRef::Lvalue(lvalue) => { return lvalue; @@ -103,10 +89,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { } let result = match *lvalue { - mir::Lvalue::Var(_) | - mir::Lvalue::Temp(_) | - mir::Lvalue::Arg(_) | - mir::Lvalue::ReturnPointer => bug!(), // handled above + mir::Lvalue::Local(_) => bug!(), // handled above mir::Lvalue::Static(def_id) => { let const_ty = self.monomorphized_lvalue_ty(lvalue); LvalueRef::new_sized(consts::get_static(ccx, def_id), @@ -152,7 +135,6 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { mir::ProjectionElem::Deref => bug!(), mir::ProjectionElem::Field(ref field, _) => { let base_ty = tr_base.ty.to_ty(tcx); - let base_repr = adt::represent_type(ccx, base_ty); let discr = match tr_base.ty { LvalueTy::Ty { .. } => 0, LvalueTy::Downcast { adt_def: _, substs: _, variant_index: v } => v, @@ -164,7 +146,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { } else { adt::MaybeSizedValue::unsized_(tr_base.llval, tr_base.llextra) }; - let llprojected = adt::trans_field_ptr_builder(bcx, &base_repr, base, + let llprojected = adt::trans_field_ptr_builder(bcx, base_ty, base, Disr(discr), field.index()); let llextra = if is_sized { ptr::null_mut() @@ -236,7 +218,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { lvalue: &mir::Lvalue<'tcx>, f: F) -> U where F: FnOnce(&mut Self, LvalueRef<'tcx>) -> U { - if let Some(index) = self.mir.local_index(lvalue) { + if let mir::Lvalue::Local(index) = *lvalue { match self.locals[index] { LocalRef::Lvalue(lvalue) => f(self, lvalue), LocalRef::Operand(None) => { diff --git a/src/librustc_trans/mir/mod.rs b/src/librustc_trans/mir/mod.rs index 1934f7b870d18..d2adf88c91683 100644 --- a/src/librustc_trans/mir/mod.rs +++ b/src/librustc_trans/mir/mod.rs @@ -11,7 +11,7 @@ use libc::c_uint; use llvm::{self, ValueRef}; use rustc::ty; -use rustc::mir::repr as mir; +use rustc::mir; use rustc::mir::tcx::LvalueTy; use session::config::FullDebugInfo; use base; @@ -23,8 +23,7 @@ use type_of; use syntax_pos::{DUMMY_SP, NO_EXPANSION, COMMAND_LINE_EXPN, BytePos}; use syntax::parse::token::keywords; -use std::ops::Deref; -use std::rc::Rc; +use std::cell::Ref; use std::iter; use basic_block::BasicBlock; @@ -34,30 +33,14 @@ use rustc_data_structures::indexed_vec::{IndexVec, Idx}; pub use self::constant::trans_static_initializer; -use self::lvalue::{LvalueRef, get_dataptr, get_meta}; +use self::lvalue::{LvalueRef}; use rustc::mir::traversal; use self::operand::{OperandRef, OperandValue}; -#[derive(Clone)] -pub enum CachedMir<'mir, 'tcx: 'mir> { - Ref(&'mir mir::Mir<'tcx>), - Owned(Rc>) -} - -impl<'mir, 'tcx: 'mir> Deref for CachedMir<'mir, 'tcx> { - type Target = mir::Mir<'tcx>; - fn deref(&self) -> &mir::Mir<'tcx> { - match *self { - CachedMir::Ref(r) => r, - CachedMir::Owned(ref rc) => rc - } - } -} - /// Master context for translating MIR. pub struct MirContext<'bcx, 'tcx:'bcx> { - mir: CachedMir<'bcx, 'tcx>, + mir: Ref<'tcx, mir::Mir<'tcx>>, /// Function context fcx: &'bcx common::FunctionContext<'bcx, 'tcx>, @@ -223,7 +206,7 @@ pub fn trans_mir<'blk, 'tcx: 'blk>(fcx: &'blk FunctionContext<'blk, 'tcx>) { let scopes = debuginfo::create_mir_scopes(fcx); let mut mircx = MirContext { - mir: mir.clone(), + mir: Ref::clone(&mir), fcx: fcx, llpersonalityslot: None, blocks: block_bcxs, @@ -237,51 +220,61 @@ pub fn trans_mir<'blk, 'tcx: 'blk>(fcx: &'blk FunctionContext<'blk, 'tcx>) { // Allocate variable and temp allocas mircx.locals = { let args = arg_local_refs(&bcx, &mir, &mircx.scopes, &lvalue_locals); - let vars = mir.var_decls.iter().enumerate().map(|(i, decl)| { + + let mut allocate_local = |local| { + let decl = &mir.local_decls[local]; let ty = bcx.monomorphize(&decl.ty); - let debug_scope = mircx.scopes[decl.source_info.scope]; - let dbg = debug_scope.is_valid() && bcx.sess().opts.debuginfo == FullDebugInfo; - let local = mir.local_index(&mir::Lvalue::Var(mir::Var::new(i))).unwrap(); - if !lvalue_locals.contains(local.index()) && !dbg { - return LocalRef::new_operand(bcx.ccx(), ty); - } + if let Some(name) = decl.name { + // User variable + let source_info = decl.source_info.unwrap(); + let debug_scope = mircx.scopes[source_info.scope]; + let dbg = debug_scope.is_valid() && bcx.sess().opts.debuginfo == FullDebugInfo; - let lvalue = LvalueRef::alloca(&bcx, ty, &decl.name.as_str()); - if dbg { - let dbg_loc = mircx.debug_loc(decl.source_info); - if let DebugLoc::ScopeAt(scope, span) = dbg_loc { - bcx.with_block(|bcx| { - declare_local(bcx, decl.name, ty, scope, - VariableAccess::DirectVariable { alloca: lvalue.llval }, - VariableKind::LocalVariable, span); - }); - } else { - panic!("Unexpected"); + if !lvalue_locals.contains(local.index()) && !dbg { + debug!("alloc: {:?} ({}) -> operand", local, name); + return LocalRef::new_operand(bcx.ccx(), ty); } - } - LocalRef::Lvalue(lvalue) - }); - - let locals = mir.temp_decls.iter().enumerate().map(|(i, decl)| { - (mir::Lvalue::Temp(mir::Temp::new(i)), decl.ty) - }).chain(iter::once((mir::Lvalue::ReturnPointer, mir.return_ty))); - - args.into_iter().chain(vars).chain(locals.map(|(lvalue, ty)| { - let ty = bcx.monomorphize(&ty); - let local = mir.local_index(&lvalue).unwrap(); - if lvalue == mir::Lvalue::ReturnPointer && fcx.fn_ty.ret.is_indirect() { - let llretptr = llvm::get_param(fcx.llfn, 0); - LocalRef::Lvalue(LvalueRef::new_sized(llretptr, LvalueTy::from_ty(ty))) - } else if lvalue_locals.contains(local.index()) { - LocalRef::Lvalue(LvalueRef::alloca(&bcx, ty, &format!("{:?}", lvalue))) + + debug!("alloc: {:?} ({}) -> lvalue", local, name); + let lvalue = LvalueRef::alloca(&bcx, ty, &name.as_str()); + if dbg { + let dbg_loc = mircx.debug_loc(source_info); + if let DebugLoc::ScopeAt(scope, span) = dbg_loc { + bcx.with_block(|bcx| { + declare_local(bcx, name, ty, scope, + VariableAccess::DirectVariable { alloca: lvalue.llval }, + VariableKind::LocalVariable, span); + }); + } else { + panic!("Unexpected"); + } + } + LocalRef::Lvalue(lvalue) } else { - // If this is an immediate local, we do not create an - // alloca in advance. Instead we wait until we see the - // definition and update the operand there. - LocalRef::new_operand(bcx.ccx(), ty) + // Temporary or return pointer + if local == mir::RETURN_POINTER && fcx.fn_ty.ret.is_indirect() { + debug!("alloc: {:?} (return pointer) -> lvalue", local); + let llretptr = llvm::get_param(fcx.llfn, 0); + LocalRef::Lvalue(LvalueRef::new_sized(llretptr, LvalueTy::from_ty(ty))) + } else if lvalue_locals.contains(local.index()) { + debug!("alloc: {:?} -> lvalue", local); + LocalRef::Lvalue(LvalueRef::alloca(&bcx, ty, &format!("{:?}", local))) + } else { + // If this is an immediate local, we do not create an + // alloca in advance. Instead we wait until we see the + // definition and update the operand there. + debug!("alloc: {:?} -> operand", local); + LocalRef::new_operand(bcx.ccx(), ty) + } } - })).collect() + }; + + let retptr = allocate_local(mir::RETURN_POINTER); + iter::once(retptr) + .chain(args.into_iter()) + .chain(mir.vars_and_temps_iter().map(allocate_local)) + .collect() }; // Branch to the START block @@ -346,10 +339,11 @@ fn arg_local_refs<'bcx, 'tcx>(bcx: &BlockAndBuilder<'bcx, 'tcx>, None }; - mir.arg_decls.iter().enumerate().map(|(arg_index, arg_decl)| { + mir.args_iter().enumerate().map(|(arg_index, local)| { + let arg_decl = &mir.local_decls[local]; let arg_ty = bcx.monomorphize(&arg_decl.ty); - let local = mir.local_index(&mir::Lvalue::Arg(mir::Arg::new(arg_index))).unwrap(); - if arg_decl.spread { + + if Some(local) == mir.spread_arg { // This argument (e.g. the last argument in the "rust-call" ABI) // is a tuple that was spread at the ABI level and now we have // to reconstruct it into a tuple local variable, from multiple @@ -360,7 +354,6 @@ fn arg_local_refs<'bcx, 'tcx>(bcx: &BlockAndBuilder<'bcx, 'tcx>, _ => bug!("spread argument isn't a tuple?!") }; - let lltuplety = type_of::type_of(bcx.ccx(), arg_ty); let lltemp = bcx.with_block(|bcx| { base::alloc_ty(bcx, arg_ty, &format!("arg{}", arg_index)) }); @@ -373,32 +366,27 @@ fn arg_local_refs<'bcx, 'tcx>(bcx: &BlockAndBuilder<'bcx, 'tcx>, // they are the two sub-fields of a single aggregate field. let meta = &fcx.fn_ty.args[idx]; idx += 1; - arg.store_fn_arg(bcx, &mut llarg_idx, get_dataptr(bcx, dst)); - meta.store_fn_arg(bcx, &mut llarg_idx, get_meta(bcx, dst)); + arg.store_fn_arg(bcx, &mut llarg_idx, + base::get_dataptr_builder(bcx, dst)); + meta.store_fn_arg(bcx, &mut llarg_idx, + base::get_meta_builder(bcx, dst)); } else { arg.store_fn_arg(bcx, &mut llarg_idx, dst); } - - bcx.with_block(|bcx| arg_scope.map(|scope| { - let byte_offset_of_var_in_tuple = - machine::llelement_offset(bcx.ccx(), lltuplety, i); - - let ops = unsafe { - [llvm::LLVMRustDIBuilderCreateOpDeref(), - llvm::LLVMRustDIBuilderCreateOpPlus(), - byte_offset_of_var_in_tuple as i64] - }; - - let variable_access = VariableAccess::IndirectVariable { - alloca: lltemp, - address_operations: &ops - }; - declare_local(bcx, keywords::Invalid.name(), - tupled_arg_ty, scope, variable_access, - VariableKind::ArgumentVariable(arg_index + i + 1), - bcx.fcx().span.unwrap_or(DUMMY_SP)); - })); } + + // Now that we have one alloca that contains the aggregate value, + // we can create one debuginfo entry for the argument. + bcx.with_block(|bcx| arg_scope.map(|scope| { + let variable_access = VariableAccess::DirectVariable { + alloca: lltemp + }; + declare_local(bcx, arg_decl.name.unwrap_or(keywords::Invalid.name()), + arg_ty, scope, variable_access, + VariableKind::ArgumentVariable(arg_index + 1), + bcx.fcx().span.unwrap_or(DUMMY_SP)); + })); + return LocalRef::Lvalue(LvalueRef::new_sized(lltemp, LvalueTy::from_ty(arg_ty))); } @@ -455,8 +443,10 @@ fn arg_local_refs<'bcx, 'tcx>(bcx: &BlockAndBuilder<'bcx, 'tcx>, // so make an alloca to store them in. let meta = &fcx.fn_ty.args[idx]; idx += 1; - arg.store_fn_arg(bcx, &mut llarg_idx, get_dataptr(bcx, lltemp)); - meta.store_fn_arg(bcx, &mut llarg_idx, get_meta(bcx, lltemp)); + arg.store_fn_arg(bcx, &mut llarg_idx, + base::get_dataptr_builder(bcx, lltemp)); + meta.store_fn_arg(bcx, &mut llarg_idx, + base::get_meta_builder(bcx, lltemp)); } else { // otherwise, arg is passed by value, so make a // temporary and store it there @@ -467,8 +457,8 @@ fn arg_local_refs<'bcx, 'tcx>(bcx: &BlockAndBuilder<'bcx, 'tcx>, bcx.with_block(|bcx| arg_scope.map(|scope| { // Is this a regular argument? if arg_index > 0 || mir.upvar_decls.is_empty() { - declare_local(bcx, arg_decl.debug_name, arg_ty, scope, - VariableAccess::DirectVariable { alloca: llval }, + declare_local(bcx, arg_decl.name.unwrap_or(keywords::Invalid.name()), arg_ty, + scope, VariableAccess::DirectVariable { alloca: llval }, VariableKind::ArgumentVariable(arg_index + 1), bcx.fcx().span.unwrap_or(DUMMY_SP)); return; diff --git a/src/librustc_trans/mir/operand.rs b/src/librustc_trans/mir/operand.rs index 270033be9375c..62eda56e2e1ba 100644 --- a/src/librustc_trans/mir/operand.rs +++ b/src/librustc_trans/mir/operand.rs @@ -10,7 +10,7 @@ use llvm::ValueRef; use rustc::ty::Ty; -use rustc::mir::repr as mir; +use rustc::mir; use rustc_data_structures::indexed_vec::Idx; use base; @@ -143,20 +143,18 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { { debug!("trans_load: {:?} @ {:?}", Value(llval), ty); - let val = if common::type_is_imm_pair(bcx.ccx(), ty) { + let val = if common::type_is_fat_ptr(bcx.tcx(), ty) { + let (lldata, llextra) = base::load_fat_ptr_builder(bcx, llval, ty); + OperandValue::Pair(lldata, llextra) + } else if common::type_is_imm_pair(bcx.ccx(), ty) { + let [a_ty, b_ty] = common::type_pair_fields(bcx.ccx(), ty).unwrap(); let a_ptr = bcx.struct_gep(llval, 0); let b_ptr = bcx.struct_gep(llval, 1); - // This is None only for fat pointers, which don't - // need any special load-time behavior anyway. - let pair_fields = common::type_pair_fields(bcx.ccx(), ty); - let (a, b) = if let Some([a_ty, b_ty]) = pair_fields { - (base::load_ty_builder(bcx, a_ptr, a_ty), - base::load_ty_builder(bcx, b_ptr, b_ty)) - } else { - (bcx.load(a_ptr), bcx.load(b_ptr)) - }; - OperandValue::Pair(a, b) + OperandValue::Pair( + base::load_ty_builder(bcx, a_ptr, a_ty), + base::load_ty_builder(bcx, b_ptr, b_ty) + ) } else if common::type_is_immediate(bcx.ccx(), ty) { OperandValue::Immediate(base::load_ty_builder(bcx, llval, ty)) } else { @@ -175,7 +173,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { // watch out for locals that do not have an // alloca; they are handled somewhat differently - if let Some(index) = self.mir.local_index(lvalue) { + if let mir::Lvalue::Local(index) = *lvalue { match self.locals[index] { LocalRef::Operand(Some(o)) => { return o; @@ -191,7 +189,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { // Moves out of pair fields are trivial. if let &mir::Lvalue::Projection(ref proj) = lvalue { - if let Some(index) = self.mir.local_index(&proj.base) { + if let mir::Lvalue::Local(index) = proj.base { if let LocalRef::Operand(Some(o)) = self.locals[index] { match (o.val, &proj.elem) { (OperandValue::Pair(a, b), diff --git a/src/librustc_trans/mir/rvalue.rs b/src/librustc_trans/mir/rvalue.rs index 21b019d7e24df..f25877b1de12d 100644 --- a/src/librustc_trans/mir/rvalue.rs +++ b/src/librustc_trans/mir/rvalue.rs @@ -11,15 +11,18 @@ use llvm::{self, ValueRef}; use rustc::ty::{self, Ty}; use rustc::ty::cast::{CastTy, IntTy}; -use rustc::mir::repr as mir; +use rustc::ty::layout::Layout; +use rustc::mir; use asm; use base; use callee::Callee; use common::{self, val_ty, C_bool, C_null, C_uint, BlockAndBuilder, Result}; +use common::{C_integral}; use debuginfo::DebugLoc; use adt; use machine; +use type_::Type; use type_of; use tvec; use value::Value; @@ -28,7 +31,7 @@ use Disr; use super::MirContext; use super::constant::const_scalar_checked_binop; use super::operand::{OperandRef, OperandValue}; -use super::lvalue::{LvalueRef, get_dataptr}; +use super::lvalue::{LvalueRef}; impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { pub fn trans_rvalue(&mut self, @@ -98,7 +101,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { let tr_elem = self.trans_operand(&bcx, elem); let size = count.value.as_u64(bcx.tcx().sess.target.uint_type); let size = C_uint(bcx.ccx(), size); - let base = get_dataptr(&bcx, dest.llval); + let base = base::get_dataptr_builder(&bcx, dest.llval); let bcx = bcx.map_block(|block| { tvec::slice_for_each(block, base, tr_elem.ty, size, |block, llslot| { self.store_operand_direct(block, llslot, tr_elem); @@ -111,10 +114,10 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { mir::Rvalue::Aggregate(ref kind, ref operands) => { match *kind { mir::AggregateKind::Adt(adt_def, variant_index, _, active_field_index) => { - let repr = adt::represent_type(bcx.ccx(), dest.ty.to_ty(bcx.tcx())); let disr = Disr::from(adt_def.variants[variant_index].disr_val); bcx.with_block(|bcx| { - adt::trans_set_discr(bcx, &repr, dest.llval, Disr::from(disr)); + adt::trans_set_discr(bcx, + dest.ty.to_ty(bcx.tcx()), dest.llval, Disr::from(disr)); }); for (i, operand) in operands.iter().enumerate() { let op = self.trans_operand(&bcx, operand); @@ -122,8 +125,9 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { if !common::type_is_zero_size(bcx.ccx(), op.ty) { let val = adt::MaybeSizedValue::sized(dest.llval); let field_index = active_field_index.unwrap_or(i); - let lldest_i = adt::trans_field_ptr_builder(&bcx, &repr, val, - disr, field_index); + let lldest_i = adt::trans_field_ptr_builder(&bcx, + dest.ty.to_ty(bcx.tcx()), + val, disr, field_index); self.store_operand(&bcx, lldest_i, op); } } @@ -270,17 +274,36 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { let ll_t_in = type_of::immediate_type_of(bcx.ccx(), operand.ty); let ll_t_out = type_of::immediate_type_of(bcx.ccx(), cast_ty); let (llval, signed) = if let CastTy::Int(IntTy::CEnum) = r_t_in { - let repr = adt::represent_type(bcx.ccx(), operand.ty); + let l = bcx.ccx().layout_of(operand.ty); let discr = match operand.val { OperandValue::Immediate(llval) => llval, OperandValue::Ref(llptr) => { bcx.with_block(|bcx| { - adt::trans_get_discr(bcx, &repr, llptr, None, true) + adt::trans_get_discr(bcx, operand.ty, llptr, None, true) }) } OperandValue::Pair(..) => bug!("Unexpected Pair operand") }; - (discr, adt::is_discr_signed(&repr)) + let (signed, min, max) = match l { + &Layout::CEnum { signed, min, max, .. } => { + (signed, min, max) + } + _ => bug!("CEnum {:?} is not an enum", operand) + }; + + if max > min { + // We want `table[e as usize]` to not + // have bound checks, and this is the most + // convenient place to put the `assume`. + + base::call_assume(&bcx, bcx.icmp( + llvm::IntULE, + discr, + C_integral(common::val_ty(discr), max, false) + )) + } + + (discr, signed) } else { (operand.immediate(), operand.ty.is_signed()) }; @@ -381,13 +404,10 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { match (lhs.val, rhs.val) { (OperandValue::Pair(lhs_addr, lhs_extra), OperandValue::Pair(rhs_addr, rhs_extra)) => { - bcx.with_block(|bcx| { - base::compare_fat_ptrs(bcx, - lhs_addr, lhs_extra, - rhs_addr, rhs_extra, - lhs.ty, op.to_hir_binop(), - debug_loc) - }) + self.trans_fat_ptr_binop(&bcx, op, + lhs_addr, lhs_extra, + rhs_addr, rhs_extra, + lhs.ty) } _ => bug!() } @@ -410,7 +430,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { lhs.immediate(), rhs.immediate(), lhs.ty); let val_ty = op.ty(bcx.tcx(), lhs.ty, rhs.ty); - let operand_ty = bcx.tcx().mk_tup(vec![val_ty, bcx.tcx().types.bool]); + let operand_ty = bcx.tcx().intern_tup(&[val_ty, bcx.tcx().types.bool]); let operand = OperandRef { val: result, ty: operand_ty @@ -484,6 +504,8 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { input_ty: Ty<'tcx>) -> ValueRef { let is_float = input_ty.is_fp(); let is_signed = input_ty.is_signed(); + let is_nil = input_ty.is_nil(); + let is_bool = input_ty.is_bool(); match op { mir::BinOp::Add => if is_float { bcx.fadd(lhs, rhs) @@ -534,12 +556,79 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { DebugLoc::None) }) } - mir::BinOp::Eq | mir::BinOp::Lt | mir::BinOp::Gt | - mir::BinOp::Ne | mir::BinOp::Le | mir::BinOp::Ge => { - bcx.with_block(|bcx| { - base::compare_scalar_types(bcx, lhs, rhs, input_ty, - op.to_hir_binop(), DebugLoc::None) + mir::BinOp::Ne | mir::BinOp::Lt | mir::BinOp::Gt | + mir::BinOp::Eq | mir::BinOp::Le | mir::BinOp::Ge => if is_nil { + C_bool(bcx.ccx(), match op { + mir::BinOp::Ne | mir::BinOp::Lt | mir::BinOp::Gt => false, + mir::BinOp::Eq | mir::BinOp::Le | mir::BinOp::Ge => true, + _ => unreachable!() }) + } else if is_float { + bcx.fcmp( + base::bin_op_to_fcmp_predicate(op.to_hir_binop()), + lhs, rhs + ) + } else { + let (lhs, rhs) = if is_bool { + // FIXME(#36856) -- extend the bools into `i8` because + // LLVM's i1 comparisons are broken. + (bcx.zext(lhs, Type::i8(bcx.ccx())), + bcx.zext(rhs, Type::i8(bcx.ccx()))) + } else { + (lhs, rhs) + }; + + bcx.icmp( + base::bin_op_to_icmp_predicate(op.to_hir_binop(), is_signed), + lhs, rhs + ) + } + } + } + + pub fn trans_fat_ptr_binop(&mut self, + bcx: &BlockAndBuilder<'bcx, 'tcx>, + op: mir::BinOp, + lhs_addr: ValueRef, + lhs_extra: ValueRef, + rhs_addr: ValueRef, + rhs_extra: ValueRef, + _input_ty: Ty<'tcx>) + -> ValueRef { + match op { + mir::BinOp::Eq => { + bcx.and( + bcx.icmp(llvm::IntEQ, lhs_addr, rhs_addr), + bcx.icmp(llvm::IntEQ, lhs_extra, rhs_extra) + ) + } + mir::BinOp::Ne => { + bcx.or( + bcx.icmp(llvm::IntNE, lhs_addr, rhs_addr), + bcx.icmp(llvm::IntNE, lhs_extra, rhs_extra) + ) + } + mir::BinOp::Le | mir::BinOp::Lt | + mir::BinOp::Ge | mir::BinOp::Gt => { + // a OP b ~ a.0 STRICT(OP) b.0 | (a.0 == b.0 && a.1 OP a.1) + let (op, strict_op) = match op { + mir::BinOp::Lt => (llvm::IntULT, llvm::IntULT), + mir::BinOp::Le => (llvm::IntULE, llvm::IntULT), + mir::BinOp::Gt => (llvm::IntUGT, llvm::IntUGT), + mir::BinOp::Ge => (llvm::IntUGE, llvm::IntUGT), + _ => bug!(), + }; + + bcx.or( + bcx.icmp(strict_op, lhs_addr, rhs_addr), + bcx.and( + bcx.icmp(llvm::IntEQ, lhs_addr, rhs_addr), + bcx.icmp(op, lhs_extra, rhs_extra) + ) + ) + } + _ => { + bug!("unexpected fat ptr binop"); } } } diff --git a/src/librustc_trans/mir/statement.rs b/src/librustc_trans/mir/statement.rs index 1167208955368..296a0e8049e08 100644 --- a/src/librustc_trans/mir/statement.rs +++ b/src/librustc_trans/mir/statement.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use rustc::mir::repr as mir; +use rustc::mir; use base; use common::{self, BlockAndBuilder}; @@ -30,7 +30,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { debug_loc.apply(bcx.fcx()); match statement.kind { mir::StatementKind::Assign(ref lvalue, ref rvalue) => { - if let Some(index) = self.mir.local_index(lvalue) { + if let mir::Lvalue::Local(index) = *lvalue { match self.locals[index] { LocalRef::Lvalue(tr_dest) => { self.trans_rvalue(bcx, tr_dest, rvalue, debug_loc) @@ -62,11 +62,10 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { } mir::StatementKind::SetDiscriminant{ref lvalue, variant_index} => { let ty = self.monomorphized_lvalue_ty(lvalue); - let repr = adt::represent_type(bcx.ccx(), ty); let lvalue_transed = self.trans_lvalue(&bcx, lvalue); bcx.with_block(|bcx| adt::trans_set_discr(bcx, - &repr, + ty, lvalue_transed.llval, Disr::from(variant_index)) ); @@ -78,6 +77,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { mir::StatementKind::StorageDead(ref lvalue) => { self.trans_storage_liveness(bcx, lvalue, base::Lifetime::End) } + mir::StatementKind::Nop => bcx, } } @@ -86,7 +86,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { lvalue: &mir::Lvalue<'tcx>, intrinsic: base::Lifetime) -> BlockAndBuilder<'bcx, 'tcx> { - if let Some(index) = self.mir.local_index(lvalue) { + if let mir::Lvalue::Local(index) = *lvalue { if let LocalRef::Lvalue(tr_lval) = self.locals[index] { intrinsic.call(&bcx, tr_lval.llval); } diff --git a/src/librustc_trans/monomorphize.rs b/src/librustc_trans/monomorphize.rs index ab2a39864336f..270ce79620f8b 100644 --- a/src/librustc_trans/monomorphize.rs +++ b/src/librustc_trans/monomorphize.rs @@ -26,7 +26,7 @@ pub struct Instance<'tcx> { impl<'tcx> fmt::Display for Instance<'tcx> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - ppaux::parameterized(f, &self.substs, self.def, ppaux::Ns::Value, &[]) + ppaux::parameterized(f, &self.substs, self.def, &[]) } } diff --git a/src/librustc_trans/partitioning.rs b/src/librustc_trans/partitioning.rs index a161bd199b1f6..625b43c7d1792 100644 --- a/src/librustc_trans/partitioning.rs +++ b/src/librustc_trans/partitioning.rs @@ -127,13 +127,14 @@ use rustc::session::config::NUMBERED_CODEGEN_UNIT_MARKER; use rustc::ty::TyCtxt; use rustc::ty::item_path::characteristic_def_id_of_type; use std::cmp::Ordering; -use std::hash::{Hash, Hasher, SipHasher}; +use std::hash::{Hash, Hasher}; use std::sync::Arc; +use std::collections::hash_map::DefaultHasher; use symbol_map::SymbolMap; use syntax::ast::NodeId; use syntax::parse::token::{self, InternedString}; use trans_item::TransItem; -use util::nodemap::{FnvHashMap, FnvHashSet, NodeSet}; +use util::nodemap::{FnvHashMap, FnvHashSet}; pub enum PartitioningStrategy { /// Generate one codegen unit per source-level module. @@ -188,7 +189,7 @@ impl<'tcx> CodegenUnit<'tcx> { } pub fn compute_symbol_name_hash(&self, tcx: TyCtxt, symbol_map: &SymbolMap) -> u64 { - let mut state = SipHasher::new(); + let mut state = DefaultHasher::new(); let all_items = self.items_in_deterministic_order(tcx, symbol_map); for (item, _) in all_items { let symbol_name = symbol_map.get(item).unwrap(); @@ -254,34 +255,26 @@ const FALLBACK_CODEGEN_UNIT: &'static str = "__rustc_fallback_codegen_unit"; pub fn partition<'a, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>, trans_items: I, strategy: PartitioningStrategy, - inlining_map: &InliningMap<'tcx>, - reachable: &NodeSet) + inlining_map: &InliningMap<'tcx>) -> Vec> where I: Iterator> { let tcx = scx.tcx(); - if let PartitioningStrategy::FixedUnitCount(1) = strategy { - // If there is only a single codegen-unit, we can use a very simple - // scheme and don't have to bother with doing much analysis. - return vec![single_codegen_unit(tcx, trans_items, reachable)]; - } - // In the first step, we place all regular translation items into their // respective 'home' codegen unit. Regular translation items are all // functions and statics defined in the local crate. let mut initial_partitioning = place_root_translation_items(scx, - trans_items, - reachable); + trans_items); - debug_dump(tcx, "INITIAL PARTITONING:", initial_partitioning.codegen_units.iter()); + debug_dump(scx, "INITIAL PARTITONING:", initial_partitioning.codegen_units.iter()); // If the partitioning should produce a fixed count of codegen units, merge // until that count is reached. if let PartitioningStrategy::FixedUnitCount(count) = strategy { merge_codegen_units(&mut initial_partitioning, count, &tcx.crate_name[..]); - debug_dump(tcx, "POST MERGING:", initial_partitioning.codegen_units.iter()); + debug_dump(scx, "POST MERGING:", initial_partitioning.codegen_units.iter()); } // In the next step, we use the inlining map to determine which addtional @@ -291,7 +284,7 @@ pub fn partition<'a, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>, let post_inlining = place_inlined_translation_items(initial_partitioning, inlining_map); - debug_dump(tcx, "POST INLINING:", post_inlining.0.iter()); + debug_dump(scx, "POST INLINING:", post_inlining.0.iter()); // Finally, sort by codegen unit name, so that we get deterministic results let mut result = post_inlining.0; @@ -310,8 +303,7 @@ struct PreInliningPartitioning<'tcx> { struct PostInliningPartitioning<'tcx>(Vec>); fn place_root_translation_items<'a, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>, - trans_items: I, - _reachable: &NodeSet) + trans_items: I) -> PreInliningPartitioning<'tcx> where I: Iterator> { @@ -320,7 +312,7 @@ fn place_root_translation_items<'a, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>, let mut codegen_units = FnvHashMap(); for trans_item in trans_items { - let is_root = !trans_item.is_instantiated_only_on_demand(); + let is_root = !trans_item.is_instantiated_only_on_demand(tcx); if is_root { let characteristic_def_id = characteristic_def_id_of_trans_item(scx, trans_item); @@ -350,6 +342,10 @@ fn place_root_translation_items<'a, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>, // This is a non-generic functions, we always // make it visible externally on the chance that // it might be used in another codegen unit. + // Later on base::internalize_symbols() will + // assign "internal" linkage to those symbols + // that are not referenced from other codegen + // units (and are not publicly visible). llvm::ExternalLinkage } else { // In the current setup, generic functions cannot @@ -454,7 +450,6 @@ fn place_inlined_translation_items<'tcx>(initial_partitioning: PreInliningPartit // reliably in that case. new_codegen_unit.items.insert(trans_item, llvm::InternalLinkage); } else { - assert!(trans_item.is_instantiated_only_on_demand()); // We can't be sure if this will also be instantiated // somewhere else, so we add an instance here with // InternalLinkage so we don't get any conflicts. @@ -550,68 +545,6 @@ fn compute_codegen_unit_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, return token::intern_and_get_ident(&mod_path[..]); } -fn single_codegen_unit<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - trans_items: I, - reachable: &NodeSet) - -> CodegenUnit<'tcx> - where I: Iterator> -{ - let mut items = FnvHashMap(); - - for trans_item in trans_items { - let linkage = trans_item.explicit_linkage(tcx).unwrap_or_else(|| { - match trans_item { - TransItem::Static(node_id) => { - if reachable.contains(&node_id) { - llvm::ExternalLinkage - } else { - llvm::PrivateLinkage - } - } - TransItem::DropGlue(_) => { - llvm::InternalLinkage - } - TransItem::Fn(instance) => { - if trans_item.is_generic_fn() { - // FIXME(mw): Assigning internal linkage to all - // monomorphizations is potentially a waste of space - // since monomorphizations could be shared between - // crates. The main reason for making them internal is - // a limitation in MingW's binutils that cannot deal - // with COFF object that have more than 2^15 sections, - // which is something that can happen for large programs - // when every function gets put into its own COMDAT - // section. - llvm::InternalLinkage - } else if trans_item.is_from_extern_crate() { - // FIXME(mw): It would be nice if we could mark these as - // `AvailableExternallyLinkage`, since they should have - // been instantiated in the extern crate. But this - // sometimes leads to crashes on Windows because LLVM - // does not handle exception handling table instantiation - // reliably in that case. - llvm::InternalLinkage - } else if reachable.contains(&tcx.map - .as_local_node_id(instance.def) - .unwrap()) { - llvm::ExternalLinkage - } else { - // Functions that are not visible outside this crate can - // be marked as internal. - llvm::InternalLinkage - } - } - } - }); - - items.insert(trans_item, linkage); - } - - CodegenUnit::new( - numbered_codegen_unit_name(&tcx.crate_name[..], 0), - items) -} - fn numbered_codegen_unit_name(crate_name: &str, index: usize) -> InternedString { token::intern_and_get_ident(&format!("{}{}{}", crate_name, @@ -619,7 +552,7 @@ fn numbered_codegen_unit_name(crate_name: &str, index: usize) -> InternedString index)[..]) } -fn debug_dump<'a, 'b, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>, +fn debug_dump<'a, 'b, 'tcx, I>(scx: &SharedCrateContext<'a, 'tcx>, label: &str, cgus: I) where I: Iterator>, @@ -628,10 +561,21 @@ fn debug_dump<'a, 'b, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>, if cfg!(debug_assertions) { debug!("{}", label); for cgu in cgus { + let symbol_map = SymbolMap::build(scx, cgu.items + .iter() + .map(|(&trans_item, _)| trans_item)); debug!("CodegenUnit {}:", cgu.name); for (trans_item, linkage) in &cgu.items { - debug!(" - {} [{:?}]", trans_item.to_string(tcx), linkage); + let symbol_name = symbol_map.get_or_compute(scx, *trans_item); + let symbol_hash_start = symbol_name.rfind('h'); + let symbol_hash = symbol_hash_start.map(|i| &symbol_name[i ..]) + .unwrap_or(""); + + debug!(" - {} [{:?}] [{}]", + trans_item.to_string(scx.tcx()), + linkage, + symbol_hash); } debug!(""); diff --git a/src/librustc_trans/trans_item.rs b/src/librustc_trans/trans_item.rs index 44e613c4c2b04..8930387c046e2 100644 --- a/src/librustc_trans/trans_item.rs +++ b/src/librustc_trans/trans_item.rs @@ -28,7 +28,6 @@ use rustc::hir::def_id::DefId; use rustc::ty::{self, Ty, TyCtxt, TypeFoldable}; use rustc::ty::subst::Substs; use rustc_const_eval::fatal_const_eval_err; -use std::hash::{Hash, Hasher}; use syntax::ast::{self, NodeId}; use syntax::attr; use type_of; @@ -36,33 +35,13 @@ use glue; use abi::{Abi, FnType}; use back::symbol_names; -#[derive(PartialEq, Eq, Clone, Copy, Debug)] +#[derive(PartialEq, Eq, Clone, Copy, Debug, Hash)] pub enum TransItem<'tcx> { DropGlue(DropGlueKind<'tcx>), Fn(Instance<'tcx>), Static(NodeId) } -impl<'tcx> Hash for TransItem<'tcx> { - fn hash(&self, s: &mut H) { - match *self { - TransItem::DropGlue(t) => { - 0u8.hash(s); - t.hash(s); - }, - TransItem::Fn(instance) => { - 1u8.hash(s); - instance.def.hash(s); - (instance.substs as *const _ as usize).hash(s); - } - TransItem::Static(node_id) => { - 2u8.hash(s); - node_id.hash(s); - } - }; - } -} - impl<'a, 'tcx> TransItem<'tcx> { pub fn define(&self, ccx: &CrateContext<'a, 'tcx>) { @@ -241,19 +220,6 @@ impl<'a, 'tcx> TransItem<'tcx> { } } - pub fn requests_inline(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> bool { - match *self { - TransItem::Fn(ref instance) => { - instance.substs.types().next().is_some() || { - let attributes = tcx.get_attrs(instance.def); - attr::requests_inline(&attributes[..]) - } - } - TransItem::DropGlue(..) => true, - TransItem::Static(..) => false, - } - } - pub fn is_from_extern_crate(&self) -> bool { match *self { TransItem::Fn(ref instance) => !instance.def.is_local(), @@ -262,10 +228,18 @@ impl<'a, 'tcx> TransItem<'tcx> { } } - pub fn is_instantiated_only_on_demand(&self) -> bool { + /// True if the translation item should only be translated to LLVM IR if + /// it is referenced somewhere (like inline functions, for example). + pub fn is_instantiated_only_on_demand(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> bool { + if self.explicit_linkage(tcx).is_some() { + return false; + } + match *self { TransItem::Fn(ref instance) => { - !instance.def.is_local() || instance.substs.types().next().is_some() + !instance.def.is_local() || + instance.substs.types().next().is_some() || + attr::requests_inline(&tcx.get_attrs(instance.def)[..]) } TransItem::DropGlue(..) => true, TransItem::Static(..) => false, @@ -282,6 +256,18 @@ impl<'a, 'tcx> TransItem<'tcx> { } } + /// Returns true if there has to be a local copy of this TransItem in every + /// codegen unit that references it (as with inlined functions, for example) + pub fn needs_local_copy(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> bool { + // Currently everything that is instantiated only on demand is done so + // with "internal" linkage, so we need a copy to be present in every + // codegen unit. + // This is coincidental: We could also instantiate something only if it + // is referenced (e.g. a regular, private function) but place it in its + // own codegen unit with "external" linkage. + self.is_instantiated_only_on_demand(tcx) + } + pub fn explicit_linkage(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option { let def_id = match *self { TransItem::Fn(ref instance) => instance.def, @@ -324,7 +310,7 @@ impl<'a, 'tcx> TransItem<'tcx> { }, TransItem::Static(node_id) => { let def_id = hir_map.local_def_id(node_id); - let instance = Instance::new(def_id, Substs::empty(tcx)); + let instance = Instance::new(def_id, tcx.intern_substs(&[])); to_string_internal(tcx, "static ", instance) }, }; @@ -352,7 +338,7 @@ impl<'a, 'tcx> TransItem<'tcx> { TransItem::Fn(instance) => { format!("Fn({:?}, {})", instance.def, - instance.substs as *const _ as usize) + instance.substs.as_ptr() as usize) } TransItem::Static(id) => { format!("Static({:?})", id) @@ -466,7 +452,7 @@ pub fn push_unique_type_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, output.push_str("fn("); - let sig = tcx.erase_late_bound_regions(sig); + let sig = tcx.erase_late_bound_regions_and_normalize(sig); if !sig.inputs.is_empty() { for ¶meter_type in &sig.inputs { push_unique_type_name(tcx, parameter_type, output); diff --git a/src/librustc_trans/type_.rs b/src/librustc_trans/type_.rs index d191591e082ad..03a71827b473b 100644 --- a/src/librustc_trans/type_.rs +++ b/src/librustc_trans/type_.rs @@ -18,6 +18,7 @@ use context::CrateContext; use util::nodemap::FnvHashMap; use syntax::ast; +use rustc::ty::layout; use std::ffi::CString; use std::fmt; @@ -299,6 +300,26 @@ impl Type { llvm::LLVMGetIntTypeWidth(self.to_ref()) as u64 } } + + pub fn from_integer(cx: &CrateContext, i: layout::Integer) -> Type { + use rustc::ty::layout::Integer::*; + match i { + I1 => Type::i1(cx), + I8 => Type::i8(cx), + I16 => Type::i16(cx), + I32 => Type::i32(cx), + I64 => Type::i64(cx), + } + } + + pub fn from_primitive(ccx: &CrateContext, p: layout::Primitive) -> Type { + match p { + layout::Int(i) => Type::from_integer(ccx, i), + layout::F32 => Type::f32(ccx), + layout::F64 => Type::f64(ccx), + layout::Pointer => bug!("It is not possible to convert Pointer directly to Type.") + } + } } /* Memory-managed object interface to type handles. */ diff --git a/src/librustc_trans/type_of.rs b/src/librustc_trans/type_of.rs index 3873c24a9f676..132b0a910b9c1 100644 --- a/src/librustc_trans/type_of.rs +++ b/src/librustc_trans/type_of.rs @@ -22,17 +22,6 @@ use type_::Type; use syntax::ast; -// LLVM doesn't like objects that are too big. Issue #17913 -fn ensure_array_fits_in_address_space<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, - llet: Type, - size: machine::llsize, - scapegoat: Ty<'tcx>) { - let esz = machine::llsize_of_alloc(ccx, llet); - match esz.checked_mul(size) { - Some(n) if n < ccx.obj_size_bound() => {} - _ => { ccx.report_overbig_object(scapegoat) } - } -} // A "sizing type" is an LLVM type, the size and alignment of which are // guaranteed to be equivalent to what you would get out of `type_of()`. It's @@ -81,7 +70,6 @@ pub fn sizing_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Typ ty::TyArray(ty, size) => { let llty = sizing_type_of(cx, ty); let size = size as u64; - ensure_array_fits_in_address_space(cx, llty, size, t); Type::array(&llty, size) } @@ -98,13 +86,11 @@ pub fn sizing_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Typ } let llet = type_of(cx, e); let n = t.simd_size(cx.tcx()) as u64; - ensure_array_fits_in_address_space(cx, llet, n, t); Type::vector(&llet, n) } ty::TyTuple(..) | ty::TyAdt(..) | ty::TyClosure(..) => { - let repr = adt::represent_type(cx, t); - adt::sizing_type_of(cx, &repr, false) + adt::sizing_type_of(cx, t, false) } ty::TyProjection(..) | ty::TyInfer(..) | ty::TyParam(..) | @@ -242,8 +228,7 @@ pub fn in_memory_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> ty::TyClosure(..) => { // Only create the named struct, but don't fill it in. We // fill it in *after* placing it into the type cache. - let repr = adt::represent_type(cx, t); - adt::incomplete_type_of(cx, &repr, "closure") + adt::incomplete_type_of(cx, t, "closure") } ty::TyBox(ty) | @@ -266,11 +251,6 @@ pub fn in_memory_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> ty::TyArray(ty, size) => { let size = size as u64; - // we must use `sizing_type_of` here as the type may - // not be fully initialized. - let szty = sizing_type_of(cx, ty); - ensure_array_fits_in_address_space(cx, szty, size, t); - let llty = in_memory_type_of(cx, ty); Type::array(&llty, size) } @@ -284,14 +264,12 @@ pub fn in_memory_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> ty::TyFnDef(..) => Type::nil(cx), ty::TyFnPtr(f) => { - let sig = cx.tcx().erase_late_bound_regions(&f.sig); - let sig = cx.tcx().normalize_associated_type(&sig); + let sig = cx.tcx().erase_late_bound_regions_and_normalize(&f.sig); FnType::new(cx, f.abi, &sig, &[]).llvm_type(cx).ptr_to() } ty::TyTuple(ref tys) if tys.is_empty() => Type::nil(cx), ty::TyTuple(..) => { - let repr = adt::represent_type(cx, t); - adt::type_of(cx, &repr) + adt::type_of(cx, t) } ty::TyAdt(..) if t.is_simd() => { let e = t.simd_type(cx.tcx()); @@ -302,7 +280,6 @@ pub fn in_memory_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> } let llet = in_memory_type_of(cx, e); let n = t.simd_size(cx.tcx()) as u64; - ensure_array_fits_in_address_space(cx, llet, n, t); Type::vector(&llet, n) } ty::TyAdt(def, substs) => { @@ -310,9 +287,8 @@ pub fn in_memory_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> // fill it in *after* placing it into the type cache. This // avoids creating more than one copy of the enum when one // of the enum's variants refers to the enum itself. - let repr = adt::represent_type(cx, t); let name = llvm_type_name(cx, def.did, substs); - adt::incomplete_type_of(cx, &repr, &name[..]) + adt::incomplete_type_of(cx, t, &name[..]) } ty::TyInfer(..) | @@ -329,8 +305,7 @@ pub fn in_memory_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> // If this was an enum or struct, fill in the type now. match t.sty { ty::TyAdt(..) | ty::TyClosure(..) if !t.is_simd() => { - let repr = adt::represent_type(cx, t); - adt::finish_type_of(cx, &repr, &mut llty); + adt::finish_type_of(cx, t, &mut llty); } _ => () } @@ -340,8 +315,8 @@ pub fn in_memory_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> pub fn align_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> machine::llalign { - let llty = sizing_type_of(cx, t); - machine::llalign_of_min(cx, llty) + let layout = cx.layout_of(t); + layout.align(&cx.tcx().data_layout).abi() as machine::llalign } fn llvm_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, @@ -356,7 +331,7 @@ fn llvm_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, format!("{}<{}>", base, strings.join(", ")) }; - if did.krate == 0 { + if did.is_local() { tstr } else { format!("{}.{}", did.krate, tstr) diff --git a/src/librustc_typeck/astconv.rs b/src/librustc_typeck/astconv.rs index 334b7a5063a3a..8799050b1b999 100644 --- a/src/librustc_typeck/astconv.rs +++ b/src/librustc_typeck/astconv.rs @@ -55,7 +55,7 @@ use hir::def_id::DefId; use hir::print as pprust; use middle::resolve_lifetime as rl; use rustc::lint; -use rustc::ty::subst::{Subst, Substs}; +use rustc::ty::subst::{Kind, Subst, Substs}; use rustc::traits; use rustc::ty::{self, Ty, TyCtxt, ToPredicate, TypeFoldable}; use rustc::ty::wf::object_region_bounds; @@ -124,7 +124,7 @@ pub trait AstConv<'gcx, 'tcx> { /// Same as ty_infer, but with a known type parameter definition. fn ty_infer_for_def(&self, _def: &ty::TypeParameterDef<'tcx>, - _substs: &Substs<'tcx>, + _substs: &[Kind<'tcx>], span: Span) -> Ty<'tcx> { self.ty_infer(span) } @@ -561,15 +561,16 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { /// Returns the appropriate lifetime to use for any output lifetimes /// (if one exists) and a vector of the (pattern, number of lifetimes) /// corresponding to each input type/pattern. - fn find_implied_output_region(&self, - input_tys: &[Ty<'tcx>], - input_pats: Vec) -> ElidedLifetime + fn find_implied_output_region(&self, + input_tys: &[Ty<'tcx>], + input_pats: F) -> ElidedLifetime + where F: FnOnce() -> Vec { let tcx = self.tcx(); let mut lifetimes_for_params = Vec::new(); let mut possible_implied_output_region = None; - for (input_type, input_pat) in input_tys.iter().zip(input_pats) { + for input_type in input_tys.iter() { let mut regions = FnvHashSet(); let have_bound_regions = tcx.collect_regions(input_type, &mut regions); @@ -583,8 +584,11 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { possible_implied_output_region = regions.iter().cloned().next(); } + // Use a placeholder for `name` because computing it can be + // expensive and we don't want to do it until we know it's + // necessary. lifetimes_for_params.push(ElisionFailureInfo { - name: input_pat, + name: String::new(), lifetime_count: regions.len(), have_bound_regions: have_bound_regions }); @@ -593,6 +597,11 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { if lifetimes_for_params.iter().map(|e| e.lifetime_count).sum::() == 1 { Ok(*possible_implied_output_region.unwrap()) } else { + // Fill in the expensive `name` fields now that we know they're + // needed. + for (info, input_pat) in lifetimes_for_params.iter_mut().zip(input_pats()) { + info.name = input_pat; + } Err(Some(lifetimes_for_params)) } } @@ -620,16 +629,17 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { fn convert_parenthesized_parameters(&self, rscope: &RegionScope, - region_substs: &Substs<'tcx>, + region_substs: &[Kind<'tcx>], data: &hir::ParenthesizedParameterData) -> (Ty<'tcx>, ConvertedBinding<'tcx>) { let anon_scope = rscope.anon_type_scope(); let binding_rscope = MaybeWithAnonTypes::new(BindingRscope::new(), anon_scope); - let inputs: Vec<_> = data.inputs.iter().map(|a_t| { + let inputs = self.tcx().mk_type_list(data.inputs.iter().map(|a_t| { self.ast_ty_arg_to_ty(&binding_rscope, None, region_substs, a_t) - }).collect(); - let input_params = vec![String::new(); inputs.len()]; + })); + let inputs_len = inputs.len(); + let input_params = || vec![String::new(); inputs_len]; let implied_output_region = self.find_implied_output_region(&inputs, input_params); let (output, output_span) = match data.output { @@ -650,7 +660,7 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { span: output_span }; - (self.tcx().mk_tup(inputs), output_binding) + (self.tcx().mk_ty(ty::TyTuple(inputs)), output_binding) } pub fn instantiate_poly_trait_ref(&self, @@ -795,7 +805,7 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { // For now, require that parenthetical notation be used // only with `Fn()` etc. if !self.tcx().sess.features.borrow().unboxed_closures && trait_def.paren_sugar { - emit_feature_err(&self.tcx().sess.parse_sess.span_diagnostic, + emit_feature_err(&self.tcx().sess.parse_sess, "unboxed_closures", span, GateIssue::Language, "\ the precise format of `Fn`-family traits' \ @@ -807,7 +817,7 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { // For now, require that parenthetical notation be used // only with `Fn()` etc. if !self.tcx().sess.features.borrow().unboxed_closures && !trait_def.paren_sugar { - emit_feature_err(&self.tcx().sess.parse_sess.span_diagnostic, + emit_feature_err(&self.tcx().sess.parse_sess, "unboxed_closures", span, GateIssue::Language, "\ parenthetical notation is only stable when used with `Fn`-family traits"); @@ -1134,16 +1144,23 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { return tcx.types.err; } - let mut associated_types: FnvHashSet<(DefId, ast::Name)> = - traits::supertraits(tcx, principal) - .flat_map(|tr| { - let trait_def = tcx.lookup_trait_def(tr.def_id()); - trait_def.associated_type_names - .clone() - .into_iter() - .map(move |associated_type_name| (tr.def_id(), associated_type_name)) - }) - .collect(); + let mut associated_types = FnvHashSet::default(); + for tr in traits::supertraits(tcx, principal) { + if let Some(trait_id) = tcx.map.as_local_node_id(tr.def_id()) { + use collect::trait_associated_type_names; + + associated_types.extend(trait_associated_type_names(tcx, trait_id) + .map(|name| (tr.def_id(), name))) + } else { + let trait_items = tcx.impl_or_trait_items(tr.def_id()); + associated_types.extend(trait_items.iter().filter_map(|&def_id| { + match tcx.impl_or_trait_item(def_id) { + ty::TypeTraitItem(ref item) => Some(item.name), + _ => None + } + }).map(|name| (tr.def_id(), name))); + } + } for projection_bound in &projection_bounds { let pair = (projection_bound.0.projection_ty.trait_ref.def_id, @@ -1234,14 +1251,28 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { -> Result, ErrorReported> { if bounds.is_empty() { - span_err!(self.tcx().sess, span, E0220, + struct_span_err!(self.tcx().sess, span, E0220, "associated type `{}` not found for `{}`", assoc_name, - ty_param_name); + ty_param_name) + .span_label(span, &format!("associated type `{}` not found", assoc_name)) + .emit(); return Err(ErrorReported); } if bounds.len() > 1 { + let spans = bounds.iter().map(|b| { + self.tcx().impl_or_trait_items(b.def_id()).iter() + .find(|&&def_id| { + match self.tcx().impl_or_trait_item(def_id) { + ty::TypeTraitItem(ref item) => item.name.as_str() == assoc_name, + _ => false + } + }) + .and_then(|&def_id| self.tcx().map.as_local_node_id(def_id)) + .and_then(|node_id| self.tcx().map.opt_span(node_id)) + }); + let mut err = struct_span_err!( self.tcx().sess, span, E0221, "ambiguous associated type `{}` in bounds of `{}`", @@ -1249,11 +1280,17 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { ty_param_name); err.span_label(span, &format!("ambiguous associated type `{}`", assoc_name)); - for bound in &bounds { - span_note!(&mut err, span, - "associated type `{}` could derive from `{}`", - ty_param_name, - bound); + for span_and_bound in spans.zip(&bounds) { + if let Some(span) = span_and_bound.0 { + err.span_label(span, &format!("ambiguous `{}` from `{}`", + assoc_name, + span_and_bound.1)); + } else { + span_note!(&mut err, span, + "associated type `{}` could derive from `{}`", + ty_param_name, + span_and_bound.1); + } } err.emit(); } @@ -1284,23 +1321,17 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { // Find the type of the associated item, and the trait where the associated // item is declared. let bound = match (&ty.sty, ty_path_def) { - (_, Def::SelfTy(Some(trait_did), Some(impl_id))) => { - // For Def::SelfTy() values inlined from another crate, the - // impl_id will be DUMMY_NODE_ID, which would cause problems - // here. But we should never run into an impl from another crate - // in this pass. - assert!(impl_id != ast::DUMMY_NODE_ID); - + (_, Def::SelfTy(Some(_), Some(impl_def_id))) => { // `Self` in an impl of a trait - we have a concrete self type and a // trait reference. - let trait_ref = tcx.impl_trait_ref(tcx.map.local_def_id(impl_id)).unwrap(); + let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap(); let trait_ref = if let Some(free_substs) = self.get_free_substs() { trait_ref.subst(tcx, free_substs) } else { trait_ref }; - if self.ensure_super_predicates(span, trait_did).is_err() { + if self.ensure_super_predicates(span, trait_ref.def_id).is_err() { return (tcx.types.err, Def::Err); } @@ -1372,7 +1403,7 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { item.expect("missing associated type").def_id() }; - (ty, Def::AssociatedTy(trait_did, item_did)) + (ty, Def::AssociatedTy(item_did)) } fn qpath_to_ty(&self, @@ -1428,7 +1459,7 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { fn ast_ty_arg_to_ty(&self, rscope: &RegionScope, def: Option<&ty::TypeParameterDef<'tcx>>, - region_substs: &Substs<'tcx>, + region_substs: &[Kind<'tcx>], ast_ty: &hir::Ty) -> Ty<'tcx> { @@ -1453,7 +1484,8 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { def: Def, opt_self_ty: Option>, base_path_ref_id: ast::NodeId, - base_segments: &[hir::PathSegment]) + base_segments: &[hir::PathSegment], + permit_variants: bool) -> Ty<'tcx> { let tcx = self.tcx(); @@ -1484,6 +1516,16 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { did, base_segments.last().unwrap()) } + Def::Variant(did) if permit_variants => { + // Convert "variant type" as if it were a real type. + // The resulting `Ty` is type of the variant's enum for now. + tcx.prohibit_type_params(base_segments.split_last().unwrap().1); + self.ast_path_to_ty(rscope, + span, + param_mode, + tcx.parent_def_id(did).unwrap(), + base_segments.last().unwrap()) + } Def::TyParam(did) => { tcx.prohibit_type_params(base_segments); @@ -1504,16 +1546,11 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { tcx.types.err } } - Def::SelfTy(_, Some(impl_id)) => { + Def::SelfTy(_, Some(def_id)) => { // Self in impl (we know the concrete type). - // For Def::SelfTy() values inlined from another crate, the - // impl_id will be DUMMY_NODE_ID, which would cause problems - // here. But we should never run into an impl from another crate - // in this pass. - assert!(impl_id != ast::DUMMY_NODE_ID); - tcx.prohibit_type_params(base_segments); + let impl_id = tcx.map.as_local_node_id(def_id).unwrap(); let ty = tcx.node_id_to_type(impl_id); if let Some(free_substs) = self.get_free_substs() { ty.subst(tcx, free_substs) @@ -1526,8 +1563,9 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { tcx.prohibit_type_params(base_segments); tcx.mk_self_type() } - Def::AssociatedTy(trait_did, _) => { + Def::AssociatedTy(def_id) => { tcx.prohibit_type_params(&base_segments[..base_segments.len()-2]); + let trait_did = tcx.parent_def_id(def_id).unwrap(); self.qpath_to_ty(rscope, span, param_mode, @@ -1577,7 +1615,8 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { opt_self_ty: Option>, base_path_ref_id: ast::NodeId, base_segments: &[hir::PathSegment], - assoc_segments: &[hir::PathSegment]) + assoc_segments: &[hir::PathSegment], + permit_variants: bool) -> (Ty<'tcx>, Def) { // Convert the base type. debug!("finish_resolving_def_to_ty(base_def={:?}, \ @@ -1592,7 +1631,8 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { base_def, opt_self_ty, base_path_ref_id, - base_segments); + base_segments, + permit_variants); debug!("finish_resolving_def_to_ty: base_def_to_ty returned {:?}", base_ty); // If any associated type segments remain, attempt to resolve them. @@ -1620,13 +1660,12 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { let tcx = self.tcx(); let cache = self.ast_ty_to_ty_cache(); - match cache.borrow().get(&ast_ty.id) { - Some(ty) => { return ty; } - None => { } + if let Some(ty) = cache.borrow().get(&ast_ty.id) { + return ty; } let result_ty = match ast_ty.node { - hir::TyVec(ref ty) => { + hir::TySlice(ref ty) => { tcx.mk_slice(self.ast_ty_to_ty(rscope, &ty)) } hir::TyObjectSum(ref ty, ref bounds) => { @@ -1652,10 +1691,7 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { tcx.types.never }, hir::TyTup(ref fields) => { - let flds = fields.iter() - .map(|t| self.ast_ty_to_ty(rscope, &t)) - .collect(); - tcx.mk_tup(flds) + tcx.mk_tup(fields.iter().map(|t| self.ast_ty_to_ty(rscope, &t))) } hir::TyBareFn(ref bf) => { require_c_abi_if_variadic(tcx, &bf.decl, bf.abi, ast_ty.span); @@ -1752,7 +1788,8 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { opt_self_ty, ast_ty.id, &path.segments[..base_ty_end], - &path.segments[base_ty_end..]); + &path.segments[base_ty_end..], + false); // Write back the new resolution. if path_res.depth != 0 { @@ -1761,7 +1798,7 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { ty } - hir::TyFixedLengthVec(ref ty, ref e) => { + hir::TyArray(ref ty, ref e) => { if let Ok(length) = eval_length(tcx.global_tcx(), &e, "array length") { tcx.mk_array(self.ast_ty_to_ty(rscope, &ty), length) } else { @@ -1862,15 +1899,22 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { }; let arg_tys: Vec = arg_params.iter().map(|a| self.ty_of_arg(&rb, a, None)).collect(); - let arg_pats: Vec = - arg_params.iter().map(|a| pprust::pat_to_string(&a.pat)).collect(); // Second, if there was exactly one lifetime (either a substitution or a // reference) in the arguments, then any anonymous regions in the output // have that lifetime. let implied_output_region = match explicit_self_category { ty::ExplicitSelfCategory::ByReference(region, _) => Ok(*region), - _ => self.find_implied_output_region(&arg_tys, arg_pats) + _ => { + // `pat_to_string` is expensive and + // `find_implied_output_region` only needs its result when + // there's an error. So we wrap it in a closure to avoid + // calling it until necessary. + let arg_pats = || { + arg_params.iter().map(|a| pprust::pat_to_string(&a.pat)).collect() + }; + self.find_implied_output_region(&arg_tys, arg_pats) + } }; let output_ty = match decl.output { @@ -2202,10 +2246,12 @@ fn check_type_argument_count(tcx: TyCtxt, span: Span, supplied: usize, } else { "expected" }; + let arguments_plural = if required == 1 { "" } else { "s" }; struct_span_err!(tcx.sess, span, E0243, "wrong number of type arguments") .span_label( span, - &format!("{} {} type arguments, found {}", expected, required, supplied) + &format!("{} {} type argument{}, found {}", + expected, required, arguments_plural, supplied) ) .emit(); } else if supplied > accepted { @@ -2216,11 +2262,12 @@ fn check_type_argument_count(tcx: TyCtxt, span: Span, supplied: usize, } else { format!("expected {}", accepted) }; + let arguments_plural = if accepted == 1 { "" } else { "s" }; struct_span_err!(tcx.sess, span, E0244, "wrong number of type arguments") .span_label( span, - &format!("{} type arguments, found {}", expected, supplied) + &format!("{} type argument{}, found {}", expected, arguments_plural, supplied) ) .emit(); } diff --git a/src/librustc_typeck/check/_match.rs b/src/librustc_typeck/check/_match.rs index dd3ac6ff2d457..c842514227ca0 100644 --- a/src/librustc_typeck/check/_match.rs +++ b/src/librustc_typeck/check/_match.rs @@ -8,12 +8,12 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use hir::def::Def; +use rustc::hir::{self, PatKind}; +use rustc::hir::def::{Def, CtorKind}; +use rustc::hir::pat_util::EnumerateAndAdjustIterator; use rustc::infer::{self, InferOk, TypeOrigin}; -use hir::pat_util::EnumerateAndAdjustIterator; -use rustc::ty::{self, Ty, TypeFoldable, LvaluePreference, VariantKind}; +use rustc::ty::{self, Ty, TypeFoldable, LvaluePreference}; use check::{FnCtxt, Expectation}; -use lint; use util::nodemap::FnvHashMap; use std::collections::hash_map::Entry::{Occupied, Vacant}; @@ -23,9 +23,6 @@ use syntax::codemap::Spanned; use syntax::ptr::P; use syntax_pos::Span; -use rustc::hir::{self, PatKind}; -use rustc::hir::print as pprust; - impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { pub fn check_pat(&self, pat: &'gcx hir::Pat, expected: Ty<'tcx>) { let tcx = self.tcx; @@ -134,7 +131,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // what the type of the binding `x` ought to be match tcx.expect_def(pat.id) { Def::Err => {} - Def::Local(_, var_id) => { + Def::Local(def_id) => { + let var_id = tcx.map.as_local_node_id(def_id).unwrap(); if var_id != pat.id { let vt = self.local_ty(pat.span, var_id); self.demand_eqtype(pat.span, vt, typ); @@ -170,8 +168,9 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { } let max_len = cmp::max(expected_len, elements.len()); - let element_tys: Vec<_> = (0 .. max_len).map(|_| self.next_ty_var()).collect(); - let pat_ty = tcx.mk_tup(element_tys.clone()); + let element_tys_iter = (0..max_len).map(|_| self.next_ty_var()); + let element_tys = tcx.mk_type_list(element_tys_iter); + let pat_ty = tcx.mk_ty(ty::TyTuple(element_tys)); self.demand_eqtype(pat.span, expected, pat_ty); for (i, elem) in elements.iter().enumerate_and_adjust(max_len, ddpos) { self.check_pat(elem, &element_tys[i]); @@ -226,7 +225,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { tcx.types.err } } - PatKind::Vec(ref before, ref slice, ref after) => { + PatKind::Slice(ref before, ref slice, ref after) => { let expected_ty = self.structurally_resolved_type(pat.span, expected); let (inner_ty, slice_ty) = match expected_ty.sty { ty::TyArray(inner_ty, size) => { @@ -490,8 +489,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { expected: Ty<'tcx>) -> Ty<'tcx> { // Resolve the path and check the definition for errors. - let (variant, pat_ty) = if let Some(variant_ty) = self.check_struct_path(path, pat.id, - pat.span) { + let (variant, pat_ty) = if let Some(variant_ty) = self.check_struct_path(path, pat.id) { variant_ty } else { for field in fields { @@ -515,10 +513,10 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { expected: Ty<'tcx>) -> Ty<'tcx> { let tcx = self.tcx; - let report_unexpected_def = || { + let report_unexpected_def = |def: Def| { span_err!(tcx.sess, pat.span, E0533, - "`{}` does not name a unit variant, unit struct or a constant", - pprust::path_to_string(path)); + "expected unit struct/variant or constant, found {} `{}`", + def.kind_name(), path); }; // Resolve the path and check the definition for errors. @@ -530,18 +528,13 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { return tcx.types.err; } Def::Method(..) => { - report_unexpected_def(); + report_unexpected_def(def); return tcx.types.err; } - Def::Variant(..) | Def::Struct(..) => { - let variant = tcx.expect_variant_def(def); - if variant.kind != VariantKind::Unit { - report_unexpected_def(); - return tcx.types.err; - } - } + Def::VariantCtor(_, CtorKind::Const) | + Def::StructCtor(_, CtorKind::Const) | Def::Const(..) | Def::AssociatedConst(..) => {} // OK - _ => bug!("unexpected pattern definition {:?}", def) + _ => bug!("unexpected pattern definition: {:?}", def) } // Type check the path. @@ -563,17 +556,12 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { self.check_pat(&pat, tcx.types.err); } }; - let report_unexpected_def = |is_lint| { - let msg = format!("`{}` does not name a tuple variant or a tuple struct", - pprust::path_to_string(path)); - if is_lint { - tcx.sess.add_lint(lint::builtin::MATCH_OF_UNIT_VARIANT_VIA_PAREN_DOTDOT, - pat.id, pat.span, msg); - } else { - struct_span_err!(tcx.sess, pat.span, E0164, "{}", msg) - .span_label(pat.span, &format!("not a tuple variant or struct")).emit(); - on_error(); - } + let report_unexpected_def = |def: Def| { + let msg = format!("expected tuple struct/variant, found {} `{}`", + def.kind_name(), path); + struct_span_err!(tcx.sess, pat.span, E0164, "{}", msg) + .span_label(pat.span, &format!("not a tuple variant or struct")).emit(); + on_error(); }; // Resolve the path and check the definition for errors. @@ -584,33 +572,21 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { on_error(); return tcx.types.err; } - Def::Const(..) | Def::AssociatedConst(..) | Def::Method(..) => { - report_unexpected_def(false); + Def::AssociatedConst(..) | Def::Method(..) => { + report_unexpected_def(def); return tcx.types.err; } - Def::Variant(..) | Def::Struct(..) => { + Def::VariantCtor(_, CtorKind::Fn) | + Def::StructCtor(_, CtorKind::Fn) => { tcx.expect_variant_def(def) } - _ => bug!("unexpected pattern definition {:?}", def) + _ => bug!("unexpected pattern definition: {:?}", def) }; - if variant.kind == VariantKind::Unit && subpats.is_empty() && ddpos.is_some() { - // Matching unit structs with tuple variant patterns (`UnitVariant(..)`) - // is allowed for backward compatibility. - report_unexpected_def(true); - } else if variant.kind != VariantKind::Tuple { - report_unexpected_def(false); - return tcx.types.err; - } // Type check the path. let pat_ty = self.instantiate_value_path(segments, opt_ty, def, pat.span, pat.id); - let pat_ty = if pat_ty.is_fn() { - // Replace constructor type with constructed type for tuple struct patterns. - tcx.no_late_bound_regions(&pat_ty.fn_ret()).unwrap() - } else { - // Leave the type as is for unit structs (backward compatibility). - pat_ty - }; + // Replace constructor type with constructed type for tuple struct patterns. + let pat_ty = tcx.no_late_bound_regions(&pat_ty.fn_ret()).expect("expected fn type"); self.demand_eqtype(pat.span, expected, pat_ty); // Type check subpatterns. @@ -625,16 +601,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { self.check_pat(&subpat, field_ty); } } else { - let subpats_ending = if subpats.len() == 1 { - "" - } else { - "s" - }; - let fields_ending = if variant.fields.len() == 1 { - "" - } else { - "s" - }; + let subpats_ending = if subpats.len() == 1 { "" } else { "s" }; + let fields_ending = if variant.fields.len() == 1 { "" } else { "s" }; struct_span_err!(tcx.sess, pat.span, E0023, "this pattern has {} field{}, but the corresponding {} has {} field{}", subpats.len(), subpats_ending, def.kind_name(), @@ -674,14 +642,14 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { for &Spanned { node: ref field, span } in fields { let field_ty = match used_fields.entry(field.name) { Occupied(occupied) => { - let mut err = struct_span_err!(tcx.sess, span, E0025, - "field `{}` bound multiple times \ - in the pattern", - field.name); - span_note!(&mut err, *occupied.get(), - "field `{}` previously bound here", - field.name); - err.emit(); + struct_span_err!(tcx.sess, span, E0025, + "field `{}` bound multiple times \ + in the pattern", + field.name) + .span_label(span, + &format!("multiple uses of `{}` in pattern", field.name)) + .span_label(*occupied.get(), &format!("first use of `{}`", field.name)) + .emit(); tcx.types.err } Vacant(vacant) => { diff --git a/src/librustc_typeck/check/assoc.rs b/src/librustc_typeck/check/assoc.rs index 41f34b9040e83..9610477d8fd91 100644 --- a/src/librustc_typeck/check/assoc.rs +++ b/src/librustc_typeck/check/assoc.rs @@ -9,13 +9,13 @@ // except according to those terms. use rustc::infer::InferCtxt; -use rustc::traits::{self, FulfillmentContext, Normalized, MiscObligation, - SelectionContext, ObligationCause}; +use rustc::traits::{self, FulfillmentContext, Normalized, MiscObligation, SelectionContext, + ObligationCause}; use rustc::ty::fold::TypeFoldable; use syntax::ast; use syntax_pos::Span; -//FIXME(@jroesch): Ideally we should be able to drop the fulfillment_cx argument. +// FIXME(@jroesch): Ideally we should be able to drop the fulfillment_cx argument. pub fn normalize_associated_types_in<'a, 'gcx, 'tcx, T>( infcx: &InferCtxt<'a, 'gcx, 'tcx>, fulfillment_cx: &mut FulfillmentContext<'tcx>, diff --git a/src/librustc_typeck/check/autoderef.rs b/src/librustc_typeck/check/autoderef.rs index 19261a2447f91..900c22a8176ed 100644 --- a/src/librustc_typeck/check/autoderef.rs +++ b/src/librustc_typeck/check/autoderef.rs @@ -16,7 +16,6 @@ use rustc::traits; use rustc::ty::{self, Ty, TraitRef}; use rustc::ty::{ToPredicate, TypeFoldable}; use rustc::ty::{MethodCall, MethodCallee}; -use rustc::ty::subst::Substs; use rustc::ty::{LvaluePreference, NoPreference, PreferMutLvalue}; use rustc::hir; @@ -26,7 +25,7 @@ use syntax::parse::token; #[derive(Copy, Clone, Debug)] enum AutoderefKind { Builtin, - Overloaded + Overloaded, } pub struct Autoderef<'a, 'gcx: 'tcx, 'tcx: 'a> { @@ -35,7 +34,7 @@ pub struct Autoderef<'a, 'gcx: 'tcx, 'tcx: 'a> { cur_ty: Ty<'tcx>, obligations: Vec>, at_start: bool, - span: Span + span: Span, } impl<'a, 'gcx, 'tcx> Iterator for Autoderef<'a, 'gcx, 'tcx> { @@ -45,7 +44,8 @@ impl<'a, 'gcx, 'tcx> Iterator for Autoderef<'a, 'gcx, 'tcx> { let tcx = self.fcx.tcx; debug!("autoderef: steps={:?}, cur_ty={:?}", - self.steps, self.cur_ty); + self.steps, + self.cur_ty); if self.at_start { self.at_start = false; debug!("autoderef stage #0 is {:?}", self.cur_ty); @@ -54,11 +54,13 @@ impl<'a, 'gcx, 'tcx> Iterator for Autoderef<'a, 'gcx, 'tcx> { if self.steps.len() == tcx.sess.recursion_limit.get() { // We've reached the recursion limit, error gracefully. - struct_span_err!(tcx.sess, self.span, E0055, - "reached the recursion limit while auto-dereferencing {:?}", - self.cur_ty) - .span_label(self.span, &format!("deref recursion limit reached")) - .emit(); + struct_span_err!(tcx.sess, + self.span, + E0055, + "reached the recursion limit while auto-dereferencing {:?}", + self.cur_ty) + .span_label(self.span, &format!("deref recursion limit reached")) + .emit(); return None; } @@ -72,7 +74,7 @@ impl<'a, 'gcx, 'tcx> Iterator for Autoderef<'a, 'gcx, 'tcx> { } else { match self.overloaded_deref_ty(self.cur_ty) { Some(ty) => (AutoderefKind::Overloaded, ty), - _ => return None + _ => return None, } }; @@ -81,8 +83,10 @@ impl<'a, 'gcx, 'tcx> Iterator for Autoderef<'a, 'gcx, 'tcx> { } self.steps.push((self.cur_ty, kind)); - debug!("autoderef stage #{:?} is {:?} from {:?}", self.steps.len(), - new_ty, (self.cur_ty, kind)); + debug!("autoderef stage #{:?} is {:?} from {:?}", + self.steps.len(), + new_ty, + (self.cur_ty, kind)); self.cur_ty = new_ty; Some((self.cur_ty, self.steps.len())) @@ -99,9 +103,9 @@ impl<'a, 'gcx, 'tcx> Autoderef<'a, 'gcx, 'tcx> { let trait_ref = TraitRef { def_id: match tcx.lang_items.deref_trait() { Some(f) => f, - None => return None + None => return None, }, - substs: Substs::new_trait(tcx, self.cur_ty, &[]) + substs: tcx.mk_substs_trait(self.cur_ty, &[]), }; let cause = traits::ObligationCause::misc(self.span, self.fcx.body_id); @@ -113,15 +117,13 @@ impl<'a, 'gcx, 'tcx> Autoderef<'a, 'gcx, 'tcx> { return None; } - let normalized = traits::normalize_projection_type( - &mut selcx, - ty::ProjectionTy { - trait_ref: trait_ref, - item_name: token::intern("Target") - }, - cause, - 0 - ); + let normalized = traits::normalize_projection_type(&mut selcx, + ty::ProjectionTy { + trait_ref: trait_ref, + item_name: token::intern("Target"), + }, + cause, + 0); debug!("overloaded_deref_ty({:?}) = {:?}", ty, normalized); self.obligations.extend(normalized.obligations); @@ -134,17 +136,23 @@ impl<'a, 'gcx, 'tcx> Autoderef<'a, 'gcx, 'tcx> { } pub fn finalize<'b, I>(self, pref: LvaluePreference, exprs: I) - where I: IntoIterator + where I: IntoIterator { - let methods : Vec<_> = self.steps.iter().map(|&(ty, kind)| { - if let AutoderefKind::Overloaded = kind { - self.fcx.try_overloaded_deref(self.span, None, ty, pref) - } else { - None - } - }).collect(); + let methods: Vec<_> = self.steps + .iter() + .map(|&(ty, kind)| { + if let AutoderefKind::Overloaded = kind { + self.fcx.try_overloaded_deref(self.span, None, ty, pref) + } else { + None + } + }) + .collect(); - debug!("finalize({:?}) - {:?},{:?}", pref, methods, self.obligations); + debug!("finalize({:?}) - {:?},{:?}", + pref, + methods, + self.obligations); for expr in exprs { debug!("finalize - finalizing #{} - {:?}", expr.id, expr); @@ -163,18 +171,14 @@ impl<'a, 'gcx, 'tcx> Autoderef<'a, 'gcx, 'tcx> { } impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { - pub fn autoderef(&'a self, - span: Span, - base_ty: Ty<'tcx>) - -> Autoderef<'a, 'gcx, 'tcx> - { + pub fn autoderef(&'a self, span: Span, base_ty: Ty<'tcx>) -> Autoderef<'a, 'gcx, 'tcx> { Autoderef { fcx: self, steps: vec![], cur_ty: self.resolve_type_vars_if_possible(&base_ty), obligations: vec![], at_start: true, - span: span + span: span, } } @@ -183,28 +187,36 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { base_expr: Option<&hir::Expr>, base_ty: Ty<'tcx>, lvalue_pref: LvaluePreference) - -> Option> - { + -> Option> { debug!("try_overloaded_deref({:?},{:?},{:?},{:?})", - span, base_expr, base_ty, lvalue_pref); + span, + base_expr, + base_ty, + lvalue_pref); // Try DerefMut first, if preferred. let method = match (lvalue_pref, self.tcx.lang_items.deref_mut_trait()) { (PreferMutLvalue, Some(trait_did)) => { - self.lookup_method_in_trait(span, base_expr, - token::intern("deref_mut"), trait_did, - base_ty, None) + self.lookup_method_in_trait(span, + base_expr, + token::intern("deref_mut"), + trait_did, + base_ty, + None) } - _ => None + _ => None, }; // Otherwise, fall back to Deref. let method = match (method, self.tcx.lang_items.deref_trait()) { (None, Some(trait_did)) => { - self.lookup_method_in_trait(span, base_expr, - token::intern("deref"), trait_did, - base_ty, None) + self.lookup_method_in_trait(span, + base_expr, + token::intern("deref"), + trait_did, + base_ty, + None) } - (method, _) => method + (method, _) => method, }; method diff --git a/src/librustc_typeck/check/callee.rs b/src/librustc_typeck/check/callee.rs index 5bd4f13a1119c..c5a21d7dd91ce 100644 --- a/src/librustc_typeck/check/callee.rs +++ b/src/librustc_typeck/check/callee.rs @@ -8,13 +8,11 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use super::{DeferredCallResolution, Expectation, FnCtxt, - TupleArgumentsFlag}; +use super::{DeferredCallResolution, Expectation, FnCtxt, TupleArgumentsFlag}; use CrateCtxt; -use middle::cstore::LOCAL_CRATE; use hir::def::Def; -use hir::def_id::DefId; +use hir::def_id::{DefId, LOCAL_CRATE}; use rustc::{infer, traits}; use rustc::ty::{self, LvaluePreference, Ty}; use syntax::parse::token; @@ -28,7 +26,10 @@ use rustc::hir; /// method that is called) pub fn check_legal_trait_for_method_call(ccx: &CrateCtxt, span: Span, trait_id: DefId) { if ccx.tcx.lang_items.drop_trait() == Some(trait_id) { - struct_span_err!(ccx.tcx.sess, span, E0040, "explicit use of destructor method") + struct_span_err!(ccx.tcx.sess, + span, + E0040, + "explicit use of destructor method") .span_label(span, &format!("explicit destructor calls not allowed")) .emit(); } @@ -37,7 +38,7 @@ pub fn check_legal_trait_for_method_call(ccx: &CrateCtxt, span: Span, trait_id: enum CallStep<'tcx> { Builtin, DeferredClosure(ty::FnSig<'tcx>), - Overloaded(ty::MethodCallee<'tcx>) + Overloaded(ty::MethodCallee<'tcx>), } impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { @@ -45,14 +46,17 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { call_expr: &'gcx hir::Expr, callee_expr: &'gcx hir::Expr, arg_exprs: &'gcx [P], - expected: Expectation<'tcx>) -> Ty<'tcx> - { + expected: Expectation<'tcx>) + -> Ty<'tcx> { let original_callee_ty = self.check_expr(callee_expr); - - let mut autoderef = self.autoderef(callee_expr.span, original_callee_ty); - let result = autoderef.by_ref().flat_map(|(adj_ty, idx)| { - self.try_overloaded_call_step(call_expr, callee_expr, adj_ty, idx) - }).next(); + let expr_ty = self.structurally_resolved_type(call_expr.span, original_callee_ty); + + let mut autoderef = self.autoderef(callee_expr.span, expr_ty); + let result = autoderef.by_ref() + .flat_map(|(adj_ty, idx)| { + self.try_overloaded_call_step(call_expr, callee_expr, adj_ty, idx) + }) + .next(); let callee_ty = autoderef.unambiguous_final_ty(); autoderef.finalize(LvaluePreference::NoPreference, Some(callee_expr)); @@ -71,8 +75,11 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { } Some(CallStep::Overloaded(method_callee)) => { - self.confirm_overloaded_call(call_expr, callee_expr, - arg_exprs, expected, method_callee) + self.confirm_overloaded_call(call_expr, + callee_expr, + arg_exprs, + expected, + method_callee) } }; @@ -87,8 +94,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { callee_expr: &'gcx hir::Expr, adjusted_ty: Ty<'tcx>, autoderefs: usize) - -> Option> - { + -> Option> { debug!("try_overloaded_call_step(call_expr={:?}, adjusted_ty={:?}, autoderefs={})", call_expr, adjusted_ty, @@ -108,20 +114,20 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // haven't yet decided on whether the closure is fn vs // fnmut vs fnonce. If so, we have to defer further processing. if self.closure_kind(def_id).is_none() { - let closure_ty = - self.closure_type(def_id, substs); - let fn_sig = - self.replace_late_bound_regions_with_fresh_var(call_expr.span, - infer::FnCall, - &closure_ty.sig).0; - self.record_deferred_call_resolution(def_id, Box::new(CallResolution { - call_expr: call_expr, - callee_expr: callee_expr, - adjusted_ty: adjusted_ty, - autoderefs: autoderefs, - fn_sig: fn_sig.clone(), - closure_def_id: def_id - })); + let closure_ty = self.closure_type(def_id, substs); + let fn_sig = self.replace_late_bound_regions_with_fresh_var(call_expr.span, + infer::FnCall, + &closure_ty.sig) + .0; + self.record_deferred_call_resolution(def_id, + Box::new(CallResolution { + call_expr: call_expr, + callee_expr: callee_expr, + adjusted_ty: adjusted_ty, + autoderefs: autoderefs, + fn_sig: fn_sig.clone(), + closure_def_id: def_id, + })); return Some(CallStep::DeferredClosure(fn_sig)); } } @@ -150,14 +156,12 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { callee_expr: &hir::Expr, adjusted_ty: Ty<'tcx>, autoderefs: usize) - -> Option> - { + -> Option> { // Try the options that are least restrictive on the caller first. - for &(opt_trait_def_id, method_name) in &[ - (self.tcx.lang_items.fn_trait(), token::intern("call")), - (self.tcx.lang_items.fn_mut_trait(), token::intern("call_mut")), - (self.tcx.lang_items.fn_once_trait(), token::intern("call_once")), - ] { + for &(opt_trait_def_id, method_name) in + &[(self.tcx.lang_items.fn_trait(), token::intern("call")), + (self.tcx.lang_items.fn_mut_trait(), token::intern("call_mut")), + (self.tcx.lang_items.fn_once_trait(), token::intern("call_once"))] { let trait_def_id = match opt_trait_def_id { Some(def_id) => def_id, None => continue, @@ -185,19 +189,20 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { call_expr: &hir::Expr, callee_ty: Ty<'tcx>, arg_exprs: &'gcx [P], - expected: Expectation<'tcx>) -> Ty<'tcx> - { + expected: Expectation<'tcx>) + -> Ty<'tcx> { let error_fn_sig; let fn_sig = match callee_ty.sty { - ty::TyFnDef(.., &ty::BareFnTy {ref sig, ..}) | - ty::TyFnPtr(&ty::BareFnTy {ref sig, ..}) => { - sig - } + ty::TyFnDef(.., &ty::BareFnTy { ref sig, .. }) | + ty::TyFnPtr(&ty::BareFnTy { ref sig, .. }) => sig, _ => { - let mut err = self.type_error_struct(call_expr.span, |actual| { - format!("expected function, found `{}`", actual) - }, callee_ty); + let mut err = self.type_error_struct(call_expr.span, + |actual| { + format!("expected function, found `{}`", + actual) + }, + callee_ty); if let hir::ExprCall(ref expr, _) = call_expr.node { let tcx = self.tcx; @@ -218,7 +223,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { error_fn_sig = ty::Binder(ty::FnSig { inputs: self.err_args(arg_exprs.len()), output: self.tcx.types.err, - variadic: false + variadic: false, }); &error_fn_sig @@ -231,17 +236,16 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // previously appeared within a `Binder<>` and hence would not // have been normalized before. let fn_sig = - self.replace_late_bound_regions_with_fresh_var(call_expr.span, - infer::FnCall, - fn_sig).0; - let fn_sig = - self.normalize_associated_types_in(call_expr.span, &fn_sig); + self.replace_late_bound_regions_with_fresh_var(call_expr.span, infer::FnCall, fn_sig) + .0; + let fn_sig = self.normalize_associated_types_in(call_expr.span, &fn_sig); // Call the generic checker. - let expected_arg_tys = self.expected_types_for_fn_args(call_expr.span, - expected, - fn_sig.output, - &fn_sig.inputs); + let expected_arg_tys = + self.expected_types_for_fn_args(call_expr.span, + expected, + fn_sig.output, + &fn_sig.inputs); self.check_argument_types(call_expr.span, &fn_sig.inputs, &expected_arg_tys[..], @@ -256,18 +260,17 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { call_expr: &hir::Expr, arg_exprs: &'gcx [P], expected: Expectation<'tcx>, - fn_sig: ty::FnSig<'tcx>) -> Ty<'tcx> - { + fn_sig: ty::FnSig<'tcx>) + -> Ty<'tcx> { // `fn_sig` is the *signature* of the cosure being called. We // don't know the full details yet (`Fn` vs `FnMut` etc), but we // do know the types expected for each argument and the return // type. - let expected_arg_tys = - self.expected_types_for_fn_args(call_expr.span, - expected, - fn_sig.output.clone(), - &fn_sig.inputs); + let expected_arg_tys = self.expected_types_for_fn_args(call_expr.span, + expected, + fn_sig.output.clone(), + &fn_sig.inputs); self.check_argument_types(call_expr.span, &fn_sig.inputs, @@ -284,15 +287,14 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { callee_expr: &'gcx hir::Expr, arg_exprs: &'gcx [P], expected: Expectation<'tcx>, - method_callee: ty::MethodCallee<'tcx>) -> Ty<'tcx> - { - let output_type = - self.check_method_argument_types(call_expr.span, - method_callee.ty, - callee_expr, - arg_exprs, - TupleArgumentsFlag::TupleArguments, - expected); + method_callee: ty::MethodCallee<'tcx>) + -> Ty<'tcx> { + let output_type = self.check_method_argument_types(call_expr.span, + method_callee.ty, + callee_expr, + arg_exprs, + TupleArgumentsFlag::TupleArguments, + expected); self.write_overloaded_call_method_map(call_expr, method_callee); output_type @@ -318,16 +320,17 @@ struct CallResolution<'gcx: 'tcx, 'tcx> { impl<'gcx, 'tcx> DeferredCallResolution<'gcx, 'tcx> for CallResolution<'gcx, 'tcx> { fn resolve<'a>(&mut self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) { - debug!("DeferredCallResolution::resolve() {:?}", - self); + debug!("DeferredCallResolution::resolve() {:?}", self); // we should not be invoked until the closure kind has been // determined by upvar inference assert!(fcx.closure_kind(self.closure_def_id).is_some()); // We may now know enough to figure out fn vs fnmut etc. - match fcx.try_overloaded_call_traits(self.call_expr, self.callee_expr, - self.adjusted_ty, self.autoderefs) { + match fcx.try_overloaded_call_traits(self.call_expr, + self.callee_expr, + self.adjusted_ty, + self.autoderefs) { Some(method_callee) => { // One problem is that when we get here, we are going // to have a newly instantiated function signature @@ -337,28 +340,24 @@ impl<'gcx, 'tcx> DeferredCallResolution<'gcx, 'tcx> for CallResolution<'gcx, 'tc // can't because of the annoying need for a TypeTrace. // (This always bites me, should find a way to // refactor it.) - let method_sig = fcx.tcx.no_late_bound_regions(method_callee.ty.fn_sig()) - .unwrap(); + let method_sig = fcx.tcx + .no_late_bound_regions(method_callee.ty.fn_sig()) + .unwrap(); - debug!("attempt_resolution: method_callee={:?}", - method_callee); + debug!("attempt_resolution: method_callee={:?}", method_callee); for (&method_arg_ty, &self_arg_ty) in - method_sig.inputs[1..].iter().zip(&self.fn_sig.inputs) - { + method_sig.inputs[1..].iter().zip(&self.fn_sig.inputs) { fcx.demand_eqtype(self.call_expr.span, self_arg_ty, method_arg_ty); } - fcx.demand_eqtype(self.call_expr.span, - method_sig.output, - self.fn_sig.output); + fcx.demand_eqtype(self.call_expr.span, method_sig.output, self.fn_sig.output); fcx.write_overloaded_call_method_map(self.call_expr, method_callee); } None => { - span_bug!( - self.call_expr.span, - "failed to find an overloaded call trait for closure call"); + span_bug!(self.call_expr.span, + "failed to find an overloaded call trait for closure call"); } } } diff --git a/src/librustc_typeck/check/cast.rs b/src/librustc_typeck/check/cast.rs index 51a9b18392dcf..c456b9358b3e2 100644 --- a/src/librustc_typeck/check/cast.rs +++ b/src/librustc_typeck/check/cast.rs @@ -69,7 +69,7 @@ enum UnsizeKind<'tcx> { /// The unsize info of this projection OfProjection(&'tcx ty::ProjectionTy<'tcx>), /// The unsize info of this parameter - OfParam(&'tcx ty::ParamTy) + OfParam(&'tcx ty::ParamTy), } impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { @@ -83,13 +83,13 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // FIXME(arielb1): do some kind of normalization match def.struct_variant().fields.last() { None => None, - Some(f) => self.unsize_kind(f.ty(self.tcx, substs)) + Some(f) => self.unsize_kind(f.ty(self.tcx, substs)), } } // We should really try to normalize here. ty::TyProjection(ref pi) => Some(UnsizeKind::OfProjection(pi)), ty::TyParam(ref p) => Some(UnsizeKind::OfParam(p)), - _ => None + _ => None, } } } @@ -133,9 +133,7 @@ impl<'a, 'gcx, 'tcx> CastCheck<'tcx> { check.report_cast_to_unsized_type(fcx); Err(ErrorReported) } - _ => { - Ok(check) - } + _ => Ok(check), } } @@ -145,18 +143,21 @@ impl<'a, 'gcx, 'tcx> CastCheck<'tcx> { CastError::NeedViaThinPtr | CastError::NeedViaInt | CastError::NeedViaUsize => { - fcx.type_error_struct(self.span, |actual| { - format!("casting `{}` as `{}` is invalid", - actual, - fcx.ty_to_string(self.cast_ty)) - }, self.expr_ty) - .help(&format!("cast through {} first", match e { - CastError::NeedViaPtr => "a raw pointer", - CastError::NeedViaThinPtr => "a thin pointer", - CastError::NeedViaInt => "an integer", - CastError::NeedViaUsize => "a usize", - _ => bug!() - })) + fcx.type_error_struct(self.span, + |actual| { + format!("casting `{}` as `{}` is invalid", + actual, + fcx.ty_to_string(self.cast_ty)) + }, + self.expr_ty) + .help(&format!("cast through {} first", + match e { + CastError::NeedViaPtr => "a raw pointer", + CastError::NeedViaThinPtr => "a thin pointer", + CastError::NeedViaInt => "an integer", + CastError::NeedViaUsize => "a usize", + _ => bug!(), + })) .emit(); } CastError::CastToBool => { @@ -166,37 +167,49 @@ impl<'a, 'gcx, 'tcx> CastCheck<'tcx> { .emit(); } CastError::CastToChar => { - fcx.type_error_message(self.span, |actual| { - format!("only `u8` can be cast as `char`, not `{}`", actual) - }, self.expr_ty); + fcx.type_error_message(self.span, + |actual| { + format!("only `u8` can be cast as `char`, not `{}`", + actual) + }, + self.expr_ty); } CastError::NonScalar => { - fcx.type_error_message(self.span, |actual| { - format!("non-scalar cast: `{}` as `{}`", - actual, - fcx.ty_to_string(self.cast_ty)) - }, self.expr_ty); + fcx.type_error_message(self.span, + |actual| { + format!("non-scalar cast: `{}` as `{}`", + actual, + fcx.ty_to_string(self.cast_ty)) + }, + self.expr_ty); } CastError::IllegalCast => { - fcx.type_error_message(self.span, |actual| { - format!("casting `{}` as `{}` is invalid", - actual, - fcx.ty_to_string(self.cast_ty)) - }, self.expr_ty); + fcx.type_error_message(self.span, + |actual| { + format!("casting `{}` as `{}` is invalid", + actual, + fcx.ty_to_string(self.cast_ty)) + }, + self.expr_ty); } CastError::SizedUnsizedCast => { - fcx.type_error_message(self.span, |actual| { - format!("cannot cast thin pointer `{}` to fat pointer `{}`", - actual, - fcx.ty_to_string(self.cast_ty)) - }, self.expr_ty) + fcx.type_error_message(self.span, + |actual| { + format!("cannot cast thin pointer `{}` to fat pointer \ + `{}`", + actual, + fcx.ty_to_string(self.cast_ty)) + }, + self.expr_ty) } CastError::DifferingKinds => { - fcx.type_error_struct(self.span, |actual| { - format!("casting `{}` as `{}` is invalid", - actual, - fcx.ty_to_string(self.cast_ty)) - }, self.expr_ty) + fcx.type_error_struct(self.span, + |actual| { + format!("casting `{}` as `{}` is invalid", + actual, + fcx.ty_to_string(self.cast_ty)) + }, + self.expr_ty) .note("vtable kinds may not match") .emit(); } @@ -204,22 +217,22 @@ impl<'a, 'gcx, 'tcx> CastCheck<'tcx> { } fn report_cast_to_unsized_type(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) { - if - self.cast_ty.references_error() || - self.expr_ty.references_error() - { + if self.cast_ty.references_error() || self.expr_ty.references_error() { return; } let tstr = fcx.ty_to_string(self.cast_ty); - let mut err = fcx.type_error_struct(self.span, |actual| { - format!("cast to unsized type: `{}` as `{}`", actual, tstr) - }, self.expr_ty); + let mut err = + fcx.type_error_struct(self.span, + |actual| { + format!("cast to unsized type: `{}` as `{}`", actual, tstr) + }, + self.expr_ty); match self.expr_ty.sty { ty::TyRef(_, ty::TypeAndMut { mutbl: mt, .. }) => { let mtstr = match mt { hir::MutMutable => "mut ", - hir::MutImmutable => "" + hir::MutImmutable => "", }; if self.cast_ty.is_trait() { match fcx.tcx.sess.codemap().span_to_snippet(self.cast_span) { @@ -227,15 +240,17 @@ impl<'a, 'gcx, 'tcx> CastCheck<'tcx> { err.span_suggestion(self.cast_span, "try casting to a reference instead:", format!("&{}{}", mtstr, s)); - }, - Err(_) => - span_help!(err, self.cast_span, - "did you mean `&{}{}`?", mtstr, tstr), + } + Err(_) => { + span_help!(err, self.cast_span, "did you mean `&{}{}`?", mtstr, tstr) + } } } else { - span_help!(err, self.span, + span_help!(err, + self.span, "consider using an implicit coercion to `&{}{}` instead", - mtstr, tstr); + mtstr, + tstr); } } ty::TyBox(..) => { @@ -244,13 +259,13 @@ impl<'a, 'gcx, 'tcx> CastCheck<'tcx> { err.span_suggestion(self.cast_span, "try casting to a `Box` instead:", format!("Box<{}>", s)); - }, - Err(_) => - span_help!(err, self.cast_span, "did you mean `Box<{}>`?", tstr), + } + Err(_) => span_help!(err, self.cast_span, "did you mean `Box<{}>`?", tstr), } } _ => { - span_help!(err, self.expr.span, + span_help!(err, + self.expr.span, "consider using a box or reference as appropriate"); } } @@ -286,7 +301,9 @@ impl<'a, 'gcx, 'tcx> CastCheck<'tcx> { self.expr_ty = fcx.structurally_resolved_type(self.span, self.expr_ty); self.cast_ty = fcx.structurally_resolved_type(self.span, self.cast_ty); - debug!("check_cast({}, {:?} as {:?})", self.expr.id, self.expr_ty, + debug!("check_cast({}, {:?} as {:?})", + self.expr.id, + self.expr_ty, self.cast_ty); if !fcx.type_is_known_to_be_sized(self.cast_ty, self.span) { @@ -296,15 +313,16 @@ impl<'a, 'gcx, 'tcx> CastCheck<'tcx> { } else if self.try_coercion_cast(fcx) { self.trivial_cast_lint(fcx); debug!(" -> CoercionCast"); - fcx.tcx.cast_kinds.borrow_mut().insert(self.expr.id, - CastKind::CoercionCast); - } else { match self.do_check(fcx) { - Ok(k) => { - debug!(" -> {:?}", k); - fcx.tcx.cast_kinds.borrow_mut().insert(self.expr.id, k); - } - Err(e) => self.report_cast_error(fcx, e) - };} + fcx.tcx.cast_kinds.borrow_mut().insert(self.expr.id, CastKind::CoercionCast); + } else { + match self.do_check(fcx) { + Ok(k) => { + debug!(" -> {:?}", k); + fcx.tcx.cast_kinds.borrow_mut().insert(self.expr.id, k); + } + Err(e) => self.report_cast_error(fcx, e), + }; + } } /// Check a cast, and report an error if one exists. In some cases, this @@ -330,9 +348,7 @@ impl<'a, 'gcx, 'tcx> CastCheck<'tcx> { return Err(CastError::NonScalar); } } - _ => { - return Err(CastError::NonScalar) - } + _ => return Err(CastError::NonScalar), }; match (t_from, t_cast) { @@ -347,17 +363,20 @@ impl<'a, 'gcx, 'tcx> CastCheck<'tcx> { (_, Int(Char)) => Err(CastError::CastToChar), // prim -> float,ptr - (Int(Bool), Float) | (Int(CEnum), Float) | (Int(Char), Float) - => Err(CastError::NeedViaInt), - (Int(Bool), Ptr(_)) | (Int(CEnum), Ptr(_)) | (Int(Char), Ptr(_)) - => Err(CastError::NeedViaUsize), + (Int(Bool), Float) | + (Int(CEnum), Float) | + (Int(Char), Float) => Err(CastError::NeedViaInt), + (Int(Bool), Ptr(_)) | + (Int(CEnum), Ptr(_)) | + (Int(Char), Ptr(_)) => Err(CastError::NeedViaUsize), // ptr -> * (Ptr(m_e), Ptr(m_c)) => self.check_ptr_ptr_cast(fcx, m_e, m_c), // ptr-ptr-cast (Ptr(m_expr), Int(_)) => self.check_ptr_addr_cast(fcx, m_expr), // ptr-addr-cast (Ptr(_), Float) | (FnPtr, Float) => Err(CastError::NeedViaUsize), (FnPtr, Int(_)) => Ok(CastKind::FnPtrAddrCast), - (RPtr(_), Int(_)) | (RPtr(_), Float) => Err(CastError::NeedViaPtr), + (RPtr(_), Int(_)) | + (RPtr(_), Float) => Err(CastError::NeedViaPtr), // * -> ptr (Int(_), Ptr(mt)) => self.check_addr_ptr_cast(fcx, mt), // addr-ptr-cast (FnPtr, Ptr(mt)) => self.check_fptr_ptr_cast(fcx, mt), @@ -366,12 +385,12 @@ impl<'a, 'gcx, 'tcx> CastCheck<'tcx> { // prim -> prim (Int(CEnum), Int(_)) => Ok(CastKind::EnumCast), - (Int(Char), Int(_)) | (Int(Bool), Int(_)) => Ok(CastKind::PrimIntCast), + (Int(Char), Int(_)) | + (Int(Bool), Int(_)) => Ok(CastKind::PrimIntCast), - (Int(_), Int(_)) | - (Int(_), Float) | - (Float, Int(_)) | - (Float, Float) => Ok(CastKind::NumericCast), + (Int(_), Int(_)) | (Int(_), Float) | (Float, Int(_)) | (Float, Float) => { + Ok(CastKind::NumericCast) + } } } @@ -380,10 +399,8 @@ impl<'a, 'gcx, 'tcx> CastCheck<'tcx> { fcx: &FnCtxt<'a, 'gcx, 'tcx>, m_expr: &'tcx ty::TypeAndMut<'tcx>, m_cast: &'tcx ty::TypeAndMut<'tcx>) - -> Result - { - debug!("check_ptr_ptr_cast m_expr={:?} m_cast={:?}", - m_expr, m_cast); + -> Result { + debug!("check_ptr_ptr_cast m_expr={:?} m_cast={:?}", m_expr, m_cast); // ptr-ptr cast. vtables must match. // Cast to sized is OK @@ -399,15 +416,14 @@ impl<'a, 'gcx, 'tcx> CastCheck<'tcx> { // vtable kinds must match match (fcx.unsize_kind(m_cast.ty), fcx.unsize_kind(m_expr.ty)) { (Some(a), Some(b)) if a == b => Ok(CastKind::PtrPtrCast), - _ => Err(CastError::DifferingKinds) + _ => Err(CastError::DifferingKinds), } } fn check_fptr_ptr_cast(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>, m_cast: &'tcx ty::TypeAndMut<'tcx>) - -> Result - { + -> Result { // fptr-ptr cast. must be to sized ptr if fcx.type_is_known_to_be_sized(m_cast.ty, self.span) { @@ -420,8 +436,7 @@ impl<'a, 'gcx, 'tcx> CastCheck<'tcx> { fn check_ptr_addr_cast(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>, m_expr: &'tcx ty::TypeAndMut<'tcx>) - -> Result - { + -> Result { // ptr-addr cast. must be from sized ptr if fcx.type_is_known_to_be_sized(m_expr.ty, self.span) { @@ -435,8 +450,7 @@ impl<'a, 'gcx, 'tcx> CastCheck<'tcx> { fcx: &FnCtxt<'a, 'gcx, 'tcx>, m_expr: &'tcx ty::TypeAndMut<'tcx>, m_cast: &'tcx ty::TypeAndMut<'tcx>) - -> Result - { + -> Result { // array-ptr-cast. if m_expr.mutbl == hir::MutImmutable && m_cast.mutbl == hir::MutImmutable { @@ -460,28 +474,22 @@ impl<'a, 'gcx, 'tcx> CastCheck<'tcx> { fn check_addr_ptr_cast(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>, m_cast: &'tcx ty::TypeAndMut<'tcx>) - -> Result - { + -> Result { // ptr-addr cast. pointer must be thin. if fcx.type_is_known_to_be_sized(m_cast.ty, self.span) { - Ok(CastKind::AddrPtrCast) + Ok(CastKind::AddrPtrCast) } else { - Err(CastError::IllegalCast) + Err(CastError::IllegalCast) } } fn try_coercion_cast(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> bool { fcx.try_coerce(self.expr, self.expr_ty, self.cast_ty).is_ok() } - } impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { - fn type_is_known_to_be_sized(&self, - ty: Ty<'tcx>, - span: Span) - -> bool - { + fn type_is_known_to_be_sized(&self, ty: Ty<'tcx>, span: Span) -> bool { traits::type_known_to_meet_builtin_bound(self, ty, ty::BoundSized, span) } } diff --git a/src/librustc_typeck/check/closure.rs b/src/librustc_typeck/check/closure.rs index 9e41d1b5676e4..d478f1092bd87 100644 --- a/src/librustc_typeck/check/closure.rs +++ b/src/librustc_typeck/check/closure.rs @@ -24,7 +24,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { _capture: hir::CaptureClause, decl: &'gcx hir::FnDecl, body: &'gcx hir::Block, - expected: Expectation<'tcx>) -> Ty<'tcx> { + expected: Expectation<'tcx>) + -> Ty<'tcx> { debug!("check_expr_closure(expr={:?},expected={:?})", expr, expected); @@ -32,9 +33,9 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // It's always helpful for inference if we know the kind of // closure sooner rather than later, so first examine the expected // type, and see if can glean a closure kind from there. - let (expected_sig,expected_kind) = match expected.to_option(self) { + let (expected_sig, expected_kind) = match expected.to_option(self) { Some(ty) => self.deduce_expectations_from_expected_type(ty), - None => (None, None) + None => (None, None), }; self.check_closure(expr, expected_kind, decl, body, expected_sig) } @@ -44,7 +45,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { opt_kind: Option, decl: &'gcx hir::FnDecl, body: &'gcx hir::Block, - expected_sig: Option>) -> Ty<'tcx> { + expected_sig: Option>) + -> Ty<'tcx> { let expr_def_id = self.tcx.map.local_def_id(expr.id); debug!("check_closure opt_kind={:?} expected_sig={:?}", @@ -64,22 +66,29 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { let upvar_tys = self.next_ty_vars(num_upvars); debug!("check_closure: expr.id={:?} upvar_tys={:?}", - expr.id, upvar_tys); + expr.id, + upvar_tys); let closure_type = self.tcx.mk_closure(expr_def_id, - self.parameter_environment.free_substs, - upvar_tys); - - let fn_sig = self.tcx.liberate_late_bound_regions( - self.tcx.region_maps.call_site_extent(expr.id, body.id), &fn_ty.sig); - let fn_sig = - (**self).normalize_associated_types_in(body.span, body.id, &fn_sig); - - check_fn(self, hir::Unsafety::Normal, expr.id, &fn_sig, decl, expr.id, &body); + self.parameter_environment.free_substs, + &upvar_tys); + + let fn_sig = self.tcx + .liberate_late_bound_regions(self.tcx.region_maps.call_site_extent(expr.id, body.id), + &fn_ty.sig); + let fn_sig = (**self).normalize_associated_types_in(body.span, body.id, &fn_sig); + + check_fn(self, + hir::Unsafety::Normal, + expr.id, + &fn_sig, + decl, + expr.id, + &body); // Tuple up the arguments and insert the resulting function type into // the `closures` table. - fn_ty.sig.0.inputs = vec![self.tcx.mk_tup(fn_ty.sig.0.inputs)]; + fn_ty.sig.0.inputs = vec![self.tcx.intern_tup(&fn_ty.sig.0.inputs[..])]; debug!("closure for {:?} --> sig={:?} opt_kind={:?}", expr_def_id, @@ -88,46 +97,47 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { self.tables.borrow_mut().closure_tys.insert(expr_def_id, fn_ty); match opt_kind { - Some(kind) => { self.tables.borrow_mut().closure_kinds.insert(expr_def_id, kind); } - None => { } + Some(kind) => { + self.tables.borrow_mut().closure_kinds.insert(expr_def_id, kind); + } + None => {} } closure_type } - fn deduce_expectations_from_expected_type(&self, expected_ty: Ty<'tcx>) - -> (Option>,Option) - { + fn deduce_expectations_from_expected_type + (&self, + expected_ty: Ty<'tcx>) + -> (Option>, Option) { debug!("deduce_expectations_from_expected_type(expected_ty={:?})", expected_ty); match expected_ty.sty { ty::TyTrait(ref object_type) => { - let sig = object_type.projection_bounds.iter().filter_map(|pb| { - let pb = pb.with_self_ty(self.tcx, self.tcx.types.err); - self.deduce_sig_from_projection(&pb) - }).next(); + let sig = object_type.projection_bounds + .iter() + .filter_map(|pb| { + let pb = pb.with_self_ty(self.tcx, self.tcx.types.err); + self.deduce_sig_from_projection(&pb) + }) + .next(); let kind = self.tcx.lang_items.fn_trait_kind(object_type.principal.def_id()); (sig, kind) } - ty::TyInfer(ty::TyVar(vid)) => { - self.deduce_expectations_from_obligations(vid) - } - _ => { - (None, None) - } + ty::TyInfer(ty::TyVar(vid)) => self.deduce_expectations_from_obligations(vid), + _ => (None, None), } } - fn deduce_expectations_from_obligations(&self, expected_vid: ty::TyVid) - -> (Option>, Option) - { + fn deduce_expectations_from_obligations + (&self, + expected_vid: ty::TyVid) + -> (Option>, Option) { let fulfillment_cx = self.fulfillment_cx.borrow(); // Here `expected_ty` is known to be a type inference variable. - let expected_sig = - fulfillment_cx - .pending_obligations() + let expected_sig = fulfillment_cx.pending_obligations() .iter() .map(|obligation| &obligation.obligation) .filter_map(|obligation| { @@ -142,9 +152,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { self.self_type_matches_expected_vid(trait_ref, expected_vid) .and_then(|_| self.deduce_sig_from_projection(proj_predicate)) } - _ => { - None - } + _ => None, } }) .next(); @@ -153,9 +161,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // infer the kind. This can occur if there is a trait-reference // like `F : Fn
`. Note that due to subtyping we could encounter // many viable options, so pick the most restrictive. - let expected_kind = - fulfillment_cx - .pending_obligations() + let expected_kind = fulfillment_cx.pending_obligations() .iter() .map(|obligation| &obligation.obligation) .filter_map(|obligation| { @@ -178,11 +184,11 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // inference variable. ty::Predicate::ClosureKind(..) => None, }; - opt_trait_ref - .and_then(|tr| self.self_type_matches_expected_vid(tr, expected_vid)) + opt_trait_ref.and_then(|tr| self.self_type_matches_expected_vid(tr, expected_vid)) .and_then(|tr| self.tcx.lang_items.fn_trait_kind(tr.def_id())) }) - .fold(None, |best, cur| Some(best.map_or(cur, |best| cmp::min(best, cur)))); + .fold(None, + |best, cur| Some(best.map_or(cur, |best| cmp::min(best, cur)))); (expected_sig, expected_kind) } @@ -190,13 +196,11 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { /// Given a projection like "::Result == Y", we can deduce /// everything we need to know about a closure. fn deduce_sig_from_projection(&self, - projection: &ty::PolyProjectionPredicate<'tcx>) - -> Option> - { + projection: &ty::PolyProjectionPredicate<'tcx>) + -> Option> { let tcx = self.tcx; - debug!("deduce_sig_from_projection({:?})", - projection); + debug!("deduce_sig_from_projection({:?})", projection); let trait_ref = projection.to_poly_trait_ref(); @@ -206,22 +210,26 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { let arg_param_ty = trait_ref.substs().type_at(1); let arg_param_ty = self.resolve_type_vars_if_possible(&arg_param_ty); - debug!("deduce_sig_from_projection: arg_param_ty {:?}", arg_param_ty); + debug!("deduce_sig_from_projection: arg_param_ty {:?}", + arg_param_ty); let input_tys = match arg_param_ty.sty { ty::TyTuple(tys) => tys.to_vec(), - _ => { return None; } + _ => { + return None; + } }; debug!("deduce_sig_from_projection: input_tys {:?}", input_tys); let ret_param_ty = projection.0.ty; let ret_param_ty = self.resolve_type_vars_if_possible(&ret_param_ty); - debug!("deduce_sig_from_projection: ret_param_ty {:?}", ret_param_ty); + debug!("deduce_sig_from_projection: ret_param_ty {:?}", + ret_param_ty); let fn_sig = ty::FnSig { inputs: input_tys, output: ret_param_ty, - variadic: false + variadic: false, }; debug!("deduce_sig_from_projection: fn_sig {:?}", fn_sig); @@ -229,10 +237,9 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { } fn self_type_matches_expected_vid(&self, - trait_ref: ty::PolyTraitRef<'tcx>, - expected_vid: ty::TyVid) - -> Option> - { + trait_ref: ty::PolyTraitRef<'tcx>, + expected_vid: ty::TyVid) + -> Option> { let self_ty = self.shallow_resolve(trait_ref.self_ty()); debug!("self_type_matches_expected_vid(trait_ref={:?}, self_ty={:?})", trait_ref, diff --git a/src/librustc_typeck/check/coercion.rs b/src/librustc_typeck/check/coercion.rs index 98a05989b140d..ccc944813ff18 100644 --- a/src/librustc_typeck/check/coercion.rs +++ b/src/librustc_typeck/check/coercion.rs @@ -60,7 +60,7 @@ //! sort of a minor point so I've opted to leave it for later---after all //! we may want to adjust precisely when coercions occur. -use check::{FnCtxt}; +use check::FnCtxt; use rustc::hir; use rustc::infer::{Coercion, InferOk, TypeOrigin, TypeTrace}; @@ -79,7 +79,7 @@ use std::cell::RefCell; use std::collections::VecDeque; use std::ops::Deref; -struct Coerce<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { +struct Coerce<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { fcx: &'a FnCtxt<'a, 'gcx, 'tcx>, origin: TypeOrigin, use_lub: bool, @@ -102,7 +102,7 @@ fn coerce_mutbls<'tcx>(from_mutbl: hir::Mutability, (hir::MutMutable, hir::MutMutable) | (hir::MutImmutable, hir::MutImmutable) | (hir::MutMutable, hir::MutImmutable) => Ok(()), - (hir::MutImmutable, hir::MutMutable) => Err(TypeError::Mutability) + (hir::MutImmutable, hir::MutMutable) => Err(TypeError::Mutability), } } @@ -112,7 +112,7 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> { fcx: fcx, origin: origin, use_lub: false, - unsizing_obligations: RefCell::new(vec![]) + unsizing_obligations: RefCell::new(vec![]), } } @@ -144,21 +144,18 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> { /// Synthesize an identity adjustment. fn identity(&self, ty: Ty<'tcx>) -> CoerceResult<'tcx> { - Ok((ty, AdjustDerefRef(AutoDerefRef { - autoderefs: 0, - autoref: None, - unsize: None - }))) + Ok((ty, + AdjustDerefRef(AutoDerefRef { + autoderefs: 0, + autoref: None, + unsize: None, + }))) } - fn coerce<'a, E, I>(&self, - exprs: &E, - a: Ty<'tcx>, - b: Ty<'tcx>) - -> CoerceResult<'tcx> - // FIXME(eddyb) use copyable iterators when that becomes ergonomic. + fn coerce<'a, E, I>(&self, exprs: &E, a: Ty<'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx> where E: Fn() -> I, - I: IntoIterator { + I: IntoIterator + { let a = self.shallow_resolve(a); debug!("Coerce.tys({:?} => {:?})", a, b); @@ -199,6 +196,8 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> { // Function items are coercible to any closure // type; function pointers are not (that would // require double indirection). + // Additionally, we permit coercion of function + // items to drop the unsafe qualifier. self.coerce_from_fn_item(a, a_f, b) } ty::TyFnPtr(a_f) => { @@ -223,9 +222,8 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> { r_b: &'tcx ty::Region, mt_b: TypeAndMut<'tcx>) -> CoerceResult<'tcx> - // FIXME(eddyb) use copyable iterators when that becomes ergonomic. where E: Fn() -> I, - I: IntoIterator + I: IntoIterator { debug!("coerce_borrowed_pointer(a={:?}, b={:?})", a, b); @@ -241,7 +239,7 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> { coerce_mutbls(mt_a.mutbl, mt_b.mutbl)?; (r_a, mt_a) } - _ => return self.unify_and_identity(a, b) + _ => return self.unify_and_identity(a, b), }; let span = self.origin.span(); @@ -255,7 +253,7 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> { if autoderefs == 0 { // Don't let this pass, otherwise it would cause // &T to autoref to &&T. - continue + continue; } // At this point, we have deref'd `a` to `referent_ty`. So @@ -333,19 +331,24 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> { } else if autoderefs == 1 { r_a // [3] above } else { - if r_borrow_var.is_none() { // create var lazilly, at most once + if r_borrow_var.is_none() { + // create var lazilly, at most once let coercion = Coercion(span); let r = self.next_region_var(coercion); r_borrow_var = Some(r); // [4] above } r_borrow_var.unwrap() }; - let derefd_ty_a = self.tcx.mk_ref(r, TypeAndMut { - ty: referent_ty, - mutbl: mt_b.mutbl // [1] above - }); + let derefd_ty_a = self.tcx.mk_ref(r, + TypeAndMut { + ty: referent_ty, + mutbl: mt_b.mutbl, // [1] above + }); match self.unify(derefd_ty_a, b) { - Ok(ty) => { success = Some((ty, autoderefs)); break }, + Ok(ty) => { + success = Some((ty, autoderefs)); + break; + } Err(err) => { if first_error.is_none() { first_error = Some(err); @@ -391,29 +394,27 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> { } let r_borrow = match ty.sty { ty::TyRef(r_borrow, _) => r_borrow, - _ => span_bug!(span, "expected a ref type, got {:?}", ty) + _ => span_bug!(span, "expected a ref type, got {:?}", ty), }; let autoref = Some(AutoPtr(r_borrow, mt_b.mutbl)); debug!("coerce_borrowed_pointer: succeeded ty={:?} autoderefs={:?} autoref={:?}", - ty, autoderefs, autoref); - Ok((ty, AdjustDerefRef(AutoDerefRef { - autoderefs: autoderefs, - autoref: autoref, - unsize: None - }))) + ty, + autoderefs, + autoref); + Ok((ty, + AdjustDerefRef(AutoDerefRef { + autoderefs: autoderefs, + autoref: autoref, + unsize: None, + }))) } // &[T; n] or &mut [T; n] -> &[T] // or &mut [T; n] -> &mut [T] // or &Concrete -> &Trait, etc. - fn coerce_unsized(&self, - source: Ty<'tcx>, - target: Ty<'tcx>) - -> CoerceResult<'tcx> { - debug!("coerce_unsized(source={:?}, target={:?})", - source, - target); + fn coerce_unsized(&self, source: Ty<'tcx>, target: Ty<'tcx>) -> CoerceResult<'tcx> { + debug!("coerce_unsized(source={:?}, target={:?})", source, target); let traits = (self.tcx.lang_items.unsize_trait(), self.tcx.lang_items.coerce_unsized_trait()); @@ -442,7 +443,7 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> { coerce_mutbls(mt_a.mutbl, mt_b.mutbl)?; (mt_a.ty, Some(AutoUnsafe(mt_b.mutbl))) } - _ => (source, None) + _ => (source, None), }; let source = source.adjust_for_autoref(self.tcx, reborrow); @@ -454,11 +455,8 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> { // Create an obligation for `Source: CoerceUnsized`. let cause = ObligationCause::misc(self.origin.span(), self.body_id); - queue.push_back(self.tcx.predicate_for_trait_def(cause, - coerce_unsized_did, - 0, - source, - &[target])); + queue.push_back(self.tcx + .predicate_for_trait_def(cause, coerce_unsized_did, 0, source, &[target])); // Keep resolving `CoerceUnsized` and `Unsize` predicates to avoid // emitting a coercion in cases like `Foo<$1>` -> `Foo<$2>`, where @@ -466,10 +464,8 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> { let traits = [coerce_unsized_did, unsize_did]; while let Some(obligation) = queue.pop_front() { debug!("coerce_unsized resolve step: {:?}", obligation); - let trait_ref = match obligation.predicate { - ty::Predicate::Trait(ref tr) if traits.contains(&tr.def_id()) => { - tr.clone() - } + let trait_ref = match obligation.predicate { + ty::Predicate::Trait(ref tr) if traits.contains(&tr.def_id()) => tr.clone(), _ => { leftover_predicates.push(obligation); continue; @@ -477,7 +473,8 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> { }; match selcx.select(&obligation.with(trait_ref)) { // Uncertain or unimplemented. - Ok(None) | Err(traits::Unimplemented) => { + Ok(None) | + Err(traits::Unimplemented) => { debug!("coerce_unsized: early return - can't prove obligation"); return Err(TypeError::Mismatch); } @@ -503,33 +500,23 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> { let adjustment = AutoDerefRef { autoderefs: if reborrow.is_some() { 1 } else { 0 }, autoref: reborrow, - unsize: Some(target) + unsize: Some(target), }; debug!("Success, coerced with {:?}", adjustment); Ok((target, AdjustDerefRef(adjustment))) } - fn coerce_from_fn_pointer(&self, + fn coerce_from_safe_fn(&self, a: Ty<'tcx>, fn_ty_a: &'tcx ty::BareFnTy<'tcx>, b: Ty<'tcx>) - -> CoerceResult<'tcx> - { - /*! - * Attempts to coerce from the type of a Rust function item - * into a closure or a `proc`. - */ - - let b = self.shallow_resolve(b); - debug!("coerce_from_fn_pointer(a={:?}, b={:?})", a, b); - + -> CoerceResult<'tcx> { if let ty::TyFnPtr(fn_ty_b) = b.sty { match (fn_ty_a.unsafety, fn_ty_b.unsafety) { (hir::Unsafety::Normal, hir::Unsafety::Unsafe) => { let unsafe_a = self.tcx.safe_to_unsafe_fn_ty(fn_ty_a); - return self.unify_and_identity(unsafe_a, b).map(|(ty, _)| { - (ty, AdjustUnsafeFnPointer) - }); + return self.unify_and_identity(unsafe_a, b) + .map(|(ty, _)| (ty, AdjustUnsafeFnPointer)); } _ => {} } @@ -537,15 +524,29 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> { self.unify_and_identity(a, b) } + fn coerce_from_fn_pointer(&self, + a: Ty<'tcx>, + fn_ty_a: &'tcx ty::BareFnTy<'tcx>, + b: Ty<'tcx>) + -> CoerceResult<'tcx> { + //! Attempts to coerce from the type of a Rust function item + //! into a closure or a `proc`. + //! + + let b = self.shallow_resolve(b); + debug!("coerce_from_fn_pointer(a={:?}, b={:?})", a, b); + + self.coerce_from_safe_fn(a, fn_ty_a, b) + } + fn coerce_from_fn_item(&self, a: Ty<'tcx>, fn_ty_a: &'tcx ty::BareFnTy<'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx> { - /*! - * Attempts to coerce from the type of a Rust function item - * into a closure or a `proc`. - */ + //! Attempts to coerce from the type of a Rust function item + //! into a closure or a `proc`. + //! let b = self.shallow_resolve(b); debug!("coerce_from_fn_item(a={:?}, b={:?})", a, b); @@ -553,11 +554,10 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> { match b.sty { ty::TyFnPtr(_) => { let a_fn_pointer = self.tcx.mk_fn_ptr(fn_ty_a); - self.unify_and_identity(a_fn_pointer, b).map(|(ty, _)| { - (ty, AdjustReifyFnPointer) - }) + self.coerce_from_safe_fn(a_fn_pointer, fn_ty_a, b) + .map(|(ty, _)| (ty, AdjustReifyFnPointer)) } - _ => self.unify_and_identity(a, b) + _ => self.unify_and_identity(a, b), } } @@ -566,9 +566,7 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> { b: Ty<'tcx>, mutbl_b: hir::Mutability) -> CoerceResult<'tcx> { - debug!("coerce_unsafe_ptr(a={:?}, b={:?})", - a, - b); + debug!("coerce_unsafe_ptr(a={:?}, b={:?})", a, b); let (is_ref, mt_a) = match a.sty { ty::TyRef(_, mt) => (true, mt), @@ -579,24 +577,28 @@ impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> { }; // Check that the types which they point at are compatible. - let a_unsafe = self.tcx.mk_ptr(ty::TypeAndMut{ mutbl: mutbl_b, ty: mt_a.ty }); + let a_unsafe = self.tcx.mk_ptr(ty::TypeAndMut { + mutbl: mutbl_b, + ty: mt_a.ty, + }); let (ty, noop) = self.unify_and_identity(a_unsafe, b)?; coerce_mutbls(mt_a.mutbl, mutbl_b)?; // Although references and unsafe ptrs have the same // representation, we still register an AutoDerefRef so that // regionck knows that the region for `a` must be valid here. - Ok((ty, if is_ref { - AdjustDerefRef(AutoDerefRef { - autoderefs: 1, - autoref: Some(AutoUnsafe(mutbl_b)), - unsize: None - }) - } else if mt_a.mutbl != mutbl_b { - AdjustMutToConstPointer - } else { - noop - })) + Ok((ty, + if is_ref { + AdjustDerefRef(AutoDerefRef { + autoderefs: 1, + autoref: Some(AutoUnsafe(mutbl_b)), + unsize: None, + }) + } else if mt_a.mutbl != mutbl_b { + AdjustMutToConstPointer + } else { + noop + })) } } @@ -606,7 +608,8 @@ fn apply<'a, 'b, 'gcx, 'tcx, E, I>(coerce: &mut Coerce<'a, 'gcx, 'tcx>, b: Ty<'tcx>) -> CoerceResult<'tcx> where E: Fn() -> I, - I: IntoIterator { + I: IntoIterator +{ let (ty, adjustment) = indent(|| coerce.coerce(exprs, a, b))?; @@ -638,12 +641,12 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { let mut coerce = Coerce::new(self, TypeOrigin::ExprAssignable(expr.span)); self.commit_if_ok(|_| { - let (ty, adjustment) = - apply(&mut coerce, &|| Some(expr), source, target)?; + let (ty, adjustment) = apply(&mut coerce, &|| Some(expr), source, target)?; if !adjustment.is_identity() { debug!("Success, coerced with {:?}", adjustment); match self.tables.borrow().adjustments.get(&expr.id) { - None | Some(&AdjustNeverToAny(..)) => (), + None | + Some(&AdjustNeverToAny(..)) => (), _ => bug!("expr already has an adjustment on it!"), }; self.write_adjustment(expr.id, adjustment); @@ -662,9 +665,9 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { new: &'b hir::Expr, new_ty: Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>> - // FIXME(eddyb) use copyable iterators when that becomes ergonomic. where E: Fn() -> I, - I: IntoIterator { + I: IntoIterator + { let prev_ty = self.resolve_type_vars_with_obligations(prev_ty); let new_ty = self.resolve_type_vars_with_obligations(new_ty); @@ -675,8 +678,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // Special-case that coercion alone cannot handle: // Two function item types of differing IDs or Substs. match (&prev_ty.sty, &new_ty.sty) { - (&ty::TyFnDef(a_def_id, a_substs, a_fty), - &ty::TyFnDef(b_def_id, b_substs, b_fty)) => { + (&ty::TyFnDef(a_def_id, a_substs, a_fty), &ty::TyFnDef(b_def_id, b_substs, b_fty)) => { // The signature must always match. let fty = self.lub(true, trace.clone(), &a_fty, &b_fty) .map(|InferOk { value, obligations }| { @@ -720,9 +722,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // but only if the new expression has no coercion already applied to it. let mut first_error = None; if !self.tables.borrow().adjustments.contains_key(&new.id) { - let result = self.commit_if_ok(|_| { - apply(&mut coerce, &|| Some(new), new_ty, prev_ty) - }); + let result = self.commit_if_ok(|_| apply(&mut coerce, &|| Some(new), new_ty, prev_ty)); match result { Ok((ty, adjustment)) => { if !adjustment.is_identity() { @@ -730,7 +730,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { } return Ok(ty); } - Err(e) => first_error = Some(e) + Err(e) => first_error = Some(e), } } @@ -739,20 +739,20 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // previous expressions, other than noop reborrows (ignoring lifetimes). for expr in exprs() { let noop = match self.tables.borrow().adjustments.get(&expr.id) { - Some(&AdjustDerefRef(AutoDerefRef { - autoderefs: 1, - autoref: Some(AutoPtr(_, mutbl_adj)), - unsize: None - })) => match self.node_ty(expr.id).sty { - ty::TyRef(_, mt_orig) => { - // Reborrow that we can safely ignore. - mutbl_adj == mt_orig.mutbl + Some(&AdjustDerefRef(AutoDerefRef { autoderefs: 1, + autoref: Some(AutoPtr(_, mutbl_adj)), + unsize: None })) => { + match self.node_ty(expr.id).sty { + ty::TyRef(_, mt_orig) => { + // Reborrow that we can safely ignore. + mutbl_adj == mt_orig.mutbl + } + _ => false, } - _ => false - }, + } Some(&AdjustNeverToAny(_)) => true, Some(_) => false, - None => true + None => true, }; if !noop { diff --git a/src/librustc_typeck/check/compare_method.rs b/src/librustc_typeck/check/compare_method.rs index 1604f34d57552..25e9f1f522c96 100644 --- a/src/librustc_typeck/check/compare_method.rs +++ b/src/librustc_typeck/check/compare_method.rs @@ -38,9 +38,9 @@ pub fn compare_impl_method<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, impl_m_span: Span, impl_m_body_id: ast::NodeId, trait_m: &ty::Method<'tcx>, - impl_trait_ref: &ty::TraitRef<'tcx>) { - debug!("compare_impl_method(impl_trait_ref={:?})", - impl_trait_ref); + impl_trait_ref: &ty::TraitRef<'tcx>, + trait_item_span: Option) { + debug!("compare_impl_method(impl_trait_ref={:?})", impl_trait_ref); debug!("compare_impl_method: impl_trait_ref (liberated) = {:?}", impl_trait_ref); @@ -57,34 +57,36 @@ pub fn compare_impl_method<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, // inscrutable, particularly for cases where one method has no // self. match (&trait_m.explicit_self, &impl_m.explicit_self) { - (&ty::ExplicitSelfCategory::Static, - &ty::ExplicitSelfCategory::Static) => {} + (&ty::ExplicitSelfCategory::Static, &ty::ExplicitSelfCategory::Static) => {} (&ty::ExplicitSelfCategory::Static, _) => { - let mut err = struct_span_err!(tcx.sess, impl_m_span, E0185, - "method `{}` has a `{}` declaration in the impl, \ - but not in the trait", - trait_m.name, - impl_m.explicit_self); - err.span_label(impl_m_span, &format!("`{}` used in impl", - impl_m.explicit_self)); + let mut err = struct_span_err!(tcx.sess, + impl_m_span, + E0185, + "method `{}` has a `{}` declaration in the impl, but \ + not in the trait", + trait_m.name, + impl_m.explicit_self); + err.span_label(impl_m_span, + &format!("`{}` used in impl", impl_m.explicit_self)); if let Some(span) = tcx.map.span_if_local(trait_m.def_id) { - err.span_label(span, &format!("trait declared without `{}`", - impl_m.explicit_self)); + err.span_label(span, + &format!("trait declared without `{}`", impl_m.explicit_self)); } err.emit(); return; } (_, &ty::ExplicitSelfCategory::Static) => { - let mut err = struct_span_err!(tcx.sess, impl_m_span, E0186, - "method `{}` has a `{}` declaration in the trait, \ - but not in the impl", - trait_m.name, - trait_m.explicit_self); - err.span_label(impl_m_span, &format!("expected `{}` in impl", - trait_m.explicit_self)); + let mut err = struct_span_err!(tcx.sess, + impl_m_span, + E0186, + "method `{}` has a `{}` declaration in the trait, but \ + not in the impl", + trait_m.name, + trait_m.explicit_self); + err.span_label(impl_m_span, + &format!("expected `{}` in impl", trait_m.explicit_self)); if let Some(span) = tcx.map.span_if_local(trait_m.def_id) { - err.span_label(span, & format!("`{}` used in trait", - trait_m.explicit_self)); + err.span_label(span, &format!("`{}` used in trait", trait_m.explicit_self)); } err.emit(); return; @@ -97,26 +99,126 @@ pub fn compare_impl_method<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, let num_impl_m_type_params = impl_m.generics.types.len(); let num_trait_m_type_params = trait_m.generics.types.len(); if num_impl_m_type_params != num_trait_m_type_params { - span_err!(tcx.sess, impl_m_span, E0049, - "method `{}` has {} type parameter{} \ - but its trait declaration has {} type parameter{}", - trait_m.name, - num_impl_m_type_params, - if num_impl_m_type_params == 1 {""} else {"s"}, - num_trait_m_type_params, - if num_trait_m_type_params == 1 {""} else {"s"}); + let impl_m_node_id = tcx.map.as_local_node_id(impl_m.def_id).unwrap(); + let span = match tcx.map.expect_impl_item(impl_m_node_id).node { + ImplItemKind::Method(ref impl_m_sig, _) => { + if impl_m_sig.generics.is_parameterized() { + impl_m_sig.generics.span + } else { + impl_m_span + } + } + _ => bug!("{:?} is not a method", impl_m), + }; + + let mut err = struct_span_err!(tcx.sess, + span, + E0049, + "method `{}` has {} type parameter{} but its trait \ + declaration has {} type parameter{}", + trait_m.name, + num_impl_m_type_params, + if num_impl_m_type_params == 1 { "" } else { "s" }, + num_trait_m_type_params, + if num_trait_m_type_params == 1 { + "" + } else { + "s" + }); + + let mut suffix = None; + + if let Some(span) = trait_item_span { + err.span_label(span, + &format!("expected {}", + &if num_trait_m_type_params != 1 { + format!("{} type parameters", num_trait_m_type_params) + } else { + format!("{} type parameter", num_trait_m_type_params) + })); + } else { + suffix = Some(format!(", expected {}", num_trait_m_type_params)); + } + + err.span_label(span, + &format!("found {}{}", + &if num_impl_m_type_params != 1 { + format!("{} type parameters", num_impl_m_type_params) + } else { + format!("1 type parameter") + }, + suffix.as_ref().map(|s| &s[..]).unwrap_or(""))); + + err.emit(); + return; } if impl_m.fty.sig.0.inputs.len() != trait_m.fty.sig.0.inputs.len() { - span_err!(tcx.sess, impl_m_span, E0050, - "method `{}` has {} parameter{} \ - but the declaration in trait `{}` has {}", - trait_m.name, - impl_m.fty.sig.0.inputs.len(), - if impl_m.fty.sig.0.inputs.len() == 1 {""} else {"s"}, - tcx.item_path_str(trait_m.def_id), - trait_m.fty.sig.0.inputs.len()); + let trait_number_args = trait_m.fty.sig.0.inputs.len(); + let impl_number_args = impl_m.fty.sig.0.inputs.len(); + let trait_m_node_id = tcx.map.as_local_node_id(trait_m.def_id); + let trait_span = if let Some(trait_id) = trait_m_node_id { + match tcx.map.expect_trait_item(trait_id).node { + TraitItem_::MethodTraitItem(ref trait_m_sig, _) => { + if let Some(arg) = trait_m_sig.decl.inputs.get(if trait_number_args > 0 { + trait_number_args - 1 + } else { + 0 + }) { + Some(arg.pat.span) + } else { + trait_item_span + } + } + _ => bug!("{:?} is not a method", impl_m), + } + } else { + trait_item_span + }; + let impl_m_node_id = tcx.map.as_local_node_id(impl_m.def_id).unwrap(); + let impl_span = match tcx.map.expect_impl_item(impl_m_node_id).node { + ImplItemKind::Method(ref impl_m_sig, _) => { + if let Some(arg) = impl_m_sig.decl.inputs.get(if impl_number_args > 0 { + impl_number_args - 1 + } else { + 0 + }) { + arg.pat.span + } else { + impl_m_span + } + } + _ => bug!("{:?} is not a method", impl_m), + }; + let mut err = struct_span_err!(tcx.sess, + impl_span, + E0050, + "method `{}` has {} parameter{} but the declaration in \ + trait `{}` has {}", + trait_m.name, + impl_number_args, + if impl_number_args == 1 { "" } else { "s" }, + tcx.item_path_str(trait_m.def_id), + trait_number_args); + if let Some(trait_span) = trait_span { + err.span_label(trait_span, + &format!("trait requires {}", + &if trait_number_args != 1 { + format!("{} parameters", trait_number_args) + } else { + format!("{} parameter", trait_number_args) + })); + } + err.span_label(impl_span, + &format!("expected {}, found {}", + &if trait_number_args != 1 { + format!("{} parameters", trait_number_args) + } else { + format!("{} parameter", trait_number_args) + }, + impl_number_args)); + err.emit(); return; } @@ -192,9 +294,10 @@ pub fn compare_impl_method<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, let impl_to_skol_substs = &impl_param_env.free_substs; // Create mapping from trait to skolemized. - let trait_to_skol_substs = - impl_to_skol_substs.rebase_onto(tcx, impl_m.container_id(), - trait_to_impl_substs.subst(tcx, impl_to_skol_substs)); + let trait_to_skol_substs = impl_to_skol_substs.rebase_onto(tcx, + impl_m.container_id(), + trait_to_impl_substs.subst(tcx, + impl_to_skol_substs)); debug!("compare_impl_method: trait_to_skol_substs={:?}", trait_to_skol_substs); @@ -230,31 +333,28 @@ pub fn compare_impl_method<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, // // We then register the obligations from the impl_m and check to see // if all constraints hold. - hybrid_preds.predicates.extend( - trait_m.predicates.instantiate_own(tcx, trait_to_skol_substs).predicates); + hybrid_preds.predicates + .extend(trait_m.predicates.instantiate_own(tcx, trait_to_skol_substs).predicates); // Construct trait parameter environment and then shift it into the skolemized viewpoint. // The key step here is to update the caller_bounds's predicates to be // the new hybrid bounds we computed. let normalize_cause = traits::ObligationCause::misc(impl_m_span, impl_m_body_id); let trait_param_env = impl_param_env.with_caller_bounds(hybrid_preds.predicates); - let trait_param_env = traits::normalize_param_env_or_error(tcx, - trait_param_env, - normalize_cause.clone()); + let trait_param_env = + traits::normalize_param_env_or_error(tcx, trait_param_env, normalize_cause.clone()); // FIXME(@jroesch) this seems ugly, but is a temporary change infcx.parameter_environment = trait_param_env; debug!("compare_impl_method: caller_bounds={:?}", - infcx.parameter_environment.caller_bounds); + infcx.parameter_environment.caller_bounds); let mut selcx = traits::SelectionContext::new(&infcx); let impl_m_own_bounds = impl_m.predicates.instantiate_own(tcx, impl_to_skol_substs); - let (impl_m_own_bounds, _) = - infcx.replace_late_bound_regions_with_fresh_var( - impl_m_span, - infer::HigherRankedType, - &ty::Binder(impl_m_own_bounds.predicates)); + let (impl_m_own_bounds, _) = infcx.replace_late_bound_regions_with_fresh_var(impl_m_span, + infer::HigherRankedType, + &ty::Binder(impl_m_own_bounds.predicates)); for predicate in impl_m_own_bounds { let traits::Normalized { value: predicate, .. } = traits::normalize(&mut selcx, normalize_cause.clone(), &predicate); @@ -262,7 +362,7 @@ pub fn compare_impl_method<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, let cause = traits::ObligationCause { span: impl_m_span, body_id: impl_m_body_id, - code: traits::ObligationCauseCode::CompareImplMethodObligation + code: traits::ObligationCauseCode::CompareImplMethodObligation, }; fulfillment_cx.register_predicate_obligation( @@ -287,40 +387,34 @@ pub fn compare_impl_method<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, let tcx = infcx.tcx; let origin = TypeOrigin::MethodCompatCheck(impl_m_span); - let (impl_sig, _) = - infcx.replace_late_bound_regions_with_fresh_var(impl_m_span, - infer::HigherRankedType, - &impl_m.fty.sig); - let impl_sig = - impl_sig.subst(tcx, impl_to_skol_substs); - let impl_sig = - assoc::normalize_associated_types_in(&infcx, - &mut fulfillment_cx, - impl_m_span, - impl_m_body_id, - &impl_sig); + let (impl_sig, _) = infcx.replace_late_bound_regions_with_fresh_var(impl_m_span, + infer::HigherRankedType, + &impl_m.fty.sig); + let impl_sig = impl_sig.subst(tcx, impl_to_skol_substs); + let impl_sig = assoc::normalize_associated_types_in(&infcx, + &mut fulfillment_cx, + impl_m_span, + impl_m_body_id, + &impl_sig); let impl_fty = tcx.mk_fn_ptr(tcx.mk_bare_fn(ty::BareFnTy { unsafety: impl_m.fty.unsafety, abi: impl_m.fty.abi, - sig: ty::Binder(impl_sig.clone()) + sig: ty::Binder(impl_sig.clone()), })); debug!("compare_impl_method: impl_fty={:?}", impl_fty); - let trait_sig = tcx.liberate_late_bound_regions( - infcx.parameter_environment.free_id_outlive, - &trait_m.fty.sig); - let trait_sig = - trait_sig.subst(tcx, trait_to_skol_substs); - let trait_sig = - assoc::normalize_associated_types_in(&infcx, - &mut fulfillment_cx, - impl_m_span, - impl_m_body_id, - &trait_sig); + let trait_sig = tcx.liberate_late_bound_regions(infcx.parameter_environment.free_id_outlive, + &trait_m.fty.sig); + let trait_sig = trait_sig.subst(tcx, trait_to_skol_substs); + let trait_sig = assoc::normalize_associated_types_in(&infcx, + &mut fulfillment_cx, + impl_m_span, + impl_m_body_id, + &trait_sig); let trait_fty = tcx.mk_fn_ptr(tcx.mk_bare_fn(ty::BareFnTy { unsafety: trait_m.fty.unsafety, abi: trait_m.fty.abi, - sig: ty::Binder(trait_sig.clone()) + sig: ty::Binder(trait_sig.clone()), })); debug!("compare_impl_method: trait_fty={:?}", trait_fty); @@ -330,36 +424,39 @@ pub fn compare_impl_method<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, impl_fty, trait_fty); - let (impl_err_span, trait_err_span) = - extract_spans_for_error_reporting(&infcx, &terr, origin, impl_m, - impl_sig, trait_m, trait_sig); + let (impl_err_span, trait_err_span) = extract_spans_for_error_reporting(&infcx, + &terr, + origin, + impl_m, + impl_sig, + trait_m, + trait_sig); let origin = TypeOrigin::MethodCompatCheck(impl_err_span); - let mut diag = struct_span_err!( - tcx.sess, origin.span(), E0053, - "method `{}` has an incompatible type for trait", trait_m.name - ); - - infcx.note_type_err( - &mut diag, - origin, - trait_err_span.map(|sp| (sp, format!("type in trait"))), - Some(infer::ValuePairs::Types(ExpectedFound { - expected: trait_fty, - found: impl_fty - })), - &terr - ); + let mut diag = struct_span_err!(tcx.sess, + origin.span(), + E0053, + "method `{}` has an incompatible type for trait", + trait_m.name); + + infcx.note_type_err(&mut diag, + origin, + trait_err_span.map(|sp| (sp, format!("type in trait"))), + Some(infer::ValuePairs::Types(ExpectedFound { + expected: trait_fty, + found: impl_fty, + })), + &terr); diag.emit(); - return + return; } // Check that all obligations are satisfied by the implementation's // version. if let Err(ref errors) = fulfillment_cx.select_all_or_error(&infcx) { infcx.report_fulfillment_errors(errors); - return + return; } // Finally, resolve all regions. This catches wily misuses of @@ -385,8 +482,7 @@ pub fn compare_impl_method<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, impl_generics: &ty::Generics<'tcx>, trait_to_skol_substs: &Substs<'tcx>, impl_to_skol_substs: &Substs<'tcx>) - -> bool - { + -> bool { let trait_params = &trait_generics.regions[..]; let impl_params = &impl_generics.regions[..]; @@ -411,9 +507,12 @@ pub fn compare_impl_method<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, // are zero. Since I don't quite know how to phrase things at // the moment, give a kind of vague error message. if trait_params.len() != impl_params.len() { - struct_span_err!(ccx.tcx.sess, span, E0195, - "lifetime parameters or bounds on method `{}` do \ - not match the trait declaration",impl_m.name) + struct_span_err!(ccx.tcx.sess, + span, + E0195, + "lifetime parameters or bounds on method `{}` do not match the \ + trait declaration", + impl_m.name) .span_label(span, &format!("lifetimes do not match trait")) .emit(); return false; @@ -429,40 +528,51 @@ pub fn compare_impl_method<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, impl_sig: ty::FnSig<'tcx>, trait_m: &ty::Method, trait_sig: ty::FnSig<'tcx>) - -> (Span, Option) { + -> (Span, Option) { let tcx = infcx.tcx; let impl_m_node_id = tcx.map.as_local_node_id(impl_m.def_id).unwrap(); let (impl_m_output, impl_m_iter) = match tcx.map.expect_impl_item(impl_m_node_id).node { - ImplItemKind::Method(ref impl_m_sig, _) => - (&impl_m_sig.decl.output, impl_m_sig.decl.inputs.iter()), - _ => bug!("{:?} is not a method", impl_m) + ImplItemKind::Method(ref impl_m_sig, _) => { + (&impl_m_sig.decl.output, impl_m_sig.decl.inputs.iter()) + } + _ => bug!("{:?} is not a method", impl_m), }; match *terr { TypeError::Mutability => { if let Some(trait_m_node_id) = tcx.map.as_local_node_id(trait_m.def_id) { let trait_m_iter = match tcx.map.expect_trait_item(trait_m_node_id).node { - TraitItem_::MethodTraitItem(ref trait_m_sig, _) => - trait_m_sig.decl.inputs.iter(), - _ => bug!("{:?} is not a MethodTraitItem", trait_m) + TraitItem_::MethodTraitItem(ref trait_m_sig, _) => { + trait_m_sig.decl.inputs.iter() + } + _ => bug!("{:?} is not a MethodTraitItem", trait_m), }; - impl_m_iter.zip(trait_m_iter).find(|&(ref impl_arg, ref trait_arg)| { - match (&impl_arg.ty.node, &trait_arg.ty.node) { - (&Ty_::TyRptr(_, ref impl_mt), &Ty_::TyRptr(_, ref trait_mt)) | - (&Ty_::TyPtr(ref impl_mt), &Ty_::TyPtr(ref trait_mt)) => - impl_mt.mutbl != trait_mt.mutbl, - _ => false - } - }).map(|(ref impl_arg, ref trait_arg)| { - match (impl_arg.to_self(), trait_arg.to_self()) { - (Some(impl_self), Some(trait_self)) => - (impl_self.span, Some(trait_self.span)), - (None, None) => (impl_arg.ty.span, Some(trait_arg.ty.span)), - _ => bug!("impl and trait fns have different first args, \ - impl: {:?}, trait: {:?}", impl_arg, trait_arg) - } - }).unwrap_or((origin.span(), tcx.map.span_if_local(trait_m.def_id))) + impl_m_iter.zip(trait_m_iter) + .find(|&(ref impl_arg, ref trait_arg)| { + match (&impl_arg.ty.node, &trait_arg.ty.node) { + (&Ty_::TyRptr(_, ref impl_mt), &Ty_::TyRptr(_, ref trait_mt)) | + (&Ty_::TyPtr(ref impl_mt), &Ty_::TyPtr(ref trait_mt)) => { + impl_mt.mutbl != trait_mt.mutbl + } + _ => false, + } + }) + .map(|(ref impl_arg, ref trait_arg)| { + match (impl_arg.to_self(), trait_arg.to_self()) { + (Some(impl_self), Some(trait_self)) => { + (impl_self.span, Some(trait_self.span)) + } + (None, None) => (impl_arg.ty.span, Some(trait_arg.ty.span)), + _ => { + bug!("impl and trait fns have different first args, impl: \ + {:?}, trait: {:?}", + impl_arg, + trait_arg) + } + } + }) + .unwrap_or((origin.span(), tcx.map.span_if_local(trait_m.def_id))) } else { (origin.span(), tcx.map.span_if_local(trait_m.def_id)) } @@ -470,25 +580,28 @@ pub fn compare_impl_method<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, TypeError::Sorts(ExpectedFound { .. }) => { if let Some(trait_m_node_id) = tcx.map.as_local_node_id(trait_m.def_id) { let (trait_m_output, trait_m_iter) = - match tcx.map.expect_trait_item(trait_m_node_id).node { - TraitItem_::MethodTraitItem(ref trait_m_sig, _) => - (&trait_m_sig.decl.output, trait_m_sig.decl.inputs.iter()), - _ => bug!("{:?} is not a MethodTraitItem", trait_m) - }; + match tcx.map.expect_trait_item(trait_m_node_id).node { + TraitItem_::MethodTraitItem(ref trait_m_sig, _) => { + (&trait_m_sig.decl.output, trait_m_sig.decl.inputs.iter()) + } + _ => bug!("{:?} is not a MethodTraitItem", trait_m), + }; let impl_iter = impl_sig.inputs.iter(); let trait_iter = trait_sig.inputs.iter(); - impl_iter.zip(trait_iter).zip(impl_m_iter).zip(trait_m_iter) + impl_iter.zip(trait_iter) + .zip(impl_m_iter) + .zip(trait_m_iter) .filter_map(|(((impl_arg_ty, trait_arg_ty), impl_arg), trait_arg)| { match infcx.sub_types(true, origin, trait_arg_ty, impl_arg_ty) { Ok(_) => None, - Err(_) => Some((impl_arg.ty.span, Some(trait_arg.ty.span))) + Err(_) => Some((impl_arg.ty.span, Some(trait_arg.ty.span))), } }) .next() .unwrap_or_else(|| { - if infcx.sub_types(false, origin, impl_sig.output, - trait_sig.output).is_err() { + if infcx.sub_types(false, origin, impl_sig.output, trait_sig.output) + .is_err() { (impl_m_output.span(), Some(trait_m_output.span())) } else { (origin.span(), tcx.map.span_if_local(trait_m.def_id)) @@ -498,7 +611,7 @@ pub fn compare_impl_method<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, (origin.span(), tcx.map.span_if_local(trait_m.def_id)) } } - _ => (origin.span(), tcx.map.span_if_local(trait_m.def_id)) + _ => (origin.span(), tcx.map.span_if_local(trait_m.def_id)), } } } @@ -508,8 +621,7 @@ pub fn compare_const_impl<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, impl_c_span: Span, trait_c: &ty::AssociatedConst<'tcx>, impl_trait_ref: &ty::TraitRef<'tcx>) { - debug!("compare_const_impl(impl_trait_ref={:?})", - impl_trait_ref); + debug!("compare_const_impl(impl_trait_ref={:?})", impl_trait_ref); let tcx = ccx.tcx; tcx.infer_ctxt(None, None, Reveal::NotSpecializable).enter(|infcx| { @@ -531,11 +643,12 @@ pub fn compare_const_impl<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, let impl_to_skol_substs = &impl_param_env.free_substs; // Create mapping from trait to skolemized. - let trait_to_skol_substs = - impl_to_skol_substs.rebase_onto(tcx, impl_c.container.id(), - trait_to_impl_substs.subst(tcx, impl_to_skol_substs)); + let trait_to_skol_substs = impl_to_skol_substs.rebase_onto(tcx, + impl_c.container.id(), + trait_to_impl_substs.subst(tcx, + impl_to_skol_substs)); debug!("compare_const_impl: trait_to_skol_substs={:?}", - trait_to_skol_substs); + trait_to_skol_substs); // Compute skolemized form of impl and trait const tys. let impl_ty = impl_c.ty.subst(tcx, impl_to_skol_substs); @@ -544,31 +657,27 @@ pub fn compare_const_impl<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, let err = infcx.commit_if_ok(|_| { // There is no "body" here, so just pass dummy id. - let impl_ty = - assoc::normalize_associated_types_in(&infcx, - &mut fulfillment_cx, - impl_c_span, - 0, - &impl_ty); - - debug!("compare_const_impl: impl_ty={:?}", - impl_ty); - - let trait_ty = - assoc::normalize_associated_types_in(&infcx, - &mut fulfillment_cx, - impl_c_span, - 0, - &trait_ty); - - debug!("compare_const_impl: trait_ty={:?}", - trait_ty); + let impl_ty = assoc::normalize_associated_types_in(&infcx, + &mut fulfillment_cx, + impl_c_span, + ast::CRATE_NODE_ID, + &impl_ty); + + debug!("compare_const_impl: impl_ty={:?}", impl_ty); + + let trait_ty = assoc::normalize_associated_types_in(&infcx, + &mut fulfillment_cx, + impl_c_span, + ast::CRATE_NODE_ID, + &trait_ty); + + debug!("compare_const_impl: trait_ty={:?}", trait_ty); infcx.sub_types(false, origin, impl_ty, trait_ty) - .map(|InferOk { obligations, .. }| { - // FIXME(#32730) propagate obligations - assert!(obligations.is_empty()) - }) + .map(|InferOk { obligations, .. }| { + // FIXME(#32730) propagate obligations + assert!(obligations.is_empty()) + }) }); if let Err(terr) = err { @@ -579,31 +688,31 @@ pub fn compare_const_impl<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, // Locate the Span containing just the type of the offending impl match tcx.map.expect_impl_item(impl_c_node_id).node { ImplItemKind::Const(ref ty, _) => origin = TypeOrigin::Misc(ty.span), - _ => bug!("{:?} is not a impl const", impl_c) + _ => bug!("{:?} is not a impl const", impl_c), } - let mut diag = struct_span_err!( - tcx.sess, origin.span(), E0326, - "implemented const `{}` has an incompatible type for trait", - trait_c.name - ); + let mut diag = struct_span_err!(tcx.sess, + origin.span(), + E0326, + "implemented const `{}` has an incompatible type for \ + trait", + trait_c.name); // Add a label to the Span containing just the type of the item let trait_c_node_id = tcx.map.as_local_node_id(trait_c.def_id).unwrap(); let trait_c_span = match tcx.map.expect_trait_item(trait_c_node_id).node { TraitItem_::ConstTraitItem(ref ty, _) => ty.span, - _ => bug!("{:?} is not a trait const", trait_c) + _ => bug!("{:?} is not a trait const", trait_c), }; - infcx.note_type_err( - &mut diag, - origin, - Some((trait_c_span, format!("type in trait"))), - Some(infer::ValuePairs::Types(ExpectedFound { - expected: trait_ty, - found: impl_ty - })), &terr - ); + infcx.note_type_err(&mut diag, + origin, + Some((trait_c_span, format!("type in trait"))), + Some(infer::ValuePairs::Types(ExpectedFound { + expected: trait_ty, + found: impl_ty, + })), + &terr); diag.emit(); } }); diff --git a/src/librustc_typeck/check/dropck.rs b/src/librustc_typeck/check/dropck.rs index cc958fb3b2343..5e2b49bac1b2f 100644 --- a/src/librustc_typeck/check/dropck.rs +++ b/src/librustc_typeck/check/dropck.rs @@ -71,7 +71,8 @@ fn ensure_drop_params_and_item_params_correspond<'a, 'tcx>( ccx: &CrateCtxt<'a, 'tcx>, drop_impl_did: DefId, drop_impl_ty: Ty<'tcx>, - self_type_did: DefId) -> Result<(), ()> + self_type_did: DefId) + -> Result<(), ()> { let tcx = ccx.tcx; let drop_impl_node_id = tcx.map.as_local_node_id(drop_impl_did).unwrap(); @@ -123,7 +124,9 @@ fn ensure_drop_predicates_are_implied_by_item_defn<'a, 'tcx>( drop_impl_did: DefId, dtor_predicates: &ty::GenericPredicates<'tcx>, self_type_did: DefId, - self_to_impl_substs: &Substs<'tcx>) -> Result<(), ()> { + self_to_impl_substs: &Substs<'tcx>) + -> Result<(), ()> +{ // Here is an example, analogous to that from // `compare_impl_method`. @@ -350,7 +353,8 @@ fn iterate_over_potentially_unsafe_regions_in_type<'a, 'b, 'gcx, 'tcx>( cx: &mut DropckContext<'a, 'b, 'gcx, 'tcx>, context: TypeContext, ty: Ty<'tcx>, - depth: usize) -> Result<(), Error<'tcx>> + depth: usize) + -> Result<(), Error<'tcx>> { let tcx = cx.rcx.tcx; // Issue #22443: Watch out for overflow. While we are careful to @@ -402,16 +406,27 @@ fn iterate_over_potentially_unsafe_regions_in_type<'a, 'b, 'gcx, 'tcx>( // unbounded type parameter `T`, we must resume the recursive // analysis on `T` (since it would be ignored by // type_must_outlive). - if has_dtor_of_interest(tcx, ty) { - debug!("iterate_over_potentially_unsafe_regions_in_type \ - {}ty: {} - is a dtorck type!", - (0..depth).map(|_| ' ').collect::(), - ty); - - cx.rcx.type_must_outlive(infer::SubregionOrigin::SafeDestructor(cx.span), - ty, tcx.mk_region(ty::ReScope(cx.parent_scope))); - - return Ok(()); + let dropck_kind = has_dtor_of_interest(tcx, ty); + debug!("iterate_over_potentially_unsafe_regions_in_type \ + ty: {:?} dropck_kind: {:?}", ty, dropck_kind); + match dropck_kind { + DropckKind::NoBorrowedDataAccessedInMyDtor => { + // The maximally blind attribute. + } + DropckKind::BorrowedDataMustStrictlyOutliveSelf => { + cx.rcx.type_must_outlive(infer::SubregionOrigin::SafeDestructor(cx.span), + ty, tcx.mk_region(ty::ReScope(cx.parent_scope))); + return Ok(()); + } + DropckKind::RevisedSelf(revised_ty) => { + cx.rcx.type_must_outlive(infer::SubregionOrigin::SafeDestructor(cx.span), + revised_ty, tcx.mk_region(ty::ReScope(cx.parent_scope))); + // Do not return early from this case; we want + // to recursively process the internal structure of Self + // (because even though the Drop for Self has been asserted + // safe, the types instantiated for the generics of Self + // may themselves carry dropck constraints.) + } } debug!("iterate_over_potentially_unsafe_regions_in_type \ @@ -492,16 +507,140 @@ fn iterate_over_potentially_unsafe_regions_in_type<'a, 'b, 'gcx, 'tcx>( } } +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +enum DropckKind<'tcx> { + /// The "safe" kind; i.e. conservatively assume any borrow + /// accessed by dtor, and therefore such data must strictly + /// outlive self. + /// + /// Equivalent to RevisedTy with no change to the self type. + BorrowedDataMustStrictlyOutliveSelf, + + /// The nearly completely-unsafe kind. + /// + /// Equivalent to RevisedSelf with *all* parameters remapped to () + /// (maybe...?) + NoBorrowedDataAccessedInMyDtor, + + /// Assume all borrowed data access by dtor occurs as if Self has the + /// type carried by this variant. In practice this means that some + /// of the type parameters are remapped to `()` (and some lifetime + /// parameters remapped to `'static`), because the developer has asserted + /// that the destructor will not access their contents. + RevisedSelf(Ty<'tcx>), +} + +/// Returns the classification of what kind of check should be applied +/// to `ty`, which may include a revised type where some of the type +/// parameters are re-mapped to `()` to reflect the destructor's +/// "purity" with respect to their actual contents. fn has_dtor_of_interest<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, - ty: Ty<'tcx>) -> bool { + ty: Ty<'tcx>) + -> DropckKind<'tcx> { match ty.sty { - ty::TyAdt(def, _) => { - def.is_dtorck(tcx) + ty::TyAdt(adt_def, substs) => { + if !adt_def.is_dtorck(tcx) { + return DropckKind::NoBorrowedDataAccessedInMyDtor; + } + + // Find the `impl<..> Drop for _` to inspect any + // attributes attached to the impl's generics. + let dtor_method = adt_def.destructor() + .expect("dtorck type without destructor impossible"); + let method = tcx.impl_or_trait_item(dtor_method); + let impl_id: DefId = method.container().id(); + let revised_ty = revise_self_ty(tcx, adt_def, impl_id, substs); + return DropckKind::RevisedSelf(revised_ty); } ty::TyTrait(..) | ty::TyProjection(..) | ty::TyAnon(..) => { debug!("ty: {:?} isn't known, and therefore is a dropck type", ty); - true + return DropckKind::BorrowedDataMustStrictlyOutliveSelf; }, - _ => false + _ => { + return DropckKind::NoBorrowedDataAccessedInMyDtor; + } } } + +// Constructs new Ty just like the type defined by `adt_def` coupled +// with `substs`, except each type and lifetime parameter marked as +// `#[may_dangle]` in the Drop impl (identified by `impl_id`) is +// respectively mapped to `()` or `'static`. +// +// For example: If the `adt_def` maps to: +// +// enum Foo<'a, X, Y> { ... } +// +// and the `impl_id` maps to: +// +// impl<#[may_dangle] 'a, X, #[may_dangle] Y> Drop for Foo<'a, X, Y> { ... } +// +// then revises input: `Foo<'r,i64,&'r i64>` to: `Foo<'static,i64,()>` +fn revise_self_ty<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, + adt_def: ty::AdtDef<'tcx>, + impl_id: DefId, + substs: &Substs<'tcx>) + -> Ty<'tcx> { + // Get generics for `impl Drop` to query for `#[may_dangle]` attr. + let impl_bindings = tcx.lookup_generics(impl_id); + + // Get Substs attached to Self on `impl Drop`; process in parallel + // with `substs`, replacing dangling entries as appropriate. + let self_substs = { + let impl_self_ty: Ty<'tcx> = tcx.lookup_item_type(impl_id).ty; + if let ty::TyAdt(self_adt_def, self_substs) = impl_self_ty.sty { + assert_eq!(adt_def, self_adt_def); + self_substs + } else { + bug!("Self in `impl Drop for _` must be an Adt."); + } + }; + + // Walk `substs` + `self_substs`, build new substs appropriate for + // `adt_def`; each non-dangling param reuses entry from `substs`. + // + // Note: The manner we map from a right-hand side (i.e. Region or + // Ty) for a given `def` to generic parameter associated with that + // right-hand side is tightly coupled to `Drop` impl constraints. + // + // E.g. we know such a Ty must be `TyParam`, because a destructor + // for `struct Foo` is defined via `impl Drop for Foo`, + // and never by (for example) `impl Drop for Foo>`. + let substs = Substs::for_item( + tcx, + adt_def.did, + |def, _| { + let r_orig = substs.region_for_def(def); + let impl_self_orig = self_substs.region_for_def(def); + let r = if let ty::Region::ReEarlyBound(ref ebr) = *impl_self_orig { + if impl_bindings.region_param(ebr).pure_wrt_drop { + tcx.mk_region(ty::ReStatic) + } else { + r_orig + } + } else { + bug!("substs for an impl must map regions to ReEarlyBound"); + }; + debug!("has_dtor_of_interest mapping def {:?} orig {:?} to {:?}", + def, r_orig, r); + r + }, + |def, _| { + let t_orig = substs.type_for_def(def); + let impl_self_orig = self_substs.type_for_def(def); + let t = if let ty::TypeVariants::TyParam(ref pt) = impl_self_orig.sty { + if impl_bindings.type_param(pt).pure_wrt_drop { + tcx.mk_nil() + } else { + t_orig + } + } else { + bug!("substs for an impl must map types to TyParam"); + }; + debug!("has_dtor_of_interest mapping def {:?} orig {:?} {:?} to {:?} {:?}", + def, t_orig, t_orig.sty, t, t.sty); + t + }); + + return tcx.mk_adt(adt_def, &substs); +} diff --git a/src/librustc_typeck/check/intrinsic.rs b/src/librustc_typeck/check/intrinsic.rs index 563b645910e41..7d2547ec17f3a 100644 --- a/src/librustc_typeck/check/intrinsic.rs +++ b/src/librustc_typeck/check/intrinsic.rs @@ -86,18 +86,18 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &hir::ForeignItem) { //We only care about the operation here let (n_tps, inputs, output) = match split[1] { - "cxchg" | "cxchgweak" => (1, vec!(tcx.mk_mut_ptr(param(ccx, 0)), + "cxchg" | "cxchgweak" => (1, vec![tcx.mk_mut_ptr(param(ccx, 0)), param(ccx, 0), - param(ccx, 0)), - tcx.mk_tup(vec!(param(ccx, 0), tcx.types.bool))), - "load" => (1, vec!(tcx.mk_imm_ptr(param(ccx, 0))), + param(ccx, 0)], + tcx.intern_tup(&[param(ccx, 0), tcx.types.bool])), + "load" => (1, vec![tcx.mk_imm_ptr(param(ccx, 0))], param(ccx, 0)), - "store" => (1, vec!(tcx.mk_mut_ptr(param(ccx, 0)), param(ccx, 0)), + "store" => (1, vec![tcx.mk_mut_ptr(param(ccx, 0)), param(ccx, 0)], tcx.mk_nil()), "xchg" | "xadd" | "xsub" | "and" | "nand" | "or" | "xor" | "max" | "min" | "umax" | "umin" => { - (1, vec!(tcx.mk_mut_ptr(param(ccx, 0)), param(ccx, 0)), + (1, vec![tcx.mk_mut_ptr(param(ccx, 0)), param(ccx, 0)], param(ccx, 0)) } "fence" | "singlethreadfence" => { @@ -129,14 +129,14 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &hir::ForeignItem) { "rustc_peek" => (1, vec![param(ccx, 0)], param(ccx, 0)), "init" => (1, Vec::new(), param(ccx, 0)), "uninit" => (1, Vec::new(), param(ccx, 0)), - "forget" => (1, vec!( param(ccx, 0) ), tcx.mk_nil()), - "transmute" => (2, vec!( param(ccx, 0) ), param(ccx, 1)), + "forget" => (1, vec![ param(ccx, 0) ], tcx.mk_nil()), + "transmute" => (2, vec![ param(ccx, 0) ], param(ccx, 1)), "move_val_init" => { (1, - vec!( + vec![ tcx.mk_mut_ptr(param(ccx, 0)), param(ccx, 0) - ), + ], tcx.mk_nil()) } "drop_in_place" => { @@ -148,13 +148,13 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &hir::ForeignItem) { "type_id" => (1, Vec::new(), ccx.tcx.types.u64), "offset" | "arith_offset" => { (1, - vec!( + vec![ tcx.mk_ptr(ty::TypeAndMut { ty: param(ccx, 0), mutbl: hir::MutImmutable }), ccx.tcx.types.isize - ), + ], tcx.mk_ptr(ty::TypeAndMut { ty: param(ccx, 0), mutbl: hir::MutImmutable @@ -162,7 +162,7 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &hir::ForeignItem) { } "copy" | "copy_nonoverlapping" => { (1, - vec!( + vec![ tcx.mk_ptr(ty::TypeAndMut { ty: param(ccx, 0), mutbl: hir::MutImmutable @@ -172,12 +172,12 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &hir::ForeignItem) { mutbl: hir::MutMutable }), tcx.types.usize, - ), + ], tcx.mk_nil()) } "volatile_copy_memory" | "volatile_copy_nonoverlapping_memory" => { (1, - vec!( + vec![ tcx.mk_ptr(ty::TypeAndMut { ty: param(ccx, 0), mutbl: hir::MutMutable @@ -187,94 +187,94 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &hir::ForeignItem) { mutbl: hir::MutImmutable }), tcx.types.usize, - ), + ], tcx.mk_nil()) } "write_bytes" | "volatile_set_memory" => { (1, - vec!( + vec![ tcx.mk_ptr(ty::TypeAndMut { ty: param(ccx, 0), mutbl: hir::MutMutable }), tcx.types.u8, tcx.types.usize, - ), + ], tcx.mk_nil()) } - "sqrtf32" => (0, vec!( tcx.types.f32 ), tcx.types.f32), - "sqrtf64" => (0, vec!( tcx.types.f64 ), tcx.types.f64), + "sqrtf32" => (0, vec![ tcx.types.f32 ], tcx.types.f32), + "sqrtf64" => (0, vec![ tcx.types.f64 ], tcx.types.f64), "powif32" => { (0, - vec!( tcx.types.f32, tcx.types.i32 ), + vec![ tcx.types.f32, tcx.types.i32 ], tcx.types.f32) } "powif64" => { (0, - vec!( tcx.types.f64, tcx.types.i32 ), + vec![ tcx.types.f64, tcx.types.i32 ], tcx.types.f64) } - "sinf32" => (0, vec!( tcx.types.f32 ), tcx.types.f32), - "sinf64" => (0, vec!( tcx.types.f64 ), tcx.types.f64), - "cosf32" => (0, vec!( tcx.types.f32 ), tcx.types.f32), - "cosf64" => (0, vec!( tcx.types.f64 ), tcx.types.f64), + "sinf32" => (0, vec![ tcx.types.f32 ], tcx.types.f32), + "sinf64" => (0, vec![ tcx.types.f64 ], tcx.types.f64), + "cosf32" => (0, vec![ tcx.types.f32 ], tcx.types.f32), + "cosf64" => (0, vec![ tcx.types.f64 ], tcx.types.f64), "powf32" => { (0, - vec!( tcx.types.f32, tcx.types.f32 ), + vec![ tcx.types.f32, tcx.types.f32 ], tcx.types.f32) } "powf64" => { (0, - vec!( tcx.types.f64, tcx.types.f64 ), + vec![ tcx.types.f64, tcx.types.f64 ], tcx.types.f64) } - "expf32" => (0, vec!( tcx.types.f32 ), tcx.types.f32), - "expf64" => (0, vec!( tcx.types.f64 ), tcx.types.f64), - "exp2f32" => (0, vec!( tcx.types.f32 ), tcx.types.f32), - "exp2f64" => (0, vec!( tcx.types.f64 ), tcx.types.f64), - "logf32" => (0, vec!( tcx.types.f32 ), tcx.types.f32), - "logf64" => (0, vec!( tcx.types.f64 ), tcx.types.f64), - "log10f32" => (0, vec!( tcx.types.f32 ), tcx.types.f32), - "log10f64" => (0, vec!( tcx.types.f64 ), tcx.types.f64), - "log2f32" => (0, vec!( tcx.types.f32 ), tcx.types.f32), - "log2f64" => (0, vec!( tcx.types.f64 ), tcx.types.f64), + "expf32" => (0, vec![ tcx.types.f32 ], tcx.types.f32), + "expf64" => (0, vec![ tcx.types.f64 ], tcx.types.f64), + "exp2f32" => (0, vec![ tcx.types.f32 ], tcx.types.f32), + "exp2f64" => (0, vec![ tcx.types.f64 ], tcx.types.f64), + "logf32" => (0, vec![ tcx.types.f32 ], tcx.types.f32), + "logf64" => (0, vec![ tcx.types.f64 ], tcx.types.f64), + "log10f32" => (0, vec![ tcx.types.f32 ], tcx.types.f32), + "log10f64" => (0, vec![ tcx.types.f64 ], tcx.types.f64), + "log2f32" => (0, vec![ tcx.types.f32 ], tcx.types.f32), + "log2f64" => (0, vec![ tcx.types.f64 ], tcx.types.f64), "fmaf32" => { (0, - vec!( tcx.types.f32, tcx.types.f32, tcx.types.f32 ), + vec![ tcx.types.f32, tcx.types.f32, tcx.types.f32 ], tcx.types.f32) } "fmaf64" => { (0, - vec!( tcx.types.f64, tcx.types.f64, tcx.types.f64 ), + vec![ tcx.types.f64, tcx.types.f64, tcx.types.f64 ], tcx.types.f64) } - "fabsf32" => (0, vec!( tcx.types.f32 ), tcx.types.f32), - "fabsf64" => (0, vec!( tcx.types.f64 ), tcx.types.f64), - "copysignf32" => (0, vec!( tcx.types.f32, tcx.types.f32 ), tcx.types.f32), - "copysignf64" => (0, vec!( tcx.types.f64, tcx.types.f64 ), tcx.types.f64), - "floorf32" => (0, vec!( tcx.types.f32 ), tcx.types.f32), - "floorf64" => (0, vec!( tcx.types.f64 ), tcx.types.f64), - "ceilf32" => (0, vec!( tcx.types.f32 ), tcx.types.f32), - "ceilf64" => (0, vec!( tcx.types.f64 ), tcx.types.f64), - "truncf32" => (0, vec!( tcx.types.f32 ), tcx.types.f32), - "truncf64" => (0, vec!( tcx.types.f64 ), tcx.types.f64), - "rintf32" => (0, vec!( tcx.types.f32 ), tcx.types.f32), - "rintf64" => (0, vec!( tcx.types.f64 ), tcx.types.f64), - "nearbyintf32" => (0, vec!( tcx.types.f32 ), tcx.types.f32), - "nearbyintf64" => (0, vec!( tcx.types.f64 ), tcx.types.f64), - "roundf32" => (0, vec!( tcx.types.f32 ), tcx.types.f32), - "roundf64" => (0, vec!( tcx.types.f64 ), tcx.types.f64), + "fabsf32" => (0, vec![ tcx.types.f32 ], tcx.types.f32), + "fabsf64" => (0, vec![ tcx.types.f64 ], tcx.types.f64), + "copysignf32" => (0, vec![ tcx.types.f32, tcx.types.f32 ], tcx.types.f32), + "copysignf64" => (0, vec![ tcx.types.f64, tcx.types.f64 ], tcx.types.f64), + "floorf32" => (0, vec![ tcx.types.f32 ], tcx.types.f32), + "floorf64" => (0, vec![ tcx.types.f64 ], tcx.types.f64), + "ceilf32" => (0, vec![ tcx.types.f32 ], tcx.types.f32), + "ceilf64" => (0, vec![ tcx.types.f64 ], tcx.types.f64), + "truncf32" => (0, vec![ tcx.types.f32 ], tcx.types.f32), + "truncf64" => (0, vec![ tcx.types.f64 ], tcx.types.f64), + "rintf32" => (0, vec![ tcx.types.f32 ], tcx.types.f32), + "rintf64" => (0, vec![ tcx.types.f64 ], tcx.types.f64), + "nearbyintf32" => (0, vec![ tcx.types.f32 ], tcx.types.f32), + "nearbyintf64" => (0, vec![ tcx.types.f64 ], tcx.types.f64), + "roundf32" => (0, vec![ tcx.types.f32 ], tcx.types.f32), + "roundf64" => (0, vec![ tcx.types.f64 ], tcx.types.f64), "volatile_load" => - (1, vec!( tcx.mk_imm_ptr(param(ccx, 0)) ), param(ccx, 0)), + (1, vec![ tcx.mk_imm_ptr(param(ccx, 0)) ], param(ccx, 0)), "volatile_store" => - (1, vec!( tcx.mk_mut_ptr(param(ccx, 0)), param(ccx, 0) ), tcx.mk_nil()), + (1, vec![ tcx.mk_mut_ptr(param(ccx, 0)), param(ccx, 0) ], tcx.mk_nil()), - "ctpop" | "ctlz" | "cttz" | "bswap" => (1, vec!(param(ccx, 0)), param(ccx, 0)), + "ctpop" | "ctlz" | "cttz" | "bswap" => (1, vec![param(ccx, 0)], param(ccx, 0)), "add_with_overflow" | "sub_with_overflow" | "mul_with_overflow" => - (1, vec!(param(ccx, 0), param(ccx, 0)), - tcx.mk_tup(vec!(param(ccx, 0), tcx.types.bool))), + (1, vec![param(ccx, 0), param(ccx, 0)], + tcx.intern_tup(&[param(ccx, 0), tcx.types.bool])), "unchecked_div" | "unchecked_rem" => (1, vec![param(ccx, 0), param(ccx, 0)], param(ccx, 0)), @@ -379,7 +379,7 @@ pub fn check_platform_intrinsic_type(ccx: &CrateCtxt, span_err!(tcx.sess, it.span, E0444, "platform-specific intrinsic has invalid number of \ arguments: found {}, expected {}", - intr.inputs.len(), sig.inputs.len()); + sig.inputs.len(), intr.inputs.len()); return } let input_pairs = intr.inputs.iter().zip(&sig.inputs); diff --git a/src/librustc_typeck/check/method/confirm.rs b/src/librustc_typeck/check/method/confirm.rs index ab59fafb65209..722089cd50c0c 100644 --- a/src/librustc_typeck/check/method/confirm.rs +++ b/src/librustc_typeck/check/method/confirm.rs @@ -23,7 +23,7 @@ use rustc::hir; use std::ops::Deref; -struct ConfirmContext<'a, 'gcx: 'a+'tcx, 'tcx: 'a>{ +struct ConfirmContext<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { fcx: &'a FnCtxt<'a, 'gcx, 'tcx>, span: Span, self_expr: &'gcx hir::Expr, @@ -55,8 +55,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { unadjusted_self_ty: Ty<'tcx>, pick: probe::Pick<'tcx>, supplied_method_types: Vec>) - -> ty::MethodCallee<'tcx> - { + -> ty::MethodCallee<'tcx> { debug!("confirm(unadjusted_self_ty={:?}, pick={:?}, supplied_method_types={:?})", unadjusted_self_ty, pick, @@ -72,17 +71,20 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> { span: Span, self_expr: &'gcx hir::Expr, call_expr: &'gcx hir::Expr) - -> ConfirmContext<'a, 'gcx, 'tcx> - { - ConfirmContext { fcx: fcx, span: span, self_expr: self_expr, call_expr: call_expr } + -> ConfirmContext<'a, 'gcx, 'tcx> { + ConfirmContext { + fcx: fcx, + span: span, + self_expr: self_expr, + call_expr: call_expr, + } } fn confirm(&mut self, unadjusted_self_ty: Ty<'tcx>, pick: probe::Pick<'tcx>, supplied_method_types: Vec>) - -> ty::MethodCallee<'tcx> - { + -> ty::MethodCallee<'tcx> { // Adjust the self expression the user provided and obtain the adjusted type. let self_ty = self.adjust_self_ty(unadjusted_self_ty, &pick); @@ -91,18 +93,13 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> { // Create substitutions for the method's type parameters. let rcvr_substs = self.fresh_receiver_substs(self_ty, &pick); - let all_substs = - self.instantiate_method_substs( - &pick, - supplied_method_types, - rcvr_substs); + let all_substs = self.instantiate_method_substs(&pick, supplied_method_types, rcvr_substs); debug!("all_substs={:?}", all_substs); // Create the final signature for the method, replacing late-bound regions. - let InstantiatedMethodSig { - method_sig, method_predicates - } = self.instantiate_method_sig(&pick, all_substs); + let InstantiatedMethodSig { method_sig, method_predicates } = + self.instantiate_method_sig(&pick, all_substs); let method_self_ty = method_sig.inputs[0]; // Unify the (adjusted) self type with what the method expects. @@ -111,12 +108,13 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> { // Create the method type let def_id = pick.item.def_id(); let method_ty = pick.item.as_opt_method().unwrap(); - let fty = self.tcx.mk_fn_def(def_id, all_substs, + let fty = self.tcx.mk_fn_def(def_id, + all_substs, self.tcx.mk_bare_fn(ty::BareFnTy { - sig: ty::Binder(method_sig), - unsafety: method_ty.fty.unsafety, - abi: method_ty.fty.abi.clone(), - })); + sig: ty::Binder(method_sig), + unsafety: method_ty.fty.unsafety, + abi: method_ty.fty.abi.clone(), + })); // Add any trait/regions obligations specified on the method's type parameters. self.add_obligations(fty, all_substs, &method_predicates); @@ -125,7 +123,7 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> { let callee = ty::MethodCallee { def_id: def_id, ty: fty, - substs: all_substs + substs: all_substs, }; if let Some(hir::MutMutable) = pick.autoref { @@ -141,14 +139,12 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> { fn adjust_self_ty(&mut self, unadjusted_self_ty: Ty<'tcx>, pick: &probe::Pick<'tcx>) - -> Ty<'tcx> - { + -> Ty<'tcx> { let (autoref, unsize) = if let Some(mutbl) = pick.autoref { let region = self.next_region_var(infer::Autoref(self.span)); let autoref = AutoPtr(region, mutbl); - (Some(autoref), pick.unsize.map(|target| { - target.adjust_for_autoref(self.tcx, Some(autoref)) - })) + (Some(autoref), + pick.unsize.map(|target| target.adjust_for_autoref(self.tcx, Some(autoref)))) } else { // No unsizing should be performed without autoref (at // least during method dispach). This is because we @@ -168,11 +164,12 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> { autoderef.finalize(LvaluePreference::NoPreference, Some(self.self_expr)); // Write out the final adjustment. - self.write_adjustment(self.self_expr.id, AdjustDerefRef(AutoDerefRef { - autoderefs: pick.autoderefs, - autoref: autoref, - unsize: unsize - })); + self.write_adjustment(self.self_expr.id, + AdjustDerefRef(AutoDerefRef { + autoderefs: pick.autoderefs, + autoref: autoref, + unsize: unsize, + })); if let Some(target) = unsize { target @@ -193,13 +190,13 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> { fn fresh_receiver_substs(&mut self, self_ty: Ty<'tcx>, pick: &probe::Pick<'tcx>) - -> &'tcx Substs<'tcx> - { + -> &'tcx Substs<'tcx> { match pick.kind { probe::InherentImplPick => { let impl_def_id = pick.item.container().id(); assert!(self.tcx.impl_trait_ref(impl_def_id).is_none(), - "impl {:?} is not an inherent impl", impl_def_id); + "impl {:?} is not an inherent impl", + impl_def_id); self.impl_self_ty(self.span, impl_def_id).substs } @@ -216,10 +213,8 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> { // argument type), but those cases have already // been ruled out when we deemed the trait to be // "object safe". - let original_poly_trait_ref = - principal.with_self_ty(this.tcx, object_ty); - let upcast_poly_trait_ref = - this.upcast(original_poly_trait_ref, trait_def_id); + let original_poly_trait_ref = principal.with_self_ty(this.tcx, object_ty); + let upcast_poly_trait_ref = this.upcast(original_poly_trait_ref, trait_def_id); let upcast_trait_ref = this.replace_late_bound_regions_with_fresh_var(&upcast_poly_trait_ref); debug!("original_poly_trait_ref={:?} upcast_trait_ref={:?} target_trait={:?}", @@ -242,10 +237,9 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> { // the impl ([$A,$B,$C]) not the receiver type ([$C]). let impl_polytype = self.impl_self_ty(self.span, impl_def_id); let impl_trait_ref = - self.instantiate_type_scheme( - self.span, - impl_polytype.substs, - &self.tcx.impl_trait_ref(impl_def_id).unwrap()); + self.instantiate_type_scheme(self.span, + impl_polytype.substs, + &self.tcx.impl_trait_ref(impl_def_id).unwrap()); impl_trait_ref.substs } @@ -268,12 +262,11 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> { } } - fn extract_existential_trait_ref(&mut self, - self_ty: Ty<'tcx>, - mut closure: F) -> R + fn extract_existential_trait_ref(&mut self, self_ty: Ty<'tcx>, mut closure: F) -> R where F: FnMut(&mut ConfirmContext<'a, 'gcx, 'tcx>, Ty<'tcx>, - ty::PolyExistentialTraitRef<'tcx>) -> R, + ty::PolyExistentialTraitRef<'tcx>) + -> R { // If we specified that this is an object method, then the // self-type ought to be something that can be dereferenced to @@ -281,7 +274,8 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> { // etc). // FIXME: this feels, like, super dubious - self.fcx.autoderef(self.span, self_ty) + self.fcx + .autoderef(self.span, self_ty) .filter_map(|(ty, _)| { match ty.sty { ty::TyTrait(ref data) => Some(closure(self, ty, data.principal)), @@ -290,10 +284,9 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> { }) .next() .unwrap_or_else(|| { - span_bug!( - self.span, - "self-type `{}` for ObjectPick never dereferenced to an object", - self_ty) + span_bug!(self.span, + "self-type `{}` for ObjectPick never dereferenced to an object", + self_ty) }) } @@ -301,8 +294,7 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> { pick: &probe::Pick<'tcx>, mut supplied_method_types: Vec>, substs: &Substs<'tcx>) - -> &'tcx Substs<'tcx> - { + -> &'tcx Substs<'tcx> { // Determine the values for the generic parameters of the method. // If they were not explicitly supplied, just construct fresh // variables. @@ -312,13 +304,26 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> { if num_supplied_types > 0 && num_supplied_types != num_method_types { if num_method_types == 0 { - span_err!(self.tcx.sess, self.span, E0035, - "does not take type parameters"); + struct_span_err!(self.tcx.sess, + self.span, + E0035, + "does not take type parameters") + .span_label(self.span, &"called with unneeded type parameters") + .emit(); } else { - span_err!(self.tcx.sess, self.span, E0036, - "incorrect number of type parameters given for this method: \ - expected {}, found {}", - num_method_types, num_supplied_types); + struct_span_err!(self.tcx.sess, + self.span, + E0036, + "incorrect number of type parameters given for this method: \ + expected {}, found {}", + num_method_types, + num_supplied_types) + .span_label(self.span, + &format!("Passed {} type argument{}, expected {}", + num_supplied_types, + if num_supplied_types != 1 { "s" } else { "" }, + num_method_types)) + .emit(); } supplied_method_types = vec![self.tcx.types.err; num_method_types]; } @@ -328,14 +333,17 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> { // // FIXME -- permit users to manually specify lifetimes let supplied_start = substs.params().len() + method.generics.regions.len(); - Substs::for_item(self.tcx, method.def_id, |def, _| { + Substs::for_item(self.tcx, + method.def_id, + |def, _| { let i = def.index as usize; if i < substs.params().len() { substs.region_at(i) } else { self.region_var_for_def(self.span, def) } - }, |def, cur_substs| { + }, + |def, cur_substs| { let i = def.index as usize; if i < substs.params().len() { substs.type_at(i) @@ -347,21 +355,17 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> { }) } - fn unify_receivers(&mut self, - self_ty: Ty<'tcx>, - method_self_ty: Ty<'tcx>) - { - match self.sub_types(false, TypeOrigin::Misc(self.span), - self_ty, method_self_ty) { + fn unify_receivers(&mut self, self_ty: Ty<'tcx>, method_self_ty: Ty<'tcx>) { + match self.sub_types(false, TypeOrigin::Misc(self.span), self_ty, method_self_ty) { Ok(InferOk { obligations, .. }) => { // FIXME(#32730) propagate obligations assert!(obligations.is_empty()); } Err(_) => { - span_bug!( - self.span, - "{} was a subtype of {} but now is not?", - self_ty, method_self_ty); + span_bug!(self.span, + "{} was a subtype of {} but now is not?", + self_ty, + method_self_ty); } } } @@ -372,8 +376,7 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> { fn instantiate_method_sig(&mut self, pick: &probe::Pick<'tcx>, all_substs: &'tcx Substs<'tcx>) - -> InstantiatedMethodSig<'tcx> - { + -> InstantiatedMethodSig<'tcx> { debug!("instantiate_method_sig(pick={:?}, all_substs={:?})", pick, all_substs); @@ -381,13 +384,14 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> { // Instantiate the bounds on the method with the // type/early-bound-regions substitutions performed. There can // be no late-bound regions appearing here. - let method_predicates = pick.item.as_opt_method().unwrap() - .predicates.instantiate(self.tcx, all_substs); - let method_predicates = self.normalize_associated_types_in(self.span, - &method_predicates); + let method_predicates = pick.item + .as_opt_method() + .unwrap() + .predicates + .instantiate(self.tcx, all_substs); + let method_predicates = self.normalize_associated_types_in(self.span, &method_predicates); - debug!("method_predicates after subst = {:?}", - method_predicates); + debug!("method_predicates after subst = {:?}", method_predicates); // Instantiate late-bound regions and substitute the trait // parameters into the method type to get the actual method type. @@ -395,14 +399,16 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> { // NB: Instantiate late-bound regions first so that // `instantiate_type_scheme` can normalize associated types that // may reference those regions. - let method_sig = self.replace_late_bound_regions_with_fresh_var( - &pick.item.as_opt_method().unwrap().fty.sig); + let method_sig = self.replace_late_bound_regions_with_fresh_var(&pick.item + .as_opt_method() + .unwrap() + .fty + .sig); debug!("late-bound lifetimes from method instantiated, method_sig={:?}", method_sig); let method_sig = self.instantiate_type_scheme(self.span, all_substs, &method_sig); - debug!("type scheme substituted, method_sig={:?}", - method_sig); + debug!("type scheme substituted, method_sig={:?}", method_sig); InstantiatedMethodSig { method_sig: method_sig, @@ -419,9 +425,8 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> { all_substs, method_predicates); - self.add_obligations_for_parameters( - traits::ObligationCause::misc(self.span, self.body_id), - method_predicates); + self.add_obligations_for_parameters(traits::ObligationCause::misc(self.span, self.body_id), + method_predicates); // this is a projection from a trait reference, so we have to // make sure that the trait reference inputs are well-formed. @@ -460,21 +465,24 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> { for (i, &expr) in exprs.iter().rev().enumerate() { // Count autoderefs. let autoderef_count = match self.tables - .borrow() - .adjustments - .get(&expr.id) { + .borrow() + .adjustments + .get(&expr.id) { Some(&AdjustDerefRef(ref adj)) => adj.autoderefs, Some(_) | None => 0, }; debug!("convert_lvalue_derefs_to_mutable: i={} expr={:?} \ autoderef_count={}", - i, expr, autoderef_count); + i, + expr, + autoderef_count); if autoderef_count > 0 { let mut autoderef = self.autoderef(expr.span, self.node_ty(expr.id)); autoderef.nth(autoderef_count).unwrap_or_else(|| { - span_bug!(expr.span, "expr was deref-able {} times but now isn't?", + span_bug!(expr.span, + "expr was deref-able {} times but now isn't?", autoderef_count); }); autoderef.finalize(PreferMutLvalue, Some(expr)); @@ -496,29 +504,30 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> { // (ab)use the normal type checking paths. let adj = self.tables.borrow().adjustments.get(&base_expr.id).cloned(); let (autoderefs, unsize) = match adj { - Some(AdjustDerefRef(adr)) => match adr.autoref { - None => { - assert!(adr.unsize.is_none()); - (adr.autoderefs, None) - } - Some(AutoPtr(..)) => { - (adr.autoderefs, adr.unsize.map(|target| { - target.builtin_deref(false, NoPreference) - .expect("fixup: AutoPtr is not &T").ty - })) + Some(AdjustDerefRef(adr)) => { + match adr.autoref { + None => { + assert!(adr.unsize.is_none()); + (adr.autoderefs, None) + } + Some(AutoPtr(..)) => { + (adr.autoderefs, + adr.unsize.map(|target| { + target.builtin_deref(false, NoPreference) + .expect("fixup: AutoPtr is not &T") + .ty + })) + } + Some(_) => { + span_bug!(base_expr.span, + "unexpected adjustment autoref {:?}", + adr); + } } - Some(_) => { - span_bug!( - base_expr.span, - "unexpected adjustment autoref {:?}", - adr); - } - }, + } None => (0, None), Some(_) => { - span_bug!( - base_expr.span, - "unexpected adjustment type"); + span_bug!(base_expr.span, "unexpected adjustment type"); } }; @@ -526,23 +535,23 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> { (target, true) } else { (self.adjust_expr_ty(base_expr, - Some(&AdjustDerefRef(AutoDerefRef { - autoderefs: autoderefs, - autoref: None, - unsize: None - }))), false) + Some(&AdjustDerefRef(AutoDerefRef { + autoderefs: autoderefs, + autoref: None, + unsize: None, + }))), + false) }; let index_expr_ty = self.node_ty(index_expr.id); - let result = self.try_index_step( - ty::MethodCall::expr(expr.id), - expr, - &base_expr, - adjusted_base_ty, - autoderefs, - unsize, - PreferMutLvalue, - index_expr_ty); + let result = self.try_index_step(ty::MethodCall::expr(expr.id), + expr, + &base_expr, + adjusted_base_ty, + autoderefs, + unsize, + PreferMutLvalue, + index_expr_ty); if let Some((input_ty, return_ty)) = result { self.demand_suptype(index_expr.span, input_ty, index_expr_ty); @@ -557,9 +566,9 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> { let method_call = ty::MethodCall::expr(expr.id); if self.tables.borrow().method_map.contains_key(&method_call) { let method = self.try_overloaded_deref(expr.span, - Some(&base_expr), - self.node_ty(base_expr.id), - PreferMutLvalue); + Some(&base_expr), + self.node_ty(base_expr.id), + PreferMutLvalue); let method = method.expect("re-trying deref failed"); self.tables.borrow_mut().method_map.insert(method_call, method); } @@ -585,28 +594,27 @@ impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> { fn upcast(&mut self, source_trait_ref: ty::PolyTraitRef<'tcx>, target_trait_def_id: DefId) - -> ty::PolyTraitRef<'tcx> - { - let upcast_trait_refs = self.tcx.upcast_choices(source_trait_ref.clone(), - target_trait_def_id); + -> ty::PolyTraitRef<'tcx> { + let upcast_trait_refs = self.tcx + .upcast_choices(source_trait_ref.clone(), target_trait_def_id); // must be exactly one trait ref or we'd get an ambig error etc if upcast_trait_refs.len() != 1 { - span_bug!( - self.span, - "cannot uniquely upcast `{:?}` to `{:?}`: `{:?}`", - source_trait_ref, - target_trait_def_id, - upcast_trait_refs); + span_bug!(self.span, + "cannot uniquely upcast `{:?}` to `{:?}`: `{:?}`", + source_trait_ref, + target_trait_def_id, + upcast_trait_refs); } upcast_trait_refs.into_iter().next().unwrap() } fn replace_late_bound_regions_with_fresh_var(&self, value: &ty::Binder) -> T - where T : TypeFoldable<'tcx> + where T: TypeFoldable<'tcx> { - self.fcx.replace_late_bound_regions_with_fresh_var( - self.span, infer::FnCall, value).0 + self.fcx + .replace_late_bound_regions_with_fresh_var(self.span, infer::FnCall, value) + .0 } } diff --git a/src/librustc_typeck/check/method/mod.rs b/src/librustc_typeck/check/method/mod.rs index bcb410e1b8d01..f084b85a45f81 100644 --- a/src/librustc_typeck/check/method/mod.rs +++ b/src/librustc_typeck/check/method/mod.rs @@ -41,7 +41,8 @@ pub enum MethodError<'tcx> { Ambiguity(Vec), // Using a `Fn`/`FnMut`/etc method on a raw closure type before we have inferred its kind. - ClosureAmbiguity(/* DefId of fn trait */ DefId), + ClosureAmbiguity(// DefId of fn trait + DefId), // Found an applicable method, but it is not visible. PrivateMatch(Def), @@ -53,19 +54,20 @@ pub struct NoMatchData<'tcx> { pub static_candidates: Vec, pub unsatisfied_predicates: Vec>, pub out_of_scope_traits: Vec, - pub mode: probe::Mode + pub mode: probe::Mode, } impl<'tcx> NoMatchData<'tcx> { pub fn new(static_candidates: Vec, unsatisfied_predicates: Vec>, out_of_scope_traits: Vec, - mode: probe::Mode) -> Self { + mode: probe::Mode) + -> Self { NoMatchData { static_candidates: static_candidates, unsatisfied_predicates: unsatisfied_predicates, out_of_scope_traits: out_of_scope_traits, - mode: mode + mode: mode, } } } @@ -75,7 +77,8 @@ impl<'tcx> NoMatchData<'tcx> { #[derive(Copy, Clone, Debug, Eq, Ord, PartialEq, PartialOrd)] pub enum CandidateSource { ImplSource(DefId), - TraitSource(/* trait id */ DefId), + TraitSource(// trait id + DefId), } impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { @@ -86,8 +89,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { self_ty: ty::Ty<'tcx>, call_expr_id: ast::NodeId, allow_private: bool) - -> bool - { + -> bool { let mode = probe::Mode::MethodCall; match self.probe_method(span, mode, method_name, self_ty, call_expr_id) { Ok(..) => true, @@ -119,8 +121,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { supplied_method_types: Vec>, call_expr: &'gcx hir::Expr, self_expr: &'gcx hir::Expr) - -> Result, MethodError<'tcx>> - { + -> Result, MethodError<'tcx>> { debug!("lookup(method_name={}, self_ty={:?}, call_expr={:?}, self_expr={:?})", method_name, self_ty, @@ -135,7 +136,12 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { self.tcx.used_trait_imports.borrow_mut().insert(import_id); } - Ok(self.confirm_method(span, self_expr, call_expr, self_ty, pick, supplied_method_types)) + Ok(self.confirm_method(span, + self_expr, + call_expr, + self_ty, + pick, + supplied_method_types)) } pub fn lookup_method_in_trait(&self, @@ -145,10 +151,15 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { trait_def_id: DefId, self_ty: ty::Ty<'tcx>, opt_input_types: Option>>) - -> Option> - { - self.lookup_method_in_trait_adjusted(span, self_expr, m_name, trait_def_id, - 0, false, self_ty, opt_input_types) + -> Option> { + self.lookup_method_in_trait_adjusted(span, + self_expr, + m_name, + trait_def_id, + 0, + false, + self_ty, + opt_input_types) } /// `lookup_in_trait_adjusted` is used for overloaded operators. @@ -171,8 +182,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { unsize: bool, self_ty: ty::Ty<'tcx>, opt_input_types: Option>>) - -> Option> - { + -> Option> { debug!("lookup_in_trait_adjusted(self_ty={:?}, self_expr={:?}, \ m_name={}, trait_def_id={:?})", self_ty, @@ -188,9 +198,10 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { assert!(trait_def.generics.regions.is_empty()); // Construct a trait-reference `self_ty : Trait` - let substs = Substs::for_item(self.tcx, trait_def_id, |def, _| { - self.region_var_for_def(span, def) - }, |def, substs| { + let substs = Substs::for_item(self.tcx, + trait_def_id, + |def, _| self.region_var_for_def(span, def), + |def, substs| { if def.index == 0 { self_ty } else if let Some(ref input_types) = opt_input_types { @@ -204,9 +215,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // Construct an obligation let poly_trait_ref = trait_ref.to_poly_trait_ref(); - let obligation = traits::Obligation::misc(span, - self.body_id, - poly_trait_ref.to_predicate()); + let obligation = + traits::Obligation::misc(span, self.body_id, poly_trait_ref.to_predicate()); // Now we want to know if this can be matched let mut selcx = traits::SelectionContext::new(self); @@ -218,13 +228,14 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // Trait must have a method named `m_name` and it should not have // type parameters or early-bound regions. let tcx = self.tcx; - let method_item = self.trait_item(trait_def_id, m_name).unwrap(); + let method_item = self.impl_or_trait_item(trait_def_id, m_name).unwrap(); let method_ty = method_item.as_opt_method().unwrap(); assert_eq!(method_ty.generics.types.len(), 0); assert_eq!(method_ty.generics.regions.len(), 0); debug!("lookup_in_trait_adjusted: method_item={:?} method_ty={:?}", - method_item, method_ty); + method_item, + method_ty); // Instantiate late-bound regions and substitute the trait // parameters into the method type to get the actual method type. @@ -232,18 +243,19 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // NB: Instantiate late-bound regions first so that // `instantiate_type_scheme` can normalize associated types that // may reference those regions. - let fn_sig = self.replace_late_bound_regions_with_fresh_var(span, - infer::FnCall, - &method_ty.fty.sig).0; + let fn_sig = + self.replace_late_bound_regions_with_fresh_var(span, infer::FnCall, &method_ty.fty.sig) + .0; let fn_sig = self.instantiate_type_scheme(span, trait_ref.substs, &fn_sig); let transformed_self_ty = fn_sig.inputs[0]; let def_id = method_item.def_id(); - let fty = tcx.mk_fn_def(def_id, trait_ref.substs, + let fty = tcx.mk_fn_def(def_id, + trait_ref.substs, tcx.mk_bare_fn(ty::BareFnTy { - sig: ty::Binder(fn_sig), - unsafety: method_ty.fty.unsafety, - abi: method_ty.fty.abi.clone(), - })); + sig: ty::Binder(fn_sig), + unsafety: method_ty.fty.unsafety, + abi: method_ty.fty.abi.clone(), + })); debug!("lookup_in_trait_adjusted: matched method fty={:?} obligation={:?}", fty, @@ -259,9 +271,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // any late-bound regions appearing in its bounds. let method_bounds = self.instantiate_bounds(span, trait_ref.substs, &method_ty.predicates); assert!(!method_bounds.has_escaping_regions()); - self.add_obligations_for_parameters( - traits::ObligationCause::misc(span, self.body_id), - &method_bounds); + self.add_obligations_for_parameters(traits::ObligationCause::misc(span, self.body_id), + &method_bounds); // Also register an obligation for the method type being well-formed. self.register_wf_obligation(fty, span, traits::MiscObligation); @@ -273,12 +284,14 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // Insert any adjustments needed (always an autoref of some mutability). match self_expr { - None => { } + None => {} Some(self_expr) => { debug!("lookup_in_trait_adjusted: inserting adjustment if needed \ (self-id={}, autoderefs={}, unsize={}, explicit_self={:?})", - self_expr.id, autoderefs, unsize, + self_expr.id, + autoderefs, + unsize, method_ty.explicit_self); match method_ty.explicit_self { @@ -294,31 +307,29 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { match transformed_self_ty.sty { ty::TyRef(region, ty::TypeAndMut { mutbl, ty: _ }) => { self.write_adjustment(self_expr.id, - AdjustDerefRef(AutoDerefRef { - autoderefs: autoderefs, - autoref: Some(AutoPtr(region, mutbl)), - unsize: if unsize { - Some(transformed_self_ty) - } else { - None - } - })); + AdjustDerefRef(AutoDerefRef { + autoderefs: autoderefs, + autoref: Some(AutoPtr(region, mutbl)), + unsize: if unsize { + Some(transformed_self_ty) + } else { + None + }, + })); } _ => { - span_bug!( - span, - "trait method is &self but first arg is: {}", - transformed_self_ty); + span_bug!(span, + "trait method is &self but first arg is: {}", + transformed_self_ty); } } } _ => { - span_bug!( - span, - "unexpected explicit self type in operator method: {:?}", - method_ty.explicit_self); + span_bug!(span, + "unexpected explicit self type in operator method: {:?}", + method_ty.explicit_self); } } } @@ -327,7 +338,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { let callee = ty::MethodCallee { def_id: def_id, ty: fty, - substs: trait_ref.substs + substs: trait_ref.substs, }; debug!("callee = {:?}", callee); @@ -340,8 +351,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { method_name: ast::Name, self_ty: ty::Ty<'tcx>, expr_id: ast::NodeId) - -> Result> - { + -> Result> { let mode = probe::Mode::Path; let pick = self.probe_method(span, mode, method_name, self_ty, expr_id)?; @@ -359,29 +369,16 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { Ok(def) } - /// Find item with name `item_name` defined in `trait_def_id` - /// and return it, or `None`, if no such item. - pub fn trait_item(&self, - trait_def_id: DefId, - item_name: ast::Name) - -> Option> - { - let trait_items = self.tcx.trait_items(trait_def_id); - trait_items.iter() - .find(|item| item.name() == item_name) - .cloned() - } - - pub fn impl_item(&self, - impl_def_id: DefId, - item_name: ast::Name) - -> Option> - { - let impl_items = self.tcx.impl_items.borrow(); - let impl_items = impl_items.get(&impl_def_id).unwrap(); - impl_items + /// Find item with name `item_name` defined in impl/trait `def_id` + /// and return it, or `None`, if no such item was defined there. + pub fn impl_or_trait_item(&self, + def_id: DefId, + item_name: ast::Name) + -> Option> { + self.tcx + .impl_or_trait_items(def_id) .iter() - .map(|&did| self.tcx.impl_or_trait_item(did.def_id())) + .map(|&did| self.tcx.impl_or_trait_item(did)) .find(|m| m.name() == item_name) } } diff --git a/src/librustc_typeck/check/method/probe.rs b/src/librustc_typeck/check/method/probe.rs index 81e95c91e7ff9..43837de2f345d 100644 --- a/src/librustc_typeck/check/method/probe.rs +++ b/src/librustc_typeck/check/method/probe.rs @@ -13,7 +13,7 @@ use super::NoMatchData; use super::{CandidateSource, ImplSource, TraitSource}; use super::suggest; -use check::{FnCtxt}; +use check::FnCtxt; use hir::def_id::DefId; use hir::def::Def; use rustc::ty::subst::{Subst, Substs}; @@ -31,7 +31,7 @@ use std::rc::Rc; use self::CandidateKind::*; pub use self::PickKind::*; -struct ProbeContext<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { +struct ProbeContext<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { fcx: &'a FnCtxt<'a, 'gcx, 'tcx>, span: Span, mode: Mode, @@ -52,7 +52,7 @@ struct ProbeContext<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { /// Collects near misses when trait bounds for type parameters are unsatisfied and is only used /// for error reporting - unsatisfied_predicates: Vec> + unsatisfied_predicates: Vec>, } impl<'a, 'gcx, 'tcx> Deref for ProbeContext<'a, 'gcx, 'tcx> { @@ -66,7 +66,7 @@ impl<'a, 'gcx, 'tcx> Deref for ProbeContext<'a, 'gcx, 'tcx> { struct CandidateStep<'tcx> { self_ty: Ty<'tcx>, autoderefs: usize, - unsize: bool + unsize: bool, } #[derive(Debug)] @@ -80,12 +80,17 @@ struct Candidate<'tcx> { #[derive(Debug)] enum CandidateKind<'tcx> { InherentImplCandidate(&'tcx Substs<'tcx>, - /* Normalize obligations */ Vec>), - ExtensionImplCandidate(/* Impl */ DefId, &'tcx Substs<'tcx>, - /* Normalize obligations */ Vec>), + // Normalize obligations + Vec>), + ExtensionImplCandidate(// Impl + DefId, + &'tcx Substs<'tcx>, + // Normalize obligations + Vec>), ObjectCandidate, TraitCandidate, - WhereClauseCandidate(/* Trait */ ty::PolyTraitRef<'tcx>), + WhereClauseCandidate(// Trait + ty::PolyTraitRef<'tcx>), } #[derive(Debug)] @@ -115,10 +120,12 @@ pub struct Pick<'tcx> { #[derive(Clone,Debug)] pub enum PickKind<'tcx> { InherentImplPick, - ExtensionImplPick(/* Impl */ DefId), + ExtensionImplPick(// Impl + DefId), ObjectPick, TraitPick, - WhereClausePick(/* Trait */ ty::PolyTraitRef<'tcx>), + WhereClausePick(// Trait + ty::PolyTraitRef<'tcx>), } pub type PickResult<'tcx> = Result, MethodError<'tcx>>; @@ -132,7 +139,7 @@ pub enum Mode { // An expression of the form `Type::item` or `::item`. // No autoderefs are performed, lookup is done based on the type each // implementation is for, and static methods are included. - Path + Path, } impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { @@ -142,8 +149,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { item_name: ast::Name, self_ty: Ty<'tcx>, scope_expr_id: ast::NodeId) - -> PickResult<'tcx> - { + -> PickResult<'tcx> { debug!("probe(self_ty={:?}, item_name={}, scope_expr_id={})", self_ty, item_name, @@ -159,31 +165,38 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { let steps = if mode == Mode::MethodCall { match self.create_steps(span, self_ty) { Some(steps) => steps, - None =>return Err(MethodError::NoMatch(NoMatchData::new(Vec::new(), Vec::new(), - Vec::new(), mode))), + None => { + return Err(MethodError::NoMatch(NoMatchData::new(Vec::new(), + Vec::new(), + Vec::new(), + mode))) + } } } else { vec![CandidateStep { - self_ty: self_ty, - autoderefs: 0, - unsize: false - }] + self_ty: self_ty, + autoderefs: 0, + unsize: false, + }] }; // Create a list of simplified self types, if we can. let mut simplified_steps = Vec::new(); for step in &steps { match ty::fast_reject::simplify_type(self.tcx, step.self_ty, true) { - None => { break; } - Some(simplified_type) => { simplified_steps.push(simplified_type); } + None => { + break; + } + Some(simplified_type) => { + simplified_steps.push(simplified_type); + } } } - let opt_simplified_steps = - if simplified_steps.len() < steps.len() { - None // failed to convert at least one of the steps - } else { - Some(simplified_steps) - }; + let opt_simplified_steps = if simplified_steps.len() < steps.len() { + None // failed to convert at least one of the steps + } else { + Some(simplified_steps) + }; debug!("ProbeContext: steps for self_ty={:?} are {:?}", self_ty, @@ -192,31 +205,27 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // this creates one big transaction so that all type variables etc // that we create during the probe process are removed later self.probe(|_| { - let mut probe_cx = ProbeContext::new(self, - span, - mode, - item_name, - steps, - opt_simplified_steps); + let mut probe_cx = + ProbeContext::new(self, span, mode, item_name, steps, opt_simplified_steps); probe_cx.assemble_inherent_candidates(); probe_cx.assemble_extension_candidates_for_traits_in_scope(scope_expr_id)?; probe_cx.pick() }) } - fn create_steps(&self, - span: Span, - self_ty: Ty<'tcx>) - -> Option>> - { + fn create_steps(&self, span: Span, self_ty: Ty<'tcx>) -> Option>> { // FIXME: we don't need to create the entire steps in one pass let mut autoderef = self.autoderef(span, self_ty); - let mut steps: Vec<_> = autoderef.by_ref().map(|(ty, d)| CandidateStep { - self_ty: ty, - autoderefs: d, - unsize: false - }).collect(); + let mut steps: Vec<_> = autoderef.by_ref() + .map(|(ty, d)| { + CandidateStep { + self_ty: ty, + autoderefs: d, + unsize: false, + } + }) + .collect(); let final_ty = autoderef.unambiguous_final_ty(); match final_ty.sty { @@ -226,7 +235,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { steps.push(CandidateStep { self_ty: self.tcx.mk_slice(elem_ty), autoderefs: dereferences, - unsize: true + unsize: true, }); } ty::TyError => return None, @@ -246,8 +255,7 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { item_name: ast::Name, steps: Vec>, opt_simplified_steps: Option>) - -> ProbeContext<'a, 'gcx, 'tcx> - { + -> ProbeContext<'a, 'gcx, 'tcx> { ProbeContext { fcx: fcx, span: span, @@ -284,8 +292,7 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { } fn assemble_probe(&mut self, self_ty: Ty<'tcx>) { - debug!("assemble_probe: self_ty={:?}", - self_ty); + debug!("assemble_probe: self_ty={:?}", self_ty); match self_ty.sty { ty::TyTrait(box ref data) => { @@ -371,8 +378,7 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { let lang_def_id = self.tcx.lang_items.f64_impl(); self.assemble_inherent_impl_for_primitive(lang_def_id); } - _ => { - } + _ => {} } } @@ -403,9 +409,11 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { debug!("assemble_inherent_impl_probe {:?}", impl_def_id); - let item = match self.impl_item(impl_def_id) { + let item = match self.impl_or_trait_item(impl_def_id) { Some(m) => m, - None => { return; } // No method with correct name on this impl + None => { + return; + } // No method with correct name on this impl }; if !self.has_applicable_self(&item) { @@ -415,7 +423,7 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { if !item.vis().is_accessible_from(self.body_id, &self.tcx.map) { self.private_candidate = Some(item.def()); - return + return; } let (impl_ty, impl_substs) = self.impl_ty_and_substs(impl_def_id); @@ -458,9 +466,8 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { self.elaborate_bounds(&[trait_ref], |this, new_trait_ref, item| { let new_trait_ref = this.erase_late_bound_regions(&new_trait_ref); - let xform_self_ty = this.xform_self_ty(&item, - new_trait_ref.self_ty(), - new_trait_ref.substs); + let xform_self_ty = + this.xform_self_ty(&item, new_trait_ref.self_ty(), new_trait_ref.substs); this.inherent_candidates.push(Candidate { xform_self_ty: xform_self_ty, @@ -476,8 +483,8 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { param_ty: ty::ParamTy) { // FIXME -- Do we want to commit to this behavior for param bounds? - let bounds: Vec<_> = - self.parameter_environment.caller_bounds + let bounds: Vec<_> = self.parameter_environment + .caller_bounds .iter() .filter_map(|predicate| { match *predicate { @@ -486,7 +493,7 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { ty::TyParam(ref p) if *p == param_ty => { Some(trait_predicate.to_poly_trait_ref()) } - _ => None + _ => None, } } ty::Predicate::Equate(..) | @@ -495,21 +502,15 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { ty::Predicate::WellFormed(..) | ty::Predicate::ObjectSafe(..) | ty::Predicate::ClosureKind(..) | - ty::Predicate::TypeOutlives(..) => { - None - } + ty::Predicate::TypeOutlives(..) => None, } }) .collect(); self.elaborate_bounds(&bounds, |this, poly_trait_ref, item| { - let trait_ref = - this.erase_late_bound_regions(&poly_trait_ref); + let trait_ref = this.erase_late_bound_regions(&poly_trait_ref); - let xform_self_ty = - this.xform_self_ty(&item, - trait_ref.self_ty(), - trait_ref.substs); + let xform_self_ty = this.xform_self_ty(&item, trait_ref.self_ty(), trait_ref.substs); if let Some(ref m) = item.as_opt_method() { debug!("found match: trait_ref={:?} substs={:?} m={:?}", @@ -540,24 +541,20 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { // Do a search through a list of bounds, using a callback to actually // create the candidates. - fn elaborate_bounds( - &mut self, - bounds: &[ty::PolyTraitRef<'tcx>], - mut mk_cand: F, - ) where - F: for<'b> FnMut( - &mut ProbeContext<'b, 'gcx, 'tcx>, - ty::PolyTraitRef<'tcx>, - ty::ImplOrTraitItem<'tcx>, - ), + fn elaborate_bounds(&mut self, bounds: &[ty::PolyTraitRef<'tcx>], mut mk_cand: F) + where F: for<'b> FnMut(&mut ProbeContext<'b, 'gcx, 'tcx>, + ty::PolyTraitRef<'tcx>, + ty::ImplOrTraitItem<'tcx>) { debug!("elaborate_bounds(bounds={:?})", bounds); let tcx = self.tcx; for bound_trait_ref in traits::transitive_bounds(tcx, bounds) { - let item = match self.trait_item(bound_trait_ref.def_id()) { + let item = match self.impl_or_trait_item(bound_trait_ref.def_id()) { Some(v) => v, - None => { continue; } + None => { + continue; + } }; if !self.has_applicable_self(&item) { @@ -570,8 +567,7 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { fn assemble_extension_candidates_for_traits_in_scope(&mut self, expr_id: ast::NodeId) - -> Result<(), MethodError<'tcx>> - { + -> Result<(), MethodError<'tcx>> { let mut duplicates = FnvHashSet(); let opt_applicable_traits = self.tcx.trait_map.get(&expr_id); if let Some(applicable_traits) = opt_applicable_traits { @@ -600,20 +596,19 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { fn assemble_extension_candidates_for_trait(&mut self, trait_def_id: DefId) - -> Result<(), MethodError<'tcx>> - { + -> Result<(), MethodError<'tcx>> { debug!("assemble_extension_candidates_for_trait(trait_def_id={:?})", trait_def_id); // Check whether `trait_def_id` defines a method with suitable name: - let trait_items = - self.tcx.trait_items(trait_def_id); - let maybe_item = - trait_items.iter() - .find(|item| item.name() == self.item_name); + let trait_items = self.tcx.trait_items(trait_def_id); + let maybe_item = trait_items.iter() + .find(|item| item.name() == self.item_name); let item = match maybe_item { Some(i) => i, - None => { return Ok(()); } + None => { + return Ok(()); + } }; // Check whether `trait_def_id` defines a method with suitable name: @@ -636,8 +631,7 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { fn assemble_extension_candidates_for_trait_impls(&mut self, trait_def_id: DefId, - item: ty::ImplOrTraitItem<'tcx>) - { + item: ty::ImplOrTraitItem<'tcx>) { let trait_def = self.tcx.lookup_trait_def(trait_def_id); // FIXME(arielb1): can we use for_each_relevant_impl here? @@ -655,8 +649,7 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { debug!("impl_substs={:?}", impl_substs); - let impl_trait_ref = - self.tcx.impl_trait_ref(impl_def_id) + let impl_trait_ref = self.tcx.impl_trait_ref(impl_def_id) .unwrap() // we know this is a trait impl .subst(self.tcx, impl_substs); @@ -664,9 +657,7 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { // Determine the receiver type that the method itself expects. let xform_self_ty = - self.xform_self_ty(&item, - impl_trait_ref.self_ty(), - impl_trait_ref.substs); + self.xform_self_ty(&item, impl_trait_ref.self_ty(), impl_trait_ref.substs); // Normalize the receiver. We can't use normalize_associated_types_in // as it will pollute the fcx's fulfillment context after this probe @@ -690,14 +681,18 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { fn impl_can_possibly_match(&self, impl_def_id: DefId) -> bool { let simplified_steps = match self.opt_simplified_steps { Some(ref simplified_steps) => simplified_steps, - None => { return true; } + None => { + return true; + } }; let impl_type = self.tcx.lookup_item_type(impl_def_id); let impl_simplified_type = match ty::fast_reject::simplify_type(self.tcx, impl_type.ty, false) { Some(simplified_type) => simplified_type, - None => { return true; } + None => { + return true; + } }; simplified_steps.contains(&impl_simplified_type) @@ -706,8 +701,7 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { fn assemble_closure_candidates(&mut self, trait_def_id: DefId, item: ty::ImplOrTraitItem<'tcx>) - -> Result<(), MethodError<'tcx>> - { + -> Result<(), MethodError<'tcx>> { // Check if this is one of the Fn,FnMut,FnOnce traits. let tcx = self.tcx; let kind = if Some(trait_def_id) == tcx.lang_items.fn_trait() { @@ -746,9 +740,10 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { // for the purposes of our method lookup, we only take // receiver type into account, so we can just substitute // fresh types here to use during substitution and subtyping. - let substs = Substs::for_item(self.tcx, trait_def_id, |def, _| { - self.region_var_for_def(self.span, def) - }, |def, substs| { + let substs = Substs::for_item(self.tcx, + trait_def_id, + |def, _| self.region_var_for_def(self.span, def), + |def, substs| { if def.index == 0 { step.self_ty } else { @@ -756,9 +751,7 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { } }); - let xform_self_ty = self.xform_self_ty(&item, - step.self_ty, - substs); + let xform_self_ty = self.xform_self_ty(&item, step.self_ty, substs); self.inherent_candidates.push(Candidate { xform_self_ty: xform_self_ty, item: item.clone(), @@ -772,8 +765,7 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { fn assemble_projection_candidates(&mut self, trait_def_id: DefId, - item: ty::ImplOrTraitItem<'tcx>) - { + item: ty::ImplOrTraitItem<'tcx>) { debug!("assemble_projection_candidates(\ trait_def_id={:?}, \ item={:?})", @@ -781,39 +773,35 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { item); for step in self.steps.iter() { - debug!("assemble_projection_candidates: step={:?}", - step); + debug!("assemble_projection_candidates: step={:?}", step); let (def_id, substs) = match step.self_ty.sty { - ty::TyProjection(ref data) => { - (data.trait_ref.def_id, data.trait_ref.substs) - } + ty::TyProjection(ref data) => (data.trait_ref.def_id, data.trait_ref.substs), ty::TyAnon(def_id, substs) => (def_id, substs), _ => continue, }; debug!("assemble_projection_candidates: def_id={:?} substs={:?}", - def_id, substs); + def_id, + substs); let trait_predicates = self.tcx.lookup_predicates(def_id); let bounds = trait_predicates.instantiate(self.tcx, substs); let predicates = bounds.predicates; debug!("assemble_projection_candidates: predicates={:?}", predicates); - for poly_bound in - traits::elaborate_predicates(self.tcx, predicates) + for poly_bound in traits::elaborate_predicates(self.tcx, predicates) .filter_map(|p| p.to_opt_poly_trait_ref()) - .filter(|b| b.def_id() == trait_def_id) - { + .filter(|b| b.def_id() == trait_def_id) { let bound = self.erase_late_bound_regions(&poly_bound); debug!("assemble_projection_candidates: def_id={:?} substs={:?} bound={:?}", - def_id, substs, bound); + def_id, + substs, + bound); if self.can_equate(&step.self_ty, &bound.self_ty()).is_ok() { - let xform_self_ty = self.xform_self_ty(&item, - bound.self_ty(), - bound.substs); + let xform_self_ty = self.xform_self_ty(&item, bound.self_ty(), bound.substs); debug!("assemble_projection_candidates: bound={:?} xform_self_ty={:?}", bound, @@ -832,20 +820,16 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { fn assemble_where_clause_candidates(&mut self, trait_def_id: DefId, - item: ty::ImplOrTraitItem<'tcx>) - { + item: ty::ImplOrTraitItem<'tcx>) { debug!("assemble_where_clause_candidates(trait_def_id={:?})", trait_def_id); let caller_predicates = self.parameter_environment.caller_bounds.clone(); for poly_bound in traits::elaborate_predicates(self.tcx, caller_predicates) - .filter_map(|p| p.to_opt_poly_trait_ref()) - .filter(|b| b.def_id() == trait_def_id) - { + .filter_map(|p| p.to_opt_poly_trait_ref()) + .filter(|b| b.def_id() == trait_def_id) { let bound = self.erase_late_bound_regions(&poly_bound); - let xform_self_ty = self.xform_self_ty(&item, - bound.self_ty(), - bound.substs); + let xform_self_ty = self.xform_self_ty(&item, bound.self_ty(), bound.substs); debug!("assemble_where_clause_candidates: bound={:?} xform_self_ty={:?}", bound, @@ -882,19 +866,24 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { let out_of_scope_traits = match self.pick_core() { Some(Ok(p)) => vec![p.item.container().id()], - Some(Err(MethodError::Ambiguity(v))) => v.into_iter().map(|source| { - match source { - TraitSource(id) => id, - ImplSource(impl_id) => { - match tcx.trait_id_of_impl(impl_id) { - Some(id) => id, - None => - span_bug!(span, - "found inherent method when looking at traits") + Some(Err(MethodError::Ambiguity(v))) => { + v.into_iter() + .map(|source| { + match source { + TraitSource(id) => id, + ImplSource(impl_id) => { + match tcx.trait_id_of_impl(impl_id) { + Some(id) => id, + None => { + span_bug!(span, + "found inherent method when looking at traits") + } + } + } } - } - } - }).collect(), + }) + .collect() + } Some(Err(MethodError::NoMatch(NoMatchData { out_of_scope_traits: others, .. }))) => { assert!(others.is_empty()); vec![] @@ -910,8 +899,10 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { return Err(MethodError::PrivateMatch(def)); } - Err(MethodError::NoMatch(NoMatchData::new(static_candidates, unsatisfied_predicates, - out_of_scope_traits, self.mode))) + Err(MethodError::NoMatch(NoMatchData::new(static_candidates, + unsatisfied_predicates, + out_of_scope_traits, + self.mode))) } fn pick_core(&mut self) -> Option> { @@ -935,41 +926,35 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { self.pick_autorefd_method(step) } - fn pick_by_value_method(&mut self, - step: &CandidateStep<'tcx>) - -> Option> - { - /*! - * For each type `T` in the step list, this attempts to find a - * method where the (transformed) self type is exactly `T`. We - * do however do one transformation on the adjustment: if we - * are passing a region pointer in, we will potentially - * *reborrow* it to a shorter lifetime. This allows us to - * transparently pass `&mut` pointers, in particular, without - * consuming them for their entire lifetime. - */ + fn pick_by_value_method(&mut self, step: &CandidateStep<'tcx>) -> Option> { + //! For each type `T` in the step list, this attempts to find a + //! method where the (transformed) self type is exactly `T`. We + //! do however do one transformation on the adjustment: if we + //! are passing a region pointer in, we will potentially + //! *reborrow* it to a shorter lifetime. This allows us to + //! transparently pass `&mut` pointers, in particular, without + //! consuming them for their entire lifetime. if step.unsize { return None; } - self.pick_method(step.self_ty).map(|r| r.map(|mut pick| { - pick.autoderefs = step.autoderefs; + self.pick_method(step.self_ty).map(|r| { + r.map(|mut pick| { + pick.autoderefs = step.autoderefs; - // Insert a `&*` or `&mut *` if this is a reference type: - if let ty::TyRef(_, mt) = step.self_ty.sty { - pick.autoderefs += 1; - pick.autoref = Some(mt.mutbl); - } + // Insert a `&*` or `&mut *` if this is a reference type: + if let ty::TyRef(_, mt) = step.self_ty.sty { + pick.autoderefs += 1; + pick.autoref = Some(mt.mutbl); + } - pick - })) + pick + }) + }) } - fn pick_autorefd_method(&mut self, - step: &CandidateStep<'tcx>) - -> Option> - { + fn pick_autorefd_method(&mut self, step: &CandidateStep<'tcx>) -> Option> { let tcx = self.tcx; // In general, during probing we erase regions. See @@ -977,22 +962,28 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { let region = tcx.mk_region(ty::ReErased); // Search through mutabilities in order to find one where pick works: - [hir::MutImmutable, hir::MutMutable].iter().filter_map(|&m| { - let autoref_ty = tcx.mk_ref(region, ty::TypeAndMut { - ty: step.self_ty, - mutbl: m - }); - self.pick_method(autoref_ty).map(|r| r.map(|mut pick| { - pick.autoderefs = step.autoderefs; - pick.autoref = Some(m); - pick.unsize = if step.unsize { - Some(step.self_ty) - } else { - None - }; - pick - })) - }).nth(0) + [hir::MutImmutable, hir::MutMutable] + .iter() + .filter_map(|&m| { + let autoref_ty = tcx.mk_ref(region, + ty::TypeAndMut { + ty: step.self_ty, + mutbl: m, + }); + self.pick_method(autoref_ty).map(|r| { + r.map(|mut pick| { + pick.autoderefs = step.autoderefs; + pick.autoref = Some(m); + pick.unsize = if step.unsize { + Some(step.self_ty) + } else { + None + }; + pick + }) + }) + }) + .nth(0) } fn pick_method(&mut self, self_ty: Ty<'tcx>) -> Option> { @@ -1008,7 +999,8 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { } debug!("searching extension candidates"); - let res = self.consider_candidates(self_ty, &self.extension_candidates, + let res = self.consider_candidates(self_ty, + &self.extension_candidates, &mut possibly_unsatisfied_predicates); if let None = res { self.unsatisfied_predicates.extend(possibly_unsatisfied_predicates); @@ -1021,18 +1013,18 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { probes: &[Candidate<'tcx>], possibly_unsatisfied_predicates: &mut Vec>) -> Option> { - let mut applicable_candidates: Vec<_> = - probes.iter() - .filter(|&probe| self.consider_probe(self_ty, - probe,possibly_unsatisfied_predicates)) - .collect(); + let mut applicable_candidates: Vec<_> = probes.iter() + .filter(|&probe| self.consider_probe(self_ty, probe, possibly_unsatisfied_predicates)) + .collect(); debug!("applicable_candidates: {:?}", applicable_candidates); if applicable_candidates.len() > 1 { match self.collapse_candidates_to_trait_pick(&applicable_candidates[..]) { - Some(pick) => { return Some(Ok(pick)); } - None => { } + Some(pick) => { + return Some(Ok(pick)); + } + None => {} } } @@ -1041,21 +1033,22 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { return Some(Err(MethodError::Ambiguity(sources))); } - applicable_candidates.pop().map(|probe| { - Ok(probe.to_unadjusted_pick()) - }) + applicable_candidates.pop().map(|probe| Ok(probe.to_unadjusted_pick())) } - fn consider_probe(&self, self_ty: Ty<'tcx>, probe: &Candidate<'tcx>, - possibly_unsatisfied_predicates: &mut Vec>) -> bool { - debug!("consider_probe: self_ty={:?} probe={:?}", - self_ty, - probe); + fn consider_probe(&self, + self_ty: Ty<'tcx>, + probe: &Candidate<'tcx>, + possibly_unsatisfied_predicates: &mut Vec>) + -> bool { + debug!("consider_probe: self_ty={:?} probe={:?}", self_ty, probe); self.probe(|_| { // First check that the self type can be related. - match self.sub_types(false, TypeOrigin::Misc(DUMMY_SP), - self_ty, probe.xform_self_ty) { + match self.sub_types(false, + TypeOrigin::Misc(DUMMY_SP), + self_ty, + probe.xform_self_ty) { Ok(InferOk { obligations, .. }) => { // FIXME(#32730) propagate obligations assert!(obligations.is_empty()) @@ -1093,14 +1086,11 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { // Check whether the impl imposes obligations we have to worry about. let impl_bounds = self.tcx.lookup_predicates(impl_def_id); let impl_bounds = impl_bounds.instantiate(self.tcx, substs); - let traits::Normalized { value: impl_bounds, - obligations: norm_obligations } = + let traits::Normalized { value: impl_bounds, obligations: norm_obligations } = traits::normalize(selcx, cause.clone(), &impl_bounds); // Convert the bounds into obligations. - let obligations = - traits::predicates_for_generics(cause.clone(), - &impl_bounds); + let obligations = traits::predicates_for_generics(cause.clone(), &impl_bounds); debug!("impl_obligations={:?}", obligations); // Evaluate those obligations to see if they might possibly hold. @@ -1136,14 +1126,12 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { /// /// Now imagine the receiver is `Vec<_>`. It doesn't really matter at this time which impl we /// use, so it's ok to just commit to "using the method from the trait Foo". - fn collapse_candidates_to_trait_pick(&self, - probes: &[&Candidate<'tcx>]) - -> Option> { + fn collapse_candidates_to_trait_pick(&self, probes: &[&Candidate<'tcx>]) -> Option> { // Do all probes correspond to the same trait? let container = probes[0].item.container(); match container { ty::TraitContainer(_) => {} - ty::ImplContainer(_) => return None + ty::ImplContainer(_) => return None, } if probes[1..].iter().any(|p| p.item.container() != container) { return None; @@ -1156,7 +1144,7 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { import_id: probes[0].import_id, autoderefs: 0, autoref: None, - unsize: None + unsize: None, }) } @@ -1165,13 +1153,14 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { fn has_applicable_self(&self, item: &ty::ImplOrTraitItem) -> bool { // "fast track" -- check for usage of sugar match *item { - ty::ImplOrTraitItem::MethodTraitItem(ref method) => + ty::ImplOrTraitItem::MethodTraitItem(ref method) => { match method.explicit_self { ty::ExplicitSelfCategory::Static => self.mode == Mode::Path, ty::ExplicitSelfCategory::ByValue | ty::ExplicitSelfCategory::ByReference(..) | ty::ExplicitSelfCategory::ByBox => true, - }, + } + } ty::ImplOrTraitItem::ConstTraitItem(..) => self.mode == Mode::Path, _ => false, } @@ -1191,11 +1180,9 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { item: &ty::ImplOrTraitItem<'tcx>, impl_ty: Ty<'tcx>, substs: &Substs<'tcx>) - -> Ty<'tcx> - { + -> Ty<'tcx> { match item.as_opt_method() { - Some(ref method) => self.xform_method_self_ty(method, impl_ty, - substs), + Some(ref method) => self.xform_method_self_ty(method, impl_ty, substs), None => impl_ty, } } @@ -1204,8 +1191,7 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { method: &Rc>, impl_ty: Ty<'tcx>, substs: &Substs<'tcx>) - -> Ty<'tcx> - { + -> Ty<'tcx> { debug!("xform_self_ty(impl_ty={:?}, self_ty={:?}, substs={:?})", impl_ty, method.fty.sig.0.inputs.get(0), @@ -1218,8 +1204,10 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { // are given do not include type/lifetime parameters for the // method yet. So create fresh variables here for those too, // if there are any. - assert_eq!(substs.types().count(), method.generics.parent_types as usize); - assert_eq!(substs.regions().count(), method.generics.parent_regions as usize); + assert_eq!(substs.types().count(), + method.generics.parent_types as usize); + assert_eq!(substs.regions().count(), + method.generics.parent_regions as usize); if self.mode == Mode::Path { return impl_ty; @@ -1233,7 +1221,9 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { if method.generics.types.is_empty() && method.generics.regions.is_empty() { xform_self_ty.subst(self.tcx, substs) } else { - let substs = Substs::for_item(self.tcx, method.def_id, |def, _| { + let substs = Substs::for_item(self.tcx, + method.def_id, + |def, _| { let i = def.index as usize; if i < substs.params().len() { substs.region_at(i) @@ -1242,7 +1232,8 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { // `impl_self_ty()` for an explanation. self.tcx.mk_region(ty::ReErased) } - }, |def, cur_substs| { + }, + |def, cur_substs| { let i = def.index as usize; if i < substs.params().len() { substs.type_at(i) @@ -1255,13 +1246,11 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { } /// Get the type of an impl and generate substitutions with placeholders. - fn impl_ty_and_substs(&self, - impl_def_id: DefId) - -> (Ty<'tcx>, &'tcx Substs<'tcx>) - { + fn impl_ty_and_substs(&self, impl_def_id: DefId) -> (Ty<'tcx>, &'tcx Substs<'tcx>) { let impl_ty = self.tcx.lookup_item_type(impl_def_id).ty; - let substs = Substs::for_item(self.tcx, impl_def_id, + let substs = Substs::for_item(self.tcx, + impl_def_id, |_, _| self.tcx.mk_region(ty::ReErased), |_, _| self.next_ty_var()); @@ -1287,23 +1276,15 @@ impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { /// and/or tracking the substitution and /// so forth. fn erase_late_bound_regions(&self, value: &ty::Binder) -> T - where T : TypeFoldable<'tcx> + where T: TypeFoldable<'tcx> { self.tcx.erase_late_bound_regions(value) } - fn impl_item(&self, impl_def_id: DefId) - -> Option> - { - self.fcx.impl_item(impl_def_id, self.item_name) - } - - /// Find item with name `item_name` defined in `trait_def_id` - /// and return it, or `None`, if no such item. - fn trait_item(&self, trait_def_id: DefId) - -> Option> - { - self.fcx.trait_item(trait_def_id, self.item_name) + /// Find item with name `item_name` defined in impl/trait `def_id` + /// and return it, or `None`, if no such item was defined there. + fn impl_or_trait_item(&self, def_id: DefId) -> Option> { + self.fcx.impl_or_trait_item(def_id, self.item_name) } } @@ -1313,9 +1294,7 @@ impl<'tcx> Candidate<'tcx> { item: self.item.clone(), kind: match self.kind { InherentImplCandidate(..) => InherentImplPick, - ExtensionImplCandidate(def_id, ..) => { - ExtensionImplPick(def_id) - } + ExtensionImplCandidate(def_id, ..) => ExtensionImplPick(def_id), ObjectCandidate => ObjectPick, TraitCandidate => TraitPick, WhereClauseCandidate(ref trait_ref) => { @@ -1332,15 +1311,13 @@ impl<'tcx> Candidate<'tcx> { import_id: self.import_id, autoderefs: 0, autoref: None, - unsize: None + unsize: None, } } fn to_source(&self) -> CandidateSource { match self.kind { - InherentImplCandidate(..) => { - ImplSource(self.item.container().id()) - } + InherentImplCandidate(..) => ImplSource(self.item.container().id()), ExtensionImplCandidate(def_id, ..) => ImplSource(def_id), ObjectCandidate | TraitCandidate | diff --git a/src/librustc_typeck/check/method/suggest.rs b/src/librustc_typeck/check/method/suggest.rs index 3692d6fbf73d6..32bf839a4ed4e 100644 --- a/src/librustc_typeck/check/method/suggest.rs +++ b/src/librustc_typeck/check/method/suggest.rs @@ -13,16 +13,14 @@ use CrateCtxt; -use check::{FnCtxt}; +use check::FnCtxt; use rustc::hir::map as hir_map; use rustc::ty::{self, Ty, ToPolyTraitRef, ToPredicate, TypeFoldable}; -use middle::cstore; use hir::def::Def; -use hir::def_id::DefId; +use hir::def_id::{CRATE_DEF_INDEX, DefId}; use middle::lang_items::FnOnceTraitLangItem; -use rustc::ty::subst::Substs; use rustc::traits::{Obligation, SelectionContext}; -use util::nodemap::{FnvHashSet}; +use util::nodemap::FnvHashSet; use syntax::ast; use errors::DiagnosticBuilder; @@ -44,25 +42,26 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { match ty.sty { // Not all of these (e.g. unsafe fns) implement FnOnce // so we look for these beforehand - ty::TyClosure(..) | ty::TyFnDef(..) | ty::TyFnPtr(_) => true, + ty::TyClosure(..) | + ty::TyFnDef(..) | + ty::TyFnPtr(_) => true, // If it's not a simple function, look for things which implement FnOnce _ => { let fn_once = match tcx.lang_items.require(FnOnceTraitLangItem) { Ok(fn_once) => fn_once, - Err(..) => return false + Err(..) => return false, }; - self.autoderef(span, ty).any(|(ty, _)| self.probe(|_| { - let fn_once_substs = - Substs::new_trait(tcx, ty, &[self.next_ty_var()]); - let trait_ref = ty::TraitRef::new(fn_once, fn_once_substs); - let poly_trait_ref = trait_ref.to_poly_trait_ref(); - let obligation = Obligation::misc(span, - self.body_id, - poly_trait_ref - .to_predicate()); - SelectionContext::new(self).evaluate_obligation(&obligation) - })) + self.autoderef(span, ty).any(|(ty, _)| { + self.probe(|_| { + let fn_once_substs = tcx.mk_substs_trait(ty, &[self.next_ty_var()]); + let trait_ref = ty::TraitRef::new(fn_once, fn_once_substs); + let poly_trait_ref = trait_ref.to_poly_trait_ref(); + let obligation = + Obligation::misc(span, self.body_id, poly_trait_ref.to_predicate()); + SelectionContext::new(self).evaluate_obligation(&obligation) + }) + }) } } } @@ -72,15 +71,13 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { rcvr_ty: Ty<'tcx>, item_name: ast::Name, rcvr_expr: Option<&hir::Expr>, - error: MethodError<'tcx>) - { + error: MethodError<'tcx>) { // avoid suggestions when we don't know what's going on. if rcvr_ty.references_error() { - return + return; } - let report_candidates = |err: &mut DiagnosticBuilder, - mut sources: Vec| { + let report_candidates = |err: &mut DiagnosticBuilder, mut sources: Vec| { sources.sort(); sources.dedup(); @@ -92,17 +89,20 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { CandidateSource::ImplSource(impl_did) => { // Provide the best span we can. Use the item, if local to crate, else // the impl, if local to crate (item may be defaulted), else nothing. - let item = self.impl_item(impl_did, item_name) + let item = self.impl_or_trait_item(impl_did, item_name) .or_else(|| { - self.trait_item( - self.tcx.impl_trait_ref(impl_did).unwrap().def_id, - - item_name - ) - }).unwrap(); - let note_span = self.tcx.map.span_if_local(item.def_id()).or_else(|| { - self.tcx.map.span_if_local(impl_did) - }); + self.impl_or_trait_item(self.tcx + .impl_trait_ref(impl_did) + .unwrap() + .def_id, + + item_name) + }) + .unwrap(); + let note_span = self.tcx + .map + .span_if_local(item.def_id()) + .or_else(|| self.tcx.map.span_if_local(impl_did)); let impl_ty = self.impl_self_ty(span, impl_did).ty; @@ -127,9 +127,10 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { } } CandidateSource::TraitSource(trait_did) => { - let item = self.trait_item(trait_did, item_name).unwrap(); + let item = self.impl_or_trait_item(trait_did, item_name).unwrap(); let item_span = self.tcx.map.def_id_span(item.def_id(), span); - span_note!(err, item_span, + span_note!(err, + item_span, "candidate #{} is defined in the trait `{}`", idx + 1, self.tcx.item_path_str(trait_did)); @@ -145,20 +146,22 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { MethodError::NoMatch(NoMatchData { static_candidates: static_sources, unsatisfied_predicates, out_of_scope_traits, - mode, .. }) => { + mode, + .. }) => { let tcx = self.tcx; - let mut err = self.type_error_struct( - span, - |actual| { - format!("no {} named `{}` found for type `{}` \ - in the current scope", - if mode == Mode::MethodCall { "method" } - else { "associated item" }, - item_name, - actual) - }, - rcvr_ty); + let mut err = self.type_error_struct(span, + |actual| { + format!("no {} named `{}` found for type `{}` in the current scope", + if mode == Mode::MethodCall { + "method" + } else { + "associated item" + }, + item_name, + actual) + }, + rcvr_ty); // If the method name is the name of a field with a function or closure type, // give a helping note that it has to be called as (x.f)(...). @@ -166,27 +169,30 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { for (ty, _) in self.autoderef(span, rcvr_ty) { match ty.sty { ty::TyAdt(def, substs) if !def.is_enum() => { - if let Some(field) = def.struct_variant(). - find_field_named(item_name) { + if let Some(field) = def.struct_variant() + .find_field_named(item_name) { let snippet = tcx.sess.codemap().span_to_snippet(expr.span); let expr_string = match snippet { Ok(expr_string) => expr_string, - _ => "s".into() // Default to a generic placeholder for the - // expression when we can't generate a - // string snippet + _ => "s".into(), // Default to a generic placeholder for the + // expression when we can't generate a + // string snippet }; let field_ty = field.ty(tcx, substs); if self.is_fn_ty(&field_ty, span) { - err.span_note(span, &format!( - "use `({0}.{1})(...)` if you meant to call the \ - function stored in the `{1}` field", - expr_string, item_name)); + err.span_note(span, + &format!("use `({0}.{1})(...)` if you \ + meant to call the function \ + stored in the `{1}` field", + expr_string, + item_name)); } else { - err.span_note(span, &format!( - "did you mean to write `{0}.{1}`?", - expr_string, item_name)); + err.span_note(span, + &format!("did you mean to write `{0}.{1}`?", + expr_string, + item_name)); } break; } @@ -205,10 +211,9 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { } if let Some(expr) = rcvr_expr { - if let Ok (expr_string) = tcx.sess.codemap().span_to_snippet(expr.span) { + if let Ok(expr_string) = tcx.sess.codemap().span_to_snippet(expr.span) { report_function!(expr.span, expr_string); - } - else if let Expr_::ExprPath(_, path) = expr.node.clone() { + } else if let Expr_::ExprPath(_, path) = expr.node.clone() { if let Some(segment) = path.segments.last() { report_function!(expr.span, segment.name); } @@ -217,34 +222,36 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { } if !static_sources.is_empty() { - err.note( - "found the following associated functions; to be used as \ - methods, functions must have a `self` parameter"); + err.note("found the following associated functions; to be used as methods, \ + functions must have a `self` parameter"); report_candidates(&mut err, static_sources); } if !unsatisfied_predicates.is_empty() { let bound_list = unsatisfied_predicates.iter() - .map(|p| format!("`{} : {}`", - p.self_ty(), - p)) + .map(|p| format!("`{} : {}`", p.self_ty(), p)) .collect::>() .join(", "); - err.note( - &format!("the method `{}` exists but the \ - following trait bounds were not satisfied: {}", - item_name, - bound_list)); + err.note(&format!("the method `{}` exists but the following trait bounds \ + were not satisfied: {}", + item_name, + bound_list)); } - self.suggest_traits_to_import(&mut err, span, rcvr_ty, item_name, - rcvr_expr, out_of_scope_traits); + self.suggest_traits_to_import(&mut err, + span, + rcvr_ty, + item_name, + rcvr_expr, + out_of_scope_traits); err.emit(); } MethodError::Ambiguity(sources) => { - let mut err = struct_span_err!(self.sess(), span, E0034, + let mut err = struct_span_err!(self.sess(), + span, + E0034, "multiple applicable items in scope"); err.span_label(span, &format!("multiple `{}` found", item_name)); @@ -256,11 +263,12 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { let msg = format!("the `{}` method from the `{}` trait cannot be explicitly \ invoked on this closure as we have not yet inferred what \ kind of closure it is", - item_name, - self.tcx.item_path_str(trait_def_id)); + item_name, + self.tcx.item_path_str(trait_def_id)); let msg = if let Some(callee) = rcvr_expr { format!("{}; use overloaded call notation instead (e.g., `{}()`)", - msg, pprust::expr_to_string(callee)) + msg, + pprust::expr_to_string(callee)) } else { msg }; @@ -280,18 +288,24 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { rcvr_ty: Ty<'tcx>, item_name: ast::Name, rcvr_expr: Option<&hir::Expr>, - valid_out_of_scope_traits: Vec) - { + valid_out_of_scope_traits: Vec) { if !valid_out_of_scope_traits.is_empty() { let mut candidates = valid_out_of_scope_traits; candidates.sort(); candidates.dedup(); - let msg = format!( - "items from traits can only be used if the trait is in scope; \ - the following {traits_are} implemented but not in scope, \ - perhaps add a `use` for {one_of_them}:", - traits_are = if candidates.len() == 1 {"trait is"} else {"traits are"}, - one_of_them = if candidates.len() == 1 {"it"} else {"one of them"}); + let msg = format!("items from traits can only be used if the trait is in scope; the \ + following {traits_are} implemented but not in scope, perhaps add \ + a `use` for {one_of_them}:", + traits_are = if candidates.len() == 1 { + "trait is" + } else { + "traits are" + }, + one_of_them = if candidates.len() == 1 { + "it" + } else { + "one of them" + }); err.help(&msg[..]); @@ -304,7 +318,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { if candidates.len() > limit { err.note(&format!("and {} others", candidates.len() - limit)); } - return + return; } let type_is_local = self.type_derefs_to_local(span, rcvr_ty, rcvr_expr); @@ -320,8 +334,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // this isn't perfect (that is, there are cases when // implementing a trait would be legal but is rejected // here). - (type_is_local || info.def_id.is_local()) - && self.trait_item(info.def_id, item_name).is_some() + (type_is_local || info.def_id.is_local()) && + self.impl_or_trait_item(info.def_id, item_name).is_some() }) .collect::>(); @@ -333,13 +347,20 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // FIXME #21673 this help message could be tuned to the case // of a type parameter: suggest adding a trait bound rather // than implementing. - let msg = format!( - "items from traits can only be used if the trait is implemented and in scope; \ - the following {traits_define} an item `{name}`, \ - perhaps you need to implement {one_of_them}:", - traits_define = if candidates.len() == 1 {"trait defines"} else {"traits define"}, - one_of_them = if candidates.len() == 1 {"it"} else {"one of them"}, - name = item_name); + let msg = format!("items from traits can only be used if the trait is implemented \ + and in scope; the following {traits_define} an item `{name}`, \ + perhaps you need to implement {one_of_them}:", + traits_define = if candidates.len() == 1 { + "trait defines" + } else { + "traits define" + }, + one_of_them = if candidates.len() == 1 { + "it" + } else { + "one of them" + }, + name = item_name); err.help(&msg[..]); @@ -356,7 +377,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { fn type_derefs_to_local(&self, span: Span, rcvr_ty: Ty<'tcx>, - rcvr_expr: Option<&hir::Expr>) -> bool { + rcvr_expr: Option<&hir::Expr>) + -> bool { fn is_local(ty: Ty) -> bool { match ty.sty { ty::TyAdt(def, _) => def.did.is_local(), @@ -369,7 +391,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // non-local (there are "edge" cases, e.g. (LocalType,), but // the noise from these sort of types is usually just really // annoying, rather than any sort of help). - _ => false + _ => false, } } @@ -392,9 +414,7 @@ pub struct TraitInfo { impl TraitInfo { fn new(def_id: DefId) -> TraitInfo { - TraitInfo { - def_id: def_id, - } + TraitInfo { def_id: def_id } } } impl PartialEq for TraitInfo { @@ -404,7 +424,9 @@ impl PartialEq for TraitInfo { } impl Eq for TraitInfo {} impl PartialOrd for TraitInfo { - fn partial_cmp(&self, other: &TraitInfo) -> Option { Some(self.cmp(other)) } + fn partial_cmp(&self, other: &TraitInfo) -> Option { + Some(self.cmp(other)) + } } impl Ord for TraitInfo { fn cmp(&self, other: &TraitInfo) -> Ordering { @@ -427,7 +449,7 @@ pub fn all_traits<'a>(ccx: &'a CrateCtxt) -> AllTraits<'a> { // Crate-local: // // meh. - struct Visitor<'a, 'tcx:'a> { + struct Visitor<'a, 'tcx: 'a> { map: &'a hir_map::Map<'tcx>, traits: &'a mut AllTraitsVec, } @@ -444,39 +466,37 @@ pub fn all_traits<'a>(ccx: &'a CrateCtxt) -> AllTraits<'a> { } ccx.tcx.map.krate().visit_all_items(&mut Visitor { map: &ccx.tcx.map, - traits: &mut traits + traits: &mut traits, }); // Cross-crate: let mut external_mods = FnvHashSet(); - fn handle_external_def(traits: &mut AllTraitsVec, + fn handle_external_def(ccx: &CrateCtxt, + traits: &mut AllTraitsVec, external_mods: &mut FnvHashSet, - ccx: &CrateCtxt, - cstore: &for<'a> cstore::CrateStore<'a>, - dl: cstore::DefLike) { - match dl { - cstore::DlDef(Def::Trait(did)) => { - traits.push(TraitInfo::new(did)); + def: Def) { + let def_id = def.def_id(); + match def { + Def::Trait(..) => { + traits.push(TraitInfo::new(def_id)); } - cstore::DlDef(Def::Mod(did)) => { - if !external_mods.insert(did) { + Def::Mod(..) => { + if !external_mods.insert(def_id) { return; } - for child in cstore.item_children(did) { - handle_external_def(traits, external_mods, - ccx, cstore, child.def) + for child in ccx.tcx.sess.cstore.item_children(def_id) { + handle_external_def(ccx, traits, external_mods, child.def) } } _ => {} } } - let cstore = &*ccx.tcx.sess.cstore; - for cnum in ccx.tcx.sess.cstore.crates() { - for child in cstore.crate_top_level_items(cnum) { - handle_external_def(&mut traits, &mut external_mods, - ccx, cstore, child.def) - } + let def_id = DefId { + krate: cnum, + index: CRATE_DEF_INDEX, + }; + handle_external_def(ccx, &mut traits, &mut external_mods, Def::Mod(def_id)); } *ccx.all_traits.borrow_mut() = Some(traits); @@ -486,13 +506,13 @@ pub fn all_traits<'a>(ccx: &'a CrateCtxt) -> AllTraits<'a> { assert!(borrow.is_some()); AllTraits { borrow: borrow, - idx: 0 + idx: 0, } } pub struct AllTraits<'a> { borrow: cell::Ref<'a, Option>, - idx: usize + idx: usize, } impl<'a> Iterator for AllTraits<'a> { diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index 8598c561fc2e3..75a14bb3db922 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -83,12 +83,11 @@ use self::TupleArgumentsFlag::*; use astconv::{AstConv, ast_region_to_region, PathParamMode}; use dep_graph::DepNode; use fmt_macros::{Parser, Piece, Position}; -use middle::cstore::LOCAL_CRATE; -use hir::def::{Def, PathResolution}; -use hir::def_id::DefId; +use hir::def::{Def, CtorKind, PathResolution}; +use hir::def_id::{DefId, LOCAL_CRATE}; use hir::pat_util; use rustc::infer::{self, InferCtxt, InferOk, TypeOrigin, TypeTrace, type_variable}; -use rustc::ty::subst::{Subst, Substs}; +use rustc::ty::subst::{Kind, Subst, Substs}; use rustc::traits::{self, Reveal}; use rustc::ty::{ParamTy, ParameterEnvironment}; use rustc::ty::{LvaluePreference, NoPreference, PreferMutLvalue}; @@ -451,7 +450,7 @@ impl<'a, 'tcx> Visitor<'tcx> for CheckItemTypesVisitor<'a, 'tcx> { fn visit_ty(&mut self, t: &'tcx hir::Ty) { match t.node { - hir::TyFixedLengthVec(_, ref expr) => { + hir::TyArray(_, ref expr) => { check_const_with_type(self.ccx, &expr, self.ccx.tcx.types.usize, expr.id); } _ => {} @@ -532,13 +531,16 @@ pub fn check_drop_impls(ccx: &CrateCtxt) -> CompileResult { fn check_bare_fn<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, decl: &'tcx hir::FnDecl, body: &'tcx hir::Block, - fn_id: ast::NodeId) { + fn_id: ast::NodeId, + span: Span) { let raw_fty = ccx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(fn_id)).ty; let fn_ty = match raw_fty.sty { ty::TyFnDef(.., f) => f, _ => span_bug!(body.span, "check_bare_fn: function type expected") }; + check_abi(ccx, span, fn_ty.abi); + ccx.inherited(fn_id).enter(|inh| { // Compute the fty from point of view of inside fn. let fn_scope = inh.tcx.region_maps.call_site_extent(fn_id, body.id); @@ -562,6 +564,13 @@ fn check_bare_fn<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, }); } +fn check_abi<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, span: Span, abi: Abi) { + if !ccx.tcx.sess.target.target.is_abi_supported(abi) { + struct_span_err!(ccx.tcx.sess, span, E0570, + "The ABI `{}` is not supported for the current target", abi).emit() + } +} + struct GatherLocalsVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { fcx: &'a FnCtxt<'a, 'gcx, 'tcx> } @@ -627,7 +636,7 @@ impl<'a, 'gcx, 'tcx> Visitor<'gcx> for GatherLocalsVisitor<'a, 'gcx, 'tcx> { // need to record the type for that node fn visit_ty(&mut self, t: &'gcx hir::Ty) { match t.node { - hir::TyFixedLengthVec(ref ty, ref count_expr) => { + hir::TyArray(ref ty, ref count_expr) => { self.visit_ty(&ty); self.fcx.check_expr_with_hint(&count_expr, self.fcx.tcx.types.usize); } @@ -743,17 +752,14 @@ pub fn check_item_type<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) { hir::ItemImpl(.., ref impl_items) => { debug!("ItemImpl {} with id {}", it.name, it.id); let impl_def_id = ccx.tcx.map.local_def_id(it.id); - match ccx.tcx.impl_trait_ref(impl_def_id) { - Some(impl_trait_ref) => { - check_impl_items_against_trait(ccx, - it.span, - impl_def_id, - &impl_trait_ref, - impl_items); - let trait_def_id = impl_trait_ref.def_id; - check_on_unimplemented(ccx, trait_def_id, it); - } - None => { } + if let Some(impl_trait_ref) = ccx.tcx.impl_trait_ref(impl_def_id) { + check_impl_items_against_trait(ccx, + it.span, + impl_def_id, + &impl_trait_ref, + impl_items); + let trait_def_id = impl_trait_ref.def_id; + check_on_unimplemented(ccx, trait_def_id, it); } } hir::ItemTrait(..) => { @@ -771,6 +777,8 @@ pub fn check_item_type<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) { check_bounds_are_used(ccx, generics, pty_ty); } hir::ItemForeignMod(ref m) => { + check_abi(ccx, it.span, m.abi); + if m.abi == Abi::RustIntrinsic { for item in &m.items { intrinsic::check_intrinsic_type(ccx, item); @@ -808,7 +816,7 @@ pub fn check_item_body<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) { let _indenter = indenter(); match it.node { hir::ItemFn(ref decl, .., ref body) => { - check_bare_fn(ccx, &decl, &body, it.id); + check_bare_fn(ccx, &decl, &body, it.id, it.span); } hir::ItemImpl(.., ref impl_items) => { debug!("ItemImpl {} with id {}", it.name, it.id); @@ -819,7 +827,7 @@ pub fn check_item_body<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) { check_const(ccx, &expr, impl_item.id) } hir::ImplItemKind::Method(ref sig, ref body) => { - check_bare_fn(ccx, &sig.decl, body, impl_item.id); + check_bare_fn(ccx, &sig.decl, body, impl_item.id, impl_item.span); } hir::ImplItemKind::Type(_) => { // Nothing to do here. @@ -834,7 +842,7 @@ pub fn check_item_body<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) { check_const(ccx, &expr, trait_item.id) } hir::MethodTraitItem(ref sig, Some(ref body)) => { - check_bare_fn(ccx, &sig.decl, body, trait_item.id); + check_bare_fn(ccx, &sig.decl, body, trait_item.id, trait_item.span); } hir::MethodTraitItem(_, None) | hir::ConstTraitItem(_, None) | @@ -904,7 +912,7 @@ fn report_forbidden_specialization<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, let mut err = struct_span_err!( tcx.sess, impl_item.span, E0520, "`{}` specializes an item from a parent `impl`, but \ - neither that item nor the `impl` are marked `default`", + that item is not marked `default`", impl_item.name); err.span_label(impl_item.span, &format!("cannot specialize default item `{}`", impl_item.name)); @@ -912,8 +920,7 @@ fn report_forbidden_specialization<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, match tcx.span_of_impl(parent_impl) { Ok(span) => { err.span_label(span, &"parent `impl` is here"); - err.note(&format!("to specialize, either the parent `impl` or `{}` \ - in the parent `impl` must be marked `default`", + err.note(&format!("to specialize, `{}` in the parent `impl` must be marked `default`", impl_item.name)); } Err(cname) => { @@ -1015,13 +1022,15 @@ fn check_impl_items_against_trait<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, _ => span_bug!(impl_item.span, "non-method impl-item for method") }; + let trait_span = tcx.map.span_if_local(ty_trait_item.def_id()); if let &ty::MethodTraitItem(ref trait_method) = ty_trait_item { compare_impl_method(ccx, &impl_method, impl_item.span, body.id, &trait_method, - &impl_trait_ref); + &impl_trait_ref, + trait_span); } else { let mut err = struct_span_err!(tcx.sess, impl_item.span, E0324, "item `{}` is an associated method, \ @@ -1347,8 +1356,12 @@ impl<'a, 'gcx, 'tcx> AstConv<'gcx, 'tcx> for FnCtxt<'a, 'gcx, 'tcx> { assoc_name: ast::Name) -> bool { - let trait_def = self.tcx().lookup_trait_def(trait_def_id); - trait_def.associated_type_names.contains(&assoc_name) + self.tcx().impl_or_trait_items(trait_def_id).iter().any(|&def_id| { + match self.tcx().impl_or_trait_item(def_id) { + ty::TypeTraitItem(ref item) => item.name == assoc_name, + _ => false + } + }) } fn ty_infer(&self, _span: Span) -> Ty<'tcx> { @@ -1357,7 +1370,7 @@ impl<'a, 'gcx, 'tcx> AstConv<'gcx, 'tcx> for FnCtxt<'a, 'gcx, 'tcx> { fn ty_infer_for_def(&self, ty_param_def: &ty::TypeParameterDef<'tcx>, - substs: &Substs<'tcx>, + substs: &[Kind<'tcx>], span: Span) -> Ty<'tcx> { self.type_var_for_def(span, ty_param_def, substs) } @@ -1448,12 +1461,13 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { writeback_errors: Cell::new(false), err_count_on_creation: inh.tcx.sess.err_count(), ret_ty: rty, - ps: RefCell::new(UnsafetyState::function(hir::Unsafety::Normal, 0)), + ps: RefCell::new(UnsafetyState::function(hir::Unsafety::Normal, + ast::CRATE_NODE_ID)), inh: inh, } } - pub fn param_env(&self) -> &ty::ParameterEnvironment<'tcx> { + pub fn param_env(&self) -> &ty::ParameterEnvironment<'gcx> { &self.parameter_environment } @@ -1520,9 +1534,11 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { match self.locals.borrow().get(&nid) { Some(&t) => t, None => { - span_err!(self.tcx.sess, span, E0513, - "no type for local variable {}", - nid); + struct_span_err!(self.tcx.sess, span, E0513, + "no type for local variable {}", + self.tcx.map.node_to_string(nid)) + .span_label(span, &"no type for variable") + .emit(); self.tcx.types.err } } @@ -1670,41 +1686,6 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { cause) } - /// Instantiates the type in `did` with the generics in `path` and returns - /// it (registering the necessary trait obligations along the way). - /// - /// Note that this function is only intended to be used with type-paths, - /// not with value-paths. - pub fn instantiate_type_path(&self, - did: DefId, - path: &hir::Path, - node_id: ast::NodeId) - -> Ty<'tcx> { - debug!("instantiate_type_path(did={:?}, path={:?})", did, path); - let mut ty = self.tcx.lookup_item_type(did).ty; - if ty.is_fn() { - // Tuple variants have fn type even in type namespace, extract true variant type from it - ty = self.tcx.no_late_bound_regions(&ty.fn_ret()).unwrap(); - } - let type_predicates = self.tcx.lookup_predicates(did); - let substs = AstConv::ast_path_substs_for_ty(self, self, - path.span, - PathParamMode::Optional, - did, - path.segments.last().unwrap()); - debug!("instantiate_type_path: ty={:?} substs={:?}", ty, substs); - let bounds = self.instantiate_bounds(path.span, substs, &type_predicates); - let cause = traits::ObligationCause::new(path.span, self.body_id, - traits::ItemObligation(did)); - self.add_obligations_for_parameters(cause, &bounds); - - let ty_substituted = self.instantiate_type_scheme(path.span, substs, &ty); - self.write_substs(node_id, ty::ItemSubsts { - substs: substs - }); - ty_substituted - } - pub fn write_nil(&self, node_id: ast::NodeId) { self.write_ty(node_id, self.tcx.mk_nil()); } @@ -1805,9 +1786,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { f: F) where F: FnOnce(&ty::ItemSubsts<'tcx>), { - match self.tables.borrow().item_substs.get(&id) { - Some(s) => { f(s) } - None => { } + if let Some(s) = self.tables.borrow().item_substs.get(&id) { + f(s); } } @@ -2115,7 +2095,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { .unwrap_or(type_variable::Default { ty: self.next_ty_var(), origin_span: syntax_pos::DUMMY_SP, - def_id: self.tcx.map.local_def_id(0) // what do I put here? + // what do I put here? + def_id: self.tcx.map.local_def_id(ast::CRATE_NODE_ID) }); // This is to ensure that we elimnate any non-determinism from the error @@ -2372,7 +2353,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { let err_inputs = match tuple_arguments { DontTupleArguments => err_inputs, - TupleArguments => vec![self.tcx.mk_tup(err_inputs)], + TupleArguments => vec![self.tcx.intern_tup(&err_inputs[..])], }; self.check_argument_types(sp, &err_inputs[..], &[], args_no_rcvr, @@ -2424,6 +2405,21 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { let mut expected_arg_tys = expected_arg_tys; let expected_arg_count = fn_inputs.len(); + let sp_args = if args.len() > 0 { + let (first, args) = args.split_at(1); + let mut sp_tmp = first[0].span; + for arg in args { + let sp_opt = self.sess().codemap().merge_spans(sp_tmp, arg.span); + if ! sp_opt.is_some() { + break; + } + sp_tmp = sp_opt.unwrap(); + }; + sp_tmp + } else { + sp + }; + fn parameter_count_error<'tcx>(sess: &Session, sp: Span, fn_inputs: &[Ty<'tcx>], expected_count: usize, arg_count: usize, error_code: &str, variadic: bool) { @@ -2456,7 +2452,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { let tuple_type = self.structurally_resolved_type(sp, fn_inputs[0]); match tuple_type.sty { ty::TyTuple(arg_types) if arg_types.len() != args.len() => { - parameter_count_error(tcx.sess, sp, fn_inputs, arg_types.len(), args.len(), + parameter_count_error(tcx.sess, sp_args, fn_inputs, arg_types.len(), args.len(), "E0057", false); expected_arg_tys = &[]; self.err_args(args.len()) @@ -2485,14 +2481,14 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { if supplied_arg_count >= expected_arg_count { fn_inputs.to_vec() } else { - parameter_count_error(tcx.sess, sp, fn_inputs, expected_arg_count, + parameter_count_error(tcx.sess, sp_args, fn_inputs, expected_arg_count, supplied_arg_count, "E0060", true); expected_arg_tys = &[]; self.err_args(supplied_arg_count) } } else { - parameter_count_error(tcx.sess, sp, fn_inputs, expected_arg_count, supplied_arg_count, - "E0061", false); + parameter_count_error(tcx.sess, sp_args, fn_inputs, expected_arg_count, + supplied_arg_count, "E0061", false); expected_arg_tys = &[]; self.err_args(supplied_arg_count) }; @@ -2951,18 +2947,20 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { .emit(); self.tcx().types.err } else { - let mut err = self.type_error_struct(expr.span, |actual| { - format!("attempted access of field `{}` on type `{}`, \ - but no field with that name was found", + let mut err = self.type_error_struct(field.span, |actual| { + format!("no field `{}` on type `{}`", field.node, actual) }, expr_t); match expr_t.sty { ty::TyAdt(def, _) if !def.is_enum() => { if let Some(suggested_field_name) = Self::suggest_field_name(def.struct_variant(), field, vec![]) { - err.span_help(field.span, - &format!("did you mean `{}`?", suggested_field_name)); - }; + err.span_label(field.span, + &format!("did you mean `{}`?", suggested_field_name)); + } else { + err.span_label(field.span, + &format!("unknown field")); + }; } ty::TyRawPtr(..) => { err.note(&format!("`{0}` is a native pointer; perhaps you need to deref with \ @@ -3010,7 +3008,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { while let Some((base_t, autoderefs)) = autoderef.next() { let field = match base_t.sty { ty::TyAdt(base_def, substs) if base_def.is_struct() => { - tuple_like = base_def.struct_variant().kind == ty::VariantKind::Tuple; + tuple_like = base_def.struct_variant().ctor_kind == CtorKind::Fn; if !tuple_like { continue } debug!("tuple struct named {:?}", base_t); @@ -3092,7 +3090,18 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { if let Some(field_name) = Self::suggest_field_name(variant, &field.name, skip_fields.collect()) { - err.span_label(field.name.span,&format!("did you mean `{}`?",field_name)); + err.span_label(field.name.span, + &format!("field does not exist - did you mean `{}`?", field_name)); + } else { + match ty.sty { + ty::TyAdt(adt, ..) if adt.is_enum() => { + err.span_label(field.name.span, &format!("`{}::{}` does not have this field", + ty, variant.name.as_str())); + } + _ => { + err.span_label(field.name.span, &format!("`{}` does not have this field", ty)); + } + } }; err.emit(); } @@ -3207,43 +3216,55 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { } pub fn check_struct_path(&self, - path: &hir::Path, - node_id: ast::NodeId, - span: Span) - -> Option<(ty::VariantDef<'tcx>, Ty<'tcx>)> { - let def = self.finish_resolving_struct_path(path, node_id, span); + path: &hir::Path, + node_id: ast::NodeId) + -> Option<(ty::VariantDef<'tcx>, Ty<'tcx>)> { + let (def, ty) = self.finish_resolving_struct_path(path, node_id); let variant = match def { Def::Err => { self.set_tainted_by_errors(); return None; } - Def::Variant(type_did, _) | Def::Struct(type_did) | Def::Union(type_did) => { - Some((type_did, self.tcx.expect_variant_def(def))) + Def::Variant(..) => { + match ty.sty { + ty::TyAdt(adt, substs) => { + Some((adt.variant_of_def(def), adt.did, substs)) + } + _ => bug!("unexpected type: {:?}", ty.sty) + } } - Def::TyAlias(did) => { - match self.tcx.opt_lookup_item_type(did).map(|scheme| &scheme.ty.sty) { - Some(&ty::TyAdt(adt, _)) if !adt.is_enum() => { - Some((did, adt.struct_variant())) + Def::Struct(..) | Def::Union(..) | Def::TyAlias(..) | + Def::AssociatedTy(..) | Def::SelfTy(..) => { + match ty.sty { + ty::TyAdt(adt, substs) if !adt.is_enum() => { + Some((adt.struct_variant(), adt.did, substs)) } _ => None, } } - _ => None + _ => bug!("unexpected definition: {:?}", def) }; - if let Some((def_id, variant)) = variant { - if variant.kind == ty::VariantKind::Tuple && + if let Some((variant, did, substs)) = variant { + if variant.ctor_kind == CtorKind::Fn && !self.tcx.sess.features.borrow().relaxed_adts { - emit_feature_err(&self.tcx.sess.parse_sess.span_diagnostic, - "relaxed_adts", span, GateIssue::Language, + emit_feature_err(&self.tcx.sess.parse_sess, + "relaxed_adts", path.span, GateIssue::Language, "tuple structs and variants in struct patterns are unstable"); } - let ty = self.instantiate_type_path(def_id, path, node_id); + + // Check bounds on type arguments used in the path. + let type_predicates = self.tcx.lookup_predicates(did); + let bounds = self.instantiate_bounds(path.span, substs, &type_predicates); + let cause = traits::ObligationCause::new(path.span, self.body_id, + traits::ItemObligation(did)); + self.add_obligations_for_parameters(cause, &bounds); + Some((variant, ty)) } else { struct_span_err!(self.tcx.sess, path.span, E0071, - "`{}` does not name a struct or a struct variant", - pprust::path_to_string(path)) + "expected struct, variant or union type, found {}", + ty.sort_string(self.tcx)) .span_label(path.span, &format!("not a struct")) .emit(); None @@ -3257,12 +3278,11 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { base_expr: &'gcx Option>) -> Ty<'tcx> { // Find the relevant variant - let (variant, struct_ty) = if let Some(variant_ty) = self.check_struct_path(path, expr.id, - expr.span) { + let (variant, struct_ty) = if let Some(variant_ty) = self.check_struct_path(path, expr.id) { variant_ty } else { self.check_struct_fields_on_error(fields, base_expr); - return self.tcx().types.err; + return self.tcx.types.err; }; self.check_expr_struct_fields(struct_ty, path.span, variant, fields, @@ -3567,7 +3587,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { self.check_method_call(expr, name, &args[..], &tps[..], expected, lvalue_pref) } hir::ExprCast(ref e, ref t) => { - if let hir::TyFixedLengthVec(_, ref count_expr) = t.node { + if let hir::TyArray(_, ref count_expr) = t.node { self.check_expr_with_hint(&count_expr, tcx.types.usize); } @@ -3600,7 +3620,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { self.check_expr_eq_type(&e, typ); typ } - hir::ExprVec(ref args) => { + hir::ExprArray(ref args) => { let uty = expected.to_option(self).and_then(|uty| { match uty.sty { ty::TyArray(ty, _) | ty::TySlice(ty) => Some(ty), @@ -3678,9 +3698,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { _ => None } }); - let mut err_field = false; - let elt_ts = elts.iter().enumerate().map(|(i, e)| { + let elt_ts_iter = elts.iter().enumerate().map(|(i, e)| { let t = match flds { Some(ref fs) if i < fs.len() => { let ety = fs[i]; @@ -3691,13 +3710,13 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { self.check_expr_with_expectation(&e, NoExpectation) } }; - err_field = err_field || t.references_error(); t - }).collect(); - if err_field { + }); + let tuple = tcx.mk_tup(elt_ts_iter); + if tuple.references_error() { tcx.types.err } else { - tcx.mk_tup(elt_ts) + tuple } } hir::ExprStruct(ref path, ref fields, ref base_expr) => { @@ -3758,7 +3777,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { } } err.emit(); - self.tcx().types.err + self.tcx.types.err } } } @@ -3768,29 +3787,26 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // Finish resolving a path in a struct expression or pattern `S::A { .. }` if necessary. // The newly resolved definition is written into `def_map`. - pub fn finish_resolving_struct_path(&self, - path: &hir::Path, - node_id: ast::NodeId, - span: Span) - -> Def + fn finish_resolving_struct_path(&self, + path: &hir::Path, + node_id: ast::NodeId) + -> (Def, Ty<'tcx>) { - let path_res = self.tcx().expect_resolution(node_id); - if path_res.depth == 0 { - // If fully resolved already, we don't have to do anything. - path_res.base_def - } else { - let base_ty_end = path.segments.len() - path_res.depth; - let (_ty, def) = AstConv::finish_resolving_def_to_ty(self, self, span, - PathParamMode::Optional, - path_res.base_def, - None, - node_id, - &path.segments[..base_ty_end], - &path.segments[base_ty_end..]); - // Write back the new resolution. - self.tcx().def_map.borrow_mut().insert(node_id, PathResolution::new(def)); - def + let path_res = self.tcx.expect_resolution(node_id); + let base_ty_end = path.segments.len() - path_res.depth; + let (ty, def) = AstConv::finish_resolving_def_to_ty(self, self, path.span, + PathParamMode::Optional, + path_res.base_def, + None, + node_id, + &path.segments[..base_ty_end], + &path.segments[base_ty_end..], + true); + // Write back the new resolution. + if path_res.depth != 0 { + self.tcx.def_map.borrow_mut().insert(node_id, PathResolution::new(def)); } + (def, ty) } // Resolve associated value path into a base type and associated constant or method definition. @@ -3802,7 +3818,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { span: Span) -> (Def, Option>, &'b [hir::PathSegment]) { - let path_res = self.tcx().expect_resolution(node_id); + let path_res = self.tcx.expect_resolution(node_id); if path_res.depth == 0 { // If fully resolved already, we don't have to do anything. (path_res.base_def, opt_self_ty, &path.segments) @@ -3816,7 +3832,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { opt_self_ty, node_id, &ty_segments[..base_ty_end], - &ty_segments[base_ty_end..]); + &ty_segments[base_ty_end..], + false); // Resolve an associated constant or method on the previously resolved type. let item_segment = path.segments.last().unwrap(); @@ -3836,7 +3853,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { }; // Write back the new resolution. - self.tcx().def_map.borrow_mut().insert(node_id, PathResolution::new(def)); + self.tcx.def_map.borrow_mut().insert(node_id, PathResolution::new(def)); (def, Some(ty), slice::ref_slice(item_segment)) } } @@ -4039,34 +4056,15 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // // There are basically four cases to consider: // - // 1. Reference to a *type*, such as a struct or enum: - // - // mod a { struct Foo { ... } } + // 1. Reference to a constructor of enum variant or struct: // - // Because we don't allow types to be declared within one - // another, a path that leads to a type will always look like - // `a::b::Foo` where `a` and `b` are modules. This implies - // that only the final segment can have type parameters, and - // they are located in the TypeSpace. - // - // *Note:* Generally speaking, references to types don't - // actually pass through this function, but rather the - // `ast_ty_to_ty` function in `astconv`. However, in the case - // of struct patterns (and maybe literals) we do invoke - // `instantiate_value_path` to get the general type of an instance of - // a struct. (In these cases, there are actually no type - // parameters permitted at present, but perhaps we will allow - // them in the future.) - // - // 1b. Reference to an enum variant or tuple-like struct: - // - // struct foo(...) - // enum E { foo(...) } + // struct Foo(...) + // enum E { Foo(...) } // // In these cases, the parameters are declared in the type // space. // - // 2. Reference to a *fn item*: + // 2. Reference to a fn item or a free constant: // // fn foo() { } // @@ -4075,7 +4073,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // type parameters. However, in this case, those parameters are // declared on a value, and hence are in the `FnSpace`. // - // 3. Reference to a *method*: + // 3. Reference to a method or an associated constant: // // impl SomeStruct { // fn foo(...) @@ -4087,15 +4085,9 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // `SomeStruct::`, contains parameters in TypeSpace, and the // final segment, `foo::` contains parameters in fn space. // - // 4. Reference to an *associated const*: + // 4. Reference to a local variable // - // impl AnotherStruct { - // const FOO: B = BAR; - // } - // - // The path in this case will look like - // `a::b::AnotherStruct::::FOO`, so the penultimate segment - // only will have parameters in TypeSpace. + // Local variables can't have any type parameters. // // The first step then is to categorize the segments appropriately. @@ -4105,14 +4097,9 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { let mut type_segment = None; let mut fn_segment = None; match def { - // Case 1 and 1b. Reference to a *type* or *enum variant*. - Def::Struct(def_id) | - Def::Union(def_id) | - Def::Variant(_, def_id) | - Def::Enum(def_id) | - Def::TyAlias(def_id) | - Def::AssociatedTy(_, def_id) | - Def::Trait(def_id) => { + // Case 1. Reference to a struct/variant constructor. + Def::StructCtor(def_id, ..) | + Def::VariantCtor(def_id, ..) => { // Everything but the final segment should have no // parameters at all. let mut generics = self.tcx.lookup_generics(def_id); @@ -4155,18 +4142,10 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { fn_segment = Some((segments.last().unwrap(), generics)); } - // Other cases. Various nonsense that really shouldn't show up - // here. If they do, an error will have been reported - // elsewhere. (I hope) - Def::Mod(..) | - Def::ForeignMod(..) | - Def::PrimTy(..) | - Def::SelfTy(..) | - Def::TyParam(..) | - Def::Local(..) | - Def::Label(..) | - Def::Upvar(..) | - Def::Err => {} + // Case 4. Local variable, no generics. + Def::Local(..) | Def::Upvar(..) => {} + + _ => bug!("unexpected definition: {:?}", def), } // In `>::method`, `A` and `B` are mandatory, but @@ -4185,12 +4164,13 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { self.tcx.prohibit_type_params(&segments[..segments.len() - poly_segments]); match def { - Def::Local(_, nid) | Def::Upvar(_, nid, ..) => { + Def::Local(def_id) | Def::Upvar(def_id, ..) => { + let nid = self.tcx.map.as_local_node_id(def_id).unwrap(); let ty = self.local_ty(span, nid); let ty = self.normalize_associated_types_in(span, &ty); self.write_ty(node_id, ty); self.write_substs(node_id, ty::ItemSubsts { - substs: Substs::empty(self.tcx) + substs: self.tcx.intern_substs(&[]) }); return ty; } @@ -4298,7 +4278,6 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { // the referenced item. let ty_substituted = self.instantiate_type_scheme(span, &substs, &scheme.ty); - if let Some((ty::ImplContainer(impl_def_id), self_ty)) = ufcs_associated { // In the case of `Foo::method` and `>::method`, if `method` // is inherent, there is no `Self` parameter, instead, the impl needs diff --git a/src/librustc_typeck/check/upvar.rs b/src/librustc_typeck/check/upvar.rs index f4a0df4611d33..aa221c33b5ddb 100644 --- a/src/librustc_typeck/check/upvar.rs +++ b/src/librustc_typeck/check/upvar.rs @@ -120,7 +120,8 @@ impl<'a, 'gcx, 'tcx> SeedBorrowKind<'a, 'gcx, 'tcx> { self.fcx.tcx.with_freevars(expr.id, |freevars| { for freevar in freevars { - let var_node_id = freevar.def.var_id(); + let def_id = freevar.def.def_id(); + let var_node_id = self.fcx.tcx.map.as_local_node_id(def_id).unwrap(); let upvar_id = ty::UpvarId { var_id: var_node_id, closure_expr_id: expr.id }; debug!("seed upvar_id {:?}", upvar_id); @@ -236,31 +237,30 @@ impl<'a, 'gcx, 'tcx> AdjustBorrowKind<'a, 'gcx, 'tcx> { // implemented. let tcx = self.fcx.tcx; tcx.with_freevars(closure_id, |freevars| { - freevars.iter() - .map(|freevar| { - let freevar_node_id = freevar.def.var_id(); - let freevar_ty = self.fcx.node_ty(freevar_node_id); - let upvar_id = ty::UpvarId { - var_id: freevar_node_id, - closure_expr_id: closure_id - }; - let capture = self.fcx.upvar_capture(upvar_id).unwrap(); - - debug!("freevar_node_id={:?} freevar_ty={:?} capture={:?}", - freevar_node_id, freevar_ty, capture); - - match capture { - ty::UpvarCapture::ByValue => freevar_ty, - ty::UpvarCapture::ByRef(borrow) => - tcx.mk_ref(borrow.region, - ty::TypeAndMut { - ty: freevar_ty, - mutbl: borrow.kind.to_mutbl_lossy(), - }), - } - }) - .collect() - }) + freevars.iter().map(|freevar| { + let def_id = freevar.def.def_id(); + let var_id = tcx.map.as_local_node_id(def_id).unwrap(); + let freevar_ty = self.fcx.node_ty(var_id); + let upvar_id = ty::UpvarId { + var_id: var_id, + closure_expr_id: closure_id + }; + let capture = self.fcx.upvar_capture(upvar_id).unwrap(); + + debug!("var_id={:?} freevar_ty={:?} capture={:?}", + var_id, freevar_ty, capture); + + match capture { + ty::UpvarCapture::ByValue => freevar_ty, + ty::UpvarCapture::ByRef(borrow) => + tcx.mk_ref(borrow.region, + ty::TypeAndMut { + ty: freevar_ty, + mutbl: borrow.kind.to_mutbl_lossy(), + }), + } + }).collect() + }) } fn adjust_upvar_borrow_kind_for_consume(&mut self, diff --git a/src/librustc_typeck/check/wfcheck.rs b/src/librustc_typeck/check/wfcheck.rs index bc5cb68995b2f..e3634cfe5f5e3 100644 --- a/src/librustc_typeck/check/wfcheck.rs +++ b/src/librustc_typeck/check/wfcheck.rs @@ -150,7 +150,7 @@ impl<'ccx, 'gcx> CheckTypeWellFormedVisitor<'ccx, 'gcx> { self.check_variances_for_type_defn(item, ast_generics); } hir::ItemEnum(ref enum_def, ref ast_generics) => { - self.check_type_defn(item, false, |fcx| { + self.check_type_defn(item, true, |fcx| { fcx.enum_variants(enum_def) }); diff --git a/src/librustc_typeck/check/writeback.rs b/src/librustc_typeck/check/writeback.rs index 0b70d904c2654..8685f703a599c 100644 --- a/src/librustc_typeck/check/writeback.rs +++ b/src/librustc_typeck/check/writeback.rs @@ -247,7 +247,7 @@ impl<'cx, 'gcx, 'tcx, 'v> Visitor<'v> for WritebackCx<'cx, 'gcx, 'tcx> { fn visit_ty(&mut self, t: &hir::Ty) { match t.node { - hir::TyFixedLengthVec(ref ty, ref count_expr) => { + hir::TyArray(ref ty, ref count_expr) => { self.visit_ty(&ty); write_ty_to_tcx(self.fcx.ccx, count_expr.id, self.tcx().types.usize); } diff --git a/src/librustc_typeck/check_unused.rs b/src/librustc_typeck/check_unused.rs index f66f15b238e73..7e41a672bf325 100644 --- a/src/librustc_typeck/check_unused.rs +++ b/src/librustc_typeck/check_unused.rs @@ -30,10 +30,13 @@ impl<'a, 'tcx> UnusedTraitImportVisitor<'a, 'tcx> { if self.tcx.used_trait_imports.borrow().contains(&id) { return; } - self.tcx.sess.add_lint(lint::builtin::UNUSED_IMPORTS, - id, - span, - "unused import".to_string()); + + let msg = if let Ok(snippet) = self.tcx.sess.codemap().span_to_snippet(span) { + format!("unused import: `{}`", snippet) + } else { + "unused import".to_string() + }; + self.tcx.sess.add_lint(lint::builtin::UNUSED_IMPORTS, id, span, msg); } } diff --git a/src/librustc_typeck/coherence/mod.rs b/src/librustc_typeck/coherence/mod.rs index 26c33c00b9cf9..ca22faa2ec36a 100644 --- a/src/librustc_typeck/coherence/mod.rs +++ b/src/librustc_typeck/coherence/mod.rs @@ -20,8 +20,7 @@ use middle::lang_items::UnsizeTraitLangItem; use rustc::ty::subst::Subst; use rustc::ty::{self, TyCtxt, TypeFoldable}; use rustc::traits::{self, Reveal}; -use rustc::ty::{ImplOrTraitItemId, ConstTraitItemId}; -use rustc::ty::{MethodTraitItemId, TypeTraitItemId, ParameterEnvironment}; +use rustc::ty::ParameterEnvironment; use rustc::ty::{Ty, TyBool, TyChar, TyError}; use rustc::ty::{TyParam, TyRawPtr}; use rustc::ty::{TyRef, TyAdt, TyTrait, TyNever, TyTuple}; @@ -39,17 +38,19 @@ use rustc::hir::intravisit; use rustc::hir::{Item, ItemImpl}; use rustc::hir; +use std::rc::Rc; + mod orphan; mod overlap; mod unsafety; -struct CoherenceChecker<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { +struct CoherenceChecker<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { crate_context: &'a CrateCtxt<'a, 'gcx>, inference_context: InferCtxt<'a, 'gcx, 'tcx>, } -struct CoherenceCheckVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { - cc: &'a CoherenceChecker<'a, 'gcx, 'tcx> +struct CoherenceCheckVisitor<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> { + cc: &'a CoherenceChecker<'a, 'gcx, 'tcx>, } impl<'a, 'gcx, 'tcx, 'v> intravisit::Visitor<'v> for CoherenceCheckVisitor<'a, 'gcx, 'tcx> { @@ -61,36 +62,25 @@ impl<'a, 'gcx, 'tcx, 'v> intravisit::Visitor<'v> for CoherenceCheckVisitor<'a, ' } impl<'a, 'gcx, 'tcx> CoherenceChecker<'a, 'gcx, 'tcx> { - // Returns the def ID of the base type, if there is one. fn get_base_type_def_id(&self, span: Span, ty: Ty<'tcx>) -> Option { match ty.sty { - TyAdt(def, _) => { - Some(def.did) - } + TyAdt(def, _) => Some(def.did), - TyTrait(ref t) => { - Some(t.principal.def_id()) - } + TyTrait(ref t) => Some(t.principal.def_id()), - TyBox(_) => { - self.inference_context.tcx.lang_items.owned_box() - } + TyBox(_) => self.inference_context.tcx.lang_items.owned_box(), - TyBool | TyChar | TyInt(..) | TyUint(..) | TyFloat(..) | - TyStr | TyArray(..) | TySlice(..) | TyFnDef(..) | TyFnPtr(_) | - TyTuple(..) | TyParam(..) | TyError | TyNever | - TyRawPtr(_) | TyRef(..) | TyProjection(..) => { - None - } + TyBool | TyChar | TyInt(..) | TyUint(..) | TyFloat(..) | TyStr | TyArray(..) | + TySlice(..) | TyFnDef(..) | TyFnPtr(_) | TyTuple(..) | TyParam(..) | TyError | + TyNever | TyRawPtr(_) | TyRef(..) | TyProjection(..) => None, TyInfer(..) | TyClosure(..) | TyAnon(..) => { // `ty` comes from a user declaration so we should only expect types // that the user can type - span_bug!( - span, - "coherence encountered unexpected type searching for base type: {}", - ty); + span_bug!(span, + "coherence encountered unexpected type searching for base type: {}", + ty); } } } @@ -99,9 +89,8 @@ impl<'a, 'gcx, 'tcx> CoherenceChecker<'a, 'gcx, 'tcx> { // Check implementations and traits. This populates the tables // containing the inherent methods and extension methods. It also // builds up the trait inheritance table. - self.crate_context.tcx.visit_all_items_in_krate( - DepNode::CoherenceCheckImpl, - &mut CoherenceCheckVisitor { cc: self }); + self.crate_context.tcx.visit_all_items_in_krate(DepNode::CoherenceCheckImpl, + &mut CoherenceCheckVisitor { cc: self }); // Populate the table of destructors. It might seem a bit strange to // do this here, but it's actually the most convenient place, since @@ -156,7 +145,7 @@ impl<'a, 'gcx, 'tcx> CoherenceChecker<'a, 'gcx, 'tcx> { } } - tcx.impl_items.borrow_mut().insert(impl_did, impl_items); + tcx.impl_or_trait_item_def_ids.borrow_mut().insert(impl_did, Rc::new(impl_items)); } fn add_inherent_impl(&self, base_def_id: DefId, impl_def_id: DefId) { @@ -166,29 +155,19 @@ impl<'a, 'gcx, 'tcx> CoherenceChecker<'a, 'gcx, 'tcx> { fn add_trait_impl(&self, impl_trait_ref: ty::TraitRef<'gcx>, impl_def_id: DefId) { debug!("add_trait_impl: impl_trait_ref={:?} impl_def_id={:?}", - impl_trait_ref, impl_def_id); + impl_trait_ref, + impl_def_id); let trait_def = self.crate_context.tcx.lookup_trait_def(impl_trait_ref.def_id); trait_def.record_local_impl(self.crate_context.tcx, impl_def_id, impl_trait_ref); } // Converts an implementation in the AST to a vector of items. - fn create_impl_from_item(&self, item: &Item) -> Vec { + fn create_impl_from_item(&self, item: &Item) -> Vec { match item.node { ItemImpl(.., ref impl_items) => { - impl_items.iter().map(|impl_item| { - let impl_def_id = self.crate_context.tcx.map.local_def_id(impl_item.id); - match impl_item.node { - hir::ImplItemKind::Const(..) => { - ConstTraitItemId(impl_def_id) - } - hir::ImplItemKind::Method(..) => { - MethodTraitItemId(impl_def_id) - } - hir::ImplItemKind::Type(_) => { - TypeTraitItemId(impl_def_id) - } - } - }).collect() + impl_items.iter() + .map(|impl_item| self.crate_context.tcx.map.local_def_id(impl_item.id)) + .collect() } _ => { span_bug!(item.span, "can't convert a non-impl to an impl"); @@ -196,19 +175,19 @@ impl<'a, 'gcx, 'tcx> CoherenceChecker<'a, 'gcx, 'tcx> { } } - // // Destructors // fn populate_destructors(&self) { let tcx = self.crate_context.tcx; let drop_trait = match tcx.lang_items.drop_trait() { - Some(id) => id, None => { return } + Some(id) => id, + None => return, }; tcx.populate_implementations_for_trait_if_necessary(drop_trait); let drop_trait = tcx.lookup_trait_def(drop_trait); - let impl_items = tcx.impl_items.borrow(); + let impl_items = tcx.impl_or_trait_item_def_ids.borrow(); drop_trait.for_each_impl(tcx, |impl_did| { let items = impl_items.get(&impl_did).unwrap(); @@ -221,7 +200,7 @@ impl<'a, 'gcx, 'tcx> CoherenceChecker<'a, 'gcx, 'tcx> { let self_type = tcx.lookup_item_type(impl_did); match self_type.ty.sty { ty::TyAdt(type_def, _) => { - type_def.set_destructor(method_def_id.def_id()); + type_def.set_destructor(method_def_id); } _ => { // Destructors only work on nominal types. @@ -229,13 +208,14 @@ impl<'a, 'gcx, 'tcx> CoherenceChecker<'a, 'gcx, 'tcx> { match tcx.map.find(impl_node_id) { Some(hir_map::NodeItem(item)) => { let span = match item.node { - ItemImpl(.., ref ty, _) => { - ty.span - }, - _ => item.span + ItemImpl(.., ref ty, _) => ty.span, + _ => item.span, }; - struct_span_err!(tcx.sess, span, E0120, - "the Drop trait may only be implemented on structures") + struct_span_err!(tcx.sess, + span, + E0120, + "the Drop trait may only be implemented on \ + structures") .span_label(span, &format!("implementing Drop requires a struct")) .emit(); @@ -264,15 +244,14 @@ impl<'a, 'gcx, 'tcx> CoherenceChecker<'a, 'gcx, 'tcx> { let copy_trait = tcx.lookup_trait_def(copy_trait); copy_trait.for_each_impl(tcx, |impl_did| { - debug!("check_implementations_of_copy: impl_did={:?}", - impl_did); + debug!("check_implementations_of_copy: impl_did={:?}", impl_did); let impl_node_id = if let Some(n) = tcx.map.as_local_node_id(impl_did) { n } else { debug!("check_implementations_of_copy(): impl not in this \ crate"); - return + return; }; let self_type = tcx.lookup_item_type(impl_did); @@ -290,14 +269,12 @@ impl<'a, 'gcx, 'tcx> CoherenceChecker<'a, 'gcx, 'tcx> { match param_env.can_type_implement_copy(tcx, self_type, span) { Ok(()) => {} Err(CopyImplementationError::InfrigingField(name)) => { - struct_span_err!(tcx.sess, span, E0204, - "the trait `Copy` may not be implemented for \ - this type") - .span_label(span, &format!( - "field `{}` does not implement `Copy`", name) - ) - .emit() - + struct_span_err!(tcx.sess, + span, + E0204, + "the trait `Copy` may not be implemented for this type") + .span_label(span, &format!("field `{}` does not implement `Copy`", name)) + .emit() } Err(CopyImplementationError::InfrigingVariant(name)) => { let item = tcx.map.expect_item(impl_node_id); @@ -307,10 +284,12 @@ impl<'a, 'gcx, 'tcx> CoherenceChecker<'a, 'gcx, 'tcx> { span }; - struct_span_err!(tcx.sess, span, E0205, + struct_span_err!(tcx.sess, + span, + E0205, "the trait `Copy` may not be implemented for this type") - .span_label(span, &format!("variant `{}` does not implement `Copy`", - name)) + .span_label(span, + &format!("variant `{}` does not implement `Copy`", name)) .emit() } Err(CopyImplementationError::NotAnAdt) => { @@ -321,15 +300,19 @@ impl<'a, 'gcx, 'tcx> CoherenceChecker<'a, 'gcx, 'tcx> { span }; - struct_span_err!(tcx.sess, span, E0206, + struct_span_err!(tcx.sess, + span, + E0206, "the trait `Copy` may not be implemented for this type") .span_label(span, &format!("type is not a structure or enumeration")) .emit(); } Err(CopyImplementationError::HasDestructor) => { - struct_span_err!(tcx.sess, span, E0184, - "the trait `Copy` may not be implemented for this type; \ - the type has a destructor") + struct_span_err!(tcx.sess, + span, + E0184, + "the trait `Copy` may not be implemented for this type; the \ + type has a destructor") .span_label(span, &format!("Copy not allowed on types with destructors")) .emit(); } @@ -369,7 +352,8 @@ impl<'a, 'gcx, 'tcx> CoherenceChecker<'a, 'gcx, 'tcx> { let trait_ref = self.crate_context.tcx.impl_trait_ref(impl_did).unwrap(); let target = trait_ref.substs.type_at(1); debug!("check_implementations_of_coerce_unsized: {:?} -> {:?} (bound)", - source, target); + source, + target); let span = tcx.map.span(impl_node_id); let param_env = ParameterEnvironment::for_item(tcx, impl_node_id); @@ -378,15 +362,19 @@ impl<'a, 'gcx, 'tcx> CoherenceChecker<'a, 'gcx, 'tcx> { assert!(!source.has_escaping_regions()); debug!("check_implementations_of_coerce_unsized: {:?} -> {:?} (free)", - source, target); + source, + target); tcx.infer_ctxt(None, Some(param_env), Reveal::ExactMatch).enter(|infcx| { let origin = TypeOrigin::Misc(span); - let check_mutbl = |mt_a: ty::TypeAndMut<'gcx>, mt_b: ty::TypeAndMut<'gcx>, + let check_mutbl = |mt_a: ty::TypeAndMut<'gcx>, + mt_b: ty::TypeAndMut<'gcx>, mk_ptr: &Fn(Ty<'gcx>) -> Ty<'gcx>| { if (mt_a.mutbl, mt_b.mutbl) == (hir::MutImmutable, hir::MutMutable) { - infcx.report_mismatched_types(origin, mk_ptr(mt_b.ty), - target, ty::error::TypeError::Mutability); + infcx.report_mismatched_types(origin, + mk_ptr(mt_b.ty), + target, + ty::error::TypeError::Mutability); } (mt_a.ty, mt_b.ty, unsize_trait, None) }; @@ -404,37 +392,45 @@ impl<'a, 'gcx, 'tcx> CoherenceChecker<'a, 'gcx, 'tcx> { } (&ty::TyAdt(def_a, substs_a), &ty::TyAdt(def_b, substs_b)) - if def_a.is_struct() && def_b.is_struct() => { + if def_a.is_struct() && def_b.is_struct() => { if def_a != def_b { let source_path = tcx.item_path_str(def_a.did); let target_path = tcx.item_path_str(def_b.did); - span_err!(tcx.sess, span, E0377, + span_err!(tcx.sess, + span, + E0377, "the trait `CoerceUnsized` may only be implemented \ for a coercion between structures with the same \ definition; expected {}, found {}", - source_path, target_path); + source_path, + target_path); return; } let fields = &def_a.struct_variant().fields; - let diff_fields = fields.iter().enumerate().filter_map(|(i, f)| { - let (a, b) = (f.ty(tcx, substs_a), f.ty(tcx, substs_b)); - - if f.unsubst_ty().is_phantom_data() { - // Ignore PhantomData fields - None - } else if infcx.sub_types(false, origin, b, a).is_ok() { - // Ignore fields that aren't significantly changed - None - } else { - // Collect up all fields that were significantly changed - // i.e. those that contain T in coerce_unsized T -> U - Some((i, a, b)) - } - }).collect::>(); + let diff_fields = fields.iter() + .enumerate() + .filter_map(|(i, f)| { + let (a, b) = (f.ty(tcx, substs_a), f.ty(tcx, substs_b)); + + if f.unsubst_ty().is_phantom_data() { + // Ignore PhantomData fields + None + } else if infcx.sub_types(false, origin, b, a).is_ok() { + // Ignore fields that aren't significantly changed + None + } else { + // Collect up all fields that were significantly changed + // i.e. those that contain T in coerce_unsized T -> U + Some((i, a, b)) + } + }) + .collect::>(); if diff_fields.is_empty() { - span_err!(tcx.sess, span, E0374, + span_err!(tcx.sess, + span, + E0374, "the trait `CoerceUnsized` may only be implemented \ for a coercion between structures with one field \ being coerced, none found"); @@ -447,16 +443,22 @@ impl<'a, 'gcx, 'tcx> CoherenceChecker<'a, 'gcx, 'tcx> { tcx.map.span(impl_node_id) }; - let mut err = struct_span_err!(tcx.sess, span, E0375, - "implementing the trait `CoerceUnsized` \ - requires multiple coercions"); + let mut err = struct_span_err!(tcx.sess, + span, + E0375, + "implementing the trait \ + `CoerceUnsized` requires multiple \ + coercions"); err.note("`CoerceUnsized` may only be implemented for \ a coercion between structures with one field being coerced"); err.note(&format!("currently, {} fields need coercions: {}", - diff_fields.len(), - diff_fields.iter().map(|&(i, a, b)| { - format!("{} ({} to {})", fields[i].name, a, b) - }).collect::>().join(", ") )); + diff_fields.len(), + diff_fields.iter() + .map(|&(i, a, b)| { + format!("{} ({} to {})", fields[i].name, a, b) + }) + .collect::>() + .join(", "))); err.span_label(span, &format!("requires multiple coercions")); err.emit(); return; @@ -468,7 +470,9 @@ impl<'a, 'gcx, 'tcx> CoherenceChecker<'a, 'gcx, 'tcx> { } _ => { - span_err!(tcx.sess, span, E0376, + span_err!(tcx.sess, + span, + E0376, "the trait `CoerceUnsized` may only be implemented \ for a coercion between structures"); return; @@ -479,8 +483,8 @@ impl<'a, 'gcx, 'tcx> CoherenceChecker<'a, 'gcx, 'tcx> { // Register an obligation for `A: Trait`. let cause = traits::ObligationCause::misc(span, impl_node_id); - let predicate = tcx.predicate_for_trait_def(cause, trait_def_id, 0, - source, &[target]); + let predicate = + tcx.predicate_for_trait_def(cause, trait_def_id, 0, source, &[target]); fulfill_cx.register_predicate_obligation(&infcx, predicate); // Check that all transitive obligations are satisfied. @@ -490,8 +494,8 @@ impl<'a, 'gcx, 'tcx> CoherenceChecker<'a, 'gcx, 'tcx> { // Finally, resolve all regions. let mut free_regions = FreeRegionMap::new(); - free_regions.relate_free_regions_from_predicates( - &infcx.parameter_environment.caller_bounds); + free_regions.relate_free_regions_from_predicates(&infcx.parameter_environment + .caller_bounds); infcx.resolve_regions_and_report_errors(&free_regions, impl_node_id); if let Some(kind) = kind { @@ -505,7 +509,7 @@ impl<'a, 'gcx, 'tcx> CoherenceChecker<'a, 'gcx, 'tcx> { fn enforce_trait_manually_implementable(tcx: TyCtxt, sp: Span, trait_def_id: DefId) { if tcx.sess.features.borrow().unboxed_closures { // the feature gate allows all of them - return + return; } let did = Some(trait_def_id); let li = &tcx.lang_items; @@ -517,14 +521,15 @@ fn enforce_trait_manually_implementable(tcx: TyCtxt, sp: Span, trait_def_id: Def } else if did == li.fn_once_trait() { "FnOnce" } else { - return // everything OK + return; // everything OK }; let mut err = struct_span_err!(tcx.sess, sp, E0183, "manual implementations of `{}` are experimental", trait_name); - help!(&mut err, "add `#![feature(unboxed_closures)]` to the crate attributes to enable"); + help!(&mut err, + "add `#![feature(unboxed_closures)]` to the crate attributes to enable"); err.emit(); } @@ -532,9 +537,10 @@ pub fn check_coherence(ccx: &CrateCtxt) { let _task = ccx.tcx.dep_graph.in_task(DepNode::Coherence); ccx.tcx.infer_ctxt(None, None, Reveal::ExactMatch).enter(|infcx| { CoherenceChecker { - crate_context: ccx, - inference_context: infcx, - }.check(); + crate_context: ccx, + inference_context: infcx, + } + .check(); }); unsafety::check(ccx.tcx); orphan::check(ccx.tcx); diff --git a/src/librustc_typeck/coherence/orphan.rs b/src/librustc_typeck/coherence/orphan.rs index d1eb0f995de13..bff794364c098 100644 --- a/src/librustc_typeck/coherence/orphan.rs +++ b/src/librustc_typeck/coherence/orphan.rs @@ -11,8 +11,7 @@ //! Orphan checker: every impl either implements a trait defined in this //! crate or pertains to a type defined in this crate. -use middle::cstore::LOCAL_CRATE; -use hir::def_id::DefId; +use hir::def_id::{DefId, LOCAL_CRATE}; use rustc::traits; use rustc::ty::{self, TyCtxt}; use syntax::ast; @@ -26,17 +25,20 @@ pub fn check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { tcx.visit_all_items_in_krate(DepNode::CoherenceOrphanCheck, &mut orphan); } -struct OrphanChecker<'cx, 'tcx:'cx> { - tcx: TyCtxt<'cx, 'tcx, 'tcx> +struct OrphanChecker<'cx, 'tcx: 'cx> { + tcx: TyCtxt<'cx, 'tcx, 'tcx>, } impl<'cx, 'tcx> OrphanChecker<'cx, 'tcx> { fn check_def_id(&self, item: &hir::Item, def_id: DefId) { if def_id.krate != LOCAL_CRATE { - struct_span_err!(self.tcx.sess, item.span, E0116, - "cannot define inherent `impl` for a type outside of the \ - crate where the type is defined") - .span_label(item.span, &format!("impl for type defined outside of crate.")) + struct_span_err!(self.tcx.sess, + item.span, + E0116, + "cannot define inherent `impl` for a type outside of the crate \ + where the type is defined") + .span_label(item.span, + &format!("impl for type defined outside of crate.")) .note("define and implement a trait or new type instead") .emit(); } @@ -49,11 +51,17 @@ impl<'cx, 'tcx> OrphanChecker<'cx, 'tcx> { ty: &str, span: Span) { match lang_def_id { - Some(lang_def_id) if lang_def_id == impl_def_id => { /* OK */ }, + Some(lang_def_id) if lang_def_id == impl_def_id => { + // OK + } _ => { - struct_span_err!(self.tcx.sess, span, E0390, - "only a single inherent implementation marked with `#[lang = \"{}\"]` \ - is allowed for the `{}` primitive", lang, ty) + struct_span_err!(self.tcx.sess, + span, + E0390, + "only a single inherent implementation marked with `#[lang = \ + \"{}\"]` is allowed for the `{}` primitive", + lang, + ty) .span_help(span, "consider using a trait to implement these methods") .emit(); } @@ -210,12 +218,14 @@ impl<'cx, 'tcx> OrphanChecker<'cx, 'tcx> { return; } _ => { - struct_span_err!(self.tcx.sess, ty.span, E0118, + struct_span_err!(self.tcx.sess, + ty.span, + E0118, "no base type found for inherent implementation") - .span_label(ty.span, &format!("impl requires a base type")) - .note(&format!("either implement a trait on it or create a newtype \ - to wrap it instead")) - .emit(); + .span_label(ty.span, &format!("impl requires a base type")) + .note(&format!("either implement a trait on it or create a newtype \ + to wrap it instead")) + .emit(); return; } } @@ -227,20 +237,23 @@ impl<'cx, 'tcx> OrphanChecker<'cx, 'tcx> { let trait_ref = self.tcx.impl_trait_ref(def_id).unwrap(); let trait_def_id = trait_ref.def_id; match traits::orphan_check(self.tcx, def_id) { - Ok(()) => { } + Ok(()) => {} Err(traits::OrphanCheckErr::NoLocalInputType) => { - struct_span_err!( - self.tcx.sess, item.span, E0117, - "only traits defined in the current crate can be \ - implemented for arbitrary types") - .span_label(item.span, &format!("impl doesn't use types inside crate")) - .note(&format!("the impl does not reference any \ - types defined in this crate")) - .emit(); + struct_span_err!(self.tcx.sess, + item.span, + E0117, + "only traits defined in the current crate can be \ + implemented for arbitrary types") + .span_label(item.span, &format!("impl doesn't use types inside crate")) + .note(&format!("the impl does not reference any types defined in \ + this crate")) + .emit(); return; } Err(traits::OrphanCheckErr::UncoveredTy(param_ty)) => { - span_err!(self.tcx.sess, item.span, E0210, + span_err!(self.tcx.sess, + item.span, + E0210, "type parameter `{}` must be used as the type parameter for \ some local type (e.g. `MyStruct`); only traits defined in \ the current crate can be implemented for a type parameter", @@ -286,10 +299,8 @@ impl<'cx, 'tcx> OrphanChecker<'cx, 'tcx> { trait_ref, trait_def_id, self.tcx.trait_has_default_impl(trait_def_id)); - if - self.tcx.trait_has_default_impl(trait_def_id) && - trait_def_id.krate != LOCAL_CRATE - { + if self.tcx.trait_has_default_impl(trait_def_id) && + trait_def_id.krate != LOCAL_CRATE { let self_ty = trait_ref.self_ty(); let opt_self_def_id = match self_ty.sty { ty::TyAdt(self_def, _) => Some(self_def.did), @@ -306,20 +317,17 @@ impl<'cx, 'tcx> OrphanChecker<'cx, 'tcx> { if self_def_id.is_local() { None } else { - Some(format!( - "cross-crate traits with a default impl, like `{}`, \ - can only be implemented for a struct/enum type \ - defined in the current crate", - self.tcx.item_path_str(trait_def_id))) + Some(format!("cross-crate traits with a default impl, like `{}`, \ + can only be implemented for a struct/enum type \ + defined in the current crate", + self.tcx.item_path_str(trait_def_id))) } } _ => { - Some(format!( - "cross-crate traits with a default impl, like `{}`, \ - can only be implemented for a struct/enum type, \ - not `{}`", - self.tcx.item_path_str(trait_def_id), - self_ty)) + Some(format!("cross-crate traits with a default impl, like `{}`, can \ + only be implemented for a struct/enum type, not `{}`", + self.tcx.item_path_str(trait_def_id), + self_ty)) } }; @@ -331,14 +339,18 @@ impl<'cx, 'tcx> OrphanChecker<'cx, 'tcx> { // Disallow *all* explicit impls of `Sized` and `Unsize` for now. if Some(trait_def_id) == self.tcx.lang_items.sized_trait() { - struct_span_err!(self.tcx.sess, item.span, E0322, - "explicit impls for the `Sized` trait are not permitted") + struct_span_err!(self.tcx.sess, + item.span, + E0322, + "explicit impls for the `Sized` trait are not permitted") .span_label(item.span, &format!("impl of 'Sized' not allowed")) .emit(); return; } if Some(trait_def_id) == self.tcx.lang_items.unsize_trait() { - span_err!(self.tcx.sess, item.span, E0328, + span_err!(self.tcx.sess, + item.span, + E0328, "explicit impls for the `Unsize` trait are not permitted"); return; } @@ -349,9 +361,11 @@ impl<'cx, 'tcx> OrphanChecker<'cx, 'tcx> { self.tcx.map.node_to_string(item.id)); let trait_ref = self.tcx.impl_trait_ref(def_id).unwrap(); if trait_ref.def_id.krate != LOCAL_CRATE { - struct_span_err!(self.tcx.sess, item_trait_ref.path.span, E0318, - "cannot create default implementations for traits outside the \ - crate they're defined in; define a new trait instead") + struct_span_err!(self.tcx.sess, + item_trait_ref.path.span, + E0318, + "cannot create default implementations for traits outside \ + the crate they're defined in; define a new trait instead") .span_label(item_trait_ref.path.span, &format!("`{}` trait not defined in this crate", item_trait_ref.path)) @@ -366,7 +380,7 @@ impl<'cx, 'tcx> OrphanChecker<'cx, 'tcx> { } } -impl<'cx, 'tcx,'v> intravisit::Visitor<'v> for OrphanChecker<'cx, 'tcx> { +impl<'cx, 'tcx, 'v> intravisit::Visitor<'v> for OrphanChecker<'cx, 'tcx> { fn visit_item(&mut self, item: &hir::Item) { self.check_item(item); } diff --git a/src/librustc_typeck/coherence/overlap.rs b/src/librustc_typeck/coherence/overlap.rs index 890b6c72e6fee..1bf140c21a5a5 100644 --- a/src/librustc_typeck/coherence/overlap.rs +++ b/src/librustc_typeck/coherence/overlap.rs @@ -23,15 +23,17 @@ use util::nodemap::DefIdMap; use lint; pub fn check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { - let mut overlap = OverlapChecker { tcx: tcx, - default_impls: DefIdMap() }; + let mut overlap = OverlapChecker { + tcx: tcx, + default_impls: DefIdMap(), + }; // this secondary walk specifically checks for some other cases, // like defaulted traits, for which additional overlap rules exist tcx.visit_all_items_in_krate(DepNode::CoherenceOverlapCheckSpecial, &mut overlap); } -struct OverlapChecker<'cx, 'tcx:'cx> { +struct OverlapChecker<'cx, 'tcx: 'cx> { tcx: TyCtxt<'cx, 'tcx, 'tcx>, // maps from a trait def-id to an impl id @@ -41,32 +43,35 @@ struct OverlapChecker<'cx, 'tcx:'cx> { impl<'cx, 'tcx> OverlapChecker<'cx, 'tcx> { fn check_for_common_items_in_impls(&self, impl1: DefId, impl2: DefId) { #[derive(Copy, Clone, PartialEq)] - enum Namespace { Type, Value } + enum Namespace { + Type, + Value, + } fn name_and_namespace<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, - item: &ty::ImplOrTraitItemId) - -> (ast::Name, Namespace) - { - let name = tcx.impl_or_trait_item(item.def_id()).name(); - (name, match *item { - ty::TypeTraitItemId(..) => Namespace::Type, - ty::ConstTraitItemId(..) => Namespace::Value, - ty::MethodTraitItemId(..) => Namespace::Value, - }) + def_id: DefId) + -> (ast::Name, Namespace) { + let item = tcx.impl_or_trait_item(def_id); + (item.name(), + match item { + ty::TypeTraitItem(..) => Namespace::Type, + ty::ConstTraitItem(..) => Namespace::Value, + ty::MethodTraitItem(..) => Namespace::Value, + }) } - let impl_items = self.tcx.impl_items.borrow(); + let impl_items = self.tcx.impl_or_trait_item_def_ids.borrow(); - for item1 in &impl_items[&impl1] { + for &item1 in &impl_items[&impl1][..] { let (name, namespace) = name_and_namespace(self.tcx, item1); - for item2 in &impl_items[&impl2] { + for &item2 in &impl_items[&impl2][..] { if (name, namespace) == name_and_namespace(self.tcx, item2) { let msg = format!("duplicate definitions with name `{}`", name); - let node_id = self.tcx.map.as_local_node_id(item1.def_id()).unwrap(); + let node_id = self.tcx.map.as_local_node_id(item1).unwrap(); self.tcx.sess.add_lint(lint::builtin::OVERLAPPING_INHERENT_IMPLS, node_id, - self.tcx.span_of_impl(item1.def_id()).unwrap(), + self.tcx.span_of_impl(item1).unwrap(), msg); } } @@ -79,11 +84,11 @@ impl<'cx, 'tcx> OverlapChecker<'cx, 'tcx> { let inherent_impls = self.tcx.inherent_impls.borrow(); let impls = match inherent_impls.get(&ty_def_id) { Some(impls) => impls, - None => return + None => return, }; for (i, &impl1_def_id) in impls.iter().enumerate() { - for &impl2_def_id in &impls[(i+1)..] { + for &impl2_def_id in &impls[(i + 1)..] { self.tcx.infer_ctxt(None, None, Reveal::ExactMatch).enter(|infcx| { if traits::overlapping_impls(&infcx, impl1_def_id, impl2_def_id).is_some() { self.check_for_common_items_in_impls(impl1_def_id, impl2_def_id) @@ -94,10 +99,12 @@ impl<'cx, 'tcx> OverlapChecker<'cx, 'tcx> { } } -impl<'cx, 'tcx,'v> intravisit::Visitor<'v> for OverlapChecker<'cx, 'tcx> { +impl<'cx, 'tcx, 'v> intravisit::Visitor<'v> for OverlapChecker<'cx, 'tcx> { fn visit_item(&mut self, item: &'v hir::Item) { match item.node { - hir::ItemEnum(..) | hir::ItemStruct(..) | hir::ItemUnion(..) => { + hir::ItemEnum(..) | + hir::ItemStruct(..) | + hir::ItemUnion(..) => { let type_def_id = self.tcx.map.local_def_id(item.id); self.check_for_overlapping_inherent_impls(type_def_id); } @@ -111,12 +118,14 @@ impl<'cx, 'tcx,'v> intravisit::Visitor<'v> for OverlapChecker<'cx, 'tcx> { let prev_default_impl = self.default_impls.insert(trait_ref.def_id, item.id); if let Some(prev_id) = prev_default_impl { - let mut err = struct_span_err!( - self.tcx.sess, - self.tcx.span_of_impl(impl_def_id).unwrap(), E0521, - "redundant default implementations of trait `{}`:", - trait_ref); - err.span_note(self.tcx.span_of_impl(self.tcx.map.local_def_id(prev_id)) + let mut err = struct_span_err!(self.tcx.sess, + self.tcx.span_of_impl(impl_def_id).unwrap(), + E0521, + "redundant default implementations of trait \ + `{}`:", + trait_ref); + err.span_note(self.tcx + .span_of_impl(self.tcx.map.local_def_id(prev_id)) .unwrap(), "redundant implementation is here:"); err.emit(); @@ -127,8 +136,8 @@ impl<'cx, 'tcx,'v> intravisit::Visitor<'v> for OverlapChecker<'cx, 'tcx> { let trait_ref = self.tcx.impl_trait_ref(impl_def_id).unwrap(); let trait_def_id = trait_ref.def_id; - let _task = self.tcx.dep_graph.in_task( - DepNode::CoherenceOverlapCheck(trait_def_id)); + let _task = + self.tcx.dep_graph.in_task(DepNode::CoherenceOverlapCheck(trait_def_id)); let def = self.tcx.lookup_trait_def(trait_def_id); @@ -137,17 +146,19 @@ impl<'cx, 'tcx,'v> intravisit::Visitor<'v> for OverlapChecker<'cx, 'tcx> { // insertion failed due to overlap if let Err(overlap) = insert_result { - let mut err = struct_span_err!( - self.tcx.sess, self.tcx.span_of_impl(impl_def_id).unwrap(), E0119, - "conflicting implementations of trait `{}`{}:", - overlap.trait_desc, - overlap.self_desc.clone().map_or(String::new(), - |ty| format!(" for type `{}`", ty))); + let mut err = struct_span_err!(self.tcx.sess, + self.tcx.span_of_impl(impl_def_id).unwrap(), + E0119, + "conflicting implementations of trait `{}`{}:", + overlap.trait_desc, + overlap.self_desc.clone().map_or(String::new(), + |ty| { + format!(" for type `{}`", ty) + })); match self.tcx.span_of_impl(overlap.with_impl) { Ok(span) => { - err.span_label(span, - &format!("first implementation here")); + err.span_label(span, &format!("first implementation here")); err.span_label(self.tcx.span_of_impl(impl_def_id).unwrap(), &format!("conflicting implementation{}", overlap.self_desc @@ -155,8 +166,7 @@ impl<'cx, 'tcx,'v> intravisit::Visitor<'v> for OverlapChecker<'cx, 'tcx> { |ty| format!(" for `{}`", ty)))); } Err(cname) => { - err.note(&format!("conflicting implementation in crate `{}`", - cname)); + err.note(&format!("conflicting implementation in crate `{}`", cname)); } } @@ -177,7 +187,9 @@ impl<'cx, 'tcx,'v> intravisit::Visitor<'v> for OverlapChecker<'cx, 'tcx> { let mut supertrait_def_ids = traits::supertrait_def_ids(self.tcx, data.principal.def_id()); if supertrait_def_ids.any(|d| d == trait_def_id) { - span_err!(self.tcx.sess, item.span, E0371, + span_err!(self.tcx.sess, + item.span, + E0371, "the object type `{}` automatically \ implements the trait `{}`", trait_ref.self_ty(), diff --git a/src/librustc_typeck/coherence/unsafety.rs b/src/librustc_typeck/coherence/unsafety.rs index cdf5478e692b2..cca6c88430672 100644 --- a/src/librustc_typeck/coherence/unsafety.rs +++ b/src/librustc_typeck/coherence/unsafety.rs @@ -13,28 +13,34 @@ use rustc::ty::TyCtxt; use rustc::hir::intravisit; -use rustc::hir; +use rustc::hir::{self, Unsafety}; pub fn check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { let mut orphan = UnsafetyChecker { tcx: tcx }; tcx.map.krate().visit_all_items(&mut orphan); } -struct UnsafetyChecker<'cx, 'tcx:'cx> { - tcx: TyCtxt<'cx, 'tcx, 'tcx> +struct UnsafetyChecker<'cx, 'tcx: 'cx> { + tcx: TyCtxt<'cx, 'tcx, 'tcx>, } impl<'cx, 'tcx, 'v> UnsafetyChecker<'cx, 'tcx> { - fn check_unsafety_coherence(&mut self, item: &'v hir::Item, + fn check_unsafety_coherence(&mut self, + item: &'v hir::Item, + impl_generics: Option<&hir::Generics>, unsafety: hir::Unsafety, polarity: hir::ImplPolarity) { match self.tcx.impl_trait_ref(self.tcx.map.local_def_id(item.id)) { None => { // Inherent impl. match unsafety { - hir::Unsafety::Normal => { /* OK */ } + hir::Unsafety::Normal => { + // OK + } hir::Unsafety::Unsafe => { - span_err!(self.tcx.sess, item.span, E0197, + span_err!(self.tcx.sess, + item.span, + E0197, "inherent impls cannot be declared as unsafe"); } } @@ -42,32 +48,45 @@ impl<'cx, 'tcx, 'v> UnsafetyChecker<'cx, 'tcx> { Some(trait_ref) => { let trait_def = self.tcx.lookup_trait_def(trait_ref.def_id); - match (trait_def.unsafety, unsafety, polarity) { - (hir::Unsafety::Unsafe, - hir::Unsafety::Unsafe, hir::ImplPolarity::Negative) => { - span_err!(self.tcx.sess, item.span, E0198, + let unsafe_attr = impl_generics.and_then(|g| g.carries_unsafe_attr()); + match (trait_def.unsafety, unsafe_attr, unsafety, polarity) { + (_, _, Unsafety::Unsafe, hir::ImplPolarity::Negative) => { + span_err!(self.tcx.sess, + item.span, + E0198, "negative implementations are not unsafe"); } - (hir::Unsafety::Normal, hir::Unsafety::Unsafe, _) => { - span_err!(self.tcx.sess, item.span, E0199, + (Unsafety::Normal, None, Unsafety::Unsafe, _) => { + span_err!(self.tcx.sess, + item.span, + E0199, "implementing the trait `{}` is not unsafe", trait_ref); } - (hir::Unsafety::Unsafe, - hir::Unsafety::Normal, hir::ImplPolarity::Positive) => { - span_err!(self.tcx.sess, item.span, E0200, + (Unsafety::Unsafe, _, Unsafety::Normal, hir::ImplPolarity::Positive) => { + span_err!(self.tcx.sess, + item.span, + E0200, "the trait `{}` requires an `unsafe impl` declaration", trait_ref); } - (hir::Unsafety::Unsafe, - hir::Unsafety::Normal, hir::ImplPolarity::Negative) | - (hir::Unsafety::Unsafe, - hir::Unsafety::Unsafe, hir::ImplPolarity::Positive) | - (hir::Unsafety::Normal, hir::Unsafety::Normal, _) => { - /* OK */ + (Unsafety::Normal, Some(g), Unsafety::Normal, hir::ImplPolarity::Positive) => + { + span_err!(self.tcx.sess, + item.span, + E0569, + "requires an `unsafe impl` declaration due to `#[{}]` attribute", + g.attr_name()); + } + + (_, _, Unsafety::Normal, hir::ImplPolarity::Negative) | + (Unsafety::Unsafe, _, Unsafety::Unsafe, hir::ImplPolarity::Positive) | + (Unsafety::Normal, Some(_), Unsafety::Unsafe, hir::ImplPolarity::Positive) | + (Unsafety::Normal, None, Unsafety::Normal, _) => { + // OK } } } @@ -75,16 +94,16 @@ impl<'cx, 'tcx, 'v> UnsafetyChecker<'cx, 'tcx> { } } -impl<'cx, 'tcx,'v> intravisit::Visitor<'v> for UnsafetyChecker<'cx, 'tcx> { +impl<'cx, 'tcx, 'v> intravisit::Visitor<'v> for UnsafetyChecker<'cx, 'tcx> { fn visit_item(&mut self, item: &'v hir::Item) { match item.node { hir::ItemDefaultImpl(unsafety, _) => { - self.check_unsafety_coherence(item, unsafety, hir::ImplPolarity::Positive); + self.check_unsafety_coherence(item, None, unsafety, hir::ImplPolarity::Positive); } - hir::ItemImpl(unsafety, polarity, ..) => { - self.check_unsafety_coherence(item, unsafety, polarity); + hir::ItemImpl(unsafety, polarity, ref generics, ..) => { + self.check_unsafety_coherence(item, Some(generics), unsafety, polarity); } - _ => { } + _ => {} } } } diff --git a/src/librustc_typeck/collect.rs b/src/librustc_typeck/collect.rs index 04aca8c0947ca..202e176df0dbc 100644 --- a/src/librustc_typeck/collect.rs +++ b/src/librustc_typeck/collect.rs @@ -68,7 +68,6 @@ use rustc_const_eval::{eval_const_expr_partial, report_const_eval_err}; use rustc::ty::subst::Substs; use rustc::ty::{ToPredicate, ImplContainer, ImplOrTraitItemContainer, TraitContainer}; use rustc::ty::{self, AdtKind, ToPolyTraitRef, Ty, TyCtxt, TypeScheme}; -use rustc::ty::{VariantKind}; use rustc::ty::util::IntTypeExt; use rscope::*; use rustc::dep_graph::DepNode; @@ -87,7 +86,7 @@ use syntax::parse::token::keywords; use syntax_pos::Span; use rustc::hir::{self, intravisit, map as hir_map, print as pprust}; -use rustc::hir::def::Def; +use rustc::hir::def::{Def, CtorKind}; use rustc::hir::def_id::DefId; /////////////////////////////////////////////////////////////////////////// @@ -157,13 +156,10 @@ impl<'a,'tcx> CrateCtxt<'a,'tcx> { { { let mut stack = self.stack.borrow_mut(); - match stack.iter().enumerate().rev().find(|&(_, r)| *r == request) { - None => { } - Some((i, _)) => { - let cycle = &stack[i..]; - self.report_cycle(span, cycle); - return Err(ErrorReported); - } + if let Some((i, _)) = stack.iter().enumerate().rev().find(|&(_, r)| *r == request) { + let cycle = &stack[i..]; + self.report_cycle(span, cycle); + return Err(ErrorReported); } stack.push(request); } @@ -361,10 +357,15 @@ impl<'a, 'tcx> AstConv<'tcx, 'tcx> for ItemCtxt<'a, 'tcx> { -> bool { if let Some(trait_id) = self.tcx().map.as_local_node_id(trait_def_id) { - trait_defines_associated_type_named(self.ccx, trait_id, assoc_name) + trait_associated_type_names(self.tcx(), trait_id) + .any(|name| name == assoc_name) } else { - let trait_def = self.tcx().lookup_trait_def(trait_def_id); - trait_def.associated_type_names.contains(&assoc_name) + self.tcx().impl_or_trait_items(trait_def_id).iter().any(|&def_id| { + match self.tcx().impl_or_trait_item(def_id) { + ty::TypeTraitItem(ref item) => item.name == assoc_name, + _ => false + } + }) } } @@ -563,6 +564,7 @@ fn convert_method<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, vis: &hir::Visibility, sig: &hir::MethodSig, defaultness: hir::Defaultness, + has_body: bool, untransformed_rcvr_ty: Ty<'tcx>, rcvr_ty_predicates: &ty::GenericPredicates<'tcx>) { let def_id = ccx.tcx.map.local_def_id(id); @@ -580,15 +582,18 @@ fn convert_method<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, sig, untransformed_rcvr_ty, anon_scope) }; - let ty_method = ty::Method::new(name, - ty_generics, - ty_generic_predicates, - fty, - explicit_self_category, - ty::Visibility::from_hir(vis, id, ccx.tcx), - defaultness, - def_id, - container); + let ty_method = ty::Method { + name: name, + generics: ty_generics, + predicates: ty_generic_predicates, + fty: fty, + explicit_self: explicit_self_category, + vis: ty::Visibility::from_hir(vis, id, ccx.tcx), + defaultness: defaultness, + has_body: has_body, + def_id: def_id, + container: container, + }; let substs = mk_item_substs(&ccx.icx(&(rcvr_ty_predicates, &sig.generics)), ccx.tcx.map.span(id), def_id); @@ -665,6 +670,13 @@ fn convert_associated_type<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, defaultness: hir::Defaultness, ty: Option>) { + let predicates = ty::GenericPredicates { + parent: Some(container.id()), + predicates: vec![] + }; + ccx.tcx.predicates.borrow_mut().insert(ccx.tcx.map.local_def_id(id), + predicates); + let associated_type = Rc::new(ty::AssociatedType { name: name, vis: ty::Visibility::from_hir(vis, id, ccx.tcx), @@ -822,6 +834,9 @@ fn convert_item(ccx: &CrateCtxt, it: &hir::Item) { // Convert all the associated types. for impl_item in impl_items { if let hir::ImplItemKind::Type(ref ty) = impl_item.node { + let type_def_id = ccx.tcx.map.local_def_id(impl_item.id); + generics_of_def_id(ccx, type_def_id); + if opt_trait_ref.is_none() { span_err!(tcx.sess, impl_item.span, E0202, "associated types are not allowed in inherent impls"); @@ -843,7 +858,7 @@ fn convert_item(ccx: &CrateCtxt, it: &hir::Item) { convert_method(ccx, ImplContainer(def_id), impl_item.name, impl_item.id, method_vis, - sig, impl_item.defaultness, selfty, + sig, impl_item.defaultness, true, selfty, &ty_predicates); } } @@ -889,6 +904,9 @@ fn convert_item(ccx: &CrateCtxt, it: &hir::Item) { // Convert all the associated types. for trait_item in trait_items { if let hir::TypeTraitItem(_, ref opt_ty) = trait_item.node { + let type_def_id = ccx.tcx.map.local_def_id(trait_item.id); + generics_of_def_id(ccx, type_def_id); + let typ = opt_ty.as_ref().map({ |ty| ccx.icx(&trait_predicates).to_ty(&ExplicitRscope, &ty) }); @@ -905,7 +923,7 @@ fn convert_item(ccx: &CrateCtxt, it: &hir::Item) { // Convert all the methods for trait_item in trait_items { - if let hir::MethodTraitItem(ref sig, _) = trait_item.node { + if let hir::MethodTraitItem(ref sig, ref body) = trait_item.node { convert_method(ccx, container, trait_item.name, @@ -913,6 +931,7 @@ fn convert_item(ccx: &CrateCtxt, it: &hir::Item) { &hir::Inherited, sig, hir::Defaultness::Default, + body.is_some(), tcx.mk_self_type(), &trait_predicates); @@ -921,15 +940,10 @@ fn convert_item(ccx: &CrateCtxt, it: &hir::Item) { // Add an entry mapping let trait_item_def_ids = Rc::new(trait_items.iter().map(|trait_item| { - let def_id = ccx.tcx.map.local_def_id(trait_item.id); - match trait_item.node { - hir::ConstTraitItem(..) => ty::ConstTraitItemId(def_id), - hir::MethodTraitItem(..) => ty::MethodTraitItemId(def_id), - hir::TypeTraitItem(..) => ty::TypeTraitItemId(def_id) - } + ccx.tcx.map.local_def_id(trait_item.id) }).collect()); - tcx.trait_item_def_ids.borrow_mut().insert(ccx.tcx.map.local_def_id(it.id), - trait_item_def_ids); + tcx.impl_or_trait_item_def_ids.borrow_mut().insert(ccx.tcx.map.local_def_id(it.id), + trait_item_def_ids); }, hir::ItemStruct(ref struct_def, _) | hir::ItemUnion(ref struct_def, _) => { @@ -969,9 +983,9 @@ fn convert_variant_ctor<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, let tcx = ccx.tcx; let def_id = tcx.map.local_def_id(ctor_id); generics_of_def_id(ccx, def_id); - let ctor_ty = match variant.kind { - VariantKind::Unit | VariantKind::Struct => scheme.ty, - VariantKind::Tuple => { + let ctor_ty = match variant.ctor_kind { + CtorKind::Fictive | CtorKind::Const => scheme.ty, + CtorKind::Fn => { let inputs: Vec<_> = variant.fields .iter() @@ -1048,7 +1062,7 @@ fn convert_struct_variant<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, name: name, disr_val: disr_val, fields: fields, - kind: VariantKind::from_variant_data(def), + ctor_kind: CtorKind::from_hir(def), } } @@ -1146,10 +1160,12 @@ fn convert_enum_def<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, } else if let Some(disr) = repr_type.disr_incr(tcx, prev_disr) { Some(disr) } else { - span_err!(tcx.sess, v.span, E0370, - "enum discriminant overflowed on value after {}; \ - set explicitly via {} = {} if that is desired outcome", - prev_disr.unwrap(), v.node.name, wrapped_disr); + struct_span_err!(tcx.sess, v.span, E0370, + "enum discriminant overflowed") + .span_label(v.span, &format!("overflowed on value after {}", prev_disr.unwrap())) + .note(&format!("explicitly set `{} = {}` if that is desired outcome", + v.node.name, wrapped_disr)) + .emit(); None }.unwrap_or(wrapped_disr); prev_disr = Some(disr); @@ -1261,9 +1277,9 @@ fn trait_def_of_item<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, return def.clone(); } - let (unsafety, generics, items) = match it.node { - hir::ItemTrait(unsafety, ref generics, _, ref items) => { - (unsafety, generics, items) + let (unsafety, generics) = match it.node { + hir::ItemTrait(unsafety, ref generics, _, _) => { + (unsafety, generics) } _ => span_bug!(it.span, "trait_def_of_item invoked on non-trait"), }; @@ -1283,32 +1299,20 @@ fn trait_def_of_item<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, let ty_generics = generics_of_def_id(ccx, def_id); let substs = mk_item_substs(&ccx.icx(generics), it.span, def_id); - let associated_type_names: Vec<_> = items.iter().filter_map(|trait_item| { - match trait_item.node { - hir::TypeTraitItem(..) => Some(trait_item.name), - _ => None, - } - }).collect(); - let def_path_hash = tcx.def_path(def_id).deterministic_hash(tcx); let trait_ref = ty::TraitRef::new(def_id, substs); - let trait_def = ty::TraitDef::new(unsafety, - paren_sugar, - ty_generics, - trait_ref, - associated_type_names, + let trait_def = ty::TraitDef::new(unsafety, paren_sugar, ty_generics, trait_ref, def_path_hash); tcx.intern_trait_def(trait_def) } -fn trait_defines_associated_type_named(ccx: &CrateCtxt, - trait_node_id: ast::NodeId, - assoc_name: ast::Name) - -> bool +pub fn trait_associated_type_names<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, + trait_node_id: ast::NodeId) + -> impl Iterator + 'a { - let item = match ccx.tcx.map.get(trait_node_id) { + let item = match tcx.map.get(trait_node_id) { hir_map::NodeItem(item) => item, _ => bug!("trait_node_id {} is not an item", trait_node_id) }; @@ -1318,10 +1322,10 @@ fn trait_defines_associated_type_named(ccx: &CrateCtxt, _ => bug!("trait_node_id {} is not a trait", trait_node_id) }; - trait_items.iter().any(|trait_item| { + trait_items.iter().filter_map(|trait_item| { match trait_item.node { - hir::TypeTraitItem(..) => trait_item.name == assoc_name, - _ => false, + hir::TypeTraitItem(..) => Some(trait_item.name), + _ => None, } }) } @@ -1383,7 +1387,7 @@ fn convert_trait_predicates<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, it: &hir::Item) let bounds = match trait_item.node { hir::TypeTraitItem(ref bounds, _) => bounds, _ => { - return vec!().into_iter(); + return vec![].into_iter(); } }; @@ -1475,6 +1479,7 @@ fn generics_of_def_id<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, default_def_id: tcx.map.local_def_id(parent), default: None, object_lifetime_default: ty::ObjectLifetimeDefault::BaseDefault, + pure_wrt_drop: false, }; tcx.ty_param_defs.borrow_mut().insert(param_id, def.clone()); opt_self = Some(def); @@ -1519,7 +1524,8 @@ fn generics_of_def_id<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, def_id: tcx.map.local_def_id(l.lifetime.id), bounds: l.bounds.iter().map(|l| { ast_region_to_region(tcx, l) - }).collect() + }).collect(), + pure_wrt_drop: l.pure_wrt_drop, } }).collect::>(); @@ -1919,6 +1925,7 @@ fn get_or_create_type_parameter_def<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, default_def_id: ccx.tcx.map.local_def_id(parent), default: default, object_lifetime_default: object_lifetime_default, + pure_wrt_drop: param.pure_wrt_drop, }; if def.name == keywords::SelfType.name() { diff --git a/src/librustc_typeck/diagnostics.rs b/src/librustc_typeck/diagnostics.rs index e5c901f223ffb..7dd850180d442 100644 --- a/src/librustc_typeck/diagnostics.rs +++ b/src/librustc_typeck/diagnostics.rs @@ -895,17 +895,14 @@ fn some_func(x: &mut i32) { E0071: r##" You tried to use structure-literal syntax to create an item that is -not a struct-style structure or enum variant. +not a structure or enum variant. Example of erroneous code: ```compile_fail,E0071 -enum Foo { FirstValue(i32) }; - -let u = Foo::FirstValue { value: 0 }; // error: Foo::FirstValue - // isn't a structure! -// or even simpler, if the name doesn't refer to a structure at all. -let t = u32 { value: 4 }; // error: `u32` does not name a structure. +type U32 = u32; +let t = U32 { value: 4 }; // error: expected struct, variant or union type, + // found builtin type `u32` ``` To fix this, ensure that the name was correctly spelled, and that @@ -1915,6 +1912,45 @@ More details can be found in [RFC 438]. [RFC 438]: https://github.com/rust-lang/rfcs/pull/438 "##, +E0182: r##" +You bound an associated type in an expression path which is not +allowed. + +Erroneous code example: + +```compile_fail,E0182 +trait Foo { + type A; + fn bar() -> isize; +} + +impl Foo for isize { + type A = usize; + fn bar() -> isize { 42 } +} + +// error: unexpected binding of associated item in expression path +let x: isize = Foo::::bar(); +``` + +To give a concrete type when using the Universal Function Call Syntax, +use "Type as Trait". Example: + +``` +trait Foo { + type A; + fn bar() -> isize; +} + +impl Foo for isize { + type A = usize; + fn bar() -> isize { 42 } +} + +let x: isize = ::bar(); // ok! +``` +"##, + E0184: r##" Explicitly implementing both Drop and Copy for a type is currently disallowed. This feature can make some sense in theory, but the current implementation is @@ -2752,6 +2788,30 @@ fn main() { ``` "##, +E0230: r##" +The trait has more type parameters specified than appear in its definition. + +Erroneous example code: + +```compile_fail,E0230 +#![feature(on_unimplemented)] +#[rustc_on_unimplemented = "Trait error on `{Self}` with `<{A},{B},{C}>`"] +// error: there is no type parameter C on trait TraitWithThreeParams +trait TraitWithThreeParams +{} +``` + +Include the correct number of type parameters and the compilation should +proceed: + +``` +#![feature(on_unimplemented)] +#[rustc_on_unimplemented = "Trait error on `{Self}` with `<{A},{B},{C}>`"] +trait TraitWithThreeParams // ok! +{} +``` +"##, + E0232: r##" The attribute must have a value. Erroneous code example: @@ -2819,6 +2879,26 @@ not a distinct static type. Likewise, it's not legal to attempt to behavior for specific enum variants. "##, +E0569: r##" +If an impl has a generic parameter with the `#[may_dangle]` attribute, then +that impl must be declared as an `unsafe impl. For example: + +```compile_fail,E0569 +#![feature(generic_param_attrs)] +#![feature(dropck_eyepatch)] + +struct Foo(X); +impl<#[may_dangle] X> Drop for Foo { + fn drop(&mut self) { } +} +``` + +In this example, we are asserting that the destructor for `Foo` will not +access any data of type `X`, and require this assertion to be true for +overall safety in our program. The compiler does not currently attempt to +verify this assertion; therefore we must tag this `impl` as unsafe. +"##, + E0318: r##" Default impls for a trait must be located in the same crate where the trait was defined. For more information see the [opt-in builtin traits RFC](https://github @@ -3567,6 +3647,44 @@ fn together_we_will_rule_the_galaxy(son: &A) {} // Ok! ``` "##, +E0399: r##" +You implemented a trait, overriding one or more of its associated types but did +not reimplement its default methods. + +Example of erroneous code: + +```compile_fail,E0399 +#![feature(associated_type_defaults)] + +pub trait Foo { + type Assoc = u8; + fn bar(&self) {} +} + +impl Foo for i32 { + // error - the following trait items need to be reimplemented as + // `Assoc` was overridden: `bar` + type Assoc = i32; +} +``` + +To fix this, add an implementation for each default method from the trait: + +``` +#![feature(associated_type_defaults)] + +pub trait Foo { + type Assoc = u8; + fn bar(&self) {} +} + +impl Foo for i32 { + type Assoc = i32; + fn bar(&self) {} // ok! +} +``` +"##, + E0439: r##" The length of the platform-intrinsic function `simd_shuffle` wasn't specified. Erroneous code example: @@ -3766,6 +3884,45 @@ extern "platform-intrinsic" { ``` "##, +E0513: r##" +The type of the variable couldn't be found out. + +Erroneous code example: + +```compile_fail,E0513 +use std::mem; + +unsafe { + let size = mem::size_of::(); + mem::transmute_copy::(&8_8); + // error: no type for local variable +} +``` + +To fix this error, please use a constant size instead of `size`. To make +this error more obvious, you could run: + +```compile_fail,E0080 +use std::mem; + +unsafe { + mem::transmute_copy::()]>(&8_8); + // error: constant evaluation error +} +``` + +So now, you can fix your code by setting the size directly: + +``` +use std::mem; + +unsafe { + mem::transmute_copy::(&8_8); + // `u32` is 4 bytes so we replace the `mem::size_of` call with its size +} +``` +"##, + E0516: r##" The `typeof` keyword is currently reserved but unimplemented. Erroneous code example: @@ -3996,6 +4153,16 @@ let s = Simba { mother: 1, father: 0 }; // ok! ``` "##, +E0570: r##" +The requested ABI is unsupported by the current target. + +The rust compiler maintains for each target a blacklist of ABIs unsupported on +that target. If an ABI is present in such a list this usually means that the +target / ABI combination is currently unsupported by llvm. + +If necessary, you can circumvent this check using custom target specifications. +"##, + } register_diagnostics! { @@ -4015,7 +4182,6 @@ register_diagnostics! { // E0168, // E0173, // manual implementations of unboxed closure traits are experimental // E0174, - E0182, E0183, // E0187, // can't infer the kind of the closure // E0188, // can not cast an immutable reference to a mutable pointer @@ -4039,7 +4205,6 @@ register_diagnostics! { E0226, // only a single explicit lifetime bound is permitted E0227, // ambiguous lifetime bound, explicit lifetime bound required E0228, // explicit lifetime bound required - E0230, // there is no type parameter on trait E0231, // only named substitution parameters are allowed // E0233, // E0234, @@ -4061,10 +4226,7 @@ register_diagnostics! { // E0372, // coherence not object safe E0377, // the trait `CoerceUnsized` may only be implemented for a coercion // between structures with the same definition - E0399, // trait items need to be implemented because the associated - // type `{}` was overridden E0436, // functional record update requires a struct - E0513, // no type for local variable .. E0521, // redundant default implementations of trait E0533, // `{}` does not name a unit variant, unit struct or a constant E0562, // `impl Trait` not allowed outside of function diff --git a/src/librustc_typeck/lib.rs b/src/librustc_typeck/lib.rs index d2e2d578fcedb..c16dd788c4ec8 100644 --- a/src/librustc_typeck/lib.rs +++ b/src/librustc_typeck/lib.rs @@ -76,12 +76,13 @@ This API is completely unstable and subject to change. #![feature(box_patterns)] #![feature(box_syntax)] +#![feature(conservative_impl_trait)] #![feature(dotdot_in_tuple_patterns)] #![feature(quote)] #![feature(rustc_diagnostic_macros)] #![feature(rustc_private)] #![feature(staged_api)] -#![feature(question_mark)] +#![cfg_attr(stage0, feature(question_mark))] #[macro_use] extern crate log; #[macro_use] extern crate syntax; @@ -232,7 +233,7 @@ fn check_main_fn_ty(ccx: &CrateCtxt, _ => () } let main_def_id = tcx.map.local_def_id(main_id); - let substs = Substs::empty(tcx); + let substs = tcx.intern_substs(&[]); let se_ty = tcx.mk_fn_def(main_def_id, substs, tcx.mk_bare_fn(ty::BareFnTy { unsafety: hir::Unsafety::Normal, @@ -284,16 +285,16 @@ fn check_start_fn_ty(ccx: &CrateCtxt, } let start_def_id = ccx.tcx.map.local_def_id(start_id); - let substs = Substs::empty(tcx); + let substs = tcx.intern_substs(&[]); let se_ty = tcx.mk_fn_def(start_def_id, substs, tcx.mk_bare_fn(ty::BareFnTy { unsafety: hir::Unsafety::Normal, abi: Abi::Rust, sig: ty::Binder(ty::FnSig { - inputs: vec!( + inputs: vec![ tcx.types.isize, tcx.mk_imm_ptr(tcx.mk_imm_ptr(tcx.types.u8)) - ), + ], output: tcx.types.isize, variadic: false, }), diff --git a/src/librustc_typeck/variance/README.md b/src/librustc_typeck/variance/README.md index 94d1ff91c37b5..ac785e4058bde 100644 --- a/src/librustc_typeck/variance/README.md +++ b/src/librustc_typeck/variance/README.md @@ -1,3 +1,5 @@ +## Variance of type and lifetime parameters + This file infers the variance of type and lifetime parameters. The algorithm is taken from Section 4 of the paper "Taming the Wildcards: Combining Definition- and Use-Site Variance" published in PLDI'11 and @@ -52,11 +54,11 @@ These indicate that (1) the variance of A must be at most covariant; variance of C must be at most covariant *and* contravariant. All of these results are based on a variance lattice defined as follows: - * Top (bivariant) - - + - o Bottom (invariant) + * Top (bivariant) + - + + o Bottom (invariant) -Based on this lattice, the solution V(A)=+, V(B)=-, V(C)=o is the +Based on this lattice, the solution `V(A)=+`, `V(B)=-`, `V(C)=o` is the optimal solution. Note that there is always a naive solution which just declares all variables to be invariant. @@ -68,11 +70,11 @@ take the form: V(X) <= Term Term := + | - | * | o | V(X) | Term x Term -Here the notation V(X) indicates the variance of a type/region +Here the notation `V(X)` indicates the variance of a type/region parameter `X` with respect to its defining class. `Term x Term` represents the "variance transform" as defined in the paper: - If the variance of a type variable `X` in type expression `E` is `V2` +> If the variance of a type variable `X` in type expression `E` is `V2` and the definition-site variance of the [corresponding] type parameter of a class `C` is `V1`, then the variance of `X` in the type expression `C` is `V3 = V1.xform(V2)`. @@ -267,7 +269,7 @@ expressions -- must be invariant with respect to all of their inputs. To see why this makes sense, consider what subtyping for a trait reference means: - <: + <: means that if I know that `T as Trait`, I also know that `U as Trait`. Moreover, if you think of it as dictionary passing style, @@ -291,9 +293,9 @@ impl Identity for T { type Out = T; ... } Now if I have `<&'static () as Identity>::Out`, this can be validly derived as `&'a ()` for any `'a`: - <&'a () as Identity> <: <&'static () as Identity> - if &'static () < : &'a () -- Identity is contravariant in Self - if 'static : 'a -- Subtyping rules for relations + <&'a () as Identity> <: <&'static () as Identity> + if &'static () < : &'a () -- Identity is contravariant in Self + if 'static : 'a -- Subtyping rules for relations This change otoh means that `<'static () as Identity>::Out` is always `&'static ()` (which might then be upcast to `'a ()`, diff --git a/src/librustc_typeck/variance/constraints.rs b/src/librustc_typeck/variance/constraints.rs index b18792d894287..c9e93a1a46d62 100644 --- a/src/librustc_typeck/variance/constraints.rs +++ b/src/librustc_typeck/variance/constraints.rs @@ -37,7 +37,7 @@ pub struct ConstraintContext<'a, 'tcx: 'a> { invariant: VarianceTermPtr<'a>, bivariant: VarianceTermPtr<'a>, - pub constraints: Vec> , + pub constraints: Vec>, } /// Declares that the variable `decl_id` appears in a location with @@ -49,8 +49,7 @@ pub struct Constraint<'a> { } pub fn add_constraints_from_crate<'a, 'tcx>(terms_cx: TermsContext<'a, 'tcx>) - -> ConstraintContext<'a, 'tcx> -{ + -> ConstraintContext<'a, 'tcx> { let tcx = terms_cx.tcx; let covariant = terms_cx.arena.alloc(ConstantTerm(ty::Covariant)); let contravariant = terms_cx.arena.alloc(ConstantTerm(ty::Contravariant)); @@ -80,7 +79,9 @@ impl<'a, 'tcx, 'v> Visitor<'v> for ConstraintContext<'a, 'tcx> { debug!("visit_item item={}", tcx.map.node_to_string(item.id)); match item.node { - hir::ItemEnum(..) | hir::ItemStruct(..) | hir::ItemUnion(..) => { + hir::ItemEnum(..) | + hir::ItemStruct(..) | + hir::ItemUnion(..) => { let scheme = tcx.lookup_item_type(did); // Not entirely obvious: constraints on structs/enums do not @@ -111,8 +112,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for ConstraintContext<'a, 'tcx> { hir::ItemForeignMod(..) | hir::ItemTy(..) | hir::ItemImpl(..) | - hir::ItemDefaultImpl(..) => { - } + hir::ItemDefaultImpl(..) => {} } } } @@ -120,7 +120,8 @@ impl<'a, 'tcx, 'v> Visitor<'v> for ConstraintContext<'a, 'tcx> { /// Is `param_id` a lifetime according to `map`? fn is_lifetime(map: &hir_map::Map, param_id: ast::NodeId) -> bool { match map.find(param_id) { - Some(hir_map::NodeLifetime(..)) => true, _ => false + Some(hir_map::NodeLifetime(..)) => true, + _ => false, } } @@ -143,13 +144,12 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { let tcx = self.terms_cx.tcx; assert!(is_lifetime(&tcx.map, param_id)); match tcx.named_region_map.defs.get(¶m_id) { - Some(&rl::DefEarlyBoundRegion(_, lifetime_decl_id)) - => lifetime_decl_id, + Some(&rl::DefEarlyBoundRegion(_, lifetime_decl_id)) => lifetime_decl_id, Some(_) => bug!("should not encounter non early-bound cases"), // The lookup should only fail when `param_id` is // itself a lifetime binding: use it as the decl_id. - None => param_id, + None => param_id, } } @@ -163,14 +163,15 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { // parameter (by inspecting parent of its binding declaration // to see if it is introduced by a type or by a fn/impl). - let check_result = |this:&ConstraintContext| -> bool { + let check_result = |this: &ConstraintContext| -> bool { let tcx = this.terms_cx.tcx; let decl_id = this.find_binding_for_lifetime(param_id); // Currently only called on lifetimes; double-checking that. assert!(is_lifetime(&tcx.map, param_id)); let parent_id = tcx.map.get_parent(decl_id); - let parent = tcx.map.find(parent_id).unwrap_or_else( - || bug!("tcx.map missing entry for id: {}", parent_id)); + let parent = tcx.map + .find(parent_id) + .unwrap_or_else(|| bug!("tcx.map missing entry for id: {}", parent_id)); let is_inferred; macro_rules! cannot_happen { () => { { @@ -186,14 +187,14 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { hir::ItemEnum(..) | hir::ItemStruct(..) | hir::ItemUnion(..) | - hir::ItemTrait(..) => is_inferred = true, - hir::ItemFn(..) => is_inferred = false, - _ => cannot_happen!(), + hir::ItemTrait(..) => is_inferred = true, + hir::ItemFn(..) => is_inferred = false, + _ => cannot_happen!(), } } - hir_map::NodeTraitItem(..) => is_inferred = false, - hir_map::NodeImplItem(..) => is_inferred = false, - _ => cannot_happen!(), + hir_map::NodeTraitItem(..) => is_inferred = false, + hir_map::NodeImplItem(..) => is_inferred = false, + _ => cannot_happen!(), } return is_inferred; @@ -230,21 +231,18 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { fn add_constraint(&mut self, InferredIndex(index): InferredIndex, variance: VarianceTermPtr<'a>) { - debug!("add_constraint(index={}, variance={:?})", - index, variance); - self.constraints.push(Constraint { inferred: InferredIndex(index), - variance: variance }); + debug!("add_constraint(index={}, variance={:?})", index, variance); + self.constraints.push(Constraint { + inferred: InferredIndex(index), + variance: variance, + }); } - fn contravariant(&mut self, - variance: VarianceTermPtr<'a>) - -> VarianceTermPtr<'a> { + fn contravariant(&mut self, variance: VarianceTermPtr<'a>) -> VarianceTermPtr<'a> { self.xform(variance, self.contravariant) } - fn invariant(&mut self, - variance: VarianceTermPtr<'a>) - -> VarianceTermPtr<'a> { + fn invariant(&mut self, variance: VarianceTermPtr<'a>) -> VarianceTermPtr<'a> { self.xform(variance, self.invariant) } @@ -257,23 +255,16 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { } } - fn xform(&mut self, - v1: VarianceTermPtr<'a>, - v2: VarianceTermPtr<'a>) - -> VarianceTermPtr<'a> { + fn xform(&mut self, v1: VarianceTermPtr<'a>, v2: VarianceTermPtr<'a>) -> VarianceTermPtr<'a> { match (*v1, *v2) { (_, ConstantTerm(ty::Covariant)) => { // Applying a "covariant" transform is always a no-op v1 } - (ConstantTerm(c1), ConstantTerm(c2)) => { - self.constant_term(c1.xform(c2)) - } + (ConstantTerm(c1), ConstantTerm(c2)) => self.constant_term(c1.xform(c2)), - _ => { - &*self.terms_cx.arena.alloc(TransformTerm(v1, v2)) - } + _ => &*self.terms_cx.arena.alloc(TransformTerm(v1, v2)), } } @@ -292,13 +283,12 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { // README.md for a discussion on dep-graph management. self.tcx().dep_graph.read(ItemVariances::to_dep_node(&trait_ref.def_id)); - self.add_constraints_from_substs( - generics, - trait_ref.def_id, - &trait_def.generics.types, - &trait_def.generics.regions, - trait_ref.substs, - variance); + self.add_constraints_from_substs(generics, + trait_ref.def_id, + &trait_def.generics.types, + &trait_def.generics.regions, + trait_ref.substs, + variance); } /// Adds constraints appropriate for an instance of `ty` appearing @@ -313,13 +303,13 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { variance); match ty.sty { - ty::TyBool | - ty::TyChar | ty::TyInt(_) | ty::TyUint(_) | - ty::TyFloat(_) | ty::TyStr | ty::TyNever => { - /* leaf type -- noop */ + ty::TyBool | ty::TyChar | ty::TyInt(_) | ty::TyUint(_) | ty::TyFloat(_) | + ty::TyStr | ty::TyNever => { + // leaf type -- noop } - ty::TyClosure(..) | ty::TyAnon(..) => { + ty::TyClosure(..) | + ty::TyAnon(..) => { bug!("Unexpected closure type in variance computation"); } @@ -329,11 +319,12 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { self.add_constraints_from_mt(generics, mt, variance); } - ty::TyBox(typ) | ty::TyArray(typ, _) | ty::TySlice(typ) => { + ty::TyBox(typ) | + ty::TyArray(typ, _) | + ty::TySlice(typ) => { self.add_constraints_from_ty(generics, typ, variance); } - ty::TyRawPtr(ref mt) => { self.add_constraints_from_mt(generics, mt, variance); } @@ -352,13 +343,12 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { // README.md for a discussion on dep-graph management. self.tcx().dep_graph.read(ItemVariances::to_dep_node(&def.did)); - self.add_constraints_from_substs( - generics, - def.did, - &item_type.generics.types, - &item_type.generics.regions, - substs, - variance); + self.add_constraints_from_substs(generics, + def.did, + &item_type.generics.types, + &item_type.generics.regions, + substs, + variance); } ty::TyProjection(ref data) => { @@ -370,13 +360,12 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { // README.md for a discussion on dep-graph management. self.tcx().dep_graph.read(ItemVariances::to_dep_node(&trait_ref.def_id)); - self.add_constraints_from_substs( - generics, - trait_ref.def_id, - &trait_def.generics.types, - &trait_def.generics.regions, - trait_ref.substs, - variance); + self.add_constraints_from_substs(generics, + trait_ref.def_id, + &trait_def.generics.types, + &trait_def.generics.regions, + trait_ref.substs, + variance); } ty::TyTrait(ref data) => { @@ -384,8 +373,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { let contra = self.contravariant(variance); self.add_constraints_from_region(generics, data.region_bound, contra); - let poly_trait_ref = - data.principal.with_self_ty(self.tcx(), self.tcx().types.err); + let poly_trait_ref = data.principal.with_self_ty(self.tcx(), self.tcx().types.err); self.add_constraints_from_trait_ref(generics, poly_trait_ref.0, variance); for projection in &data.projection_bounds { @@ -425,7 +413,8 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { ty::TyInfer(..) => { bug!("unexpected type encountered in \ - variance inference: {}", ty); + variance inference: {}", + ty); } } } @@ -445,18 +434,17 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { variance); for p in type_param_defs { - let variance_decl = - self.declared_variance(p.def_id, def_id, p.index as usize); + let variance_decl = self.declared_variance(p.def_id, def_id, p.index as usize); let variance_i = self.xform(variance, variance_decl); let substs_ty = substs.type_for_def(p); debug!("add_constraints_from_substs: variance_decl={:?} variance_i={:?}", - variance_decl, variance_i); + variance_decl, + variance_i); self.add_constraints_from_ty(generics, substs_ty, variance_i); } for p in region_param_defs { - let variance_decl = - self.declared_variance(p.def_id, def_id, p.index as usize); + let variance_decl = self.declared_variance(p.def_id, def_id, p.index as usize); let variance_i = self.xform(variance, variance_decl); let substs_r = substs.region_for_def(p); self.add_constraints_from_region(generics, substs_r, variance_i); @@ -494,15 +482,19 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { } } - ty::ReStatic => { } + ty::ReStatic => {} ty::ReLateBound(..) => { // We do not infer variance for region parameters on // methods or in fn types. } - ty::ReFree(..) | ty::ReScope(..) | ty::ReVar(..) | - ty::ReSkolemized(..) | ty::ReEmpty | ty::ReErased => { + ty::ReFree(..) | + ty::ReScope(..) | + ty::ReVar(..) | + ty::ReSkolemized(..) | + ty::ReEmpty | + ty::ReErased => { // We don't expect to see anything but 'static or bound // regions when visiting member types or method types. bug!("unexpected region encountered in variance \ diff --git a/src/librustc_typeck/variance/mod.rs b/src/librustc_typeck/variance/mod.rs index 13ed6cf764140..cd0ab1cbb9543 100644 --- a/src/librustc_typeck/variance/mod.rs +++ b/src/librustc_typeck/variance/mod.rs @@ -34,4 +34,3 @@ pub fn infer_variance<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { solve::solve_constraints(constraints_cx); tcx.variance_computed.set(true); } - diff --git a/src/librustc_typeck/variance/solve.rs b/src/librustc_typeck/variance/solve.rs index 82b63d0cc0937..a5c53b4c6291c 100644 --- a/src/librustc_typeck/variance/solve.rs +++ b/src/librustc_typeck/variance/solve.rs @@ -25,24 +25,24 @@ use super::xform::*; struct SolveContext<'a, 'tcx: 'a> { terms_cx: TermsContext<'a, 'tcx>, - constraints: Vec> , + constraints: Vec>, // Maps from an InferredIndex to the inferred value for that variable. - solutions: Vec + solutions: Vec, } pub fn solve_constraints(constraints_cx: ConstraintContext) { let ConstraintContext { terms_cx, constraints, .. } = constraints_cx; - let solutions = - terms_cx.inferred_infos.iter() - .map(|ii| ii.initial_variance) - .collect(); + let solutions = terms_cx.inferred_infos + .iter() + .map(|ii| ii.initial_variance) + .collect(); let mut solutions_cx = SolveContext { terms_cx: terms_cx, constraints: constraints, - solutions: solutions + solutions: solutions, }; solutions_cx.solve(); solutions_cx.write(); @@ -68,13 +68,13 @@ impl<'a, 'tcx> SolveContext<'a, 'tcx> { if old_value != new_value { debug!("Updating inferred {} (node {}) \ from {:?} to {:?} due to {:?}", - inferred, - self.terms_cx - .inferred_infos[inferred] - .param_id, - old_value, - new_value, - term); + inferred, + self.terms_cx + .inferred_infos[inferred] + .param_id, + old_value, + new_value, + term); self.solutions[inferred] = new_value; changed = true; @@ -114,36 +114,40 @@ impl<'a, 'tcx> SolveContext<'a, 'tcx> { let info = &inferred_infos[index]; let variance = solutions[index]; debug!("Index {} Info {} Variance {:?}", - index, info.index, variance); + index, + info.index, + variance); assert_eq!(item_variances.len(), info.index); item_variances.push(variance); index += 1; } - debug!("item_id={} item_variances={:?}", - item_id, - item_variances); + debug!("item_id={} item_variances={:?}", item_id, item_variances); let item_def_id = tcx.map.local_def_id(item_id); // For unit testing: check for a special "rustc_variance" // attribute and report an error with various results if found. if tcx.has_attr(item_def_id, "rustc_variance") { - span_err!(tcx.sess, tcx.map.span(item_id), E0208, "{:?}", item_variances); + span_err!(tcx.sess, + tcx.map.span(item_id), + E0208, + "{:?}", + item_variances); } - let newly_added = tcx.item_variance_map.borrow_mut() - .insert(item_def_id, Rc::new(item_variances)).is_none(); + let newly_added = tcx.item_variance_map + .borrow_mut() + .insert(item_def_id, Rc::new(item_variances)) + .is_none(); assert!(newly_added); } } fn evaluate(&self, term: VarianceTermPtr<'a>) -> ty::Variance { match *term { - ConstantTerm(v) => { - v - } + ConstantTerm(v) => v, TransformTerm(t1, t2) => { let v1 = self.evaluate(t1); @@ -151,9 +155,7 @@ impl<'a, 'tcx> SolveContext<'a, 'tcx> { v1.xform(v2) } - InferredTerm(InferredIndex(index)) => { - self.solutions[index] - } + InferredTerm(InferredIndex(index)) => self.solutions[index], } } } diff --git a/src/librustc_typeck/variance/terms.rs b/src/librustc_typeck/variance/terms.rs index 577a47a35e125..f6732f36e355a 100644 --- a/src/librustc_typeck/variance/terms.rs +++ b/src/librustc_typeck/variance/terms.rs @@ -49,7 +49,12 @@ impl<'a> fmt::Debug for VarianceTerm<'a> { match *self { ConstantTerm(c1) => write!(f, "{:?}", c1), TransformTerm(v1, v2) => write!(f, "({:?} \u{00D7} {:?})", v1, v2), - InferredTerm(id) => write!(f, "[{}]", { let InferredIndex(i) = id; i }) + InferredTerm(id) => { + write!(f, "[{}]", { + let InferredIndex(i) = id; + i + }) + } } } } @@ -72,7 +77,7 @@ pub struct TermsContext<'a, 'tcx: 'a> { pub inferred_map: NodeMap, // Maps from an InferredIndex to the info for that variable. - pub inferred_infos: Vec> , + pub inferred_infos: Vec>, } pub struct InferredInfo<'a> { @@ -87,11 +92,9 @@ pub struct InferredInfo<'a> { pub initial_variance: ty::Variance, } -pub fn determine_parameters_to_be_inferred<'a, 'tcx>( - tcx: TyCtxt<'a, 'tcx, 'tcx>, - arena: &'a mut TypedArena>) - -> TermsContext<'a, 'tcx> -{ +pub fn determine_parameters_to_be_inferred<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + arena: &'a mut TypedArena>) + -> TermsContext<'a, 'tcx> { let mut terms_cx = TermsContext { tcx: tcx, arena: arena, @@ -102,17 +105,16 @@ pub fn determine_parameters_to_be_inferred<'a, 'tcx>( // cache and share the variance struct used for items with // no type/region parameters - empty_variances: Rc::new(vec![]) + empty_variances: Rc::new(vec![]), }; // See README.md for a discussion on dep-graph management. - tcx.visit_all_items_in_krate(|def_id| ItemVariances::to_dep_node(&def_id), - &mut terms_cx); + tcx.visit_all_items_in_krate(|def_id| ItemVariances::to_dep_node(&def_id), &mut terms_cx); terms_cx } -fn lang_items(tcx: TyCtxt) -> Vec<(ast::NodeId,Vec)> { +fn lang_items(tcx: TyCtxt) -> Vec<(ast::NodeId, Vec)> { let all = vec![ (tcx.lang_items.phantom_data(), vec![ty::Covariant]), (tcx.lang_items.unsafe_cell_type(), vec![ty::Invariant]), @@ -138,15 +140,13 @@ impl<'a, 'tcx> TermsContext<'a, 'tcx> { fn add_inferreds_for_item(&mut self, item_id: ast::NodeId, has_self: bool, - generics: &hir::Generics) - { - /*! - * Add "inferreds" for the generic parameters declared on this - * item. This has a lot of annoying parameters because we are - * trying to drive this from the AST, rather than the - * ty::Generics, so that we can get span info -- but this - * means we must accommodate syntactic distinctions. - */ + generics: &hir::Generics) { + //! Add "inferreds" for the generic parameters declared on this + //! item. This has a lot of annoying parameters because we are + //! trying to drive this from the AST, rather than the + //! ty::Generics, so that we can get span info -- but this + //! means we must accommodate syntactic distinctions. + //! // NB: In the code below for writing the results back into the // tcx, we rely on the fact that all inferreds for a particular @@ -178,26 +178,26 @@ impl<'a, 'tcx> TermsContext<'a, 'tcx> { // parameters". if self.num_inferred() == inferreds_on_entry { let item_def_id = self.tcx.map.local_def_id(item_id); - let newly_added = - self.tcx.item_variance_map.borrow_mut().insert( - item_def_id, - self.empty_variances.clone()).is_none(); + let newly_added = self.tcx + .item_variance_map + .borrow_mut() + .insert(item_def_id, self.empty_variances.clone()) + .is_none(); assert!(newly_added); } } - fn add_inferred(&mut self, - item_id: ast::NodeId, - index: usize, - param_id: ast::NodeId) { + fn add_inferred(&mut self, item_id: ast::NodeId, index: usize, param_id: ast::NodeId) { let inf_index = InferredIndex(self.inferred_infos.len()); let term = self.arena.alloc(InferredTerm(inf_index)); let initial_variance = self.pick_initial_variance(item_id, index); - self.inferred_infos.push(InferredInfo { item_id: item_id, - index: index, - param_id: param_id, - term: term, - initial_variance: initial_variance }); + self.inferred_infos.push(InferredInfo { + item_id: item_id, + index: index, + param_id: param_id, + term: term, + initial_variance: initial_variance, + }); let newly_added = self.inferred_map.insert(param_id, inf_index).is_none(); assert!(newly_added); @@ -208,18 +208,17 @@ impl<'a, 'tcx> TermsContext<'a, 'tcx> { inf_index={:?}, \ initial_variance={:?})", self.tcx.item_path_str(self.tcx.map.local_def_id(item_id)), - item_id, index, param_id, inf_index, + item_id, + index, + param_id, + inf_index, initial_variance); } - fn pick_initial_variance(&self, - item_id: ast::NodeId, - index: usize) - -> ty::Variance - { + fn pick_initial_variance(&self, item_id: ast::NodeId, index: usize) -> ty::Variance { match self.lang_items.iter().find(|&&(n, _)| n == item_id) { Some(&(_, ref variances)) => variances[index], - None => ty::Bivariant + None => ty::Bivariant, } } @@ -230,7 +229,8 @@ impl<'a, 'tcx> TermsContext<'a, 'tcx> { impl<'a, 'tcx, 'v> Visitor<'v> for TermsContext<'a, 'tcx> { fn visit_item(&mut self, item: &hir::Item) { - debug!("add_inferreds for item {}", self.tcx.map.node_to_string(item.id)); + debug!("add_inferreds for item {}", + self.tcx.map.node_to_string(item.id)); match item.node { hir::ItemEnum(_, ref generics) | @@ -254,9 +254,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for TermsContext<'a, 'tcx> { hir::ItemFn(..) | hir::ItemMod(..) | hir::ItemForeignMod(..) | - hir::ItemTy(..) => { - } + hir::ItemTy(..) => {} } } } - diff --git a/src/librustc_typeck/variance/xform.rs b/src/librustc_typeck/variance/xform.rs index 02a2ceb360d7f..507734ce35e44 100644 --- a/src/librustc_typeck/variance/xform.rs +++ b/src/librustc_typeck/variance/xform.rs @@ -47,7 +47,8 @@ pub fn glb(v1: ty::Variance, v2: ty::Variance) -> ty::Variance { // - + // o match (v1, v2) { - (ty::Invariant, _) | (_, ty::Invariant) => ty::Invariant, + (ty::Invariant, _) | + (_, ty::Invariant) => ty::Invariant, (ty::Covariant, ty::Contravariant) => ty::Invariant, (ty::Contravariant, ty::Covariant) => ty::Invariant, @@ -56,6 +57,7 @@ pub fn glb(v1: ty::Variance, v2: ty::Variance) -> ty::Variance { (ty::Contravariant, ty::Contravariant) => ty::Contravariant, - (x, ty::Bivariant) | (ty::Bivariant, x) => x, + (x, ty::Bivariant) | + (ty::Bivariant, x) => x, } } diff --git a/src/librustc_unicode/char.rs b/src/librustc_unicode/char.rs index 5a0c27d9c609f..702d7d8b4b2ca 100644 --- a/src/librustc_unicode/char.rs +++ b/src/librustc_unicode/char.rs @@ -37,7 +37,7 @@ use tables::{conversions, derived_property, general_category, property}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::char::{MAX, from_digit, from_u32, from_u32_unchecked}; #[stable(feature = "rust1", since = "1.0.0")] -pub use core::char::{EncodeUtf16, EncodeUtf8, EscapeDebug, EscapeDefault, EscapeUnicode}; +pub use core::char::{EscapeDebug, EscapeDefault, EscapeUnicode}; // unstable reexports #[unstable(feature = "try_from", issue = "33417")] @@ -435,50 +435,96 @@ impl char { C::len_utf16(self) } - /// Returns an iterator over the bytes of this character as UTF-8. + /// Encodes this character as UTF-8 into the provided byte buffer, + /// and then returns the subslice of the buffer that contains the encoded character. /// - /// The returned iterator also has an `as_slice()` method to view the - /// encoded bytes as a byte slice. + /// # Panics + /// + /// Panics if the buffer is not large enough. + /// A buffer of length four is large enough to encode any `char`. /// /// # Examples /// + /// In both of these examples, 'ß' takes two bytes to encode. + /// /// ``` /// #![feature(unicode)] /// - /// let iterator = 'ß'.encode_utf8(); - /// assert_eq!(iterator.as_slice(), [0xc3, 0x9f]); + /// let mut b = [0; 2]; /// - /// for (i, byte) in iterator.enumerate() { - /// println!("byte {}: {:x}", i, byte); - /// } + /// let result = 'ß'.encode_utf8(&mut b); + /// + /// assert_eq!(result, "ß"); + /// + /// assert_eq!(result.len(), 2); + /// ``` + /// + /// A buffer that's too small: + /// + /// ``` + /// #![feature(unicode)] + /// use std::thread; + /// + /// let result = thread::spawn(|| { + /// let mut b = [0; 1]; + /// + /// // this panics + /// 'ß'.encode_utf8(&mut b); + /// }).join(); + /// + /// assert!(result.is_err()); /// ``` - #[unstable(feature = "unicode", issue = "27784")] + #[unstable(feature = "unicode", + reason = "pending decision about Iterator/Writer/Reader", + issue = "27784")] #[inline] - pub fn encode_utf8(self) -> EncodeUtf8 { - C::encode_utf8(self) + pub fn encode_utf8(self, dst: &mut [u8]) -> &mut str { + C::encode_utf8(self, dst) } - /// Returns an iterator over the `u16` entries of this character as UTF-16. + /// Encodes this character as UTF-16 into the provided `u16` buffer, + /// and then returns the subslice of the buffer that contains the encoded character. /// - /// The returned iterator also has an `as_slice()` method to view the - /// encoded form as a slice. + /// # Panics + /// + /// Panics if the buffer is not large enough. + /// A buffer of length 2 is large enough to encode any `char`. /// /// # Examples /// + /// In both of these examples, '𝕊' takes two `u16`s to encode. + /// /// ``` /// #![feature(unicode)] /// - /// let iterator = '𝕊'.encode_utf16(); - /// assert_eq!(iterator.as_slice(), [0xd835, 0xdd4a]); + /// let mut b = [0; 2]; /// - /// for (i, val) in iterator.enumerate() { - /// println!("entry {}: {:x}", i, val); - /// } + /// let result = '𝕊'.encode_utf16(&mut b); + /// + /// assert_eq!(result.len(), 2); /// ``` - #[unstable(feature = "unicode", issue = "27784")] + /// + /// A buffer that's too small: + /// + /// ``` + /// #![feature(unicode)] + /// use std::thread; + /// + /// let result = thread::spawn(|| { + /// let mut b = [0; 1]; + /// + /// // this panics + /// '𝕊'.encode_utf16(&mut b); + /// }).join(); + /// + /// assert!(result.is_err()); + /// ``` + #[unstable(feature = "unicode", + reason = "pending decision about Iterator/Writer/Reader", + issue = "27784")] #[inline] - pub fn encode_utf16(self) -> EncodeUtf16 { - C::encode_utf16(self) + pub fn encode_utf16(self, dst: &mut [u16]) -> &mut [u16] { + C::encode_utf16(self, dst) } /// Returns true if this `char` is an alphabetic code point, and false if not. diff --git a/src/librustc_unicode/u_str.rs b/src/librustc_unicode/u_str.rs index eb5b6feeb7ec4..1c7894794c9c8 100644 --- a/src/librustc_unicode/u_str.rs +++ b/src/librustc_unicode/u_str.rs @@ -157,13 +157,13 @@ impl Iterator for Utf16Encoder return Some(tmp); } + let mut buf = [0; 2]; self.chars.next().map(|ch| { - let n = CharExt::encode_utf16(ch); - let n = n.as_slice(); - if n.len() == 2 { - self.extra = n[1]; + let n = CharExt::encode_utf16(ch, &mut buf).len(); + if n == 2 { + self.extra = buf[1]; } - n[0] + buf[0] }) } @@ -181,6 +181,7 @@ impl Iterator for Utf16Encoder impl FusedIterator for Utf16Encoder where I: FusedIterator {} +#[stable(feature = "split_whitespace", since = "1.1.0")] impl<'a> Iterator for SplitWhitespace<'a> { type Item = &'a str; @@ -188,6 +189,8 @@ impl<'a> Iterator for SplitWhitespace<'a> { self.inner.next() } } + +#[stable(feature = "split_whitespace", since = "1.1.0")] impl<'a> DoubleEndedIterator for SplitWhitespace<'a> { fn next_back(&mut self) -> Option<&'a str> { self.inner.next_back() diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs index 709e36989244f..31497b6bd3352 100644 --- a/src/librustdoc/clean/inline.rs +++ b/src/librustdoc/clean/inline.rs @@ -15,11 +15,10 @@ use std::iter::once; use syntax::ast; use rustc::hir; -use rustc::middle::cstore; -use rustc::hir::def::Def; +use rustc::hir::def::{Def, CtorKind}; use rustc::hir::def_id::DefId; use rustc::hir::print as pprust; -use rustc::ty::{self, TyCtxt, VariantKind}; +use rustc::ty::{self, TyCtxt}; use rustc::util::nodemap::FnvHashSet; use rustc_const_eval::lookup_const_by_id; @@ -73,49 +72,50 @@ fn try_inline_def<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx>, let did = def.def_id(); let inner = match def { Def::Trait(did) => { - record_extern_fqn(cx, did, clean::TypeTrait); + record_extern_fqn(cx, did, clean::TypeKind::Trait); ret.extend(build_impls(cx, tcx, did)); clean::TraitItem(build_external_trait(cx, tcx, did)) } Def::Fn(did) => { - record_extern_fqn(cx, did, clean::TypeFunction); + record_extern_fqn(cx, did, clean::TypeKind::Function); clean::FunctionItem(build_external_function(cx, tcx, did)) } - Def::Struct(did) - // If this is a struct constructor, we skip it - if tcx.sess.cstore.tuple_struct_definition_if_ctor(did).is_none() => { - record_extern_fqn(cx, did, clean::TypeStruct); + Def::Struct(did) => { + record_extern_fqn(cx, did, clean::TypeKind::Struct); ret.extend(build_impls(cx, tcx, did)); clean::StructItem(build_struct(cx, tcx, did)) } Def::Union(did) => { - record_extern_fqn(cx, did, clean::TypeUnion); + record_extern_fqn(cx, did, clean::TypeKind::Union); ret.extend(build_impls(cx, tcx, did)); clean::UnionItem(build_union(cx, tcx, did)) } Def::TyAlias(did) => { - record_extern_fqn(cx, did, clean::TypeTypedef); + record_extern_fqn(cx, did, clean::TypeKind::Typedef); ret.extend(build_impls(cx, tcx, did)); - build_type(cx, tcx, did) + clean::TypedefItem(build_type_alias(cx, tcx, did), false) } Def::Enum(did) => { - record_extern_fqn(cx, did, clean::TypeEnum); + record_extern_fqn(cx, did, clean::TypeKind::Enum); ret.extend(build_impls(cx, tcx, did)); - build_type(cx, tcx, did) + clean::EnumItem(build_enum(cx, tcx, did)) } // Assume that the enum type is reexported next to the variant, and // variants don't show up in documentation specially. - Def::Variant(..) => return Some(Vec::new()), + // Similarly, consider that struct type is reexported next to its constructor. + Def::Variant(..) | + Def::VariantCtor(..) | + Def::StructCtor(..) => return Some(Vec::new()), Def::Mod(did) => { - record_extern_fqn(cx, did, clean::TypeModule); + record_extern_fqn(cx, did, clean::TypeKind::Module); clean::ModuleItem(build_module(cx, tcx, did)) } Def::Static(did, mtbl) => { - record_extern_fqn(cx, did, clean::TypeStatic); + record_extern_fqn(cx, did, clean::TypeKind::Static); clean::StaticItem(build_static(cx, tcx, did, mtbl)) } - Def::Const(did) | Def::AssociatedConst(did) => { - record_extern_fqn(cx, did, clean::TypeConst); + Def::Const(did) => { + record_extern_fqn(cx, did, clean::TypeKind::Const); clean::ConstantItem(build_const(cx, tcx, did)) } _ => return None, @@ -200,6 +200,18 @@ fn build_external_function<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx } } +fn build_enum<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx>, + did: DefId) -> clean::Enum { + let t = tcx.lookup_item_type(did); + let predicates = tcx.lookup_predicates(did); + + clean::Enum { + generics: (t.generics, &predicates).clean(cx), + variants_stripped: false, + variants: tcx.lookup_adt_def(did).variants.clean(cx), + } +} + fn build_struct<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx>, did: DefId) -> clean::Struct { let t = tcx.lookup_item_type(did); @@ -207,10 +219,10 @@ fn build_struct<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx>, let variant = tcx.lookup_adt_def(did).struct_variant(); clean::Struct { - struct_type: match variant.kind { - VariantKind::Struct => doctree::Plain, - VariantKind::Tuple => doctree::Tuple, - VariantKind::Unit => doctree::Unit, + struct_type: match variant.ctor_kind { + CtorKind::Fictive => doctree::Plain, + CtorKind::Fn => doctree::Tuple, + CtorKind::Const => doctree::Unit, }, generics: (t.generics, &predicates).clean(cx), fields: variant.fields.clean(cx), @@ -232,25 +244,15 @@ fn build_union<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx>, } } -fn build_type<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx>, - did: DefId) -> clean::ItemEnum { +fn build_type_alias<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx>, + did: DefId) -> clean::Typedef { let t = tcx.lookup_item_type(did); let predicates = tcx.lookup_predicates(did); - match t.ty.sty { - ty::TyAdt(edef, _) if edef.is_enum() && !tcx.sess.cstore.is_typedef(did) => { - return clean::EnumItem(clean::Enum { - generics: (t.generics, &predicates).clean(cx), - variants_stripped: false, - variants: edef.variants.clean(cx), - }) - } - _ => {} - } - clean::TypedefItem(clean::Typedef { + clean::Typedef { type_: t.ty.clean(cx), generics: (t.generics, &predicates).clean(cx), - }, false) + } } pub fn build_impls<'a, 'tcx>(cx: &DocContext, @@ -264,32 +266,49 @@ pub fn build_impls<'a, 'tcx>(cx: &DocContext, build_impl(cx, tcx, did, &mut impls); } } - - // If this is the first time we've inlined something from this crate, then - // we inline *all* impls from the crate into this crate. Note that there's + // If this is the first time we've inlined something from another crate, then + // we inline *all* impls from all the crates into this crate. Note that there's // currently no way for us to filter this based on type, and we likely need // many impls for a variety of reasons. // // Primarily, the impls will be used to populate the documentation for this // type being inlined, but impls can also be used when generating // documentation for primitives (no way to find those specifically). - if cx.populated_crate_impls.borrow_mut().insert(did.krate) { - for item in tcx.sess.cstore.crate_top_level_items(did.krate) { - populate_impls(cx, tcx, item.def, &mut impls); - } + if cx.populated_all_crate_impls.get() { + return impls; + } - fn populate_impls<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx>, - def: cstore::DefLike, - impls: &mut Vec) { - match def { - cstore::DlImpl(did) => build_impl(cx, tcx, did, impls), - cstore::DlDef(Def::Mod(did)) => { - for item in tcx.sess.cstore.item_children(did) { - populate_impls(cx, tcx, item.def, impls) - } - } - _ => {} - } + cx.populated_all_crate_impls.set(true); + + for did in tcx.sess.cstore.implementations_of_trait(None) { + build_impl(cx, tcx, did, &mut impls); + } + + // Also try to inline primitive impls from other crates. + let primitive_impls = [ + tcx.lang_items.isize_impl(), + tcx.lang_items.i8_impl(), + tcx.lang_items.i16_impl(), + tcx.lang_items.i32_impl(), + tcx.lang_items.i64_impl(), + tcx.lang_items.usize_impl(), + tcx.lang_items.u8_impl(), + tcx.lang_items.u16_impl(), + tcx.lang_items.u32_impl(), + tcx.lang_items.u64_impl(), + tcx.lang_items.f32_impl(), + tcx.lang_items.f64_impl(), + tcx.lang_items.char_impl(), + tcx.lang_items.str_impl(), + tcx.lang_items.slice_impl(), + tcx.lang_items.const_ptr_impl(), + tcx.lang_items.mut_ptr_impl(), + ]; + + for def_id in primitive_impls.iter().filter_map(|&def_id| def_id) { + if !def_id.is_local() { + tcx.populate_implementations_for_primitive_if_necessary(def_id); + build_impl(cx, tcx, def_id, &mut impls); } } @@ -348,12 +367,10 @@ pub fn build_impl<'a, 'tcx>(cx: &DocContext, } let predicates = tcx.lookup_predicates(did); - let trait_items = tcx.sess.cstore.impl_items(did) + let trait_items = tcx.sess.cstore.impl_or_trait_items(did) .iter() - .filter_map(|did| { - let did = did.def_id(); - let impl_item = tcx.impl_or_trait_item(did); - match impl_item { + .filter_map(|&did| { + match tcx.impl_or_trait_item(did) { ty::ConstTraitItem(ref assoc_const) => { let did = assoc_const.def_id; let type_scheme = tcx.lookup_item_type(did); @@ -453,7 +470,7 @@ pub fn build_impl<'a, 'tcx>(cx: &DocContext, for_: for_, generics: (ty.generics, &predicates).clean(cx), items: trait_items, - polarity: polarity.map(|p| { p.clean(cx) }), + polarity: Some(polarity.clean(cx)), }), source: clean::Span::empty(), name: None, @@ -481,20 +498,12 @@ fn build_module<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx>, // visit each node at most once. let mut visited = FnvHashSet(); for item in tcx.sess.cstore.item_children(did) { - match item.def { - cstore::DlDef(Def::ForeignMod(did)) => { - fill_in(cx, tcx, did, items); - } - cstore::DlDef(def) if item.vis == ty::Visibility::Public => { - if !visited.insert(def) { continue } - if let Some(i) = try_inline_def(cx, tcx, def) { - items.extend(i) - } + let def_id = item.def.def_id(); + if tcx.sess.cstore.visibility(def_id) == ty::Visibility::Public { + if !visited.insert(def_id) { continue } + if let Some(i) = try_inline_def(cx, tcx, item.def) { + items.extend(i) } - cstore::DlDef(..) => {} - // All impls were inlined above - cstore::DlImpl(..) => {} - cstore::DlField => panic!("unimplemented field"), } } } @@ -567,7 +576,7 @@ fn filter_non_trait_generics(trait_did: DefId, mut g: clean::Generics) _ => true, } }); - return g; + g } /// Supertrait bounds for a trait are also listed in the generics coming from diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index b9dc75cdd9f12..265b66b01ea52 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -12,10 +12,7 @@ //! that clean them. pub use self::Type::*; -pub use self::TypeKind::*; -pub use self::VariantKind::*; pub use self::Mutability::*; -pub use self::Import::*; pub use self::ItemEnum::*; pub use self::Attribute::*; pub use self::TyParamBound::*; @@ -33,12 +30,10 @@ use syntax::print::pprust as syntax_pprust; use syntax_pos::{self, DUMMY_SP, Pos}; use rustc_trans::back::link; -use rustc::middle::cstore; use rustc::middle::privacy::AccessLevels; use rustc::middle::resolve_lifetime::DefRegion::*; -use rustc::hir::def::Def; -use rustc::hir::def_id::{DefId, DefIndex, CRATE_DEF_INDEX}; -use rustc::hir::fold::Folder; +use rustc::hir::def::{Def, CtorKind}; +use rustc::hir::def_id::{self, DefId, DefIndex, CRATE_DEF_INDEX}; use rustc::hir::print as pprust; use rustc::ty::subst::Substs; use rustc::ty::{self, AdtKind}; @@ -116,7 +111,7 @@ pub struct Crate { pub name: String, pub src: PathBuf, pub module: Option, - pub externs: Vec<(ast::CrateNum, ExternalCrate)>, + pub externs: Vec<(def_id::CrateNum, ExternalCrate)>, pub primitives: Vec, pub access_levels: Arc>, // These are later on moved into `CACHEKEY`, leaving the map empty. @@ -124,7 +119,7 @@ pub struct Crate { pub external_traits: FnvHashMap, } -struct CrateNum(ast::CrateNum); +struct CrateNum(def_id::CrateNum); impl<'a, 'tcx> Clean for visit_ast::RustdocVisitor<'a, 'tcx> { fn clean(&self, cx: &DocContext) -> Crate { @@ -239,19 +234,16 @@ pub struct ExternalCrate { impl Clean for CrateNum { fn clean(&self, cx: &DocContext) -> ExternalCrate { let mut primitives = Vec::new(); + let root = DefId { krate: self.0, index: CRATE_DEF_INDEX }; cx.tcx_opt().map(|tcx| { - for item in tcx.sess.cstore.crate_top_level_items(self.0) { - let did = match item.def { - cstore::DlDef(Def::Mod(did)) => did, - _ => continue - }; - let attrs = inline::load_attrs(cx, tcx, did); + for item in tcx.sess.cstore.item_children(root) { + let attrs = inline::load_attrs(cx, tcx, item.def.def_id()); PrimitiveType::find(&attrs).map(|prim| primitives.push(prim)); } }); ExternalCrate { name: (&cx.sess().cstore.crate_name(self.0)[..]).to_owned(), - attrs: cx.sess().cstore.crate_attrs(self.0).clean(cx), + attrs: cx.sess().cstore.item_attrs(root).clean(cx), primitives: primitives, } } @@ -288,34 +280,34 @@ impl Item { } } pub fn is_mod(&self) -> bool { - ItemType::from(self) == ItemType::Module + self.type_() == ItemType::Module } pub fn is_trait(&self) -> bool { - ItemType::from(self) == ItemType::Trait + self.type_() == ItemType::Trait } pub fn is_struct(&self) -> bool { - ItemType::from(self) == ItemType::Struct + self.type_() == ItemType::Struct } pub fn is_enum(&self) -> bool { - ItemType::from(self) == ItemType::Module + self.type_() == ItemType::Module } pub fn is_fn(&self) -> bool { - ItemType::from(self) == ItemType::Function + self.type_() == ItemType::Function } pub fn is_associated_type(&self) -> bool { - ItemType::from(self) == ItemType::AssociatedType + self.type_() == ItemType::AssociatedType } pub fn is_associated_const(&self) -> bool { - ItemType::from(self) == ItemType::AssociatedConst + self.type_() == ItemType::AssociatedConst } pub fn is_method(&self) -> bool { - ItemType::from(self) == ItemType::Method + self.type_() == ItemType::Method } pub fn is_ty_method(&self) -> bool { - ItemType::from(self) == ItemType::TyMethod + self.type_() == ItemType::TyMethod } pub fn is_primitive(&self) -> bool { - ItemType::from(self) == ItemType::Primitive + self.type_() == ItemType::Primitive } pub fn is_stripped(&self) -> bool { match self.inner { StrippedItem(..) => true, _ => false } @@ -324,7 +316,7 @@ impl Item { match self.inner { StructItem(ref _struct) => Some(_struct.fields_stripped), UnionItem(ref union) => Some(union.fields_stripped), - VariantItem(Variant { kind: StructVariant(ref vstruct)} ) => { + VariantItem(Variant { kind: VariantKind::Struct(ref vstruct)} ) => { Some(vstruct.fields_stripped) }, _ => None, @@ -347,6 +339,11 @@ impl Item { pub fn stable_since(&self) -> Option<&str> { self.stability.as_ref().map(|s| &s.since[..]) } + + /// Returns a documentation-level item type from the item. + pub fn type_(&self) -> ItemType { + ItemType::from(self) + } } #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] @@ -673,7 +670,7 @@ impl Clean for ty::BuiltinBound { Some(tcx) => tcx, None => return RegionBound(Lifetime::statik()) }; - let empty = Substs::empty(tcx); + let empty = tcx.intern_substs(&[]); let (did, path) = match *self { ty::BoundSend => (tcx.lang_items.send_trait().unwrap(), @@ -688,7 +685,7 @@ impl Clean for ty::BuiltinBound { (tcx.lang_items.sync_trait().unwrap(), external_path(cx, "Sync", None, false, vec![], empty)), }; - inline::record_extern_fqn(cx, did, TypeTrait); + inline::record_extern_fqn(cx, did, TypeKind::Trait); TraitBound(PolyTrait { trait_: ResolvedPath { path: path, @@ -707,7 +704,7 @@ impl<'tcx> Clean for ty::TraitRef<'tcx> { Some(tcx) => tcx, None => return RegionBound(Lifetime::statik()) }; - inline::record_extern_fqn(cx, self.def_id, TypeTrait); + inline::record_extern_fqn(cx, self.def_id, TypeKind::Trait); let path = external_path(cx, &tcx.item_name(self.def_id).as_str(), Some(self.def_id), true, vec![], self.substs); @@ -765,7 +762,7 @@ impl Lifetime { pub fn get_ref<'a>(&'a self) -> &'a str { let Lifetime(ref s) = *self; let s: &'a str = s; - return s; + s } pub fn statik() -> Lifetime { @@ -774,7 +771,20 @@ impl Lifetime { } impl Clean for hir::Lifetime { - fn clean(&self, _: &DocContext) -> Lifetime { + fn clean(&self, cx: &DocContext) -> Lifetime { + if let Some(tcx) = cx.tcx_opt() { + let def = tcx.named_region_map.defs.get(&self.id).cloned(); + match def { + Some(DefEarlyBoundRegion(_, node_id)) | + Some(DefLateBoundRegion(_, node_id)) | + Some(DefFreeRegion(_, node_id)) => { + if let Some(lt) = cx.lt_substs.borrow().get(&node_id).cloned() { + return lt; + } + } + _ => {} + } + } Lifetime(self.name.to_string()) } } @@ -1117,7 +1127,7 @@ pub struct FnDecl { impl FnDecl { pub fn has_self(&self) -> bool { - return self.inputs.values.len() > 0 && self.inputs.values[0].name == "self"; + self.inputs.values.len() > 0 && self.inputs.values[0].name == "self" } pub fn self_type(&self) -> Option { @@ -1149,7 +1159,7 @@ impl<'a, 'tcx> Clean for (DefId, &'a ty::PolyFnSig<'tcx>) { let mut names = if cx.map.as_local_node_id(did).is_some() { vec![].into_iter() } else { - cx.tcx().sess.cstore.method_arg_names(did).into_iter() + cx.tcx().sess.cstore.fn_arg_names(did).into_iter() }.peekable(); FnDecl { output: Return(sig.0.output.clean(cx)), @@ -1159,8 +1169,8 @@ impl<'a, 'tcx> Clean for (DefId, &'a ty::PolyFnSig<'tcx>) { values: sig.0.inputs.iter().map(|t| { Argument { type_: t.clean(cx), - id: 0, - name: names.next().unwrap_or("".to_string()), + id: ast::CRATE_NODE_ID, + name: names.next().map_or("".to_string(), |name| name.to_string()), } }).collect(), }, @@ -1467,16 +1477,16 @@ pub enum PrimitiveType { #[derive(Clone, RustcEncodable, RustcDecodable, Copy, Debug)] pub enum TypeKind { - TypeEnum, - TypeFunction, - TypeModule, - TypeConst, - TypeStatic, - TypeStruct, - TypeUnion, - TypeTrait, - TypeVariant, - TypeTypedef, + Enum, + Function, + Module, + Const, + Static, + Struct, + Union, + Trait, + Variant, + Typedef, } pub trait GetDefId { @@ -1559,7 +1569,7 @@ impl PrimitiveType { None } - pub fn to_string(&self) -> &'static str { + pub fn as_str(&self) -> &'static str { match *self { PrimitiveType::Isize => "isize", PrimitiveType::I8 => "i8", @@ -1584,7 +1594,7 @@ impl PrimitiveType { } pub fn to_url_str(&self) -> &'static str { - self.to_string() + self.as_str() } /// Creates a rustdoc-specific node id for primitive types. @@ -1629,42 +1639,6 @@ impl From for PrimitiveType { } } -// Poor man's type parameter substitution at HIR level. -// Used to replace private type aliases in public signatures with their aliased types. -struct SubstAlias<'a, 'tcx: 'a> { - tcx: &'a ty::TyCtxt<'a, 'tcx, 'tcx>, - // Table type parameter definition -> substituted type - ty_substs: FnvHashMap, - // Table node id of lifetime parameter definition -> substituted lifetime - lt_substs: FnvHashMap, -} - -impl<'a, 'tcx: 'a, 'b: 'tcx> Folder for SubstAlias<'a, 'tcx> { - fn fold_ty(&mut self, ty: P) -> P { - if let hir::TyPath(..) = ty.node { - let def = self.tcx.expect_def(ty.id); - if let Some(new_ty) = self.ty_substs.get(&def).cloned() { - return P(new_ty); - } - } - hir::fold::noop_fold_ty(ty, self) - } - fn fold_lifetime(&mut self, lt: hir::Lifetime) -> hir::Lifetime { - let def = self.tcx.named_region_map.defs.get(<.id).cloned(); - match def { - Some(DefEarlyBoundRegion(_, node_id)) | - Some(DefLateBoundRegion(_, node_id)) | - Some(DefFreeRegion(_, node_id)) => { - if let Some(lt) = self.lt_substs.get(&node_id).cloned() { - return lt; - } - } - _ => {} - } - hir::fold::noop_fold_lifetime(lt, self) - } -} - impl Clean for hir::Ty { fn clean(&self, cx: &DocContext) -> Type { use rustc::hir::*; @@ -1674,8 +1648,8 @@ impl Clean for hir::Ty { TyRptr(ref l, ref m) => BorrowedRef {lifetime: l.clean(cx), mutability: m.mutbl.clean(cx), type_: box m.ty.clean(cx)}, - TyVec(ref ty) => Vector(box ty.clean(cx)), - TyFixedLengthVec(ref ty, ref e) => { + TySlice(ref ty) => Vector(box ty.clean(cx)), + TyArray(ref ty, ref e) => { let n = if let Some(tcx) = cx.tcx_opt() { use rustc_const_math::{ConstInt, ConstUsize}; use rustc_const_eval::eval_const_expr; @@ -1696,43 +1670,47 @@ impl Clean for hir::Ty { }, TyTup(ref tys) => Tuple(tys.clean(cx)), TyPath(None, ref path) => { - if let Some(tcx) = cx.tcx_opt() { - // Substitute private type aliases - let def = tcx.expect_def(self.id); + let tcx_and_def = cx.tcx_opt().map(|tcx| (tcx, tcx.expect_def(self.id))); + if let Some((_, def)) = tcx_and_def { + if let Some(new_ty) = cx.ty_substs.borrow().get(&def).cloned() { + return new_ty; + } + } + + let tcx_and_alias = tcx_and_def.and_then(|(tcx, def)| { if let Def::TyAlias(def_id) = def { - if let Some(node_id) = tcx.map.as_local_node_id(def_id) { + // Substitute private type aliases + tcx.map.as_local_node_id(def_id).and_then(|node_id| { if !cx.access_levels.borrow().is_exported(def_id) { - let item = tcx.map.expect_item(node_id); - if let hir::ItemTy(ref ty, ref generics) = item.node { - let provided_params = &path.segments.last().unwrap().parameters; - let mut ty_substs = FnvHashMap(); - let mut lt_substs = FnvHashMap(); - for (i, ty_param) in generics.ty_params.iter().enumerate() { - let ty_param_def = tcx.expect_def(ty_param.id); - if let Some(ty) = provided_params.types().get(i).cloned() - .cloned() { - ty_substs.insert(ty_param_def, ty.unwrap()); - } else if let Some(default) = ty_param.default.clone() { - ty_substs.insert(ty_param_def, default.unwrap()); - } - } - for (i, lt_param) in generics.lifetimes.iter().enumerate() { - if let Some(lt) = provided_params.lifetimes().get(i) - .cloned() - .cloned() { - lt_substs.insert(lt_param.lifetime.id, lt); - } - } - let mut subst_alias = SubstAlias { - tcx: &tcx, - ty_substs: ty_substs, - lt_substs: lt_substs - }; - return subst_alias.fold_ty(ty.clone()).clean(cx); - } + Some((tcx, &tcx.map.expect_item(node_id).node)) + } else { + None } + }) + } else { + None + } + }); + if let Some((tcx, &hir::ItemTy(ref ty, ref generics))) = tcx_and_alias { + let provided_params = &path.segments.last().unwrap().parameters; + let mut ty_substs = FnvHashMap(); + let mut lt_substs = FnvHashMap(); + for (i, ty_param) in generics.ty_params.iter().enumerate() { + let ty_param_def = tcx.expect_def(ty_param.id); + if let Some(ty) = provided_params.types().get(i).cloned() + .cloned() { + ty_substs.insert(ty_param_def, ty.unwrap().clean(cx)); + } else if let Some(default) = ty_param.default.clone() { + ty_substs.insert(ty_param_def, default.unwrap().clean(cx)); } } + for (i, lt_param) in generics.lifetimes.iter().enumerate() { + if let Some(lt) = provided_params.lifetimes().get(i).cloned() + .cloned() { + lt_substs.insert(lt_param.lifetime.id, lt.clean(cx)); + } + } + return cx.enter_alias(ty_substs, lt_substs, || ty.clean(cx)); } resolve_type(cx, path.clean(cx), self.id) } @@ -1808,15 +1786,15 @@ impl<'tcx> Clean for ty::Ty<'tcx> { type_params: Vec::new(), where_predicates: Vec::new() }, - decl: (cx.map.local_def_id(0), &fty.sig).clean(cx), + decl: (cx.map.local_def_id(ast::CRATE_NODE_ID), &fty.sig).clean(cx), abi: fty.abi, }), ty::TyAdt(def, substs) => { let did = def.did; let kind = match def.adt_kind() { - AdtKind::Struct => TypeStruct, - AdtKind::Union => TypeUnion, - AdtKind::Enum => TypeEnum, + AdtKind::Struct => TypeKind::Struct, + AdtKind::Union => TypeKind::Union, + AdtKind::Enum => TypeKind::Enum, }; inline::record_extern_fqn(cx, did, kind); let path = external_path(cx, &cx.tcx().item_name(did).as_str(), @@ -1830,7 +1808,7 @@ impl<'tcx> Clean for ty::Ty<'tcx> { } ty::TyTrait(ref obj) => { let did = obj.principal.def_id(); - inline::record_extern_fqn(cx, did, TypeTrait); + inline::record_extern_fqn(cx, did, TypeKind::Trait); let mut typarams = vec![]; obj.region_bound.clean(cx).map(|b| typarams.push(RegionBound(b))); @@ -1897,11 +1875,9 @@ impl Clean for hir::StructField { impl<'tcx> Clean for ty::FieldDefData<'tcx, 'static> { fn clean(&self, cx: &DocContext) -> Item { - // FIXME: possible O(n^2)-ness! Not my fault. - let attr_map = cx.tcx().sess.cstore.crate_struct_field_attrs(self.did.krate); Item { name: Some(self.name).clean(cx), - attrs: attr_map.get(&self.did).unwrap_or(&Vec::new()).clean(cx), + attrs: cx.tcx().get_attrs(self.did).clean(cx), source: Span::empty(), visibility: self.vis.clean(cx), stability: get_stability(cx, self.did), @@ -2048,7 +2024,7 @@ impl Clean for doctree::Variant { deprecation: self.depr.clean(cx), def_id: cx.map.local_def_id(self.def.id()), inner: VariantItem(Variant { - kind: struct_def_to_variant_kind(&self.def, cx), + kind: self.def.clean(cx), }), } } @@ -2056,15 +2032,15 @@ impl Clean for doctree::Variant { impl<'tcx> Clean for ty::VariantDefData<'tcx, 'static> { fn clean(&self, cx: &DocContext) -> Item { - let kind = match self.kind { - ty::VariantKind::Unit => CLikeVariant, - ty::VariantKind::Tuple => { - TupleVariant( + let kind = match self.ctor_kind { + CtorKind::Const => VariantKind::CLike, + CtorKind::Fn => { + VariantKind::Tuple( self.fields.iter().map(|f| f.unsubst_ty().clean(cx)).collect() ) } - ty::VariantKind::Struct => { - StructVariant(VariantStruct { + CtorKind::Fictive => { + VariantKind::Struct(VariantStruct { struct_type: doctree::Plain, fields_stripped: false, fields: self.fields.iter().map(|field| { @@ -2097,18 +2073,20 @@ impl<'tcx> Clean for ty::VariantDefData<'tcx, 'static> { #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum VariantKind { - CLikeVariant, - TupleVariant(Vec), - StructVariant(VariantStruct), + CLike, + Tuple(Vec), + Struct(VariantStruct), } -fn struct_def_to_variant_kind(struct_def: &hir::VariantData, cx: &DocContext) -> VariantKind { - if struct_def.is_struct() { - StructVariant(struct_def.clean(cx)) - } else if struct_def.is_unit() { - CLikeVariant - } else { - TupleVariant(struct_def.fields().iter().map(|x| x.ty.clean(cx)).collect()) +impl Clean for hir::VariantData { + fn clean(&self, cx: &DocContext) -> VariantKind { + if self.is_struct() { + VariantKind::Struct(self.clean(cx)) + } else if self.is_unit() { + VariantKind::CLike + } else { + VariantKind::Tuple(self.fields().iter().map(|x| x.ty.clean(cx)).collect()) + } } } @@ -2547,7 +2525,7 @@ impl Clean> for doctree::Import { }); let (mut ret, inner) = match self.node { hir::ViewPathGlob(ref p) => { - (vec![], GlobImport(resolve_use_source(cx, p.clean(cx), self.id))) + (vec![], Import::Glob(resolve_use_source(cx, p.clean(cx), self.id))) } hir::ViewPathList(ref p, ref list) => { // Attempt to inline all reexported items, but be sure @@ -2573,8 +2551,7 @@ impl Clean> for doctree::Import { if remaining.is_empty() { return ret; } - (ret, ImportList(resolve_use_source(cx, p.clean(cx), self.id), - remaining)) + (ret, Import::List(resolve_use_source(cx, p.clean(cx), self.id), remaining)) } hir::ViewPathSimple(name, ref p) => { if !denied { @@ -2582,15 +2559,15 @@ impl Clean> for doctree::Import { return items; } } - (vec![], SimpleImport(name.clean(cx), - resolve_use_source(cx, p.clean(cx), self.id))) + (vec![], Import::Simple(name.clean(cx), + resolve_use_source(cx, p.clean(cx), self.id))) } }; ret.push(Item { name: None, attrs: self.attrs.clean(cx), source: self.whence.clean(cx), - def_id: cx.map.local_def_id(0), + def_id: cx.map.local_def_id(ast::CRATE_NODE_ID), visibility: self.vis.clean(cx), stability: None, deprecation: None, @@ -2603,11 +2580,11 @@ impl Clean> for doctree::Import { #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub enum Import { // use source as str; - SimpleImport(String, ImportSource), + Simple(String, ImportSource), // use source::*; - GlobImport(ImportSource), + Glob(ImportSource), // use source::{a, b, c}; - ImportList(ImportSource, Vec), + List(ImportSource, Vec), } #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] @@ -2725,7 +2702,7 @@ fn name_from_pat(p: &hir::Pat) -> String { }, PatKind::Range(..) => panic!("tried to get argument name from PatKind::Range, \ which is not allowed in function arguments"), - PatKind::Vec(ref begin, ref mid, ref end) => { + PatKind::Slice(ref begin, ref mid, ref end) => { let begin = begin.iter().map(|p| name_from_pat(&**p)); let mid = mid.as_ref().map(|p| format!("..{}", name_from_pat(&**p))).into_iter(); let end = end.iter().map(|p| name_from_pat(&**p)); @@ -2779,32 +2756,27 @@ fn resolve_type(cx: &DocContext, fn register_def(cx: &DocContext, def: Def) -> DefId { debug!("register_def({:?})", def); + let tcx = cx.tcx(); + let (did, kind) = match def { - Def::Fn(i) => (i, TypeFunction), - Def::TyAlias(i) => (i, TypeTypedef), - Def::Enum(i) => (i, TypeEnum), - Def::Trait(i) => (i, TypeTrait), - Def::Struct(i) => (i, TypeStruct), - Def::Union(i) => (i, TypeUnion), - Def::Mod(i) => (i, TypeModule), - Def::Static(i, _) => (i, TypeStatic), - Def::Variant(i, _) => (i, TypeEnum), - Def::SelfTy(Some(def_id), _) => (def_id, TypeTrait), - Def::SelfTy(_, Some(impl_id)) => { - // For Def::SelfTy() values inlined from another crate, the - // impl_id will be DUMMY_NODE_ID, which would cause problems. - // But we should never run into an impl from another crate here. - return cx.map.local_def_id(impl_id) + Def::Fn(i) => (i, TypeKind::Function), + Def::TyAlias(i) => (i, TypeKind::Typedef), + Def::Enum(i) => (i, TypeKind::Enum), + Def::Trait(i) => (i, TypeKind::Trait), + Def::Struct(i) => (i, TypeKind::Struct), + Def::Union(i) => (i, TypeKind::Union), + Def::Mod(i) => (i, TypeKind::Module), + Def::Static(i, _) => (i, TypeKind::Static), + Def::Variant(i) => (tcx.parent_def_id(i).unwrap(), TypeKind::Enum), + Def::SelfTy(Some(def_id), _) => (def_id, TypeKind::Trait), + Def::SelfTy(_, Some(impl_def_id)) => { + return impl_def_id } _ => return def.def_id() }; if did.is_local() { return did } - let tcx = match cx.tcx_opt() { - Some(tcx) => tcx, - None => return did - }; inline::record_extern_fqn(cx, did, kind); - if let TypeTrait = kind { + if let TypeKind::Trait = kind { let t = inline::build_external_trait(cx, tcx, did); cx.external_traits.borrow_mut().insert(did, t); } @@ -2992,7 +2964,7 @@ fn lang_struct(cx: &DocContext, did: Option, Some(did) => did, None => return fallback(box t.clean(cx)), }; - inline::record_extern_fqn(cx, did, TypeStruct); + inline::record_extern_fqn(cx, did, TypeKind::Struct); ResolvedPath { typarams: None, did: did, diff --git a/src/librustdoc/clean/simplify.rs b/src/librustdoc/clean/simplify.rs index 7ae177439064f..15e042f8c0809 100644 --- a/src/librustdoc/clean/simplify.rs +++ b/src/librustdoc/clean/simplify.rs @@ -141,7 +141,7 @@ pub fn ty_params(mut params: Vec) -> Vec { for param in &mut params { param.bounds = ty_bounds(mem::replace(&mut param.bounds, Vec::new())); } - return params; + params } fn ty_bounds(bounds: Vec) -> Vec { diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs index 1e3804955e940..f03b6a5ab3f1f 100644 --- a/src/librustdoc/core.rs +++ b/src/librustdoc/core.rs @@ -14,11 +14,12 @@ use rustc_driver::{driver, target_features, abort_on_err}; use rustc::dep_graph::DepGraph; use rustc::session::{self, config}; use rustc::hir::def_id::DefId; +use rustc::hir::def::Def; use rustc::middle::privacy::AccessLevels; use rustc::ty::{self, TyCtxt}; use rustc::hir::map as hir_map; use rustc::lint; -use rustc::util::nodemap::{FnvHashMap, FnvHashSet}; +use rustc::util::nodemap::FnvHashMap; use rustc_trans::back::link; use rustc_resolve as resolve; use rustc_metadata::cstore::CStore; @@ -29,7 +30,9 @@ use errors; use errors::emitter::ColorConfig; use std::cell::{RefCell, Cell}; +use std::mem; use std::rc::Rc; +use std::path::PathBuf; use visit_ast::RustdocVisitor; use clean; @@ -51,7 +54,7 @@ pub struct DocContext<'a, 'tcx: 'a> { pub map: &'a hir_map::Map<'tcx>, pub maybe_typed: MaybeTyped<'a, 'tcx>, pub input: Input, - pub populated_crate_impls: RefCell>, + pub populated_all_crate_impls: Cell, pub deref_trait_did: Cell>, pub deref_mut_trait_did: Cell>, // Note that external items for which `doc(hidden)` applies to are shown as @@ -63,6 +66,14 @@ pub struct DocContext<'a, 'tcx: 'a> { pub renderinfo: RefCell, /// Later on moved through `clean::Crate` into `html::render::CACHE_KEY` pub external_traits: RefCell>, + + // The current set of type and lifetime substitutions, + // for expanding type aliases at the HIR level: + + /// Table type parameter definition -> substituted type + pub ty_substs: RefCell>, + /// Table node id of lifetime parameter definition -> substituted lifetime + pub lt_substs: RefCell>, } impl<'b, 'tcx> DocContext<'b, 'tcx> { @@ -84,6 +95,22 @@ impl<'b, 'tcx> DocContext<'b, 'tcx> { let tcx_opt = self.tcx_opt(); tcx_opt.expect("tcx not present") } + + /// Call the closure with the given parameters set as + /// the substitutions for a type alias' RHS. + pub fn enter_alias(&self, + ty_substs: FnvHashMap, + lt_substs: FnvHashMap, + f: F) -> R + where F: FnOnce() -> R { + let (old_tys, old_lts) = + (mem::replace(&mut *self.ty_substs.borrow_mut(), ty_substs), + mem::replace(&mut *self.lt_substs.borrow_mut(), lt_substs)); + let r = f(); + *self.ty_substs.borrow_mut() = old_tys; + *self.lt_substs.borrow_mut() = old_lts; + r + } } pub trait DocAccessLevels { @@ -101,7 +128,8 @@ pub fn run_core(search_paths: SearchPaths, cfgs: Vec, externs: config::Externs, input: Input, - triple: Option) -> (clean::Crate, RenderInfo) + triple: Option, + maybe_sysroot: Option) -> (clean::Crate, RenderInfo) { // Parse, resolve, and typecheck the given crate. @@ -113,15 +141,16 @@ pub fn run_core(search_paths: SearchPaths, let warning_lint = lint::builtin::WARNINGS.name_lower(); let sessopts = config::Options { - maybe_sysroot: None, + maybe_sysroot: maybe_sysroot, search_paths: search_paths, - crate_types: vec!(config::CrateTypeRlib), - lint_opts: vec!((warning_lint, lint::Allow)), + crate_types: vec![config::CrateTypeRlib], + lint_opts: vec![(warning_lint, lint::Allow)], lint_cap: Some(lint::Allow), externs: externs, target_triple: triple.unwrap_or(config::host_triple().to_string()), // Ensure that rustdoc works even if rustc is feature-staged unstable_features: UnstableFeatures::Allow, + actually_rustdoc: true, ..config::basic_options().clone() }; @@ -134,14 +163,16 @@ pub fn run_core(search_paths: SearchPaths, let dep_graph = DepGraph::new(false); let _ignore = dep_graph.in_ignore(); let cstore = Rc::new(CStore::new(&dep_graph)); - let sess = session::build_session_(sessopts, &dep_graph, cpath, diagnostic_handler, - codemap, cstore.clone()); + let mut sess = session::build_session_( + sessopts, &dep_graph, cpath, diagnostic_handler, codemap, cstore.clone() + ); rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess)); let mut cfg = config::build_configuration(&sess, config::parse_cfgspecs(cfgs)); target_features::add_configuration(&mut cfg, &sess); + sess.parse_sess.config = cfg; - let krate = panictry!(driver::phase_1_parse_input(&sess, cfg, &input)); + let krate = panictry!(driver::phase_1_parse_input(&sess, &input)); let name = link::find_crate_name(Some(&sess), &krate.attrs, &input); @@ -160,7 +191,7 @@ pub fn run_core(search_paths: SearchPaths, resolutions, &arenas, &name, - |tcx, _, analysis, _, result| { + |tcx, analysis, _, result| { if let Err(_) = result { sess.fatal("Compilation failed, aborting rustdoc"); } @@ -179,12 +210,14 @@ pub fn run_core(search_paths: SearchPaths, map: &tcx.map, maybe_typed: Typed(tcx), input: input, - populated_crate_impls: RefCell::new(FnvHashSet()), + populated_all_crate_impls: Cell::new(false), deref_trait_did: Cell::new(None), deref_mut_trait_did: Cell::new(None), access_levels: RefCell::new(access_levels), - external_traits: RefCell::new(FnvHashMap()), - renderinfo: RefCell::new(Default::default()), + external_traits: Default::default(), + renderinfo: Default::default(), + ty_substs: Default::default(), + lt_substs: Default::default(), }; debug!("crate: {:?}", ctxt.map.krate()); diff --git a/src/librustdoc/doctree.rs b/src/librustdoc/doctree.rs index c2404f4294e96..609ae0c0e6daf 100644 --- a/src/librustdoc/doctree.rs +++ b/src/librustdoc/doctree.rs @@ -21,6 +21,7 @@ use syntax::ptr::P; use syntax_pos::{self, Span}; use rustc::hir; +use rustc::hir::def_id::CrateNum; pub struct Module { pub name: Option, @@ -53,7 +54,7 @@ impl Module { pub fn new(name: Option) -> Module { Module { name : name, - id: 0, + id: ast::CRATE_NODE_ID, vis: hir::Inherited, stab: None, depr: None, @@ -245,7 +246,7 @@ pub struct Macro { pub struct ExternCrate { pub name: Name, - pub cnum: ast::CrateNum, + pub cnum: CrateNum, pub path: Option, pub vis: hir::Visibility, pub attrs: hir::HirVec, diff --git a/src/librustdoc/externalfiles.rs b/src/librustdoc/externalfiles.rs index 2ecb071fcc2a4..d78f00497ca55 100644 --- a/src/librustdoc/externalfiles.rs +++ b/src/librustdoc/externalfiles.rs @@ -11,64 +11,79 @@ use std::fs::File; use std::io::prelude::*; use std::io; -use std::path::{PathBuf, Path}; +use std::path::Path; use std::str; #[derive(Clone)] pub struct ExternalHtml{ + /// Content that will be included inline in the section of a + /// rendered Markdown file or generated documentation pub in_header: String, + /// Content that will be included inline between and the content of + /// a rendered Markdown file or generated documentation pub before_content: String, + /// Content that will be included inline between the content and of + /// a rendered Markdown file or generated documentation pub after_content: String } impl ExternalHtml { pub fn load(in_header: &[String], before_content: &[String], after_content: &[String]) -> Option { - match (load_external_files(in_header), - load_external_files(before_content), - load_external_files(after_content)) { - (Some(ih), Some(bc), Some(ac)) => Some(ExternalHtml { - in_header: ih, - before_content: bc, - after_content: ac - }), - _ => None - } + load_external_files(in_header) + .and_then(|ih| + load_external_files(before_content) + .map(|bc| (ih, bc)) + ) + .and_then(|(ih, bc)| + load_external_files(after_content) + .map(|ac| (ih, bc, ac)) + ) + .map(|(ih, bc, ac)| + ExternalHtml { + in_header: ih, + before_content: bc, + after_content: ac, + } + ) } } -pub fn load_string(input: &Path) -> io::Result> { - let mut f = File::open(input)?; - let mut d = Vec::new(); - f.read_to_end(&mut d)?; - Ok(str::from_utf8(&d).map(|s| s.to_string()).ok()) +pub enum LoadStringError { + ReadFail, + BadUtf8, } -macro_rules! load_or_return { - ($input: expr, $cant_read: expr, $not_utf8: expr) => { - { - let input = PathBuf::from(&$input[..]); - match ::externalfiles::load_string(&input) { - Err(e) => { - let _ = writeln!(&mut io::stderr(), - "error reading `{}`: {}", input.display(), e); - return $cant_read; - } - Ok(None) => { - let _ = writeln!(&mut io::stderr(), - "error reading `{}`: not UTF-8", input.display()); - return $not_utf8; - } - Ok(Some(s)) => s - } +pub fn load_string>(file_path: P) -> Result { + let file_path = file_path.as_ref(); + let mut contents = vec![]; + let result = File::open(file_path) + .and_then(|mut f| f.read_to_end(&mut contents)); + if let Err(e) = result { + let _ = writeln!(&mut io::stderr(), + "error reading `{}`: {}", + file_path.display(), e); + return Err(LoadStringError::ReadFail); + } + match str::from_utf8(&contents) { + Ok(s) => Ok(s.to_string()), + Err(_) => { + let _ = writeln!(&mut io::stderr(), + "error reading `{}`: not UTF-8", + file_path.display()); + Err(LoadStringError::BadUtf8) } } } -pub fn load_external_files(names: &[String]) -> Option { +fn load_external_files(names: &[String]) -> Option { let mut out = String::new(); for name in names { - out.push_str(&*load_or_return!(&name, None, None)); + let s = match load_string(name) { + Ok(s) => s, + Err(_) => return None, + }; + out.push_str(&s); out.push('\n'); } Some(out) diff --git a/src/librustdoc/fold.rs b/src/librustdoc/fold.rs index 8d6ab221c4fce..e269d940bfabf 100644 --- a/src/librustdoc/fold.rs +++ b/src/librustdoc/fold.rs @@ -74,12 +74,12 @@ pub trait DocFolder : Sized { VariantItem(i) => { let i2 = i.clone(); // this clone is small match i.kind { - StructVariant(mut j) => { + VariantKind::Struct(mut j) => { let num_fields = j.fields.len(); j.fields = j.fields.into_iter().filter_map(|x| self.fold_item(x)).collect(); j.fields_stripped |= num_fields != j.fields.len() || j.fields.iter().any(|f| f.is_stripped()); - VariantItem(Variant {kind: StructVariant(j), ..i2}) + VariantItem(Variant {kind: VariantKind::Struct(j), ..i2}) }, _ => VariantItem(i2) } diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs index 65992798ab099..625acce27bd64 100644 --- a/src/librustdoc/html/format.rs +++ b/src/librustdoc/html/format.rs @@ -18,8 +18,7 @@ use std::fmt; use std::iter::repeat; -use rustc::middle::cstore::LOCAL_CRATE; -use rustc::hir::def_id::DefId; +use rustc::hir::def_id::{DefId, LOCAL_CRATE}; use syntax::abi::Abi; use rustc::hir; @@ -43,7 +42,7 @@ pub struct UnsafetySpace(pub hir::Unsafety); #[derive(Copy, Clone)] pub struct ConstnessSpace(pub hir::Constness); /// Wrapper struct for properly emitting a method declaration. -pub struct Method<'a>(pub &'a clean::FnDecl); +pub struct Method<'a>(pub &'a clean::FnDecl, pub &'a str); /// Similar to VisSpace, but used for mutability #[derive(Copy, Clone)] pub struct MutableSpace(pub clean::Mutability); @@ -85,7 +84,7 @@ impl<'a, T: fmt::Display> fmt::Display for CommaSep<'a, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for (i, item) in self.0.iter().enumerate() { if i != 0 { write!(f, ", ")?; } - write!(f, "{}", item)?; + fmt::Display::fmt(item, f)?; } Ok(()) } @@ -98,7 +97,7 @@ impl<'a> fmt::Display for TyParamBounds<'a> { if i > 0 { f.write_str(" + ")?; } - write!(f, "{}", *bound)?; + fmt::Display::fmt(bound, f)?; } Ok(()) } @@ -107,35 +106,51 @@ impl<'a> fmt::Display for TyParamBounds<'a> { impl fmt::Display for clean::Generics { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if self.lifetimes.is_empty() && self.type_params.is_empty() { return Ok(()) } - f.write_str("<")?; + if f.alternate() { + f.write_str("<")?; + } else { + f.write_str("<")?; + } for (i, life) in self.lifetimes.iter().enumerate() { if i > 0 { - f.write_str(", ")?; + f.write_str(", ")?; } write!(f, "{}", *life)?; } if !self.type_params.is_empty() { if !self.lifetimes.is_empty() { - f.write_str(", ")?; + f.write_str(", ")?; } for (i, tp) in self.type_params.iter().enumerate() { if i > 0 { - f.write_str(", ")? + f.write_str(", ")? } f.write_str(&tp.name)?; if !tp.bounds.is_empty() { - write!(f, ": {}", TyParamBounds(&tp.bounds))?; + if f.alternate() { + write!(f, ": {:#}", TyParamBounds(&tp.bounds))?; + } else { + write!(f, ": {}", TyParamBounds(&tp.bounds))?; + } } if let Some(ref ty) = tp.default { - write!(f, " = {}", ty)?; + if f.alternate() { + write!(f, " = {:#}", ty)?; + } else { + write!(f, " = {}", ty)?; + } }; } } - f.write_str(">")?; + if f.alternate() { + f.write_str(">")?; + } else { + f.write_str(">")?; + } Ok(()) } } @@ -146,7 +161,11 @@ impl<'a> fmt::Display for WhereClause<'a> { if gens.where_predicates.is_empty() { return Ok(()); } - f.write_str(" where ")?; + if f.alternate() { + f.write_str(" ")?; + } else { + f.write_str(" where ")?; + } for (i, pred) in gens.where_predicates.iter().enumerate() { if i > 0 { f.write_str(", ")?; @@ -154,7 +173,11 @@ impl<'a> fmt::Display for WhereClause<'a> { match pred { &clean::WherePredicate::BoundPredicate { ref ty, ref bounds } => { let bounds = bounds; - write!(f, "{}: {}", ty, TyParamBounds(bounds))?; + if f.alternate() { + write!(f, "{:#}: {:#}", ty, TyParamBounds(bounds))?; + } else { + write!(f, "{}: {}", ty, TyParamBounds(bounds))?; + } } &clean::WherePredicate::RegionPredicate { ref lifetime, ref bounds } => { @@ -168,11 +191,17 @@ impl<'a> fmt::Display for WhereClause<'a> { } } &clean::WherePredicate::EqPredicate { ref lhs, ref rhs } => { - write!(f, "{} == {}", lhs, rhs)?; + if f.alternate() { + write!(f, "{:#} == {:#}", lhs, rhs)?; + } else { + write!(f, "{} == {}", lhs, rhs)?; + } } } } - f.write_str("")?; + if !f.alternate() { + f.write_str("")?; + } Ok(()) } } @@ -187,16 +216,28 @@ impl fmt::Display for clean::Lifetime { impl fmt::Display for clean::PolyTrait { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if !self.lifetimes.is_empty() { - f.write_str("for<")?; + if f.alternate() { + f.write_str("for<")?; + } else { + f.write_str("for<")?; + } for (i, lt) in self.lifetimes.iter().enumerate() { if i > 0 { f.write_str(", ")?; } write!(f, "{}", lt)?; } - f.write_str("> ")?; + if f.alternate() { + f.write_str("> ")?; + } else { + f.write_str("> ")?; + } + } + if f.alternate() { + write!(f, "{:#}", self.trait_) + } else { + write!(f, "{}", self.trait_) } - write!(f, "{}", self.trait_) } } @@ -211,7 +252,11 @@ impl fmt::Display for clean::TyParamBound { hir::TraitBoundModifier::None => "", hir::TraitBoundModifier::Maybe => "?", }; - write!(f, "{}{}", modifier_str, *ty) + if f.alternate() { + write!(f, "{}{:#}", modifier_str, *ty) + } else { + write!(f, "{}{}", modifier_str, *ty) + } } } } @@ -224,30 +269,46 @@ impl fmt::Display for clean::PathParameters { ref lifetimes, ref types, ref bindings } => { if !lifetimes.is_empty() || !types.is_empty() || !bindings.is_empty() { - f.write_str("<")?; + if f.alternate() { + f.write_str("<")?; + } else { + f.write_str("<")?; + } let mut comma = false; for lifetime in lifetimes { if comma { - f.write_str(", ")?; + f.write_str(", ")?; } comma = true; write!(f, "{}", *lifetime)?; } for ty in types { if comma { - f.write_str(", ")?; + f.write_str(", ")?; } comma = true; - write!(f, "{}", *ty)?; + if f.alternate() { + write!(f, "{:#}", *ty)?; + } else { + write!(f, "{}", *ty)?; + } } for binding in bindings { if comma { - f.write_str(", ")?; + f.write_str(", ")?; } comma = true; - write!(f, "{}", *binding)?; + if f.alternate() { + write!(f, "{:#}", *binding)?; + } else { + write!(f, "{}", *binding)?; + } + } + if f.alternate() { + f.write_str(">")?; + } else { + f.write_str(">")?; } - f.write_str(">")?; } } clean::PathParameters::Parenthesized { ref inputs, ref output } => { @@ -258,12 +319,19 @@ impl fmt::Display for clean::PathParameters { f.write_str(", ")?; } comma = true; - write!(f, "{}", *ty)?; + if f.alternate() { + write!(f, "{:#}", *ty)?; + } else { + write!(f, "{}", *ty)?; + } } f.write_str(")")?; if let Some(ref ty) = *output { - f.write_str(" -> ")?; - write!(f, "{}", ty)?; + if f.alternate() { + write!(f, " -> {:#}", ty)?; + } else { + write!(f, " -> {}", ty)?; + } } } } @@ -274,7 +342,11 @@ impl fmt::Display for clean::PathParameters { impl fmt::Display for clean::PathSegment { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(&self.name)?; - write!(f, "{}", self.params) + if f.alternate() { + write!(f, "{:#}", self.params) + } else { + write!(f, "{}", self.params) + } } } @@ -288,7 +360,11 @@ impl fmt::Display for clean::Path { if i > 0 { f.write_str("::")? } - write!(f, "{}", seg)?; + if f.alternate() { + write!(f, "{:#}", seg)?; + } else { + write!(f, "{}", seg)?; + } } Ok(()) } @@ -350,7 +426,7 @@ fn resolved_path(w: &mut fmt::Formatter, did: DefId, path: &clean::Path, match rel_root { Some(mut root) => { for seg in &path.segments[..amt] { - if "super" == seg.name || "self" == seg.name { + if "super" == seg.name || "self" == seg.name || w.alternate() { write!(w, "{}::", seg.name)?; } else { root.push_str(&seg.name); @@ -369,7 +445,11 @@ fn resolved_path(w: &mut fmt::Formatter, did: DefId, path: &clean::Path, } } } - write!(w, "{}{}", HRef::new(did, &last.name), last.params)?; + if w.alternate() { + write!(w, "{:#}{:#}", HRef::new(did, &last.name), last.params)?; + } else { + write!(w, "{}{}", HRef::new(did, &last.name), last.params)?; + } Ok(()) } @@ -378,33 +458,35 @@ fn primitive_link(f: &mut fmt::Formatter, name: &str) -> fmt::Result { let m = cache(); let mut needs_termination = false; - match m.primitive_locations.get(&prim) { - Some(&LOCAL_CRATE) => { - let len = CURRENT_LOCATION_KEY.with(|s| s.borrow().len()); - let len = if len == 0 {0} else {len - 1}; - write!(f, "", - repeat("../").take(len).collect::(), - prim.to_url_str())?; - needs_termination = true; - } - Some(&cnum) => { - let loc = match m.extern_locations[&cnum] { - (ref cname, render::Remote(ref s)) => Some((cname, s.to_string())), - (ref cname, render::Local) => { - let len = CURRENT_LOCATION_KEY.with(|s| s.borrow().len()); - Some((cname, repeat("../").take(len).collect::())) - } - (_, render::Unknown) => None, - }; - if let Some((cname, root)) = loc { - write!(f, "", - root, - cname, + if !f.alternate() { + match m.primitive_locations.get(&prim) { + Some(&LOCAL_CRATE) => { + let len = CURRENT_LOCATION_KEY.with(|s| s.borrow().len()); + let len = if len == 0 {0} else {len - 1}; + write!(f, "", + repeat("../").take(len).collect::(), prim.to_url_str())?; needs_termination = true; } + Some(&cnum) => { + let loc = match m.extern_locations[&cnum] { + (ref cname, render::Remote(ref s)) => Some((cname, s.to_string())), + (ref cname, render::Local) => { + let len = CURRENT_LOCATION_KEY.with(|s| s.borrow().len()); + Some((cname, repeat("../").take(len).collect::())) + } + (_, render::Unknown) => None, + }; + if let Some((cname, root)) = loc { + write!(f, "", + root, + cname, + prim.to_url_str())?; + needs_termination = true; + } + } + None => {} } - None => {} } write!(f, "{}", name)?; if needs_termination { @@ -420,7 +502,7 @@ fn tybounds(w: &mut fmt::Formatter, Some(ref params) => { for param in params { write!(w, " + ")?; - write!(w, "{}", *param)?; + fmt::Display::fmt(param, w)?; } Ok(()) } @@ -437,10 +519,12 @@ impl<'a> HRef<'a> { impl<'a> fmt::Display for HRef<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match href(self.did) { - Some((url, shortty, fqp)) => { + Some((url, shortty, fqp)) => if !f.alternate() { write!(f, "{}", shortty, url, fqp.join("::"), self.text) - } + } else { + write!(f, "{}", self.text) + }, _ => write!(f, "{}", self.text), } } @@ -458,51 +542,70 @@ impl fmt::Display for clean::Type { tybounds(f, typarams) } clean::Infer => write!(f, "_"), - clean::Primitive(prim) => primitive_link(f, prim, prim.to_string()), + clean::Primitive(prim) => primitive_link(f, prim, prim.as_str()), clean::BareFunction(ref decl) => { - write!(f, "{}{}fn{}{}", - UnsafetySpace(decl.unsafety), - AbiSpace(decl.abi), - decl.generics, - decl.decl) + if f.alternate() { + write!(f, "{}{}fn{:#}{:#}", + UnsafetySpace(decl.unsafety), + AbiSpace(decl.abi), + decl.generics, + decl.decl) + } else { + write!(f, "{}{}fn{}{}", + UnsafetySpace(decl.unsafety), + AbiSpace(decl.abi), + decl.generics, + decl.decl) + } } clean::Tuple(ref typs) => { match &typs[..] { &[] => primitive_link(f, PrimitiveType::Tuple, "()"), &[ref one] => { primitive_link(f, PrimitiveType::Tuple, "(")?; - write!(f, "{},", one)?; - primitive_link(f, PrimitiveType::Tuple, ")") + //carry f.alternate() into this display w/o branching manually + fmt::Display::fmt(one, f)?; + primitive_link(f, PrimitiveType::Tuple, ",)") } many => { primitive_link(f, PrimitiveType::Tuple, "(")?; - write!(f, "{}", CommaSep(&many))?; + fmt::Display::fmt(&CommaSep(&many), f)?; primitive_link(f, PrimitiveType::Tuple, ")") } } } clean::Vector(ref t) => { primitive_link(f, PrimitiveType::Slice, &format!("["))?; - write!(f, "{}", t)?; + fmt::Display::fmt(t, f)?; primitive_link(f, PrimitiveType::Slice, &format!("]")) } clean::FixedVector(ref t, ref s) => { primitive_link(f, PrimitiveType::Array, "[")?; - write!(f, "{}", t)?; - primitive_link(f, PrimitiveType::Array, - &format!("; {}]", Escape(s))) + fmt::Display::fmt(t, f)?; + if f.alternate() { + primitive_link(f, PrimitiveType::Array, + &format!("; {}]", s)) + } else { + primitive_link(f, PrimitiveType::Array, + &format!("; {}]", Escape(s))) + } } clean::Never => f.write_str("!"), clean::RawPointer(m, ref t) => { match **t { clean::Generic(_) | clean::ResolvedPath {is_generic: true, ..} => { - primitive_link(f, clean::PrimitiveType::RawPointer, - &format!("*{}{}", RawMutableSpace(m), t)) + if f.alternate() { + primitive_link(f, clean::PrimitiveType::RawPointer, + &format!("*{}{:#}", RawMutableSpace(m), t)) + } else { + primitive_link(f, clean::PrimitiveType::RawPointer, + &format!("*{}{}", RawMutableSpace(m), t)) + } } _ => { primitive_link(f, clean::PrimitiveType::RawPointer, &format!("*{}", RawMutableSpace(m)))?; - write!(f, "{}", t) + fmt::Display::fmt(t, f) } } } @@ -516,18 +619,33 @@ impl fmt::Display for clean::Type { clean::Vector(ref bt) => { // BorrowedRef{ ... Vector(T) } is &[T] match **bt { clean::Generic(_) => - primitive_link(f, PrimitiveType::Slice, - &format!("&{}{}[{}]", lt, m, **bt)), + if f.alternate() { + primitive_link(f, PrimitiveType::Slice, + &format!("&{}{}[{:#}]", lt, m, **bt)) + } else { + primitive_link(f, PrimitiveType::Slice, + &format!("&{}{}[{}]", lt, m, **bt)) + }, _ => { - primitive_link(f, PrimitiveType::Slice, - &format!("&{}{}[", lt, m))?; - write!(f, "{}", **bt)?; + if f.alternate() { + primitive_link(f, PrimitiveType::Slice, + &format!("&{}{}[", lt, m))?; + write!(f, "{:#}", **bt)?; + } else { + primitive_link(f, PrimitiveType::Slice, + &format!("&{}{}[", lt, m))?; + write!(f, "{}", **bt)?; + } primitive_link(f, PrimitiveType::Slice, "]") } } } _ => { - write!(f, "&{}{}{}", lt, m, **ty) + if f.alternate() { + write!(f, "&{}{}{:#}", lt, m, **ty) + } else { + write!(f, "&{}{}{}", lt, m, **ty) + } } } } @@ -536,7 +654,11 @@ impl fmt::Display for clean::Type { if i != 0 { write!(f, " + ")?; } - write!(f, "{}", *bound)?; + if f.alternate() { + write!(f, "{:#}", *bound)?; + } else { + write!(f, "{}", *bound)?; + } } Ok(()) } @@ -546,7 +668,11 @@ impl fmt::Display for clean::Type { if i != 0 { write!(f, " + ")?; } - write!(f, "{}", *bound)?; + if f.alternate() { + write!(f, "{:#}", *bound)?; + } else { + write!(f, "{}", *bound)?; + } } Ok(()) } @@ -565,7 +691,11 @@ impl fmt::Display for clean::Type { ref self_type, trait_: box clean::ResolvedPath { did, ref typarams, .. }, } => { - write!(f, "{}::", self_type)?; + if f.alternate() { + write!(f, "{:#}::", self_type)?; + } else { + write!(f, "{}::", self_type)?; + } let path = clean::Path::singleton(name.clone()); resolved_path(f, did, &path, false)?; @@ -574,7 +704,11 @@ impl fmt::Display for clean::Type { Ok(()) } clean::QPath { ref name, ref self_type, ref trait_ } => { - write!(f, "<{} as {}>::{}", self_type, trait_, name) + if f.alternate() { + write!(f, "<{:#} as {:#}>::{}", self_type, trait_, name) + } else { + write!(f, "<{} as {}>::{}", self_type, trait_, name) + } } clean::Unique(..) => { panic!("should have been cleaned") @@ -584,24 +718,30 @@ impl fmt::Display for clean::Type { } fn fmt_impl(i: &clean::Impl, f: &mut fmt::Formatter, link_trait: bool) -> fmt::Result { - write!(f, "impl{} ", i.generics)?; + if f.alternate() { + write!(f, "impl{:#} ", i.generics)?; + } else { + write!(f, "impl{} ", i.generics)?; + } if let Some(ref ty) = i.trait_ { write!(f, "{}", if i.polarity == Some(clean::ImplPolarity::Negative) { "!" } else { "" })?; if link_trait { - write!(f, "{}", *ty)?; + fmt::Display::fmt(ty, f)?; } else { match *ty { clean::ResolvedPath{ typarams: None, ref path, is_generic: false, .. } => { let last = path.segments.last().unwrap(); - write!(f, "{}{}", last.name, last.params)?; + fmt::Display::fmt(&last.name, f)?; + fmt::Display::fmt(&last.params, f)?; } _ => unreachable!(), } } write!(f, " for ")?; } - write!(f, "{}{}", i.for_, WhereClause(&i.generics))?; + fmt::Display::fmt(&i.for_, f)?; + fmt::Display::fmt(&WhereClause(&i.generics), f)?; Ok(()) } @@ -619,11 +759,15 @@ pub fn fmt_impl_for_trait_page(i: &clean::Impl, f: &mut fmt::Formatter) -> fmt:: impl fmt::Display for clean::Arguments { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for (i, input) in self.values.iter().enumerate() { - if i > 0 { write!(f, ", ")?; } if !input.name.is_empty() { write!(f, "{}: ", input.name)?; } - write!(f, "{}", input.type_)?; + if f.alternate() { + write!(f, "{:#}", input.type_)?; + } else { + write!(f, "{}", input.type_)?; + } + if i + 1 < self.values.len() { write!(f, ", ")?; } } Ok(()) } @@ -633,6 +777,7 @@ impl fmt::Display for clean::FunctionRetTy { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { clean::Return(clean::Tuple(ref tys)) if tys.is_empty() => Ok(()), + clean::Return(ref ty) if f.alternate() => write!(f, " -> {:#}", ty), clean::Return(ref ty) => write!(f, " -> {}", ty), clean::DefaultReturn => Ok(()), } @@ -642,9 +787,17 @@ impl fmt::Display for clean::FunctionRetTy { impl fmt::Display for clean::FnDecl { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if self.variadic { - write!(f, "({args}, ...){arrow}", args = self.inputs, arrow = self.output) + if f.alternate() { + write!(f, "({args:#}, ...){arrow:#}", args = self.inputs, arrow = self.output) + } else { + write!(f, "({args}, ...){arrow}", args = self.inputs, arrow = self.output) + } } else { - write!(f, "({args}){arrow}", args = self.inputs, arrow = self.output) + if f.alternate() { + write!(f, "({args:#}){arrow:#}", args = self.inputs, arrow = self.output) + } else { + write!(f, "({args}){arrow}", args = self.inputs, arrow = self.output) + } } } } @@ -652,30 +805,89 @@ impl fmt::Display for clean::FnDecl { impl<'a> fmt::Display for Method<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let decl = self.0; + let indent = self.1; + let amp = if f.alternate() { "&" } else { "&" }; let mut args = String::new(); + let mut args_plain = String::new(); for (i, input) in decl.inputs.values.iter().enumerate() { - if i > 0 || !args.is_empty() { args.push_str(", "); } if let Some(selfty) = input.to_self() { match selfty { - clean::SelfValue => args.push_str("self"), + clean::SelfValue => { + args.push_str("self"); + args_plain.push_str("self"); + } clean::SelfBorrowed(Some(ref lt), mtbl) => { - args.push_str(&format!("&{} {}self", *lt, MutableSpace(mtbl))); + args.push_str(&format!("{}{} {}self", amp, *lt, MutableSpace(mtbl))); + args_plain.push_str(&format!("&{} {}self", *lt, MutableSpace(mtbl))); } clean::SelfBorrowed(None, mtbl) => { - args.push_str(&format!("&{}self", MutableSpace(mtbl))); + args.push_str(&format!("{}{}self", amp, MutableSpace(mtbl))); + args_plain.push_str(&format!("&{}self", MutableSpace(mtbl))); } clean::SelfExplicit(ref typ) => { - args.push_str(&format!("self: {}", *typ)); + if f.alternate() { + args.push_str(&format!("self: {:#}", *typ)); + } else { + args.push_str(&format!("self: {}", *typ)); + } + args_plain.push_str(&format!("self: {:#}", *typ)); } } } else { + if i > 0 { + args.push_str("
"); + args_plain.push_str(" "); + } if !input.name.is_empty() { args.push_str(&format!("{}: ", input.name)); + args_plain.push_str(&format!("{}: ", input.name)); } - args.push_str(&format!("{}", input.type_)); + + if f.alternate() { + args.push_str(&format!("{:#}", input.type_)); + } else { + args.push_str(&format!("{}", input.type_)); + } + args_plain.push_str(&format!("{:#}", input.type_)); + } + if i + 1 < decl.inputs.values.len() { + args.push_str(","); + args_plain.push_str(","); } } - write!(f, "({args}){arrow}", args = args, arrow = decl.output) + + if decl.variadic { + args.push_str(",
..."); + args_plain.push_str(", ..."); + } + + let arrow_plain = format!("{:#}", decl.output); + let arrow = if f.alternate() { + format!("{:#}", decl.output) + } else { + format!("{}", decl.output) + }; + + let mut output: String; + let plain: String; + if arrow.is_empty() { + output = format!("({})", args); + plain = format!("{}({})", indent.replace(" ", " "), args_plain); + } else { + output = format!("({args})
{arrow}", args = args, arrow = arrow); + plain = format!("{indent}({args}){arrow}", + indent = indent.replace(" ", " "), + args = args_plain, + arrow = arrow_plain); + } + + if plain.len() > 80 { + let pad = format!("
{}", indent); + output = output.replace("
", &pad); + } else { + output = output.replace("
", ""); + } + write!(f, "{}", output) } } @@ -709,17 +921,17 @@ impl fmt::Display for ConstnessSpace { impl fmt::Display for clean::Import { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { - clean::SimpleImport(ref name, ref src) => { + clean::Import::Simple(ref name, ref src) => { if *name == src.path.last_name() { write!(f, "use {};", *src) } else { write!(f, "use {} as {};", *src, *name) } } - clean::GlobImport(ref src) => { + clean::Import::Glob(ref src) => { write!(f, "use {}::*;", *src) } - clean::ImportList(ref src, ref names) => { + clean::Import::List(ref src, ref names) => { write!(f, "use {}::{{", *src)?; for (i, n) in names.iter().enumerate() { if i > 0 { @@ -769,7 +981,11 @@ impl fmt::Display for clean::ViewListIdent { impl fmt::Display for clean::TypeBinding { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "{}={}", self.name, self.ty) + if f.alternate() { + write!(f, "{}={:#}", self.name, self.ty) + } else { + write!(f, "{}={}", self.name, self.ty) + } } } @@ -793,10 +1009,11 @@ impl fmt::Display for RawMutableSpace { impl fmt::Display for AbiSpace { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let quot = if f.alternate() { "\"" } else { """ }; match self.0 { Abi::Rust => Ok(()), Abi::C => write!(f, "extern "), - abi => write!(f, "extern "{}" ", abi.name()), + abi => write!(f, "extern {0}{1}{0} ", quot, abi.name()), } } } diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index 855588a4c3a4b..bd47b1e7c121c 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -17,7 +17,7 @@ //! the `render_inner_with_highlighting` or `render_with_highlighting` //! functions. For more advanced use cases (if you want to supply your own css //! classes or control how the HTML is generated, or even generate something -//! other then HTML), then you should implement the the `Writer` trait and use a +//! other then HTML), then you should implement the `Writer` trait and use a //! `Classifier`. use html::escape::Escape; @@ -104,6 +104,7 @@ pub enum Class { Lifetime, PreludeTy, PreludeVal, + QuestionMark, } /// Trait that controls writing the output of syntax highlighting. Users should @@ -237,8 +238,10 @@ impl<'a> Classifier<'a> { token::Dot | token::DotDot | token::DotDotDot | token::Comma | token::Semi | token::Colon | token::ModSep | token::LArrow | token::OpenDelim(_) | token::CloseDelim(token::Brace) | token::CloseDelim(token::Paren) | - token::CloseDelim(token::NoDelim) | - token::Question => Class::None, + token::CloseDelim(token::NoDelim) => Class::None, + + token::Question => Class::QuestionMark, + token::Dollar => { if self.lexer.peek().tok.is_ident() { self.in_macro_nonterminal = true; @@ -292,7 +295,9 @@ impl<'a> Classifier<'a> { "Option" | "Result" => Class::PreludeTy, "Some" | "None" | "Ok" | "Err" => Class::PreludeVal, + "$crate" => Class::KeyWord, _ if tas.tok.is_any_keyword() => Class::KeyWord, + _ => { if self.in_macro_nonterminal { self.in_macro_nonterminal = false; @@ -307,9 +312,6 @@ impl<'a> Classifier<'a> { } } - // Special macro vars are like keywords. - token::SpecialVarNt(_) => Class::KeyWord, - token::Lifetime(..) => Class::Lifetime, token::Underscore | token::Eof | token::Interpolated(..) | @@ -348,6 +350,7 @@ impl Class { Class::Lifetime => "lifetime", Class::PreludeTy => "prelude-ty", Class::PreludeVal => "prelude-val", + Class::QuestionMark => "question-mark" } } } diff --git a/src/librustdoc/html/item_type.rs b/src/librustdoc/html/item_type.rs index b93dc17dbdd7d..f584c4e2f4d9c 100644 --- a/src/librustdoc/html/item_type.rs +++ b/src/librustdoc/html/item_type.rs @@ -90,16 +90,16 @@ impl<'a> From<&'a clean::Item> for ItemType { impl From for ItemType { fn from(kind: clean::TypeKind) -> ItemType { match kind { - clean::TypeStruct => ItemType::Struct, - clean::TypeUnion => ItemType::Union, - clean::TypeEnum => ItemType::Enum, - clean::TypeFunction => ItemType::Function, - clean::TypeTrait => ItemType::Trait, - clean::TypeModule => ItemType::Module, - clean::TypeStatic => ItemType::Static, - clean::TypeConst => ItemType::Constant, - clean::TypeVariant => ItemType::Variant, - clean::TypeTypedef => ItemType::Typedef, + clean::TypeKind::Struct => ItemType::Struct, + clean::TypeKind::Union => ItemType::Union, + clean::TypeKind::Enum => ItemType::Enum, + clean::TypeKind::Function => ItemType::Function, + clean::TypeKind::Trait => ItemType::Trait, + clean::TypeKind::Module => ItemType::Module, + clean::TypeKind::Static => ItemType::Static, + clean::TypeKind::Const => ItemType::Constant, + clean::TypeKind::Variant => ItemType::Variant, + clean::TypeKind::Typedef => ItemType::Typedef, } } } diff --git a/src/librustdoc/html/layout.rs b/src/librustdoc/html/layout.rs index 151e138efefbf..5353642e29425 100644 --- a/src/librustdoc/html/layout.rs +++ b/src/librustdoc/html/layout.rs @@ -19,7 +19,6 @@ pub struct Layout { pub favicon: String, pub external_html: ExternalHtml, pub krate: String, - pub playground_url: String, } pub struct Page<'a> { @@ -136,11 +135,9 @@ r##" - {play_js} "##, @@ -174,12 +171,6 @@ r##" after_content = layout.external_html.after_content, sidebar = *sidebar, krate = layout.krate, - play_url = layout.playground_url, - play_js = if layout.playground_url.is_empty() { - "".to_string() - } else { - format!(r#""#, page.root_path) - }, ) } diff --git a/src/librustdoc/html/markdown.rs b/src/librustdoc/html/markdown.rs index aff5a964f75cc..67cf12f4f4a6e 100644 --- a/src/librustdoc/html/markdown.rs +++ b/src/librustdoc/html/markdown.rs @@ -27,12 +27,11 @@ #![allow(non_camel_case_types)] use libc; -use rustc::session::config::get_unstable_features_setting; use std::ascii::AsciiExt; use std::cell::RefCell; use std::default::Default; use std::ffi::CString; -use std::fmt; +use std::fmt::{self, Write}; use std::slice; use std::str; use syntax::feature_gate::UnstableFeatures; @@ -215,7 +214,9 @@ fn collapse_whitespace(s: &str) -> String { s.split_whitespace().collect::>().join(" ") } -thread_local!(pub static PLAYGROUND_KRATE: RefCell>> = { +// Information about the playground if a URL has been specified, containing an +// optional crate name and the URL. +thread_local!(pub static PLAYGROUND: RefCell, String)>> = { RefCell::new(None) }); @@ -249,24 +250,53 @@ pub fn render(w: &mut fmt::Formatter, s: &str, print_toc: bool) -> fmt::Result { }); let text = lines.collect::>().join("\n"); if rendered { return } - PLAYGROUND_KRATE.with(|krate| { + PLAYGROUND.with(|play| { // insert newline to clearly separate it from the // previous block so we can shorten the html output let mut s = String::from("\n"); - krate.borrow().as_ref().map(|krate| { + let playground_button = play.borrow().as_ref().and_then(|&(ref krate, ref url)| { + if url.is_empty() { + return None; + } let test = origtext.lines().map(|l| { stripped_filtered_line(l).unwrap_or(l) }).collect::>().join("\n"); let krate = krate.as_ref().map(|s| &**s); let test = test::maketest(&test, krate, false, &Default::default()); - s.push_str(&format!("{}", Escape(&test))); + let channel = if test.contains("#![feature(") { + "&version=nightly" + } else { + "" + }; + // These characters don't need to be escaped in a URI. + // FIXME: use a library function for percent encoding. + fn dont_escape(c: u8) -> bool { + (b'a' <= c && c <= b'z') || + (b'A' <= c && c <= b'Z') || + (b'0' <= c && c <= b'9') || + c == b'-' || c == b'_' || c == b'.' || + c == b'~' || c == b'!' || c == b'\'' || + c == b'(' || c == b')' || c == b'*' + } + let mut test_escaped = String::new(); + for b in test.bytes() { + if dont_escape(b) { + test_escaped.push(char::from(b)); + } else { + write!(test_escaped, "%{:02X}", b).unwrap(); + } + } + Some(format!( + r#"Run"#, + url, test_escaped, channel + )) }); s.push_str(&highlight::render_with_highlighting( &text, Some("rust-example-rendered"), None, - Some("Run"))); + playground_button.as_ref().map(String::as_str))); let output = CString::new(s).unwrap(); hoedown_buffer_puts(ob, output.as_ptr()); }) @@ -478,13 +508,10 @@ impl LangString { let mut data = LangString::all_false(); let mut allow_compile_fail = false; let mut allow_error_code_check = false; - match get_unstable_features_setting() { - UnstableFeatures::Allow | UnstableFeatures::Cheat => { - allow_compile_fail = true; - allow_error_code_check = true; - } - _ => {}, - }; + if UnstableFeatures::from_environment().is_nightly_build() { + allow_compile_fail = true; + allow_error_code_check = true; + } let tokens = string.split(|c: char| !(c == '_' || c == '-' || c.is_alphanumeric()) @@ -617,7 +644,7 @@ mod tests { t("test_harness", false, false, false, true, true, false, Vec::new()); t("compile_fail", false, true, false, true, false, true, Vec::new()); t("E0450", false, false, false, true, false, false, - vec!("E0450".to_owned())); + vec!["E0450".to_owned()]); t("{.no_run .example}", false, true, false, true, false, false, Vec::new()); t("{.sh .should_panic}", true, false, false, true, false, false, Vec::new()); t("{.example .rust}", false, false, false, true, false, false, Vec::new()); diff --git a/src/librustdoc/html/render.rs b/src/librustdoc/html/render.rs index 8cc9bbb422ae6..a848a011f88db 100644 --- a/src/librustdoc/html/render.rs +++ b/src/librustdoc/html/render.rs @@ -53,13 +53,11 @@ use std::sync::Arc; use externalfiles::ExternalHtml; use serialize::json::{ToJson, Json, as_json}; -use syntax::{abi, ast}; +use syntax::abi; use syntax::feature_gate::UnstableFeatures; -use rustc::middle::cstore::LOCAL_CRATE; -use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId}; +use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId, LOCAL_CRATE}; use rustc::middle::privacy::AccessLevels; use rustc::middle::stability; -use rustc::session::config::get_unstable_features_setting; use rustc::hir; use rustc::util::nodemap::{FnvHashMap, FnvHashSet}; use rustc_data_structures::flock; @@ -91,9 +89,6 @@ pub struct Context { /// Current hierarchy of components leading down to what's currently being /// rendered pub current: Vec, - /// String representation of how to get back to the root path of the 'doc/' - /// folder in terms of a relative URL. - pub root_path: String, /// The current destination folder of where HTML artifacts should be placed. /// This changes as the context descends into the module hierarchy. pub dst: PathBuf, @@ -246,10 +241,10 @@ pub struct Cache { pub implementors: FnvHashMap>, /// Cache of where external crate documentation can be found. - pub extern_locations: FnvHashMap, + pub extern_locations: FnvHashMap, /// Cache of where documentation for primitives can be found. - pub primitive_locations: FnvHashMap, + pub primitive_locations: FnvHashMap, // Note that external items for which `doc(hidden)` applies to are shown as // non-reachable while local items aren't. This is because we're reusing @@ -454,7 +449,6 @@ pub fn run(mut krate: clean::Crate, favicon: "".to_string(), external_html: external_html.clone(), krate: krate.name.clone(), - playground_url: "".to_string(), }, css_file_extension: css_file_extension.clone(), }; @@ -474,11 +468,10 @@ pub fn run(mut krate: clean::Crate, } clean::NameValue(ref x, ref s) if "html_playground_url" == *x => { - scx.layout.playground_url = s.to_string(); - markdown::PLAYGROUND_KRATE.with(|slot| { + markdown::PLAYGROUND.with(|slot| { if slot.borrow().is_none() { let name = krate.name.clone(); - *slot.borrow_mut() = Some(Some(name)); + *slot.borrow_mut() = Some((Some(name), s.clone())); } }); } @@ -498,7 +491,6 @@ pub fn run(mut krate: clean::Crate, krate = render_sources(&dst, &mut scx, krate)?; let cx = Context { current: Vec::new(), - root_path: String::new(), dst: dst, render_redirect_pages: false, shared: Arc::new(scx), @@ -593,7 +585,7 @@ fn build_index(krate: &clean::Crate, cache: &mut Cache) -> String { for &(did, ref item) in orphan_impl_items { if let Some(&(ref fqp, _)) = paths.get(&did) { search_index.push(IndexItem { - ty: item_type(item), + ty: item.type_(), name: item.name.clone().unwrap(), path: fqp[..fqp.len() - 1].join("::"), desc: Escape(&shorter(item.doc_value())).to_string(), @@ -665,8 +657,6 @@ fn write_shared(cx: &Context, include_bytes!("static/jquery-2.1.4.min.js"))?; write(cx.dst.join("main.js"), include_bytes!("static/main.js"))?; - write(cx.dst.join("playpen.js"), - include_bytes!("static/playpen.js"))?; write(cx.dst.join("rustdoc.css"), include_bytes!("static/rustdoc.css"))?; write(cx.dst.join("main.css"), @@ -728,7 +718,7 @@ fn write_shared(cx: &Context, ret.push(line.to_string()); } } - return Ok(ret); + Ok(ret) } // Update the search index @@ -837,11 +827,6 @@ fn mkdir(path: &Path) -> io::Result<()> { } } -/// Returns a documentation-level item type from the item. -fn item_type(item: &clean::Item) -> ItemType { - ItemType::from(item) -} - /// Takes a path to a source file and cleans the path to it. This canonicalizes /// things like ".." to components which preserve the "top down" hierarchy of a /// static HTML tree. Each component in the cleaned path will be passed as an @@ -1077,7 +1062,7 @@ impl DocFolder for Cache { // inserted later on when serializing the search-index. if item.def_id.index != CRATE_DEF_INDEX { self.search_index.push(IndexItem { - ty: item_type(&item), + ty: item.type_(), name: s.to_string(), path: path.join("::").to_string(), desc: Escape(&shorter(item.doc_value())).to_string(), @@ -1124,7 +1109,7 @@ impl DocFolder for Cache { self.access_levels.is_public(item.def_id) { self.paths.insert(item.def_id, - (self.stack.clone(), item_type(&item))); + (self.stack.clone(), item.type_())); } } // link variants to their parent enum because pages aren't emitted @@ -1137,7 +1122,7 @@ impl DocFolder for Cache { clean::PrimitiveItem(..) if item.visibility.is_some() => { self.paths.insert(item.def_id, (self.stack.clone(), - item_type(&item))); + item.type_())); } _ => {} @@ -1219,7 +1204,7 @@ impl DocFolder for Cache { self.seen_mod = orig_seen_mod; self.stripped_mod = orig_stripped_mod; self.parent_is_trait_impl = orig_parent_is_trait_impl; - return ret; + ret } } @@ -1232,6 +1217,12 @@ impl<'a> Cache { } impl Context { + /// String representation of how to get back to the root path of the 'doc/' + /// folder in terms of a relative URL. + fn root_path(&self) -> String { + repeat("../").take(self.current.len()).collect::() + } + /// Recurse in the directory structure and change the "root path" to make /// sure it always points to the top (relatively) fn recurse(&mut self, s: String, f: F) -> T where @@ -1242,7 +1233,6 @@ impl Context { } let prev = self.dst.clone(); self.dst.push(&s); - self.root_path.push_str("../"); self.current.push(s); info!("Recursing into {}", self.dst.display()); @@ -1253,11 +1243,9 @@ impl Context { // Go back to where we were at self.dst = prev; - let len = self.root_path.len(); - self.root_path.truncate(len - 3); self.current.pop().unwrap(); - return ret; + ret } /// Main method for rendering a crate. @@ -1272,7 +1260,7 @@ impl Context { item.name = Some(krate.name); // render the crate documentation - let mut work = vec!((self, item)); + let mut work = vec![(self, item)]; while let Some((mut cx, item)) = work.pop() { cx.item(item, |cx, item| { @@ -1306,7 +1294,7 @@ impl Context { title.push_str(it.name.as_ref().unwrap()); } title.push_str(" - Rust"); - let tyname = item_type(it).css_class(); + let tyname = it.type_().css_class(); let desc = if it.is_crate() { format!("API documentation for the Rust `{}` crate.", self.shared.layout.krate) @@ -1317,7 +1305,7 @@ impl Context { let keywords = make_item_keywords(it); let page = layout::Page { css_class: tyname, - root_path: &self.root_path, + root_path: &self.root_path(), title: &title, description: &desc, keywords: &keywords, @@ -1331,8 +1319,7 @@ impl Context { &Item{ cx: self, item: it }, self.shared.css_file_extension.is_some())?; } else { - let mut url = repeat("../").take(self.current.len()) - .collect::(); + let mut url = self.root_path(); if let Some(&(ref names, ty)) = cache().paths.get(&it.def_id) { for name in &names[..names.len() - 1] { url.push_str(name); @@ -1409,7 +1396,7 @@ impl Context { // buf will be empty if the item is stripped and there is no redirect for it if !buf.is_empty() { let name = item.name.as_ref().unwrap(); - let item_type = item_type(&item); + let item_type = item.type_(); let file_name = &item_path(item_type, name); let joint_dst = self.dst.join(file_name); try_err!(fs::create_dir_all(&self.dst), &self.dst); @@ -1446,7 +1433,7 @@ impl Context { for item in &m.items { if maybe_ignore_item(item) { continue } - let short = item_type(item).css_class(); + let short = item.type_().css_class(); let myname = match item.name { None => continue, Some(ref s) => s.to_string(), @@ -1459,7 +1446,7 @@ impl Context { for (_, items) in &mut map { items.sort(); } - return map; + map } } @@ -1494,7 +1481,7 @@ impl<'a> Item<'a> { }).map(|l| &l.1); let root = match root { Some(&Remote(ref s)) => s.to_string(), - Some(&Local) => self.cx.root_path.clone(), + Some(&Local) => self.cx.root_path(), None | Some(&Unknown) => return None, }; Some(format!("{root}/{krate}/macro.{name}.html?gotomacrosrc=1", @@ -1509,7 +1496,7 @@ impl<'a> Item<'a> { let path = PathBuf::from(&self.item.source.filename); self.cx.shared.local_sources.get(&path).map(|path| { format!("{root}src/{krate}/{path}#{href}", - root = self.cx.root_path, + root = self.cx.root_path(), krate = self.cx.shared.layout.krate, path = path, href = href) @@ -1533,7 +1520,7 @@ impl<'a> Item<'a> { }; let mut path = match cache.extern_locations.get(&self.item.def_id.krate) { Some(&(_, Remote(ref s))) => s.to_string(), - Some(&(_, Local)) => self.cx.root_path.clone(), + Some(&(_, Local)) => self.cx.root_path(), Some(&(_, Unknown)) => return None, None => return None, }; @@ -1543,7 +1530,7 @@ impl<'a> Item<'a> { } Some(format!("{path}{file}?gotosrc={goto}", path = path, - file = item_path(item_type(self.item), external_path.last().unwrap()), + file = item_path(self.item.type_(), external_path.last().unwrap()), goto = self.item.def_id.index.as_usize())) } } @@ -1588,7 +1575,7 @@ impl<'a> fmt::Display for Item<'a> { } } write!(fmt, "{}", - item_type(self.item), self.item.name.as_ref().unwrap())?; + self.item.type_(), self.item.name.as_ref().unwrap())?; write!(fmt, "")?; // in-band write!(fmt, "")?; @@ -1656,7 +1643,7 @@ fn full_path(cx: &Context, item: &clean::Item) -> String { let mut s = cx.current.join("::"); s.push_str("::"); s.push_str(item.name.as_ref().unwrap()); - return s + s } fn shorter<'a>(s: Option<&'a str>) -> String { @@ -1741,8 +1728,8 @@ fn item_module(w: &mut fmt::Formatter, cx: &Context, } fn cmp(i1: &clean::Item, i2: &clean::Item, idx1: usize, idx2: usize) -> Ordering { - let ty1 = item_type(i1); - let ty2 = item_type(i2); + let ty1 = i1.type_(); + let ty2 = i2.type_(); if ty1 != ty2 { return (reorder(ty1), idx1).cmp(&(reorder(ty2), idx2)) } @@ -1766,7 +1753,7 @@ fn item_module(w: &mut fmt::Formatter, cx: &Context, continue; } - let myty = Some(item_type(myitem)); + let myty = Some(myitem.type_()); if curty == Some(ItemType::ExternCrate) && myty == Some(ItemType::Import) { // Put `extern crate` and `use` re-exports in the same section. curty = myty; @@ -1853,9 +1840,9 @@ fn item_module(w: &mut fmt::Formatter, cx: &Context, name = *myitem.name.as_ref().unwrap(), stab_docs = stab_docs, docs = shorter(Some(&Markdown(doc_value).to_string())), - class = item_type(myitem), + class = myitem.type_(), stab = myitem.stability_class(), - href = item_path(item_type(myitem), myitem.name.as_ref().unwrap()), + href = item_path(myitem.type_(), myitem.name.as_ref().unwrap()), title = full_path(cx, myitem))?; } } @@ -1972,10 +1959,18 @@ fn item_static(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, fn item_function(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, f: &clean::Function) -> fmt::Result { // FIXME(#24111): remove when `const_fn` is stabilized - let vis_constness = match get_unstable_features_setting() { + let vis_constness = match UnstableFeatures::from_environment() { UnstableFeatures::Allow => f.constness, _ => hir::Constness::NotConst }; + let prefix = format!("{}{}{}{:#}fn {}{:#}", + VisSpace(&it.visibility), + ConstnessSpace(vis_constness), + UnsafetySpace(f.unsafety), + AbiSpace(f.abi), + it.name.as_ref().unwrap(), + f.generics); + let indent = repeat(" ").take(prefix.len()).collect::(); write!(w, "
{vis}{constness}{unsafety}{abi}fn \
                {name}{generics}{decl}{where_clause}
", vis = VisSpace(&it.visibility), @@ -1985,7 +1980,7 @@ fn item_function(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, name = it.name.as_ref().unwrap(), generics = f.generics, where_clause = WhereClause(&f.generics), - decl = f.decl)?; + decl = Method(&f.decl, &indent))?; document(w, cx, it) } @@ -2061,7 +2056,7 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, fn trait_item(w: &mut fmt::Formatter, cx: &Context, m: &clean::Item, t: &clean::Item) -> fmt::Result { let name = m.name.as_ref().unwrap(); - let item_type = item_type(m); + let item_type = m.type_(); let id = derive_id(format!("{}.{}", item_type, name)); let ns_id = derive_id(format!("{}.{}", name, item_type.name_space())); write!(w, "

\ @@ -2147,7 +2142,7 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, let (ref path, _) = cache.external_paths[&it.def_id]; path[..path.len() - 1].join("/") }, - ty = item_type(it).css_class(), + ty = it.type_().css_class(), name = *it.name.as_ref().unwrap())?; Ok(()) } @@ -2156,7 +2151,7 @@ fn naive_assoc_href(it: &clean::Item, link: AssocItemLink) -> String { use html::item_type::ItemType::*; let name = it.name.as_ref().unwrap(); - let ty = match item_type(it) { + let ty = match it.type_() { Typedef | AssociatedType => AssociatedType, s@_ => s, }; @@ -2234,7 +2229,7 @@ fn render_assoc_item(w: &mut fmt::Formatter, link: AssocItemLink) -> fmt::Result { let name = meth.name.as_ref().unwrap(); - let anchor = format!("#{}.{}", item_type(meth), name); + let anchor = format!("#{}.{}", meth.type_(), name); let href = match link { AssocItemLink::Anchor(Some(ref id)) => format!("#{}", id), AssocItemLink::Anchor(None) => anchor, @@ -2251,10 +2246,17 @@ fn render_assoc_item(w: &mut fmt::Formatter, } }; // FIXME(#24111): remove when `const_fn` is stabilized - let vis_constness = match get_unstable_features_setting() { + let vis_constness = match UnstableFeatures::from_environment() { UnstableFeatures::Allow => constness, _ => hir::Constness::NotConst }; + let prefix = format!("{}{}{:#}fn {}{:#}", + ConstnessSpace(vis_constness), + UnsafetySpace(unsafety), + AbiSpace(abi), + name, + *g); + let indent = repeat(" ").take(prefix.len()).collect::(); write!(w, "{}{}{}fn {name}\ {generics}{decl}{where_clause}", ConstnessSpace(vis_constness), @@ -2263,7 +2265,7 @@ fn render_assoc_item(w: &mut fmt::Formatter, href = href, name = name, generics = *g, - decl = Method(d), + decl = Method(d, &indent), where_clause = WhereClause(g)) } match item.inner { @@ -2387,8 +2389,8 @@ fn item_enum(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, match v.inner { clean::VariantItem(ref var) => { match var.kind { - clean::CLikeVariant => write!(w, "{}", name)?, - clean::TupleVariant(ref tys) => { + clean::VariantKind::CLike => write!(w, "{}", name)?, + clean::VariantKind::Tuple(ref tys) => { write!(w, "{}(", name)?; for (i, ty) in tys.iter().enumerate() { if i > 0 { @@ -2398,7 +2400,7 @@ fn item_enum(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, } write!(w, ")")?; } - clean::StructVariant(ref s) => { + clean::VariantKind::Struct(ref s) => { render_struct(w, v, None, @@ -2438,7 +2440,7 @@ fn item_enum(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, ns_id = ns_id, name = variant.name.as_ref().unwrap())?; if let clean::VariantItem(ref var) = variant.inner { - if let clean::TupleVariant(ref tys) = var.kind { + if let clean::VariantKind::Tuple(ref tys) = var.kind { write!(w, "(")?; for (i, ty) in tys.iter().enumerate() { if i > 0 { @@ -2452,8 +2454,10 @@ fn item_enum(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, write!(w, "")?; document(w, cx, variant)?; - use clean::{Variant, StructVariant}; - if let clean::VariantItem( Variant { kind: StructVariant(ref s) } ) = variant.inner { + use clean::{Variant, VariantKind}; + if let clean::VariantItem(Variant { + kind: VariantKind::Struct(ref s) + }) = variant.inner { write!(w, "

Fields

\n ")?; for field in &s.fields { @@ -2742,7 +2746,7 @@ fn render_impl(w: &mut fmt::Formatter, cx: &Context, i: &Impl, link: AssocItemLi link: AssocItemLink, render_mode: RenderMode, is_default_item: bool, outer_version: Option<&str>, trait_: Option<&clean::Trait>) -> fmt::Result { - let item_type = item_type(item); + let item_type = item.type_(); let name = item.name.as_ref().unwrap(); let render_method_item: bool = match render_mode { @@ -2920,7 +2924,7 @@ impl<'a> fmt::Display for Sidebar<'a> { write!(fmt, "::")?; } write!(fmt, "{}", - &cx.root_path[..(cx.current.len() - i - 1) * 3], + &cx.root_path()[..(cx.current.len() - i - 1) * 3], *name)?; } write!(fmt, "

")?; @@ -2934,7 +2938,7 @@ impl<'a> fmt::Display for Sidebar<'a> { relpath: '{path}'\ }};", name = it.name.as_ref().map(|x| &x[..]).unwrap_or(""), - ty = item_type(it).css_class(), + ty = it.type_().css_class(), path = relpath)?; if parentlen == 0 { // there is no sidebar-items.js beyond the crate root path diff --git a/src/librustdoc/html/static/playpen.js b/src/librustdoc/html/static/playpen.js deleted file mode 100644 index cad97c04e1ac0..0000000000000 --- a/src/librustdoc/html/static/playpen.js +++ /dev/null @@ -1,43 +0,0 @@ -// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -/*jslint browser: true, es5: true */ -/*globals $: true, rootPath: true */ - -document.addEventListener('DOMContentLoaded', function() { - 'use strict'; - - if (!window.playgroundUrl) { - return; - } - - var featureRegexp = new RegExp('^\s*#!\\[feature\\(\.*?\\)\\]'); - var elements = document.querySelectorAll('pre.rust-example-rendered'); - - Array.prototype.forEach.call(elements, function(el) { - el.onmouseover = function(e) { - if (el.contains(e.relatedTarget)) { - return; - } - - var a = el.querySelectorAll('a.test-arrow')[0]; - - var code = el.previousElementSibling.textContent; - - var channel = ''; - if (featureRegexp.test(code)) { - channel = '&version=nightly'; - } - - a.setAttribute('href', window.playgroundUrl + '?code=' + - encodeURIComponent(code) + channel); - }; - }); -}); diff --git a/src/librustdoc/html/static/rustdoc.css b/src/librustdoc/html/static/rustdoc.css index e2fb63fc7f974..f49b8556f66cb 100644 --- a/src/librustdoc/html/static/rustdoc.css +++ b/src/librustdoc/html/static/rustdoc.css @@ -1,3 +1,5 @@ +@import "normalize.css"; + /** * Copyright 2013 The Rust Project Developers. See the COPYRIGHT * file at the top-level directory of this distribution and at @@ -59,8 +61,6 @@ src: local('Source Code Pro Semibold'), url("SourceCodePro-Semibold.woff") format('woff'); } -@import "normalize.css"; - * { -webkit-box-sizing: border-box; -moz-box-sizing: border-box; @@ -284,7 +284,7 @@ h3.impl > .out-of-band { font-size: 21px; } -h4 > code, h3 > code, invisible > code { +h4 > code, h3 > code, .invisible > code { position: inherit; } @@ -378,6 +378,11 @@ h4 > code, h3 > code, invisible > code { font-size: 90%; } +/* Shift where in trait listing down a line */ +pre.trait .where::before { + content: '\a '; +} + nav { border-bottom: 1px solid; padding-bottom: 10px; @@ -459,7 +464,8 @@ a { .content .search-results td:first-child { padding-right: 0; } .content .search-results td:first-child a { padding-right: 10px; } -tr.result span.primitive::after { content: ' (primitive type)'; font-style: italic; color: black} +tr.result span.primitive::after { content: ' (primitive type)'; font-style: italic; color: black; +} body.blur > :not(#help) { filter: blur(8px); @@ -564,8 +570,11 @@ pre.rust .self, pre.rust .bool-val, pre.rust .prelude-val, pre.rust .attribute, pre.rust .attribute .ident { color: #C82829; } pre.rust .macro, pre.rust .macro-nonterminal { color: #3E999F; } pre.rust .lifetime { color: #B76514; } +pre.rust .question-mark { + color: #ff9011; + font-weight: bold; +} -.rusttest { display: none; } pre.rust { position: relative; } a.test-arrow { background-color: rgba(78, 139, 202, 0.2); @@ -579,6 +588,7 @@ a.test-arrow { } a.test-arrow:hover{ background-color: #4e8bca; + text-decoration: none; } .section-header:hover a:after { diff --git a/src/librustdoc/html/static/styles/main.css b/src/librustdoc/html/static/styles/main.css index fceea85cc704f..6a9a24f69960a 100644 --- a/src/librustdoc/html/static/styles/main.css +++ b/src/librustdoc/html/static/styles/main.css @@ -107,7 +107,7 @@ nav.main .current { border-bottom-color: #000; } nav.main .separator { - border-color: 1px solid #000; + border: 1px solid #000; } a { color: #000; diff --git a/src/librustdoc/html/toc.rs b/src/librustdoc/html/toc.rs index 305e6258baa41..a7da1c5cca48c 100644 --- a/src/librustdoc/html/toc.rs +++ b/src/librustdoc/html/toc.rs @@ -247,7 +247,7 @@ mod tests { macro_rules! toc { ($(($level: expr, $name: expr, $(($sub: tt))* )),*) => { Toc { - entries: vec!( + entries: vec![ $( TocEntry { level: $level, @@ -257,7 +257,7 @@ mod tests { children: toc!($($sub),*) } ),* - ) + ] } } } diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index cc5cdf9f4e74c..cf5e8e5e34a3c 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -28,7 +28,7 @@ #![feature(staged_api)] #![feature(test)] #![feature(unicode)] -#![feature(question_mark)] +#![cfg_attr(stage0, feature(question_mark))] extern crate arena; extern crate getopts; @@ -91,31 +91,6 @@ pub mod test; use clean::Attributes; -type Pass = (&'static str, // name - fn(clean::Crate) -> plugins::PluginResult, // fn - &'static str); // description - -const PASSES: &'static [Pass] = &[ - ("strip-hidden", passes::strip_hidden, - "strips all doc(hidden) items from the output"), - ("unindent-comments", passes::unindent_comments, - "removes excess indentation on comments in order for markdown to like it"), - ("collapse-docs", passes::collapse_docs, - "concatenates all document attributes into one document attribute"), - ("strip-private", passes::strip_private, - "strips all private items from a crate which cannot be seen externally, \ - implies strip-priv-imports"), - ("strip-priv-imports", passes::strip_priv_imports, - "strips all private import statements (`use`, `extern crate`) from a crate"), -]; - -const DEFAULT_PASSES: &'static [&'static str] = &[ - "strip-hidden", - "strip-private", - "collapse-docs", - "unindent-comments", -]; - struct Output { krate: clean::Crate, renderinfo: html::render::RenderInfo, @@ -123,7 +98,7 @@ struct Output { } pub fn main() { - const STACK_SIZE: usize = 32000000; // 32MB + const STACK_SIZE: usize = 32_000_000; // 32MB let res = std::thread::Builder::new().stack_size(STACK_SIZE).spawn(move || { let s = env::args().collect::>(); main_args(&s) @@ -136,7 +111,7 @@ fn unstable(g: getopts::OptGroup) -> RustcOptGroup { RustcOptGroup::unstable(g) pub fn opts() -> Vec { use getopts::*; - vec!( + vec![ stable(optflag("h", "help", "show this help message")), stable(optflag("V", "version", "print rustdoc's version")), stable(optflag("v", "verbose", "use verbose output")), @@ -186,7 +161,8 @@ pub fn opts() -> Vec { own theme", "PATH")), unstable(optmulti("Z", "", "internal and debugging options (only on nightly build)", "FLAG")), - ) + stable(optopt("", "sysroot", "Override the system root", "PATH")), + ] } pub fn usage(argv0: &str) { @@ -222,11 +198,11 @@ pub fn main_args(args: &[String]) -> isize { if matches.opt_strs("passes") == ["list"] { println!("Available passes for running rustdoc:"); - for &(name, _, description) in PASSES { + for &(name, _, description) in passes::PASSES { println!("{:>20} - {}", name, description); } println!("\nDefault passes for rustdoc:"); - for &name in DEFAULT_PASSES { + for &name in passes::DEFAULT_PASSES { println!("{:>20}", name); } return 0; @@ -235,7 +211,8 @@ pub fn main_args(args: &[String]) -> isize { if matches.free.is_empty() { println!("expected an input file to act on"); return 1; - } if matches.free.len() > 1 { + } + if matches.free.len() > 1 { println!("only one input file may be specified"); return 1; } @@ -311,15 +288,14 @@ pub fn main_args(args: &[String]) -> isize { passes.into_iter().collect(), css_file_extension, renderinfo) - .expect("failed to generate documentation") + .expect("failed to generate documentation"); + 0 } Some(s) => { println!("unknown output format: {}", s); - return 1; + 1 } } - - return 0; } /// Looks inside the command line arguments to extract the relevant input format @@ -370,6 +346,7 @@ fn rust_input(cratefile: &str, externs: Externs, matches: &getopts::Matches) -> } let cfgs = matches.opt_strs("cfg"); let triple = matches.opt_str("target"); + let maybe_sysroot = matches.opt_str("sysroot").map(PathBuf::from); let cr = PathBuf::from(cratefile); info!("starting to run rustc"); @@ -379,7 +356,7 @@ fn rust_input(cratefile: &str, externs: Externs, matches: &getopts::Matches) -> use rustc::session::config::Input; tx.send(core::run_core(paths, cfgs, externs, Input::File(cr), - triple)).unwrap(); + triple, maybe_sysroot)).unwrap(); }); let (mut krate, renderinfo) = rx.recv().unwrap(); info!("finished with rustc"); @@ -410,7 +387,7 @@ fn rust_input(cratefile: &str, externs: Externs, matches: &getopts::Matches) -> } if default_passes { - for name in DEFAULT_PASSES.iter().rev() { + for name in passes::DEFAULT_PASSES.iter().rev() { passes.insert(0, name.to_string()); } } @@ -420,11 +397,11 @@ fn rust_input(cratefile: &str, externs: Externs, matches: &getopts::Matches) -> .unwrap_or("/tmp/rustdoc/plugins".to_string()); let mut pm = plugins::PluginManager::new(PathBuf::from(path)); for pass in &passes { - let plugin = match PASSES.iter() - .position(|&(p, ..)| { - p == *pass - }) { - Some(i) => PASSES[i].1, + let plugin = match passes::PASSES.iter() + .position(|&(p, ..)| { + p == *pass + }) { + Some(i) => passes::PASSES[i].1, None => { error!("unknown pass {}, skipping", *pass); continue diff --git a/src/librustdoc/markdown.rs b/src/librustdoc/markdown.rs index 1421a3c78fc5a..b617acfabbbf1 100644 --- a/src/librustdoc/markdown.rs +++ b/src/librustdoc/markdown.rs @@ -19,7 +19,7 @@ use testing; use rustc::session::search_paths::SearchPaths; use rustc::session::config::Externs; -use externalfiles::ExternalHtml; +use externalfiles::{ExternalHtml, LoadStringError, load_string}; use html::render::reset_ids; use html::escape::Escape; @@ -58,12 +58,14 @@ pub fn render(input: &str, mut output: PathBuf, matches: &getopts::Matches, css.push_str(&s) } - let input_str = load_or_return!(input, 1, 2); - let playground = matches.opt_str("markdown-playground-url"); - if playground.is_some() { - markdown::PLAYGROUND_KRATE.with(|s| { *s.borrow_mut() = Some(None); }); + let input_str = match load_string(input) { + Ok(s) => s, + Err(LoadStringError::ReadFail) => return 1, + Err(LoadStringError::BadUtf8) => return 2, + }; + if let Some(playground) = matches.opt_str("markdown-playground-url") { + markdown::PLAYGROUND.with(|s| { *s.borrow_mut() = Some((None, playground)); }); } - let playground = playground.unwrap_or("".to_string()); let mut out = match File::create(&output) { Err(e) => { @@ -115,9 +117,6 @@ pub fn render(input: &str, mut output: PathBuf, matches: &getopts::Matches, {before_content}

{title}

{text} - {after_content} "#, @@ -127,7 +126,6 @@ pub fn render(input: &str, mut output: PathBuf, matches: &getopts::Matches, before_content = external_html.before_content, text = rendered, after_content = external_html.after_content, - playground = playground, ); match err { @@ -144,7 +142,11 @@ pub fn render(input: &str, mut output: PathBuf, matches: &getopts::Matches, /// Run any tests/code examples in the markdown file `input`. pub fn test(input: &str, cfgs: Vec, libs: SearchPaths, externs: Externs, mut test_args: Vec) -> isize { - let input_str = load_or_return!(input, 1, 2); + let input_str = match load_string(input) { + Ok(s) => s, + Err(LoadStringError::ReadFail) => return 1, + Err(LoadStringError::BadUtf8) => return 2, + }; let mut opts = TestOptions::default(); opts.no_crate_inject = true; diff --git a/src/librustdoc/passes.rs b/src/librustdoc/passes.rs deleted file mode 100644 index c60e22824965f..0000000000000 --- a/src/librustdoc/passes.rs +++ /dev/null @@ -1,416 +0,0 @@ -// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use rustc::hir::def_id::DefId; -use rustc::middle::privacy::AccessLevels; -use rustc::util::nodemap::DefIdSet; -use std::cmp; -use std::mem; -use std::string::String; -use std::usize; - -use clean::{self, Attributes, GetDefId}; -use clean::Item; -use plugins; -use fold; -use fold::DocFolder; -use fold::FoldItem::Strip; - -/// Strip items marked `#[doc(hidden)]` -pub fn strip_hidden(krate: clean::Crate) -> plugins::PluginResult { - let mut retained = DefIdSet(); - - // strip all #[doc(hidden)] items - let krate = { - struct Stripper<'a> { - retained: &'a mut DefIdSet, - update_retained: bool, - } - impl<'a> fold::DocFolder for Stripper<'a> { - fn fold_item(&mut self, i: Item) -> Option { - if i.attrs.list("doc").has_word("hidden") { - debug!("found one in strip_hidden; removing"); - // use a dedicated hidden item for given item type if any - match i.inner { - clean::StructFieldItem(..) | clean::ModuleItem(..) => { - // We need to recurse into stripped modules to - // strip things like impl methods but when doing so - // we must not add any items to the `retained` set. - let old = mem::replace(&mut self.update_retained, false); - let ret = Strip(self.fold_item_recur(i).unwrap()).fold(); - self.update_retained = old; - return ret; - } - _ => return None, - } - } else { - if self.update_retained { - self.retained.insert(i.def_id); - } - } - self.fold_item_recur(i) - } - } - let mut stripper = Stripper{ retained: &mut retained, update_retained: true }; - stripper.fold_crate(krate) - }; - - // strip all impls referencing stripped items - let mut stripper = ImplStripper { retained: &retained }; - stripper.fold_crate(krate) -} - -/// Strip private items from the point of view of a crate or externally from a -/// crate, specified by the `xcrate` flag. -pub fn strip_private(mut krate: clean::Crate) -> plugins::PluginResult { - // This stripper collects all *retained* nodes. - let mut retained = DefIdSet(); - let access_levels = krate.access_levels.clone(); - - // strip all private items - { - let mut stripper = Stripper { - retained: &mut retained, - access_levels: &access_levels, - update_retained: true, - }; - krate = ImportStripper.fold_crate(stripper.fold_crate(krate)); - } - - // strip all impls referencing private items - let mut stripper = ImplStripper { retained: &retained }; - stripper.fold_crate(krate) -} - -struct Stripper<'a> { - retained: &'a mut DefIdSet, - access_levels: &'a AccessLevels, - update_retained: bool, -} - -impl<'a> fold::DocFolder for Stripper<'a> { - fn fold_item(&mut self, i: Item) -> Option { - match i.inner { - clean::StrippedItem(..) => { - // We need to recurse into stripped modules to strip things - // like impl methods but when doing so we must not add any - // items to the `retained` set. - let old = mem::replace(&mut self.update_retained, false); - let ret = self.fold_item_recur(i); - self.update_retained = old; - return ret; - } - // These items can all get re-exported - clean::TypedefItem(..) | clean::StaticItem(..) | - clean::StructItem(..) | clean::EnumItem(..) | - clean::TraitItem(..) | clean::FunctionItem(..) | - clean::VariantItem(..) | clean::MethodItem(..) | - clean::ForeignFunctionItem(..) | clean::ForeignStaticItem(..) | - clean::ConstantItem(..) | clean::UnionItem(..) => { - if i.def_id.is_local() { - if !self.access_levels.is_exported(i.def_id) { - return None; - } - } - } - - clean::StructFieldItem(..) => { - if i.visibility != Some(clean::Public) { - return Strip(i).fold(); - } - } - - clean::ModuleItem(..) => { - if i.def_id.is_local() && i.visibility != Some(clean::Public) { - let old = mem::replace(&mut self.update_retained, false); - let ret = Strip(self.fold_item_recur(i).unwrap()).fold(); - self.update_retained = old; - return ret; - } - } - - // handled in the `strip-priv-imports` pass - clean::ExternCrateItem(..) | clean::ImportItem(..) => {} - - clean::DefaultImplItem(..) | clean::ImplItem(..) => {} - - // tymethods/macros have no control over privacy - clean::MacroItem(..) | clean::TyMethodItem(..) => {} - - // Primitives are never stripped - clean::PrimitiveItem(..) => {} - - // Associated consts and types are never stripped - clean::AssociatedConstItem(..) | - clean::AssociatedTypeItem(..) => {} - } - - let fastreturn = match i.inner { - // nothing left to do for traits (don't want to filter their - // methods out, visibility controlled by the trait) - clean::TraitItem(..) => true, - - // implementations of traits are always public. - clean::ImplItem(ref imp) if imp.trait_.is_some() => true, - // Struct variant fields have inherited visibility - clean::VariantItem(clean::Variant { - kind: clean::StructVariant(..) - }) => true, - _ => false, - }; - - let i = if fastreturn { - if self.update_retained { - self.retained.insert(i.def_id); - } - return Some(i); - } else { - self.fold_item_recur(i) - }; - - i.and_then(|i| { - match i.inner { - // emptied modules have no need to exist - clean::ModuleItem(ref m) - if m.items.is_empty() && - i.doc_value().is_none() => None, - _ => { - if self.update_retained { - self.retained.insert(i.def_id); - } - Some(i) - } - } - }) - } -} - -// This stripper discards all impls which reference stripped items -struct ImplStripper<'a> { - retained: &'a DefIdSet -} - -impl<'a> fold::DocFolder for ImplStripper<'a> { - fn fold_item(&mut self, i: Item) -> Option { - if let clean::ImplItem(ref imp) = i.inner { - // emptied none trait impls can be stripped - if imp.trait_.is_none() && imp.items.is_empty() { - return None; - } - if let Some(did) = imp.for_.def_id() { - if did.is_local() && !imp.for_.is_generic() && - !self.retained.contains(&did) - { - return None; - } - } - if let Some(did) = imp.trait_.def_id() { - if did.is_local() && !self.retained.contains(&did) { - return None; - } - } - } - self.fold_item_recur(i) - } -} - -// This stripper discards all private import statements (`use`, `extern crate`) -struct ImportStripper; -impl fold::DocFolder for ImportStripper { - fn fold_item(&mut self, i: Item) -> Option { - match i.inner { - clean::ExternCrateItem(..) | - clean::ImportItem(..) if i.visibility != Some(clean::Public) => None, - _ => self.fold_item_recur(i) - } - } -} - -pub fn strip_priv_imports(krate: clean::Crate) -> plugins::PluginResult { - ImportStripper.fold_crate(krate) -} - -pub fn unindent_comments(krate: clean::Crate) -> plugins::PluginResult { - struct CommentCleaner; - impl fold::DocFolder for CommentCleaner { - fn fold_item(&mut self, mut i: Item) -> Option { - let mut avec: Vec = Vec::new(); - for attr in &i.attrs { - match attr { - &clean::NameValue(ref x, ref s) - if "doc" == *x => { - avec.push(clean::NameValue("doc".to_string(), - unindent(s))) - } - x => avec.push(x.clone()) - } - } - i.attrs = avec; - self.fold_item_recur(i) - } - } - let mut cleaner = CommentCleaner; - let krate = cleaner.fold_crate(krate); - krate -} - -pub fn collapse_docs(krate: clean::Crate) -> plugins::PluginResult { - struct Collapser; - impl fold::DocFolder for Collapser { - fn fold_item(&mut self, mut i: Item) -> Option { - let mut docstr = String::new(); - for attr in &i.attrs { - if let clean::NameValue(ref x, ref s) = *attr { - if "doc" == *x { - docstr.push_str(s); - docstr.push('\n'); - } - } - } - let mut a: Vec = i.attrs.iter().filter(|&a| match a { - &clean::NameValue(ref x, _) if "doc" == *x => false, - _ => true - }).cloned().collect(); - if !docstr.is_empty() { - a.push(clean::NameValue("doc".to_string(), docstr)); - } - i.attrs = a; - self.fold_item_recur(i) - } - } - let mut collapser = Collapser; - let krate = collapser.fold_crate(krate); - krate -} - -pub fn unindent(s: &str) -> String { - let lines = s.lines().collect:: >(); - let mut saw_first_line = false; - let mut saw_second_line = false; - let min_indent = lines.iter().fold(usize::MAX, |min_indent, line| { - - // After we see the first non-whitespace line, look at - // the line we have. If it is not whitespace, and therefore - // part of the first paragraph, then ignore the indentation - // level of the first line - let ignore_previous_indents = - saw_first_line && - !saw_second_line && - !line.chars().all(|c| c.is_whitespace()); - - let min_indent = if ignore_previous_indents { - usize::MAX - } else { - min_indent - }; - - if saw_first_line { - saw_second_line = true; - } - - if line.chars().all(|c| c.is_whitespace()) { - min_indent - } else { - saw_first_line = true; - let mut whitespace = 0; - line.chars().all(|char| { - // Compare against either space or tab, ignoring whether they - // are mixed or not - if char == ' ' || char == '\t' { - whitespace += 1; - true - } else { - false - } - }); - cmp::min(min_indent, whitespace) - } - }); - - if !lines.is_empty() { - let mut unindented = vec![ lines[0].trim().to_string() ]; - unindented.extend_from_slice(&lines[1..].iter().map(|&line| { - if line.chars().all(|c| c.is_whitespace()) { - line.to_string() - } else { - assert!(line.len() >= min_indent); - line[min_indent..].to_string() - } - }).collect::>()); - unindented.join("\n") - } else { - s.to_string() - } -} - -#[cfg(test)] -mod unindent_tests { - use super::unindent; - - #[test] - fn should_unindent() { - let s = " line1\n line2".to_string(); - let r = unindent(&s); - assert_eq!(r, "line1\nline2"); - } - - #[test] - fn should_unindent_multiple_paragraphs() { - let s = " line1\n\n line2".to_string(); - let r = unindent(&s); - assert_eq!(r, "line1\n\nline2"); - } - - #[test] - fn should_leave_multiple_indent_levels() { - // Line 2 is indented another level beyond the - // base indentation and should be preserved - let s = " line1\n\n line2".to_string(); - let r = unindent(&s); - assert_eq!(r, "line1\n\n line2"); - } - - #[test] - fn should_ignore_first_line_indent() { - // The first line of the first paragraph may not be indented as - // far due to the way the doc string was written: - // - // #[doc = "Start way over here - // and continue here"] - let s = "line1\n line2".to_string(); - let r = unindent(&s); - assert_eq!(r, "line1\nline2"); - } - - #[test] - fn should_not_ignore_first_line_indent_in_a_single_line_para() { - let s = "line1\n\n line2".to_string(); - let r = unindent(&s); - assert_eq!(r, "line1\n\n line2"); - } - - #[test] - fn should_unindent_tabs() { - let s = "\tline1\n\tline2".to_string(); - let r = unindent(&s); - assert_eq!(r, "line1\nline2"); - } - - #[test] - fn should_trim_mixed_indentation() { - let s = "\t line1\n\t line2".to_string(); - let r = unindent(&s); - assert_eq!(r, "line1\nline2"); - - let s = " \tline1\n \tline2".to_string(); - let r = unindent(&s); - assert_eq!(r, "line1\nline2"); - } -} diff --git a/src/librustdoc/passes/collapse_docs.rs b/src/librustdoc/passes/collapse_docs.rs new file mode 100644 index 0000000000000..c034ef9326846 --- /dev/null +++ b/src/librustdoc/passes/collapse_docs.rs @@ -0,0 +1,47 @@ +// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::string::String; + +use clean::{self, Item}; +use plugins; +use fold; +use fold::DocFolder; + +pub fn collapse_docs(krate: clean::Crate) -> plugins::PluginResult { + let mut collapser = Collapser; + let krate = collapser.fold_crate(krate); + krate +} + +struct Collapser; + +impl fold::DocFolder for Collapser { + fn fold_item(&mut self, mut i: Item) -> Option { + let mut docstr = String::new(); + for attr in &i.attrs { + if let clean::NameValue(ref x, ref s) = *attr { + if "doc" == *x { + docstr.push_str(s); + docstr.push('\n'); + } + } + } + let mut a: Vec = i.attrs.iter().filter(|&a| match a { + &clean::NameValue(ref x, _) if "doc" == *x => false, + _ => true + }).cloned().collect(); + if !docstr.is_empty() { + a.push(clean::NameValue("doc".to_string(), docstr)); + } + i.attrs = a; + self.fold_item_recur(i) + } +} diff --git a/src/librustdoc/passes/mod.rs b/src/librustdoc/passes/mod.rs new file mode 100644 index 0000000000000..1cc4f9371cb68 --- /dev/null +++ b/src/librustdoc/passes/mod.rs @@ -0,0 +1,204 @@ +// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use rustc::hir::def_id::DefId; +use rustc::middle::privacy::AccessLevels; +use rustc::util::nodemap::DefIdSet; +use std::mem; + +use clean::{self, GetDefId, Item}; +use fold; +use fold::FoldItem::Strip; +use plugins; + +mod collapse_docs; +pub use self::collapse_docs::collapse_docs; + +mod strip_hidden; +pub use self::strip_hidden::strip_hidden; + +mod strip_private; +pub use self::strip_private::strip_private; + +mod strip_priv_imports; +pub use self::strip_priv_imports::strip_priv_imports; + +mod unindent_comments; +pub use self::unindent_comments::unindent_comments; + +type Pass = (&'static str, // name + fn(clean::Crate) -> plugins::PluginResult, // fn + &'static str); // description + +pub const PASSES: &'static [Pass] = &[ + ("strip-hidden", strip_hidden, + "strips all doc(hidden) items from the output"), + ("unindent-comments", unindent_comments, + "removes excess indentation on comments in order for markdown to like it"), + ("collapse-docs", collapse_docs, + "concatenates all document attributes into one document attribute"), + ("strip-private", strip_private, + "strips all private items from a crate which cannot be seen externally, \ + implies strip-priv-imports"), + ("strip-priv-imports", strip_priv_imports, + "strips all private import statements (`use`, `extern crate`) from a crate"), +]; + +pub const DEFAULT_PASSES: &'static [&'static str] = &[ + "strip-hidden", + "strip-private", + "collapse-docs", + "unindent-comments", +]; + + +struct Stripper<'a> { + retained: &'a mut DefIdSet, + access_levels: &'a AccessLevels, + update_retained: bool, +} + +impl<'a> fold::DocFolder for Stripper<'a> { + fn fold_item(&mut self, i: Item) -> Option { + match i.inner { + clean::StrippedItem(..) => { + // We need to recurse into stripped modules to strip things + // like impl methods but when doing so we must not add any + // items to the `retained` set. + let old = mem::replace(&mut self.update_retained, false); + let ret = self.fold_item_recur(i); + self.update_retained = old; + return ret; + } + // These items can all get re-exported + clean::TypedefItem(..) | clean::StaticItem(..) | + clean::StructItem(..) | clean::EnumItem(..) | + clean::TraitItem(..) | clean::FunctionItem(..) | + clean::VariantItem(..) | clean::MethodItem(..) | + clean::ForeignFunctionItem(..) | clean::ForeignStaticItem(..) | + clean::ConstantItem(..) | clean::UnionItem(..) => { + if i.def_id.is_local() { + if !self.access_levels.is_exported(i.def_id) { + return None; + } + } + } + + clean::StructFieldItem(..) => { + if i.visibility != Some(clean::Public) { + return Strip(i).fold(); + } + } + + clean::ModuleItem(..) => { + if i.def_id.is_local() && i.visibility != Some(clean::Public) { + let old = mem::replace(&mut self.update_retained, false); + let ret = Strip(self.fold_item_recur(i).unwrap()).fold(); + self.update_retained = old; + return ret; + } + } + + // handled in the `strip-priv-imports` pass + clean::ExternCrateItem(..) | clean::ImportItem(..) => {} + + clean::DefaultImplItem(..) | clean::ImplItem(..) => {} + + // tymethods/macros have no control over privacy + clean::MacroItem(..) | clean::TyMethodItem(..) => {} + + // Primitives are never stripped + clean::PrimitiveItem(..) => {} + + // Associated consts and types are never stripped + clean::AssociatedConstItem(..) | + clean::AssociatedTypeItem(..) => {} + } + + let fastreturn = match i.inner { + // nothing left to do for traits (don't want to filter their + // methods out, visibility controlled by the trait) + clean::TraitItem(..) => true, + + // implementations of traits are always public. + clean::ImplItem(ref imp) if imp.trait_.is_some() => true, + // Struct variant fields have inherited visibility + clean::VariantItem(clean::Variant { + kind: clean::VariantKind::Struct(..) + }) => true, + _ => false, + }; + + let i = if fastreturn { + if self.update_retained { + self.retained.insert(i.def_id); + } + return Some(i); + } else { + self.fold_item_recur(i) + }; + + i.and_then(|i| { + match i.inner { + // emptied modules have no need to exist + clean::ModuleItem(ref m) + if m.items.is_empty() && + i.doc_value().is_none() => None, + _ => { + if self.update_retained { + self.retained.insert(i.def_id); + } + Some(i) + } + } + }) + } +} + +// This stripper discards all impls which reference stripped items +struct ImplStripper<'a> { + retained: &'a DefIdSet +} + +impl<'a> fold::DocFolder for ImplStripper<'a> { + fn fold_item(&mut self, i: Item) -> Option { + if let clean::ImplItem(ref imp) = i.inner { + // emptied none trait impls can be stripped + if imp.trait_.is_none() && imp.items.is_empty() { + return None; + } + if let Some(did) = imp.for_.def_id() { + if did.is_local() && !imp.for_.is_generic() && + !self.retained.contains(&did) + { + return None; + } + } + if let Some(did) = imp.trait_.def_id() { + if did.is_local() && !self.retained.contains(&did) { + return None; + } + } + } + self.fold_item_recur(i) + } +} + +// This stripper discards all private import statements (`use`, `extern crate`) +struct ImportStripper; +impl fold::DocFolder for ImportStripper { + fn fold_item(&mut self, i: Item) -> Option { + match i.inner { + clean::ExternCrateItem(..) | + clean::ImportItem(..) if i.visibility != Some(clean::Public) => None, + _ => self.fold_item_recur(i) + } + } +} diff --git a/src/librustdoc/passes/strip_hidden.rs b/src/librustdoc/passes/strip_hidden.rs new file mode 100644 index 0000000000000..927ccf9171999 --- /dev/null +++ b/src/librustdoc/passes/strip_hidden.rs @@ -0,0 +1,66 @@ +// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use rustc::util::nodemap::DefIdSet; +use std::mem; + +use clean::{self, Attributes}; +use clean::Item; +use plugins; +use fold; +use fold::DocFolder; +use fold::FoldItem::Strip; +use passes::ImplStripper; + +/// Strip items marked `#[doc(hidden)]` +pub fn strip_hidden(krate: clean::Crate) -> plugins::PluginResult { + let mut retained = DefIdSet(); + + // strip all #[doc(hidden)] items + let krate = { + let mut stripper = Stripper{ retained: &mut retained, update_retained: true }; + stripper.fold_crate(krate) + }; + + // strip all impls referencing stripped items + let mut stripper = ImplStripper { retained: &retained }; + stripper.fold_crate(krate) +} + +struct Stripper<'a> { + retained: &'a mut DefIdSet, + update_retained: bool, +} + +impl<'a> fold::DocFolder for Stripper<'a> { + fn fold_item(&mut self, i: Item) -> Option { + if i.attrs.list("doc").has_word("hidden") { + debug!("found one in strip_hidden; removing"); + // use a dedicated hidden item for given item type if any + match i.inner { + clean::StructFieldItem(..) | clean::ModuleItem(..) => { + // We need to recurse into stripped modules to + // strip things like impl methods but when doing so + // we must not add any items to the `retained` set. + let old = mem::replace(&mut self.update_retained, false); + let ret = Strip(self.fold_item_recur(i).unwrap()).fold(); + self.update_retained = old; + return ret; + } + _ => return None, + } + } else { + if self.update_retained { + self.retained.insert(i.def_id); + } + } + self.fold_item_recur(i) + } +} diff --git a/src/librustdoc/passes/strip_priv_imports.rs b/src/librustdoc/passes/strip_priv_imports.rs new file mode 100644 index 0000000000000..91f8be43c281a --- /dev/null +++ b/src/librustdoc/passes/strip_priv_imports.rs @@ -0,0 +1,18 @@ +// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use clean; +use fold::DocFolder; +use plugins; +use passes::ImportStripper; + +pub fn strip_priv_imports(krate: clean::Crate) -> plugins::PluginResult { + ImportStripper.fold_crate(krate) +} diff --git a/src/librustdoc/passes/strip_private.rs b/src/librustdoc/passes/strip_private.rs new file mode 100644 index 0000000000000..acd735739e488 --- /dev/null +++ b/src/librustdoc/passes/strip_private.rs @@ -0,0 +1,38 @@ +// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use rustc::util::nodemap::DefIdSet; + +use clean; +use plugins; +use fold::DocFolder; +use passes::{ImplStripper, ImportStripper, Stripper}; + +/// Strip private items from the point of view of a crate or externally from a +/// crate, specified by the `xcrate` flag. +pub fn strip_private(mut krate: clean::Crate) -> plugins::PluginResult { + // This stripper collects all *retained* nodes. + let mut retained = DefIdSet(); + let access_levels = krate.access_levels.clone(); + + // strip all private items + { + let mut stripper = Stripper { + retained: &mut retained, + access_levels: &access_levels, + update_retained: true, + }; + krate = ImportStripper.fold_crate(stripper.fold_crate(krate)); + } + + // strip all impls referencing private items + let mut stripper = ImplStripper { retained: &retained }; + stripper.fold_crate(krate) +} diff --git a/src/librustdoc/passes/unindent_comments.rs b/src/librustdoc/passes/unindent_comments.rs new file mode 100644 index 0000000000000..20640f3f88518 --- /dev/null +++ b/src/librustdoc/passes/unindent_comments.rs @@ -0,0 +1,168 @@ +// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::cmp; +use std::string::String; +use std::usize; + +use clean::{self, Item}; +use plugins; +use fold::{self, DocFolder}; + +pub fn unindent_comments(krate: clean::Crate) -> plugins::PluginResult { + let mut cleaner = CommentCleaner; + let krate = cleaner.fold_crate(krate); + krate +} + +struct CommentCleaner; + +impl fold::DocFolder for CommentCleaner { + fn fold_item(&mut self, mut i: Item) -> Option { + let mut avec: Vec = Vec::new(); + for attr in &i.attrs { + match attr { + &clean::NameValue(ref x, ref s) + if "doc" == *x => { + avec.push(clean::NameValue("doc".to_string(), + unindent(s))) + } + x => avec.push(x.clone()) + } + } + i.attrs = avec; + self.fold_item_recur(i) + } +} + +fn unindent(s: &str) -> String { + let lines = s.lines().collect:: >(); + let mut saw_first_line = false; + let mut saw_second_line = false; + let min_indent = lines.iter().fold(usize::MAX, |min_indent, line| { + + // After we see the first non-whitespace line, look at + // the line we have. If it is not whitespace, and therefore + // part of the first paragraph, then ignore the indentation + // level of the first line + let ignore_previous_indents = + saw_first_line && + !saw_second_line && + !line.chars().all(|c| c.is_whitespace()); + + let min_indent = if ignore_previous_indents { + usize::MAX + } else { + min_indent + }; + + if saw_first_line { + saw_second_line = true; + } + + if line.chars().all(|c| c.is_whitespace()) { + min_indent + } else { + saw_first_line = true; + let mut whitespace = 0; + line.chars().all(|char| { + // Compare against either space or tab, ignoring whether they + // are mixed or not + if char == ' ' || char == '\t' { + whitespace += 1; + true + } else { + false + } + }); + cmp::min(min_indent, whitespace) + } + }); + + if !lines.is_empty() { + let mut unindented = vec![ lines[0].trim().to_string() ]; + unindented.extend_from_slice(&lines[1..].iter().map(|&line| { + if line.chars().all(|c| c.is_whitespace()) { + line.to_string() + } else { + assert!(line.len() >= min_indent); + line[min_indent..].to_string() + } + }).collect::>()); + unindented.join("\n") + } else { + s.to_string() + } +} + +#[cfg(test)] +mod unindent_tests { + use super::unindent; + + #[test] + fn should_unindent() { + let s = " line1\n line2".to_string(); + let r = unindent(&s); + assert_eq!(r, "line1\nline2"); + } + + #[test] + fn should_unindent_multiple_paragraphs() { + let s = " line1\n\n line2".to_string(); + let r = unindent(&s); + assert_eq!(r, "line1\n\nline2"); + } + + #[test] + fn should_leave_multiple_indent_levels() { + // Line 2 is indented another level beyond the + // base indentation and should be preserved + let s = " line1\n\n line2".to_string(); + let r = unindent(&s); + assert_eq!(r, "line1\n\n line2"); + } + + #[test] + fn should_ignore_first_line_indent() { + // The first line of the first paragraph may not be indented as + // far due to the way the doc string was written: + // + // #[doc = "Start way over here + // and continue here"] + let s = "line1\n line2".to_string(); + let r = unindent(&s); + assert_eq!(r, "line1\nline2"); + } + + #[test] + fn should_not_ignore_first_line_indent_in_a_single_line_para() { + let s = "line1\n\n line2".to_string(); + let r = unindent(&s); + assert_eq!(r, "line1\n\n line2"); + } + + #[test] + fn should_unindent_tabs() { + let s = "\tline1\n\tline2".to_string(); + let r = unindent(&s); + assert_eq!(r, "line1\nline2"); + } + + #[test] + fn should_trim_mixed_indentation() { + let s = "\t line1\n\t line2".to_string(); + let r = unindent(&s); + assert_eq!(r, "line1\nline2"); + + let s = " \tline1\n \tline2".to_string(); + let r = unindent(&s); + assert_eq!(r, "line1\nline2"); + } +} diff --git a/src/librustdoc/test.rs b/src/librustdoc/test.rs index 851cf95f9968f..1bbd67fb9be3a 100644 --- a/src/librustdoc/test.rs +++ b/src/librustdoc/test.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use std::cell::{RefCell, Cell}; +use std::cell::Cell; use std::env; use std::ffi::OsString; use std::io::prelude::*; @@ -25,10 +25,8 @@ use rustc_lint; use rustc::dep_graph::DepGraph; use rustc::hir::map as hir_map; use rustc::session::{self, config}; -use rustc::session::config::{get_unstable_features_setting, OutputType, - OutputTypes, Externs}; +use rustc::session::config::{OutputType, OutputTypes, Externs}; use rustc::session::search_paths::{SearchPaths, PathKind}; -use rustc::util::nodemap::{FnvHashMap, FnvHashSet}; use rustc_back::dynamic_lib::DynamicLibrary; use rustc_back::tempdir::TempDir; use rustc_driver::{driver, Compilation}; @@ -36,6 +34,7 @@ use rustc_driver::driver::phase_2_configure_and_expand; use rustc_metadata::cstore::CStore; use rustc_resolve::MakeGlobMap; use syntax::codemap::CodeMap; +use syntax::feature_gate::UnstableFeatures; use errors; use errors::emitter::ColorConfig; @@ -67,31 +66,27 @@ pub fn run(input: &str, maybe_sysroot: Some(env::current_exe().unwrap().parent().unwrap() .parent().unwrap().to_path_buf()), search_paths: libs.clone(), - crate_types: vec!(config::CrateTypeDylib), + crate_types: vec![config::CrateTypeDylib], externs: externs.clone(), - unstable_features: get_unstable_features_setting(), + unstable_features: UnstableFeatures::from_environment(), ..config::basic_options().clone() }; let codemap = Rc::new(CodeMap::new()); - let diagnostic_handler = errors::Handler::with_tty_emitter(ColorConfig::Auto, - true, - false, - Some(codemap.clone())); + let handler = + errors::Handler::with_tty_emitter(ColorConfig::Auto, true, false, Some(codemap.clone())); let dep_graph = DepGraph::new(false); let _ignore = dep_graph.in_ignore(); let cstore = Rc::new(CStore::new(&dep_graph)); - let sess = session::build_session_(sessopts, - &dep_graph, - Some(input_path.clone()), - diagnostic_handler, - codemap, - cstore.clone()); + let mut sess = session::build_session_( + sessopts, &dep_graph, Some(input_path.clone()), handler, codemap, cstore.clone(), + ); rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess)); + sess.parse_sess.config = + config::build_configuration(&sess, config::parse_cfgspecs(cfgs.clone())); - let cfg = config::build_configuration(&sess, config::parse_cfgspecs(cfgs.clone())); - let krate = panictry!(driver::phase_1_parse_input(&sess, cfg, &input)); + let krate = panictry!(driver::phase_1_parse_input(&sess, &input)); let driver::ExpansionResult { defs, mut hir_forest, .. } = { phase_2_configure_and_expand( &sess, &cstore, krate, None, "rustdoc-test", None, MakeGlobMap::No, |_| Ok(()) @@ -107,12 +102,14 @@ pub fn run(input: &str, map: &map, maybe_typed: core::NotTyped(&sess), input: input, - external_traits: RefCell::new(FnvHashMap()), - populated_crate_impls: RefCell::new(FnvHashSet()), + populated_all_crate_impls: Cell::new(false), + external_traits: Default::default(), deref_trait_did: Cell::new(None), deref_mut_trait_did: Cell::new(None), access_levels: Default::default(), renderinfo: Default::default(), + ty_substs: Default::default(), + lt_substs: Default::default(), }; let mut v = RustdocVisitor::new(&ctx); @@ -168,7 +165,7 @@ fn scrape_test_config(krate: &::rustc::hir::Crate) -> TestOptions { } } - return opts; + opts } fn runtest(test: &str, cratename: &str, cfgs: Vec, libs: SearchPaths, @@ -188,7 +185,7 @@ fn runtest(test: &str, cratename: &str, cfgs: Vec, libs: SearchPaths, maybe_sysroot: Some(env::current_exe().unwrap().parent().unwrap() .parent().unwrap().to_path_buf()), search_paths: libs, - crate_types: vec!(config::CrateTypeExecutable), + crate_types: vec![config::CrateTypeExecutable], output_types: outputs, externs: externs, cg: config::CodegenOptions { @@ -196,7 +193,7 @@ fn runtest(test: &str, cratename: &str, cfgs: Vec, libs: SearchPaths, .. config::basic_codegen_options() }, test: as_test_harness, - unstable_features: get_unstable_features_setting(), + unstable_features: UnstableFeatures::from_environment(), ..config::basic_options().clone() }; @@ -227,7 +224,7 @@ fn runtest(test: &str, cratename: &str, cfgs: Vec, libs: SearchPaths, let codemap = Rc::new(CodeMap::new()); let emitter = errors::emitter::EmitterWriter::new(box Sink(data.clone()), Some(codemap.clone())); - let old = io::set_panic(box Sink(data.clone())); + let old = io::set_panic(Some(box Sink(data.clone()))); let _bomb = Bomb(data.clone(), old.unwrap_or(box io::stdout())); // Compile the code @@ -235,18 +232,16 @@ fn runtest(test: &str, cratename: &str, cfgs: Vec, libs: SearchPaths, let dep_graph = DepGraph::new(false); let cstore = Rc::new(CStore::new(&dep_graph)); - let sess = session::build_session_(sessopts, - &dep_graph, - None, - diagnostic_handler, - codemap, - cstore.clone()); + let mut sess = session::build_session_( + sessopts, &dep_graph, None, diagnostic_handler, codemap, cstore.clone(), + ); rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess)); let outdir = Mutex::new(TempDir::new("rustdoctest").ok().expect("rustdoc needs a tempdir")); let libdir = sess.target_filesearch(PathKind::All).get_lib_path(); let mut control = driver::CompileController::basic(); - let cfg = config::build_configuration(&sess, config::parse_cfgspecs(cfgs.clone())); + sess.parse_sess.config = + config::build_configuration(&sess, config::parse_cfgspecs(cfgs.clone())); let out = Some(outdir.lock().unwrap().path().to_path_buf()); if no_run { @@ -254,18 +249,16 @@ fn runtest(test: &str, cratename: &str, cfgs: Vec, libs: SearchPaths, } let res = panic::catch_unwind(AssertUnwindSafe(|| { - driver::compile_input(&sess, &cstore, cfg.clone(), - &input, &out, - &None, None, &control) + driver::compile_input(&sess, &cstore, &input, &out, &None, None, &control) })); match res { Ok(r) => { match r { Err(count) => { - if count > 0 && compile_fail == false { + if count > 0 && !compile_fail { sess.fatal("aborting due to previous error(s)") - } else if count == 0 && compile_fail == true { + } else if count == 0 && compile_fail { panic!("test compiled while it wasn't supposed to") } if count > 0 && error_codes.len() > 0 { @@ -278,7 +271,7 @@ fn runtest(test: &str, cratename: &str, cfgs: Vec, libs: SearchPaths, } } Err(_) => { - if compile_fail == false { + if !compile_fail { panic!("couldn't compile the test"); } if error_codes.len() > 0 { @@ -354,7 +347,7 @@ pub fn maketest(s: &str, cratename: Option<&str>, dont_insert_main: bool, if dont_insert_main || s.contains("fn main") { prog.push_str(&everything_else); } else { - prog.push_str("fn main() {\n "); + prog.push_str("fn main() {\n"); prog.push_str(&everything_else); prog = prog.trim().into(); prog.push_str("\n}"); @@ -362,7 +355,7 @@ pub fn maketest(s: &str, cratename: Option<&str>, dont_insert_main: bool, info!("final test program: {}", prog); - return prog + prog } fn partition_source(s: &str) -> (String, String) { @@ -386,7 +379,7 @@ fn partition_source(s: &str) -> (String, String) { } } - return (before, after); + (before, after) } pub struct Collector { @@ -442,7 +435,7 @@ impl Collector { // compiler failures are test failures should_panic: testing::ShouldPanic::No, }, - testfn: testing::DynTestFn(box move|| { + testfn: testing::DynTestFn(box move |()| { runtest(&test, &cratename, cfgs, diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs index 16a6e994b5a0b..4d1af1622724a 100644 --- a/src/librustdoc/visit_ast.rs +++ b/src/librustdoc/visit_ast.rs @@ -20,6 +20,7 @@ use syntax_pos::Span; use rustc::hir::map as hir_map; use rustc::hir::def::Def; +use rustc::hir::def_id::LOCAL_CRATE; use rustc::middle::privacy::AccessLevel; use rustc::util::nodemap::FnvHashSet; @@ -327,7 +328,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { _ => false, }; self.view_item_stack.remove(&def_node_id); - return ret; + ret } pub fn visit_item(&mut self, item: &hir::Item, @@ -339,7 +340,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { let cstore = &self.cx.sess().cstore; om.extern_crates.push(ExternCrate { cnum: cstore.extern_mod_stmt_cnum(item.id) - .unwrap_or(ast::CrateNum::max_value()), + .unwrap_or(LOCAL_CRATE), name: name, path: p.map(|x|x.to_string()), vis: item.vis.clone(), diff --git a/src/librustdoc/visit_lib.rs b/src/librustdoc/visit_lib.rs index 3af030706b739..6d2830c56192b 100644 --- a/src/librustdoc/visit_lib.rs +++ b/src/librustdoc/visit_lib.rs @@ -8,12 +8,11 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use rustc::middle::cstore::{CrateStore, ChildItem, DefLike}; +use rustc::middle::cstore::CrateStore; use rustc::middle::privacy::{AccessLevels, AccessLevel}; use rustc::hir::def::Def; -use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId}; +use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId}; use rustc::ty::Visibility; -use syntax::ast; use std::cell::RefMut; @@ -42,7 +41,7 @@ impl<'a, 'b, 'tcx> LibEmbargoVisitor<'a, 'b, 'tcx> { } } - pub fn visit_lib(&mut self, cnum: ast::CrateNum) { + pub fn visit_lib(&mut self, cnum: CrateNum) { let did = DefId { krate: cnum, index: CRATE_DEF_INDEX }; self.update(did, Some(AccessLevel::Public)); self.visit_mod(did); @@ -65,45 +64,28 @@ impl<'a, 'b, 'tcx> LibEmbargoVisitor<'a, 'b, 'tcx> { } } - pub fn visit_mod(&mut self, did: DefId) { - for item in self.cstore.item_children(did) { - if let DefLike::DlDef(def) = item.def { - match def { - Def::Mod(did) | - Def::ForeignMod(did) | - Def::Trait(did) | - Def::Struct(did) | - Def::Union(did) | - Def::Enum(did) | - Def::TyAlias(did) | - Def::Fn(did) | - Def::Method(did) | - Def::Static(did, _) | - Def::Const(did) => self.visit_item(did, item), - _ => {} - } - } + pub fn visit_mod(&mut self, def_id: DefId) { + for item in self.cstore.item_children(def_id) { + self.visit_item(item.def); } } - fn visit_item(&mut self, did: DefId, item: ChildItem) { - let inherited_item_level = match item.def { - DefLike::DlImpl(..) | DefLike::DlField => unreachable!(), - DefLike::DlDef(def) => { - match def { - Def::ForeignMod(..) => self.prev_level, - _ => if item.vis == Visibility::Public { self.prev_level } else { None } - } - } + fn visit_item(&mut self, def: Def) { + let def_id = def.def_id(); + let vis = self.cstore.visibility(def_id); + let inherited_item_level = if vis == Visibility::Public { + self.prev_level + } else { + None }; - let item_level = self.update(did, inherited_item_level); + let item_level = self.update(def_id, inherited_item_level); - if let DefLike::DlDef(Def::Mod(did)) = item.def { + if let Def::Mod(..) = def { let orig_level = self.prev_level; self.prev_level = item_level; - self.visit_mod(did); + self.visit_mod(def_id); self.prev_level = orig_level; } } diff --git a/src/libserialize/collection_impls.rs b/src/libserialize/collection_impls.rs index 7b5092e8848e4..ba9bf2b86a60f 100644 --- a/src/libserialize/collection_impls.rs +++ b/src/libserialize/collection_impls.rs @@ -134,7 +134,7 @@ impl< fn encode(&self, s: &mut S) -> Result<(), S::Error> { let mut bits = 0; for item in self { - bits |= item.to_usize(); + bits |= 1 << item.to_usize(); } s.emit_usize(bits) } @@ -148,7 +148,7 @@ impl< let mut set = EnumSet::new(); for bit in 0..(mem::size_of::()*8) { if bits & (1 << bit) != 0 { - set.insert(CLike::from_usize(1 << bit)); + set.insert(CLike::from_usize(bit)); } } Ok(set) diff --git a/src/libserialize/json.rs b/src/libserialize/json.rs index 6ccc0be41bc0f..239d32c8fc8dd 100644 --- a/src/libserialize/json.rs +++ b/src/libserialize/json.rs @@ -199,6 +199,7 @@ use self::DecoderError::*; use self::ParserState::*; use self::InternalStackElement::*; +use std::borrow::Cow; use std::collections::{HashMap, BTreeMap}; use std::io::prelude::*; use std::io; @@ -433,9 +434,7 @@ fn escape_str(wr: &mut fmt::Write, v: &str) -> EncodeResult { } fn escape_char(writer: &mut fmt::Write, v: char) -> EncodeResult { - escape_str(writer, unsafe { - str::from_utf8_unchecked(v.encode_utf8().as_slice()) - }) + escape_str(writer, v.encode_utf8(&mut [0; 4])) } fn spaces(wr: &mut fmt::Write, mut n: usize) -> EncodeResult { @@ -2083,9 +2082,7 @@ impl Decoder { pub fn new(json: Json) -> Decoder { Decoder { stack: vec![json] } } -} -impl Decoder { fn pop(&mut self) -> Json { self.stack.pop().unwrap() } @@ -2184,8 +2181,8 @@ impl ::Decoder for Decoder { Err(ExpectedError("single character string".to_owned(), format!("{}", s))) } - fn read_str(&mut self) -> DecodeResult { - expect!(self.pop(), String) + fn read_str(&mut self) -> DecodeResult> { + expect!(self.pop(), String).map(Cow::Owned) } fn read_enum(&mut self, _name: &str, f: F) -> DecodeResult where @@ -3883,8 +3880,8 @@ mod tests { use std::collections::{HashMap,BTreeMap}; use super::ToJson; - let array2 = Array(vec!(U64(1), U64(2))); - let array3 = Array(vec!(U64(1), U64(2), U64(3))); + let array2 = Array(vec![U64(1), U64(2)]); + let array3 = Array(vec![U64(1), U64(2), U64(3)]); let object = { let mut tree_map = BTreeMap::new(); tree_map.insert("a".to_string(), U64(1)); @@ -3918,7 +3915,7 @@ mod tests { assert_eq!([1_usize, 2_usize].to_json(), array2); assert_eq!((&[1_usize, 2_usize, 3_usize]).to_json(), array3); assert_eq!((vec![1_usize, 2_usize]).to_json(), array2); - assert_eq!(vec!(1_usize, 2_usize, 3_usize).to_json(), array3); + assert_eq!(vec![1_usize, 2_usize, 3_usize].to_json(), array3); let mut tree_map = BTreeMap::new(); tree_map.insert("a".to_string(), 1 as usize); tree_map.insert("b".to_string(), 2); diff --git a/src/librbml/leb128.rs b/src/libserialize/leb128.rs similarity index 99% rename from src/librbml/leb128.rs rename to src/libserialize/leb128.rs index 0c5356c022235..8e8e03f1f8ebb 100644 --- a/src/librbml/leb128.rs +++ b/src/libserialize/leb128.rs @@ -38,6 +38,7 @@ pub fn write_unsigned_leb128(out: &mut Vec, start_position: usize, mut value return position - start_position; } +#[inline] pub fn read_unsigned_leb128(data: &[u8], start_position: usize) -> (u64, usize) { let mut result = 0; let mut shift = 0; @@ -78,6 +79,7 @@ pub fn write_signed_leb128(out: &mut Vec, start_position: usize, mut value: return position - start_position; } +#[inline] pub fn read_signed_leb128(data: &[u8], start_position: usize) -> (i64, usize) { let mut result = 0; let mut shift = 0; diff --git a/src/libserialize/lib.rs b/src/libserialize/lib.rs index 80cd47c85ccdf..884f24ddc4c75 100644 --- a/src/libserialize/lib.rs +++ b/src/libserialize/lib.rs @@ -29,11 +29,13 @@ Core encoding and decoding interfaces. #![feature(box_syntax)] #![feature(collections)] +#![feature(core_intrinsics)] #![feature(enumset)] #![feature(rustc_private)] +#![feature(specialization)] #![feature(staged_api)] #![feature(unicode)] -#![feature(question_mark)] +#![cfg_attr(stage0, feature(question_mark))] #![cfg_attr(test, feature(test))] // test harness access @@ -43,8 +45,10 @@ Core encoding and decoding interfaces. extern crate rustc_unicode; extern crate collections; -pub use self::serialize::{Decoder, Encoder, Decodable, Encodable, - DecoderHelpers, EncoderHelpers}; +pub use self::serialize::{Decoder, Encoder, Decodable, Encodable}; + +pub use self::serialize::{SpecializationError, SpecializedEncoder, SpecializedDecoder}; +pub use self::serialize::{UseSpecializedEncodable, UseSpecializedDecodable}; mod serialize; mod collection_impls; @@ -52,6 +56,9 @@ mod collection_impls; pub mod hex; pub mod json; +pub mod opaque; +pub mod leb128; + mod rustc_serialize { pub use serialize::*; } diff --git a/src/librbml/opaque.rs b/src/libserialize/opaque.rs similarity index 63% rename from src/librbml/opaque.rs rename to src/libserialize/opaque.rs index 6dc7a72b1b1bb..87b6ed2ed403c 100644 --- a/src/librbml/opaque.rs +++ b/src/libserialize/opaque.rs @@ -8,9 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use Error as DecodeError; -use writer::EncodeResult; use leb128::{read_signed_leb128, read_unsigned_leb128, write_signed_leb128, write_unsigned_leb128}; +use std::borrow::Cow; use std::io::{self, Write}; use serialize; @@ -18,6 +17,8 @@ use serialize; // Encoder // ----------------------------------------------------------------------------- +pub type EncodeResult = io::Result<()>; + pub struct Encoder<'a> { pub cursor: &'a mut io::Cursor>, } @@ -124,141 +125,12 @@ impl<'a> serialize::Encoder for Encoder<'a> { let _ = self.cursor.write_all(v.as_bytes()); Ok(()) } - - fn emit_enum(&mut self, _name: &str, f: F) -> EncodeResult - where F: FnOnce(&mut Self) -> EncodeResult - { - f(self) - } - - fn emit_enum_variant(&mut self, - _v_name: &str, - v_id: usize, - _len: usize, - f: F) - -> EncodeResult - where F: FnOnce(&mut Self) -> EncodeResult - { - self.emit_usize(v_id)?; - f(self) - } - - fn emit_enum_variant_arg(&mut self, _: usize, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - f(self) - } - - fn emit_enum_struct_variant(&mut self, - v_name: &str, - v_id: usize, - cnt: usize, - f: F) - -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - self.emit_enum_variant(v_name, v_id, cnt, f) - } - - fn emit_enum_struct_variant_field(&mut self, _: &str, idx: usize, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - self.emit_enum_variant_arg(idx, f) - } - - fn emit_struct(&mut self, _: &str, _len: usize, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - f(self) - } - - fn emit_struct_field(&mut self, _name: &str, _: usize, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - f(self) - } - - fn emit_tuple(&mut self, len: usize, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - self.emit_seq(len, f) - } - - fn emit_tuple_arg(&mut self, idx: usize, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - self.emit_seq_elt(idx, f) - } - - fn emit_tuple_struct(&mut self, _: &str, len: usize, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - self.emit_seq(len, f) - } - - fn emit_tuple_struct_arg(&mut self, idx: usize, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - self.emit_seq_elt(idx, f) - } - - fn emit_option(&mut self, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - self.emit_enum("Option", f) - } - - fn emit_option_none(&mut self) -> EncodeResult { - self.emit_enum_variant("None", 0, 0, |_| Ok(())) - } - - fn emit_option_some(&mut self, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - self.emit_enum_variant("Some", 1, 1, f) - } - - fn emit_seq(&mut self, len: usize, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - self.emit_usize(len)?; - f(self) - } - - fn emit_seq_elt(&mut self, _idx: usize, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - f(self) - } - - fn emit_map(&mut self, len: usize, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - self.emit_usize(len)?; - f(self) - } - - fn emit_map_elt_key(&mut self, _idx: usize, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - f(self) - } - - fn emit_map_elt_val(&mut self, _idx: usize, f: F) -> EncodeResult - where F: FnOnce(&mut Encoder<'a>) -> EncodeResult - { - f(self) - } } impl<'a> Encoder<'a> { pub fn position(&self) -> usize { self.cursor.position() as usize } - - pub fn from_rbml<'b: 'c, 'c>(rbml: &'c mut ::writer::Encoder<'b>) -> Encoder<'c> { - Encoder { cursor: rbml.writer } - } } // ----------------------------------------------------------------------------- @@ -305,215 +177,101 @@ macro_rules! read_sleb128 { impl<'a> serialize::Decoder for Decoder<'a> { - type Error = DecodeError; + type Error = String; + #[inline] fn read_nil(&mut self) -> Result<(), Self::Error> { Ok(()) } + #[inline] fn read_u64(&mut self) -> Result { read_uleb128!(self, u64) } + #[inline] fn read_u32(&mut self) -> Result { read_uleb128!(self, u32) } + #[inline] fn read_u16(&mut self) -> Result { read_uleb128!(self, u16) } + #[inline] fn read_u8(&mut self) -> Result { let value = self.data[self.position]; self.position += 1; Ok(value) } + #[inline] fn read_usize(&mut self) -> Result { read_uleb128!(self, usize) } + #[inline] fn read_i64(&mut self) -> Result { read_sleb128!(self, i64) } + #[inline] fn read_i32(&mut self) -> Result { read_sleb128!(self, i32) } + #[inline] fn read_i16(&mut self) -> Result { read_sleb128!(self, i16) } + #[inline] fn read_i8(&mut self) -> Result { let as_u8 = self.data[self.position]; self.position += 1; unsafe { Ok(::std::mem::transmute(as_u8)) } } + #[inline] fn read_isize(&mut self) -> Result { read_sleb128!(self, isize) } + #[inline] fn read_bool(&mut self) -> Result { let value = self.read_u8()?; Ok(value != 0) } + #[inline] fn read_f64(&mut self) -> Result { let bits = self.read_u64()?; Ok(unsafe { ::std::mem::transmute(bits) }) } + #[inline] fn read_f32(&mut self) -> Result { let bits = self.read_u32()?; Ok(unsafe { ::std::mem::transmute(bits) }) } + #[inline] fn read_char(&mut self) -> Result { let bits = self.read_u32()?; Ok(::std::char::from_u32(bits).unwrap()) } - fn read_str(&mut self) -> Result { + #[inline] + fn read_str(&mut self) -> Result, Self::Error> { let len = self.read_usize()?; let s = ::std::str::from_utf8(&self.data[self.position..self.position + len]).unwrap(); self.position += len; - Ok(s.to_string()) - } - - fn read_enum(&mut self, _name: &str, f: F) -> Result - where F: FnOnce(&mut Decoder<'a>) -> Result - { - f(self) - } - - fn read_enum_variant(&mut self, _: &[&str], mut f: F) -> Result - where F: FnMut(&mut Decoder<'a>, usize) -> Result - { - let disr = self.read_usize()?; - f(self, disr) - } - - fn read_enum_variant_arg(&mut self, _idx: usize, f: F) -> Result - where F: FnOnce(&mut Decoder<'a>) -> Result - { - f(self) - } - - fn read_enum_struct_variant(&mut self, _: &[&str], mut f: F) -> Result - where F: FnMut(&mut Decoder<'a>, usize) -> Result - { - let disr = self.read_usize()?; - f(self, disr) - } - - fn read_enum_struct_variant_field(&mut self, - _name: &str, - _idx: usize, - f: F) - -> Result - where F: FnOnce(&mut Decoder<'a>) -> Result - { - f(self) - } - - fn read_struct(&mut self, _name: &str, _: usize, f: F) -> Result - where F: FnOnce(&mut Decoder<'a>) -> Result - { - f(self) - } - - fn read_struct_field(&mut self, _name: &str, _idx: usize, f: F) -> Result - where F: FnOnce(&mut Decoder<'a>) -> Result - { - f(self) - } - - fn read_tuple(&mut self, tuple_len: usize, f: F) -> Result - where F: FnOnce(&mut Decoder<'a>) -> Result - { - self.read_seq(move |d, len| { - if len == tuple_len { - f(d) - } else { - let err = format!("Invalid tuple length. Expected {}, found {}", - tuple_len, - len); - Err(DecodeError::Expected(err)) - } - }) - } - - fn read_tuple_arg(&mut self, idx: usize, f: F) -> Result - where F: FnOnce(&mut Decoder<'a>) -> Result - { - self.read_seq_elt(idx, f) - } - - fn read_tuple_struct(&mut self, _name: &str, len: usize, f: F) -> Result - where F: FnOnce(&mut Decoder<'a>) -> Result - { - self.read_tuple(len, f) - } - - fn read_tuple_struct_arg(&mut self, idx: usize, f: F) -> Result - where F: FnOnce(&mut Decoder<'a>) -> Result - { - self.read_tuple_arg(idx, f) - } - - fn read_option(&mut self, mut f: F) -> Result - where F: FnMut(&mut Decoder<'a>, bool) -> Result - { - self.read_enum("Option", move |this| { - this.read_enum_variant(&["None", "Some"], move |this, idx| { - match idx { - 0 => f(this, false), - 1 => f(this, true), - _ => { - let msg = format!("Invalid Option index: {}", idx); - Err(DecodeError::Expected(msg)) - } - } - }) - }) - } - - fn read_seq(&mut self, f: F) -> Result - where F: FnOnce(&mut Decoder<'a>, usize) -> Result - { - let len = self.read_usize()?; - f(self, len) - } - - fn read_seq_elt(&mut self, _idx: usize, f: F) -> Result - where F: FnOnce(&mut Decoder<'a>) -> Result - { - f(self) - } - - fn read_map(&mut self, f: F) -> Result - where F: FnOnce(&mut Decoder<'a>, usize) -> Result - { - let len = self.read_usize()?; - f(self, len) - } - - fn read_map_elt_key(&mut self, _idx: usize, f: F) -> Result - where F: FnOnce(&mut Decoder<'a>) -> Result - { - f(self) - } - - fn read_map_elt_val(&mut self, _idx: usize, f: F) -> Result - where F: FnOnce(&mut Decoder<'a>) -> Result - { - f(self) + Ok(Cow::Borrowed(s)) } fn error(&mut self, err: &str) -> Self::Error { - DecodeError::ApplicationError(err.to_string()) + err.to_string() } } diff --git a/src/libserialize/serialize.rs b/src/libserialize/serialize.rs index 8e271597dfcb6..c4613c661a84b 100644 --- a/src/libserialize/serialize.rs +++ b/src/libserialize/serialize.rs @@ -14,6 +14,8 @@ Core encoding and decoding interfaces. */ +use std::borrow::Cow; +use std::intrinsics; use std::path; use std::rc::Rc; use std::cell::{Cell, RefCell}; @@ -41,66 +43,99 @@ pub trait Encoder { fn emit_str(&mut self, v: &str) -> Result<(), Self::Error>; // Compound types: - fn emit_enum(&mut self, name: &str, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + fn emit_enum(&mut self, _name: &str, f: F) -> Result<(), Self::Error> + where F: FnOnce(&mut Self) -> Result<(), Self::Error> { f(self) } - fn emit_enum_variant(&mut self, v_name: &str, + fn emit_enum_variant(&mut self, _v_name: &str, v_id: usize, - len: usize, + _len: usize, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; - fn emit_enum_variant_arg(&mut self, a_idx: usize, f: F) + where F: FnOnce(&mut Self) -> Result<(), Self::Error> + { + self.emit_usize(v_id)?; + f(self) + } + fn emit_enum_variant_arg(&mut self, _a_idx: usize, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + where F: FnOnce(&mut Self) -> Result<(), Self::Error> { f(self) } fn emit_enum_struct_variant(&mut self, v_name: &str, v_id: usize, len: usize, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + where F: FnOnce(&mut Self) -> Result<(), Self::Error> + { + self.emit_enum_variant(v_name, v_id, len, f) + } fn emit_enum_struct_variant_field(&mut self, - f_name: &str, + _f_name: &str, f_idx: usize, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + where F: FnOnce(&mut Self) -> Result<(), Self::Error> + { + self.emit_enum_variant_arg(f_idx, f) + } - fn emit_struct(&mut self, name: &str, len: usize, f: F) + fn emit_struct(&mut self, _name: &str, _len: usize, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; - fn emit_struct_field(&mut self, f_name: &str, f_idx: usize, f: F) + where F: FnOnce(&mut Self) -> Result<(), Self::Error> { f(self) } + fn emit_struct_field(&mut self, _f_name: &str, _f_idx: usize, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + where F: FnOnce(&mut Self) -> Result<(), Self::Error> { f(self) } - fn emit_tuple(&mut self, len: usize, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; - fn emit_tuple_arg(&mut self, idx: usize, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + fn emit_tuple(&mut self, _len: usize, f: F) -> Result<(), Self::Error> + where F: FnOnce(&mut Self) -> Result<(), Self::Error> { f(self) } + fn emit_tuple_arg(&mut self, _idx: usize, f: F) -> Result<(), Self::Error> + where F: FnOnce(&mut Self) -> Result<(), Self::Error> { f(self) } - fn emit_tuple_struct(&mut self, name: &str, len: usize, f: F) + fn emit_tuple_struct(&mut self, _name: &str, len: usize, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + where F: FnOnce(&mut Self) -> Result<(), Self::Error> + { + self.emit_tuple(len, f) + } fn emit_tuple_struct_arg(&mut self, f_idx: usize, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + where F: FnOnce(&mut Self) -> Result<(), Self::Error> + { + self.emit_tuple_arg(f_idx, f) + } // Specialized types: fn emit_option(&mut self, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; - fn emit_option_none(&mut self) -> Result<(), Self::Error>; + where F: FnOnce(&mut Self) -> Result<(), Self::Error> + { + self.emit_enum("Option", f) + } + fn emit_option_none(&mut self) -> Result<(), Self::Error> { + self.emit_enum_variant("None", 0, 0, |_| Ok(())) + } fn emit_option_some(&mut self, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + where F: FnOnce(&mut Self) -> Result<(), Self::Error> + { + + self.emit_enum_variant("Some", 1, 1, f) + } fn emit_seq(&mut self, len: usize, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; - fn emit_seq_elt(&mut self, idx: usize, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + where F: FnOnce(&mut Self) -> Result<(), Self::Error> + { + self.emit_usize(len)?; + f(self) + } + fn emit_seq_elt(&mut self, _idx: usize, f: F) -> Result<(), Self::Error> + where F: FnOnce(&mut Self) -> Result<(), Self::Error> { f(self) } fn emit_map(&mut self, len: usize, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; - fn emit_map_elt_key(&mut self, idx: usize, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; - fn emit_map_elt_val(&mut self, idx: usize, f: F) -> Result<(), Self::Error> - where F: FnOnce(&mut Self) -> Result<(), Self::Error>; + where F: FnOnce(&mut Self) -> Result<(), Self::Error> + { + self.emit_usize(len)?; + f(self) + } + fn emit_map_elt_key(&mut self, _idx: usize, f: F) -> Result<(), Self::Error> + where F: FnOnce(&mut Self) -> Result<(), Self::Error> { f(self) } + fn emit_map_elt_val(&mut self, _idx: usize, f: F) -> Result<(), Self::Error> + where F: FnOnce(&mut Self) -> Result<(), Self::Error> { f(self) } } pub trait Decoder { @@ -122,69 +157,104 @@ pub trait Decoder { fn read_f64(&mut self) -> Result; fn read_f32(&mut self) -> Result; fn read_char(&mut self) -> Result; - fn read_str(&mut self) -> Result; + fn read_str(&mut self) -> Result, Self::Error>; // Compound types: - fn read_enum(&mut self, name: &str, f: F) -> Result - where F: FnOnce(&mut Self) -> Result; + fn read_enum(&mut self, _name: &str, f: F) -> Result + where F: FnOnce(&mut Self) -> Result { f(self) } - fn read_enum_variant(&mut self, names: &[&str], f: F) + fn read_enum_variant(&mut self, _names: &[&str], mut f: F) -> Result - where F: FnMut(&mut Self, usize) -> Result; - fn read_enum_variant_arg(&mut self, a_idx: usize, f: F) + where F: FnMut(&mut Self, usize) -> Result + { + let disr = self.read_usize()?; + f(self, disr) + } + fn read_enum_variant_arg(&mut self, _a_idx: usize, f: F) -> Result - where F: FnOnce(&mut Self) -> Result; + where F: FnOnce(&mut Self) -> Result { f(self) } fn read_enum_struct_variant(&mut self, names: &[&str], f: F) -> Result - where F: FnMut(&mut Self, usize) -> Result; + where F: FnMut(&mut Self, usize) -> Result + { + self.read_enum_variant(names, f) + } fn read_enum_struct_variant_field(&mut self, - &f_name: &str, + _f_name: &str, f_idx: usize, f: F) -> Result - where F: FnOnce(&mut Self) -> Result; + where F: FnOnce(&mut Self) -> Result + { + self.read_enum_variant_arg(f_idx, f) + } - fn read_struct(&mut self, s_name: &str, len: usize, f: F) + fn read_struct(&mut self, _s_name: &str, _len: usize, f: F) -> Result - where F: FnOnce(&mut Self) -> Result; + where F: FnOnce(&mut Self) -> Result { f(self) } fn read_struct_field(&mut self, - f_name: &str, - f_idx: usize, + _f_name: &str, + _f_idx: usize, f: F) -> Result - where F: FnOnce(&mut Self) -> Result; + where F: FnOnce(&mut Self) -> Result { f(self) } - fn read_tuple(&mut self, len: usize, f: F) -> Result - where F: FnOnce(&mut Self) -> Result; - fn read_tuple_arg(&mut self, a_idx: usize, f: F) + fn read_tuple(&mut self, _len: usize, f: F) -> Result + where F: FnOnce(&mut Self) -> Result { f(self) } + fn read_tuple_arg(&mut self, _a_idx: usize, f: F) -> Result - where F: FnOnce(&mut Self) -> Result; + where F: FnOnce(&mut Self) -> Result { f(self) } - fn read_tuple_struct(&mut self, s_name: &str, len: usize, f: F) + fn read_tuple_struct(&mut self, _s_name: &str, len: usize, f: F) -> Result - where F: FnOnce(&mut Self) -> Result; + where F: FnOnce(&mut Self) -> Result + { + self.read_tuple(len, f) + } fn read_tuple_struct_arg(&mut self, a_idx: usize, f: F) -> Result - where F: FnOnce(&mut Self) -> Result; + where F: FnOnce(&mut Self) -> Result + { + self.read_tuple_arg(a_idx, f) + } // Specialized types: - fn read_option(&mut self, f: F) -> Result - where F: FnMut(&mut Self, bool) -> Result; + fn read_option(&mut self, mut f: F) -> Result + where F: FnMut(&mut Self, bool) -> Result + { + self.read_enum("Option", move |this| { + this.read_enum_variant(&["None", "Some"], move |this, idx| { + match idx { + 0 => f(this, false), + 1 => f(this, true), + _ => Err(this.error("read_option: expected 0 for None or 1 for Some")), + } + }) + }) + } fn read_seq(&mut self, f: F) -> Result - where F: FnOnce(&mut Self, usize) -> Result; - fn read_seq_elt(&mut self, idx: usize, f: F) -> Result - where F: FnOnce(&mut Self) -> Result; + where F: FnOnce(&mut Self, usize) -> Result + { + let len = self.read_usize()?; + f(self, len) + } + fn read_seq_elt(&mut self, _idx: usize, f: F) -> Result + where F: FnOnce(&mut Self) -> Result { f(self) } fn read_map(&mut self, f: F) -> Result - where F: FnOnce(&mut Self, usize) -> Result; - fn read_map_elt_key(&mut self, idx: usize, f: F) + where F: FnOnce(&mut Self, usize) -> Result + { + let len = self.read_usize()?; + f(self, len) + } + fn read_map_elt_key(&mut self, _idx: usize, f: F) -> Result - where F: FnOnce(&mut Self) -> Result; - fn read_map_elt_val(&mut self, idx: usize, f: F) + where F: FnOnce(&mut Self) -> Result { f(self) } + fn read_map_elt_val(&mut self, _idx: usize, f: F) -> Result - where F: FnOnce(&mut Self) -> Result; + where F: FnOnce(&mut Self) -> Result { f(self) } // Failure fn error(&mut self, err: &str) -> Self::Error; @@ -332,7 +402,7 @@ impl Encodable for String { impl Decodable for String { fn decode(d: &mut D) -> Result { - d.read_str() + Ok(d.read_str()?.into_owned()) } } @@ -593,45 +663,97 @@ impl Decodable for Arc { } // ___________________________________________________________________________ -// Helper routines +// Specialization-based interface for multi-dispatch Encodable/Decodable. -pub trait EncoderHelpers: Encoder { - fn emit_from_vec(&mut self, v: &[T], f: F) - -> Result<(), Self::Error> - where F: FnMut(&mut Self, &T) -> Result<(), Self::Error>; +/// Implement this trait on your `{Encodable,Decodable}::Error` types +/// to override the default panic behavior for missing specializations. +pub trait SpecializationError { + /// Create an error for a missing method specialization. + /// Defaults to panicking with type, trait & method names. + /// `S` is the encoder/decoder state type, + /// `T` is the type being encoded/decoded, and + /// the arguments are the names of the trait + /// and method that should've been overriden. + fn not_found(trait_name: &'static str, + method_name: &'static str) -> Self; } -impl EncoderHelpers for S { - fn emit_from_vec(&mut self, v: &[T], mut f: F) -> Result<(), S::Error> where - F: FnMut(&mut S, &T) -> Result<(), S::Error>, - { - self.emit_seq(v.len(), |this| { - for (i, e) in v.iter().enumerate() { - this.emit_seq_elt(i, |this| { - f(this, e) - })?; - } - Ok(()) - }) +impl SpecializationError for E { + default fn not_found(trait_name: &'static str, + method_name: &'static str) -> E { + panic!("missing specializaiton: `<{} as {}<{}>>::{}` not overriden", + unsafe { intrinsics::type_name::() }, + trait_name, + unsafe { intrinsics::type_name::() }, + method_name); } } -pub trait DecoderHelpers: Decoder { - fn read_to_vec(&mut self, f: F) - -> Result, Self::Error> where - F: FnMut(&mut Self) -> Result; +/// Implement this trait on encoders, with `T` being the type +/// you want to encode (employing `UseSpecializedEncodable`), +/// using a strategy specific to the encoder. +pub trait SpecializedEncoder: Encoder { + /// Encode the value in a manner specific to this encoder state. + fn specialized_encode(&mut self, value: &T) -> Result<(), Self::Error>; } -impl DecoderHelpers for D { - fn read_to_vec(&mut self, mut f: F) -> Result, D::Error> where F: - FnMut(&mut D) -> Result, - { - self.read_seq(|this, len| { - let mut v = Vec::with_capacity(len); - for i in 0..len { - v.push(this.read_seq_elt(i, |this| f(this))?); - } - Ok(v) - }) +impl SpecializedEncoder for E { + default fn specialized_encode(&mut self, value: &T) -> Result<(), E::Error> { + value.default_encode(self) + } +} + +/// Implement this trait on decoders, with `T` being the type +/// you want to decode (employing `UseSpecializedDecodable`), +/// using a strategy specific to the decoder. +pub trait SpecializedDecoder: Decoder { + /// Decode a value in a manner specific to this decoder state. + fn specialized_decode(&mut self) -> Result; +} + +impl SpecializedDecoder for D { + default fn specialized_decode(&mut self) -> Result { + T::default_decode(self) + } +} + +/// Implement this trait on your type to get an `Encodable` +/// implementation which goes through `SpecializedEncoder`. +pub trait UseSpecializedEncodable { + /// Defaults to returning an error (see `SpecializationError`). + fn default_encode(&self, _: &mut E) -> Result<(), E::Error> { + Err(E::Error::not_found::("SpecializedEncoder", "specialized_encode")) + } +} + +impl Encodable for T { + default fn encode(&self, e: &mut E) -> Result<(), E::Error> { + E::specialized_encode(e, self) + } +} + +/// Implement this trait on your type to get an `Decodable` +/// implementation which goes through `SpecializedDecoder`. +pub trait UseSpecializedDecodable: Sized { + /// Defaults to returning an error (see `SpecializationError`). + fn default_decode(_: &mut D) -> Result { + Err(D::Error::not_found::("SpecializedDecoder", "specialized_decode")) } } + +impl Decodable for T { + default fn decode(d: &mut D) -> Result { + D::specialized_decode(d) + } +} + +// Can't avoid specialization for &T and Box impls, +// as proxy impls on them are blankets that conflict +// with the Encodable and Decodable impls above, +// which only have `default` on their methods +// for this exact reason. +// May be fixable in a simpler fashion via the +// more complex lattice model for specialization. +impl<'a, T: ?Sized + Encodable> UseSpecializedEncodable for &'a T {} +impl UseSpecializedEncodable for Box {} +impl UseSpecializedDecodable for Box {} diff --git a/src/libstd/build.rs b/src/libstd/build.rs index 2d540c6b59ac0..72cd6e4830bca 100644 --- a/src/libstd/build.rs +++ b/src/libstd/build.rs @@ -23,10 +23,10 @@ fn main() { println!("cargo:rustc-cfg=cargobuild"); println!("cargo:rerun-if-changed=build.rs"); - let target = env::var("TARGET").unwrap(); - let host = env::var("HOST").unwrap(); + let target = env::var("TARGET").expect("TARGET was not set"); + let host = env::var("HOST").expect("HOST was not set"); if cfg!(feature = "backtrace") && !target.contains("apple") && !target.contains("msvc") && - !target.contains("emscripten") { + !target.contains("emscripten") && !target.contains("fuchsia") { build_libbacktrace(&host, &target); } @@ -58,6 +58,8 @@ fn main() { println!("cargo:rustc-link-lib=ws2_32"); println!("cargo:rustc-link-lib=userenv"); println!("cargo:rustc-link-lib=shell32"); + } else if target.contains("fuchsia") { + println!("cargo:rustc-link-lib=magenta"); } } @@ -103,5 +105,5 @@ fn build_libbacktrace(host: &str, target: &str) { run(Command::new("make") .current_dir(&build_dir) .arg(format!("INCDIR={}", src_dir.display())) - .arg("-j").arg(env::var("NUM_JOBS").unwrap())); + .arg("-j").arg(env::var("NUM_JOBS").expect("NUM_JOBS was not set"))); } diff --git a/src/libstd/collections/hash/bench.rs b/src/libstd/collections/hash/bench.rs index a1275d23d5710..ff6cb7985a4de 100644 --- a/src/libstd/collections/hash/bench.rs +++ b/src/libstd/collections/hash/bench.rs @@ -15,17 +15,17 @@ extern crate test; use self::test::Bencher; #[bench] -fn new_drop(b : &mut Bencher) { +fn new_drop(b: &mut Bencher) { use super::map::HashMap; b.iter(|| { - let m : HashMap = HashMap::new(); + let m: HashMap = HashMap::new(); assert_eq!(m.len(), 0); }) } #[bench] -fn new_insert_drop(b : &mut Bencher) { +fn new_insert_drop(b: &mut Bencher) { use super::map::HashMap; b.iter(|| { diff --git a/src/libstd/collections/hash/map.rs b/src/libstd/collections/hash/map.rs index eb1653f18cba1..fb8a0c3c26554 100644 --- a/src/libstd/collections/hash/map.rs +++ b/src/libstd/collections/hash/map.rs @@ -14,33 +14,19 @@ use self::VacantEntryState::*; use borrow::Borrow; use cmp::max; use fmt::{self, Debug}; +#[allow(deprecated)] use hash::{Hash, Hasher, BuildHasher, SipHasher13}; use iter::{FromIterator, FusedIterator}; use mem::{self, replace}; use ops::{Deref, Index}; use rand::{self, Rng}; -use super::table::{ - self, - Bucket, - EmptyBucket, - FullBucket, - FullBucketMut, - RawTable, - SafeHash -}; -use super::table::BucketState::{ - Empty, - Full, -}; - -const INITIAL_LOG2_CAP: usize = 5; -const INITIAL_CAPACITY: usize = 1 << INITIAL_LOG2_CAP; // 2^5 - -/// The default behavior of HashMap implements a load factor of 90.9%. -/// This behavior is characterized by the following condition: -/// -/// - if size > 0.909 * capacity: grow the map +use super::table::{self, Bucket, EmptyBucket, FullBucket, FullBucketMut, RawTable, SafeHash}; +use super::table::BucketState::{Empty, Full}; + +const MIN_NONZERO_RAW_CAPACITY: usize = 32; // must be a power of two + +/// The default behavior of HashMap implements a maximum load factor of 90.9%. #[derive(Clone)] struct DefaultResizePolicy; @@ -49,40 +35,35 @@ impl DefaultResizePolicy { DefaultResizePolicy } + /// A hash map's "capacity" is the number of elements it can hold without + /// being resized. Its "raw capacity" is the number of slots required to + /// provide that capacity, accounting for maximum loading. The raw capacity + /// is always zero or a power of two. #[inline] - fn min_capacity(&self, usable_size: usize) -> usize { - // Here, we are rephrasing the logic by specifying the lower limit - // on capacity: - // - // - if `cap < size * 1.1`: grow the map - usable_size * 11 / 10 + fn raw_capacity(&self, len: usize) -> usize { + if len == 0 { + 0 + } else { + // 1. Account for loading: `raw_capacity >= len * 1.1`. + // 2. Ensure it is a power of two. + // 3. Ensure it is at least the minimum size. + let mut raw_cap = len * 11 / 10; + assert!(raw_cap >= len, "raw_cap overflow"); + raw_cap = raw_cap.checked_next_power_of_two().expect("raw_capacity overflow"); + raw_cap = max(MIN_NONZERO_RAW_CAPACITY, raw_cap); + raw_cap + } } - /// An inverse of `min_capacity`, approximately. + /// The capacity of the given raw capacity. #[inline] - fn usable_capacity(&self, cap: usize) -> usize { - // As the number of entries approaches usable capacity, - // min_capacity(size) must be smaller than the internal capacity, - // so that the map is not resized: - // `min_capacity(usable_capacity(x)) <= x`. - // The left-hand side can only be smaller due to flooring by integer - // division. - // + fn capacity(&self, raw_cap: usize) -> usize { // This doesn't have to be checked for overflow since allocation size // in bytes will overflow earlier than multiplication by 10. // // As per https://github.com/rust-lang/rust/pull/30991 this is updated - // to be: (cap * den + den - 1) / num - (cap * 10 + 10 - 1) / 11 - } -} - -#[test] -fn test_resize_policy() { - let rp = DefaultResizePolicy; - for n in 0..1000 { - assert!(rp.min_capacity(rp.usable_capacity(n)) <= n); - assert!(rp.usable_capacity(rp.min_capacity(n)) <= n); + // to be: (raw_cap * den + den - 1) / num + (raw_cap * 10 + 10 - 1) / 11 } } @@ -196,15 +177,29 @@ fn test_resize_policy() { // // FIXME(Gankro, pczarn): review the proof and put it all in a separate README.md -/// A hash map implementation which uses linear probing with Robin -/// Hood bucket stealing. +/// A hash map implementation which uses linear probing with Robin Hood bucket +/// stealing. /// -/// By default, HashMap uses a somewhat slow hashing algorithm which can provide resistance -/// to DoS attacks. Rust makes a best attempt at acquiring random numbers without IO -/// blocking from your system. Because of this HashMap is not guaranteed to provide -/// DoS resistance since the numbers generated might not be truly random. If you do -/// require this behavior you can create your own hashing function using -/// [BuildHasherDefault](../hash/struct.BuildHasherDefault.html). +/// By default, `HashMap` uses a hashing algorithm selected to provide +/// resistance against HashDoS attacks. The algorithm is randomly seeded, and a +/// reasonable best-effort is made to generate this seed from a high quality, +/// secure source of randomness provided by the host without blocking the +/// program. Because of this, the randomness of the seed is dependant on the +/// quality of the system's random number generator at the time it is created. +/// In particular, seeds generated when the system's entropy pool is abnormally +/// low such as during system boot may be of a lower quality. +/// +/// The default hashing algorithm is currently SipHash 1-3, though this is +/// subject to change at any point in the future. While its performance is very +/// competitive for medium sized keys, other hashing algorithms will outperform +/// it for small keys such as integers as well as large keys such as long +/// strings, though those algorithms will typically *not* protect against +/// attacks such as HashDoS. +/// +/// The hashing algorithm can be replaced on a per-`HashMap` basis using the +/// `HashMap::default`, `HashMap::with_hasher`, and +/// `HashMap::with_capacity_and_hasher` methods. Many alternative algorithms +/// are available on crates.io, such as the `fnv` crate. /// /// It is required that the keys implement the [`Eq`] and [`Hash`] traits, although /// this can frequently be achieved by using `#[derive(PartialEq, Eq, Hash)]`. @@ -335,6 +330,22 @@ fn test_resize_policy() { /// println!("{:?} has {} hp", viking, health); /// } /// ``` +/// +/// A HashMap with fixed list of elements can be initialized from an array: +/// +/// ``` +/// use std::collections::HashMap; +/// +/// fn main() { +/// let timber_resources: HashMap<&str, i32> = +/// [("Norway", 100), +/// ("Denmark", 50), +/// ("Iceland", 10)] +/// .iter().cloned().collect(); +/// // use the values stored in map +/// } +/// ``` + #[derive(Clone)] #[stable(feature = "rust1", since = "1.0.0")] pub struct HashMap { @@ -348,12 +359,9 @@ pub struct HashMap { /// Search for a pre-hashed key. #[inline] -fn search_hashed(table: M, - hash: SafeHash, - mut is_match: F) - -> InternalEntry where - M: Deref>, - F: FnMut(&K) -> bool, +fn search_hashed(table: M, hash: SafeHash, mut is_match: F) -> InternalEntry + where M: Deref>, + F: FnMut(&K) -> bool { // This is the only function where capacity can be zero. To avoid // undefined behavior when Bucket::new gets the raw bucket in this @@ -375,7 +383,7 @@ fn search_hashed(table: M, elem: NoElem(bucket), }; } - Full(bucket) => bucket + Full(bucket) => bucket, }; let robin_ib = full.index() as isize - full.displacement() as isize; @@ -394,9 +402,7 @@ fn search_hashed(table: M, if hash == full.hash() { // If the key doesn't match, it can't be this one.. if is_match(full.read().0) { - return InternalEntry::Occupied { - elem: full - }; + return InternalEntry::Occupied { elem: full }; } } @@ -409,13 +415,13 @@ fn pop_internal(starting_bucket: FullBucketMut) -> (K, V) { let (empty, retkey, retval) = starting_bucket.take(); let mut gap = match empty.gap_peek() { Some(b) => b, - None => return (retkey, retval) + None => return (retkey, retval), }; while gap.full().displacement() != 0 { gap = match gap.shift() { Some(b) => b, - None => break + None => break, }; } @@ -429,11 +435,11 @@ fn pop_internal(starting_bucket: FullBucketMut) -> (K, V) { /// /// `hash`, `k`, and `v` are the elements to "robin hood" into the hashtable. fn robin_hood<'a, K: 'a, V: 'a>(bucket: FullBucketMut<'a, K, V>, - mut ib: usize, - mut hash: SafeHash, - mut key: K, - mut val: V) - -> &'a mut V { + mut ib: usize, + mut hash: SafeHash, + mut key: K, + mut val: V) + -> &'a mut V { let starting_index = bucket.index(); let size = bucket.table().size(); // Save the *starting point*. @@ -465,8 +471,8 @@ fn robin_hood<'a, K: 'a, V: 'a>(bucket: FullBucketMut<'a, K, V>, // FullBucketMut, into just one FullBucketMut. The "table" // refers to the inner FullBucketMut in this context. return bucket.into_table().into_mut_refs().1; - }, - Full(bucket) => bucket + } + Full(bucket) => bucket, }; let probe_ib = full_bucket.index() - full_bucket.displacement(); @@ -483,9 +489,12 @@ fn robin_hood<'a, K: 'a, V: 'a>(bucket: FullBucketMut<'a, K, V>, } impl HashMap - where K: Eq + Hash, S: BuildHasher + where K: Eq + Hash, + S: BuildHasher { - fn make_hash(&self, x: &X) -> SafeHash where X: Hash { + fn make_hash(&self, x: &X) -> SafeHash + where X: Hash + { table::make_hash(&self.hash_builder, x) } @@ -494,7 +503,8 @@ impl HashMap /// search_hashed. #[inline] fn search<'a, Q: ?Sized>(&'a self, q: &Q) -> InternalEntry> - where K: Borrow, Q: Eq + Hash + where K: Borrow, + Q: Eq + Hash { let hash = self.make_hash(q); search_hashed(&self.table, hash, |k| q.eq(k.borrow())) @@ -502,7 +512,8 @@ impl HashMap #[inline] fn search_mut<'a, Q: ?Sized>(&'a mut self, q: &Q) -> InternalEntry> - where K: Borrow, Q: Eq + Hash + where K: Borrow, + Q: Eq + Hash { let hash = self.make_hash(q); search_hashed(&mut self.table, hash, |k| q.eq(k.borrow())) @@ -510,11 +521,11 @@ impl HashMap // The caller should ensure that invariants by Robin Hood Hashing hold. fn insert_hashed_ordered(&mut self, hash: SafeHash, k: K, v: V) { - let cap = self.table.capacity(); + let raw_cap = self.raw_capacity(); let mut buckets = Bucket::new(&mut self.table, hash); let ib = buckets.index(); - while buckets.index() != ib + cap { + while buckets.index() != ib + raw_cap { // We don't need to compare hashes for value swap. // Not even DIBs for Robin Hood. buckets = match buckets.peek() { @@ -522,7 +533,7 @@ impl HashMap empty.put(hash, k, v); return; } - Full(b) => b.into_bucket() + Full(b) => b.into_bucket(), }; buckets.next(); } @@ -545,7 +556,10 @@ impl HashMap { Default::default() } - /// Creates an empty `HashMap` with the given initial capacity. + /// Creates an empty `HashMap` with the specified capacity. + /// + /// The hash map will be able to hold at least `capacity` elements without + /// reallocating. If `capacity` is 0, the hash map will not allocate. /// /// # Examples /// @@ -561,7 +575,8 @@ impl HashMap { } impl HashMap - where K: Eq + Hash, S: BuildHasher + where K: Eq + Hash, + S: BuildHasher { /// Creates an empty `HashMap` which will use the given hash builder to hash /// keys. @@ -593,9 +608,11 @@ impl HashMap } } - /// Creates an empty `HashMap` with space for at least `capacity` - /// elements, using `hasher` to hash the keys. + /// Creates an empty `HashMap` with the specified capacity, using `hasher` + /// to hash the keys. /// + /// The hash map will be able to hold at least `capacity` elements without + /// reallocating. If `capacity` is 0, the hash map will not allocate. /// Warning: `hasher` is normally randomly generated, and /// is designed to allow HashMaps to be resistant to attacks that /// cause many collisions and very poor performance. Setting it @@ -613,16 +630,13 @@ impl HashMap /// ``` #[inline] #[stable(feature = "hashmap_build_hasher", since = "1.7.0")] - pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) - -> HashMap { + pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> HashMap { let resize_policy = DefaultResizePolicy::new(); - let min_cap = max(INITIAL_CAPACITY, resize_policy.min_capacity(capacity)); - let internal_cap = min_cap.checked_next_power_of_two().expect("capacity overflow"); - assert!(internal_cap >= capacity, "capacity overflow"); + let raw_cap = resize_policy.raw_capacity(capacity); HashMap { hash_builder: hash_builder, resize_policy: resize_policy, - table: RawTable::new(internal_cap), + table: RawTable::new(raw_cap), } } @@ -647,7 +661,13 @@ impl HashMap #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn capacity(&self) -> usize { - self.resize_policy.usable_capacity(self.table.capacity()) + self.resize_policy.capacity(self.raw_capacity()) + } + + /// Returns the hash map's raw capacity. + #[inline] + fn raw_capacity(&self) -> usize { + self.table.capacity() } /// Reserves capacity for at least `additional` more elements to be inserted @@ -667,28 +687,24 @@ impl HashMap /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn reserve(&mut self, additional: usize) { - let new_size = self.len().checked_add(additional).expect("capacity overflow"); - let min_cap = self.resize_policy.min_capacity(new_size); - - // An invalid value shouldn't make us run out of space. This includes - // an overflow check. - assert!(new_size <= min_cap); - - if self.table.capacity() < min_cap { - let new_capacity = max(min_cap.next_power_of_two(), INITIAL_CAPACITY); - self.resize(new_capacity); + let remaining = self.capacity() - self.len(); // this can't overflow + if remaining < additional { + let min_cap = self.len().checked_add(additional).expect("reserve overflow"); + let raw_cap = self.resize_policy.raw_capacity(min_cap); + self.resize(raw_cap); } } - /// Resizes the internal vectors to a new capacity. It's your responsibility to: - /// 1) Make sure the new capacity is enough for all the elements, accounting + /// Resizes the internal vectors to a new capacity. It's your + /// responsibility to: + /// 1) Ensure `new_raw_cap` is enough for all the elements, accounting /// for the load factor. - /// 2) Ensure `new_capacity` is a power of two or zero. - fn resize(&mut self, new_capacity: usize) { - assert!(self.table.size() <= new_capacity); - assert!(new_capacity.is_power_of_two() || new_capacity == 0); + /// 2) Ensure `new_raw_cap` is a power of two or zero. + fn resize(&mut self, new_raw_cap: usize) { + assert!(self.table.size() <= new_raw_cap); + assert!(new_raw_cap.is_power_of_two() || new_raw_cap == 0); - let mut old_table = replace(&mut self.table, RawTable::new(new_capacity)); + let mut old_table = replace(&mut self.table, RawTable::new(new_raw_cap)); let old_size = old_table.size(); if old_table.capacity() == 0 || old_table.size() == 0 { @@ -752,7 +768,7 @@ impl HashMap } b.into_bucket() } - Empty(b) => b.into_bucket() + Empty(b) => b.into_bucket(), }; bucket.next(); } @@ -778,14 +794,9 @@ impl HashMap /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn shrink_to_fit(&mut self) { - let min_capacity = self.resize_policy.min_capacity(self.len()); - let min_capacity = max(min_capacity.next_power_of_two(), INITIAL_CAPACITY); - - // An invalid value shouldn't make us run out of space. - debug_assert!(self.len() <= min_capacity); - - if self.table.capacity() != min_capacity { - let old_table = replace(&mut self.table, RawTable::new(min_capacity)); + let new_raw_cap = self.resize_policy.raw_capacity(self.len()); + if self.raw_capacity() != new_raw_cap { + let old_table = replace(&mut self.table, RawTable::new(new_raw_cap)); let old_size = old_table.size(); // Shrink the table. Naive algorithm for resizing: @@ -806,16 +817,12 @@ impl HashMap fn insert_hashed_nocheck(&mut self, hash: SafeHash, k: K, v: V) -> Option { let entry = search_hashed(&mut self.table, hash, |key| *key == k).into_entry(k); match entry { - Some(Occupied(mut elem)) => { - Some(elem.insert(v)) - } + Some(Occupied(mut elem)) => Some(elem.insert(v)), Some(Vacant(elem)) => { elem.insert(v); None } - None => { - unreachable!() - } + None => unreachable!(), } } @@ -979,7 +986,9 @@ impl HashMap /// assert_eq!(a.len(), 1); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - pub fn len(&self) -> usize { self.table.size() } + pub fn len(&self) -> usize { + self.table.size() + } /// Returns true if the map contains no elements. /// @@ -995,7 +1004,9 @@ impl HashMap /// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] - pub fn is_empty(&self) -> bool { self.len() == 0 } + pub fn is_empty(&self) -> bool { + self.len() == 0 + } /// Clears the map, returning all key-value pairs as an iterator. Keeps the /// allocated memory for reuse. @@ -1019,9 +1030,7 @@ impl HashMap #[inline] #[stable(feature = "drain", since = "1.6.0")] pub fn drain(&mut self) -> Drain { - Drain { - inner: self.table.drain(), - } + Drain { inner: self.table.drain() } } /// Clears the map, removing all key-value pairs. Keeps the allocated memory @@ -1064,7 +1073,8 @@ impl HashMap /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn get(&self, k: &Q) -> Option<&V> - where K: Borrow, Q: Hash + Eq + where K: Borrow, + Q: Hash + Eq { self.search(k).into_occupied_bucket().map(|bucket| bucket.into_refs().1) } @@ -1090,7 +1100,8 @@ impl HashMap /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn contains_key(&self, k: &Q) -> bool - where K: Borrow, Q: Hash + Eq + where K: Borrow, + Q: Hash + Eq { self.search(k).into_occupied_bucket().is_some() } @@ -1118,7 +1129,8 @@ impl HashMap /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn get_mut(&mut self, k: &Q) -> Option<&mut V> - where K: Borrow, Q: Hash + Eq + where K: Borrow, + Q: Hash + Eq { self.search_mut(k).into_occupied_bucket().map(|bucket| bucket.into_mut_refs().1) } @@ -1176,10 +1188,11 @@ impl HashMap /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn remove(&mut self, k: &Q) -> Option - where K: Borrow, Q: Hash + Eq + where K: Borrow, + Q: Hash + Eq { if self.table.size() == 0 { - return None + return None; } self.search_mut(k).into_occupied_bucket().map(|bucket| pop_internal(bucket).1) @@ -1188,25 +1201,32 @@ impl HashMap #[stable(feature = "rust1", since = "1.0.0")] impl PartialEq for HashMap - where K: Eq + Hash, V: PartialEq, S: BuildHasher + where K: Eq + Hash, + V: PartialEq, + S: BuildHasher { fn eq(&self, other: &HashMap) -> bool { - if self.len() != other.len() { return false; } + if self.len() != other.len() { + return false; + } - self.iter().all(|(key, value)| - other.get(key).map_or(false, |v| *value == *v) - ) + self.iter().all(|(key, value)| other.get(key).map_or(false, |v| *value == *v)) } } #[stable(feature = "rust1", since = "1.0.0")] impl Eq for HashMap - where K: Eq + Hash, V: Eq, S: BuildHasher -{} + where K: Eq + Hash, + V: Eq, + S: BuildHasher +{ +} #[stable(feature = "rust1", since = "1.0.0")] impl Debug for HashMap - where K: Eq + Hash + Debug, V: Debug, S: BuildHasher + where K: Eq + Hash + Debug, + V: Debug, + S: BuildHasher { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_map().entries(self.iter()).finish() @@ -1216,7 +1236,7 @@ impl Debug for HashMap #[stable(feature = "rust1", since = "1.0.0")] impl Default for HashMap where K: Eq + Hash, - S: BuildHasher + Default, + S: BuildHasher + Default { /// Creates an empty `HashMap`, with the `Default` value for the hasher. fn default() -> HashMap { @@ -1228,7 +1248,7 @@ impl Default for HashMap impl<'a, K, Q: ?Sized, V, S> Index<&'a Q> for HashMap where K: Eq + Hash + Borrow, Q: Eq + Hash, - S: BuildHasher, + S: BuildHasher { type Output = V; @@ -1241,79 +1261,71 @@ impl<'a, K, Q: ?Sized, V, S> Index<&'a Q> for HashMap /// HashMap iterator. #[stable(feature = "rust1", since = "1.0.0")] pub struct Iter<'a, K: 'a, V: 'a> { - inner: table::Iter<'a, K, V> + inner: table::Iter<'a, K, V>, } // FIXME(#19839) Remove in favor of `#[derive(Clone)]` #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> Clone for Iter<'a, K, V> { fn clone(&self) -> Iter<'a, K, V> { - Iter { - inner: self.inner.clone() - } + Iter { inner: self.inner.clone() } } } /// HashMap mutable values iterator. #[stable(feature = "rust1", since = "1.0.0")] pub struct IterMut<'a, K: 'a, V: 'a> { - inner: table::IterMut<'a, K, V> + inner: table::IterMut<'a, K, V>, } /// HashMap move iterator. #[stable(feature = "rust1", since = "1.0.0")] pub struct IntoIter { - inner: table::IntoIter + inner: table::IntoIter, } /// HashMap keys iterator. #[stable(feature = "rust1", since = "1.0.0")] pub struct Keys<'a, K: 'a, V: 'a> { - inner: Iter<'a, K, V> + inner: Iter<'a, K, V>, } // FIXME(#19839) Remove in favor of `#[derive(Clone)]` #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> Clone for Keys<'a, K, V> { fn clone(&self) -> Keys<'a, K, V> { - Keys { - inner: self.inner.clone() - } + Keys { inner: self.inner.clone() } } } /// HashMap values iterator. #[stable(feature = "rust1", since = "1.0.0")] pub struct Values<'a, K: 'a, V: 'a> { - inner: Iter<'a, K, V> + inner: Iter<'a, K, V>, } // FIXME(#19839) Remove in favor of `#[derive(Clone)]` #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> Clone for Values<'a, K, V> { fn clone(&self) -> Values<'a, K, V> { - Values { - inner: self.inner.clone() - } + Values { inner: self.inner.clone() } } } /// HashMap drain iterator. #[stable(feature = "drain", since = "1.6.0")] pub struct Drain<'a, K: 'a, V: 'a> { - inner: table::Drain<'a, K, V> + inner: table::Drain<'a, K, V>, } /// Mutable HashMap values iterator. #[stable(feature = "map_values_mut", since = "1.10.0")] pub struct ValuesMut<'a, K: 'a, V: 'a> { - inner: IterMut<'a, K, V> + inner: IterMut<'a, K, V>, } enum InternalEntry { - Occupied { - elem: FullBucket, - }, + Occupied { elem: FullBucket }, Vacant { hash: SafeHash, elem: VacantEntryState, @@ -1338,7 +1350,7 @@ impl<'a, K, V> InternalEntry> { InternalEntry::Occupied { elem } => { Some(Occupied(OccupiedEntry { key: Some(key), - elem: elem + elem: elem, })) } InternalEntry::Vacant { hash, elem } => { @@ -1348,7 +1360,7 @@ impl<'a, K, V> InternalEntry> { elem: elem, })) } - InternalEntry::TableIsEmpty => None + InternalEntry::TableIsEmpty => None, } } } @@ -1362,27 +1374,29 @@ impl<'a, K, V> InternalEntry> { pub enum Entry<'a, K: 'a, V: 'a> { /// An occupied Entry. #[stable(feature = "rust1", since = "1.0.0")] - Occupied( - #[stable(feature = "rust1", since = "1.0.0")] OccupiedEntry<'a, K, V> - ), + Occupied(#[stable(feature = "rust1", since = "1.0.0")] + OccupiedEntry<'a, K, V>), /// A vacant Entry. #[stable(feature = "rust1", since = "1.0.0")] - Vacant( - #[stable(feature = "rust1", since = "1.0.0")] VacantEntry<'a, K, V> - ), + Vacant(#[stable(feature = "rust1", since = "1.0.0")] + VacantEntry<'a, K, V>), } #[stable(feature= "debug_hash_map", since = "1.12.0")] impl<'a, K: 'a + Debug, V: 'a + Debug> Debug for Entry<'a, K, V> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { - Vacant(ref v) => f.debug_tuple("Entry") - .field(v) - .finish(), - Occupied(ref o) => f.debug_tuple("Entry") - .field(o) - .finish(), + Vacant(ref v) => { + f.debug_tuple("Entry") + .field(v) + .finish() + } + Occupied(ref o) => { + f.debug_tuple("Entry") + .field(o) + .finish() + } } } } @@ -1401,9 +1415,9 @@ pub struct OccupiedEntry<'a, K: 'a, V: 'a> { impl<'a, K: 'a + Debug, V: 'a + Debug> Debug for OccupiedEntry<'a, K, V> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("OccupiedEntry") - .field("key", self.key()) - .field("value", self.get()) - .finish() + .field("key", self.key()) + .field("value", self.get()) + .finish() } } @@ -1422,8 +1436,8 @@ pub struct VacantEntry<'a, K: 'a, V: 'a> { impl<'a, K: 'a + Debug, V: 'a> Debug for VacantEntry<'a, K, V> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_tuple("VacantEntry") - .field(self.key()) - .finish() + .field(self.key()) + .finish() } } @@ -1438,7 +1452,8 @@ enum VacantEntryState { #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V, S> IntoIterator for &'a HashMap - where K: Eq + Hash, S: BuildHasher + where K: Eq + Hash, + S: BuildHasher { type Item = (&'a K, &'a V); type IntoIter = Iter<'a, K, V>; @@ -1450,7 +1465,8 @@ impl<'a, K, V, S> IntoIterator for &'a HashMap #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V, S> IntoIterator for &'a mut HashMap - where K: Eq + Hash, S: BuildHasher + where K: Eq + Hash, + S: BuildHasher { type Item = (&'a K, &'a mut V); type IntoIter = IterMut<'a, K, V>; @@ -1462,7 +1478,8 @@ impl<'a, K, V, S> IntoIterator for &'a mut HashMap #[stable(feature = "rust1", since = "1.0.0")] impl IntoIterator for HashMap - where K: Eq + Hash, S: BuildHasher + where K: Eq + Hash, + S: BuildHasher { type Item = (K, V); type IntoIter = IntoIter; @@ -1485,9 +1502,7 @@ impl IntoIterator for HashMap /// let vec: Vec<(&str, isize)> = map.into_iter().collect(); /// ``` fn into_iter(self) -> IntoIter { - IntoIter { - inner: self.table.into_iter() - } + IntoIter { inner: self.table.into_iter() } } } @@ -1495,12 +1510,21 @@ impl IntoIterator for HashMap impl<'a, K, V> Iterator for Iter<'a, K, V> { type Item = (&'a K, &'a V); - #[inline] fn next(&mut self) -> Option<(&'a K, &'a V)> { self.inner.next() } - #[inline] fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } + #[inline] + fn next(&mut self) -> Option<(&'a K, &'a V)> { + self.inner.next() + } + #[inline] + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> ExactSizeIterator for Iter<'a, K, V> { - #[inline] fn len(&self) -> usize { self.inner.len() } + #[inline] + fn len(&self) -> usize { + self.inner.len() + } } #[unstable(feature = "fused", issue = "35602")] @@ -1510,12 +1534,21 @@ impl<'a, K, V> FusedIterator for Iter<'a, K, V> {} impl<'a, K, V> Iterator for IterMut<'a, K, V> { type Item = (&'a K, &'a mut V); - #[inline] fn next(&mut self) -> Option<(&'a K, &'a mut V)> { self.inner.next() } - #[inline] fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } + #[inline] + fn next(&mut self) -> Option<(&'a K, &'a mut V)> { + self.inner.next() + } + #[inline] + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> ExactSizeIterator for IterMut<'a, K, V> { - #[inline] fn len(&self) -> usize { self.inner.len() } + #[inline] + fn len(&self) -> usize { + self.inner.len() + } } #[unstable(feature = "fused", issue = "35602")] impl<'a, K, V> FusedIterator for IterMut<'a, K, V> {} @@ -1524,12 +1557,21 @@ impl<'a, K, V> FusedIterator for IterMut<'a, K, V> {} impl Iterator for IntoIter { type Item = (K, V); - #[inline] fn next(&mut self) -> Option<(K, V)> { self.inner.next().map(|(_, k, v)| (k, v)) } - #[inline] fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } + #[inline] + fn next(&mut self) -> Option<(K, V)> { + self.inner.next().map(|(_, k, v)| (k, v)) + } + #[inline] + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for IntoIter { - #[inline] fn len(&self) -> usize { self.inner.len() } + #[inline] + fn len(&self) -> usize { + self.inner.len() + } } #[unstable(feature = "fused", issue = "35602")] impl FusedIterator for IntoIter {} @@ -1538,12 +1580,21 @@ impl FusedIterator for IntoIter {} impl<'a, K, V> Iterator for Keys<'a, K, V> { type Item = &'a K; - #[inline] fn next(&mut self) -> Option<(&'a K)> { self.inner.next().map(|(k, _)| k) } - #[inline] fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } + #[inline] + fn next(&mut self) -> Option<(&'a K)> { + self.inner.next().map(|(k, _)| k) + } + #[inline] + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> ExactSizeIterator for Keys<'a, K, V> { - #[inline] fn len(&self) -> usize { self.inner.len() } + #[inline] + fn len(&self) -> usize { + self.inner.len() + } } #[unstable(feature = "fused", issue = "35602")] impl<'a, K, V> FusedIterator for Keys<'a, K, V> {} @@ -1552,12 +1603,21 @@ impl<'a, K, V> FusedIterator for Keys<'a, K, V> {} impl<'a, K, V> Iterator for Values<'a, K, V> { type Item = &'a V; - #[inline] fn next(&mut self) -> Option<(&'a V)> { self.inner.next().map(|(_, v)| v) } - #[inline] fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } + #[inline] + fn next(&mut self) -> Option<(&'a V)> { + self.inner.next().map(|(_, v)| v) + } + #[inline] + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K, V> ExactSizeIterator for Values<'a, K, V> { - #[inline] fn len(&self) -> usize { self.inner.len() } + #[inline] + fn len(&self) -> usize { + self.inner.len() + } } #[unstable(feature = "fused", issue = "35602")] impl<'a, K, V> FusedIterator for Values<'a, K, V> {} @@ -1566,26 +1626,44 @@ impl<'a, K, V> FusedIterator for Values<'a, K, V> {} impl<'a, K, V> Iterator for ValuesMut<'a, K, V> { type Item = &'a mut V; - #[inline] fn next(&mut self) -> Option<(&'a mut V)> { self.inner.next().map(|(_, v)| v) } - #[inline] fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } + #[inline] + fn next(&mut self) -> Option<(&'a mut V)> { + self.inner.next().map(|(_, v)| v) + } + #[inline] + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } } #[stable(feature = "map_values_mut", since = "1.10.0")] impl<'a, K, V> ExactSizeIterator for ValuesMut<'a, K, V> { - #[inline] fn len(&self) -> usize { self.inner.len() } + #[inline] + fn len(&self) -> usize { + self.inner.len() + } } #[unstable(feature = "fused", issue = "35602")] impl<'a, K, V> FusedIterator for ValuesMut<'a, K, V> {} -#[stable(feature = "rust1", since = "1.0.0")] +#[stable(feature = "drain", since = "1.6.0")] impl<'a, K, V> Iterator for Drain<'a, K, V> { type Item = (K, V); - #[inline] fn next(&mut self) -> Option<(K, V)> { self.inner.next().map(|(_, k, v)| (k, v)) } - #[inline] fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } + #[inline] + fn next(&mut self) -> Option<(K, V)> { + self.inner.next().map(|(_, k, v)| (k, v)) + } + #[inline] + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } } -#[stable(feature = "rust1", since = "1.0.0")] +#[stable(feature = "drain", since = "1.6.0")] impl<'a, K, V> ExactSizeIterator for Drain<'a, K, V> { - #[inline] fn len(&self) -> usize { self.inner.len() } + #[inline] + fn len(&self) -> usize { + self.inner.len() + } } #[unstable(feature = "fused", issue = "35602")] impl<'a, K, V> FusedIterator for Drain<'a, K, V> {} @@ -1880,21 +1958,18 @@ impl<'a, K: 'a, V: 'a> VacantEntry<'a, K, V> { #[stable(feature = "rust1", since = "1.0.0")] pub fn insert(self, value: V) -> &'a mut V { match self.elem { - NeqElem(bucket, ib) => { - robin_hood(bucket, ib, self.hash, self.key, value) - } - NoElem(bucket) => { - bucket.put(self.hash, self.key, value).into_mut_refs().1 - } + NeqElem(bucket, ib) => robin_hood(bucket, ib, self.hash, self.key, value), + NoElem(bucket) => bucket.put(self.hash, self.key, value).into_mut_refs().1, } } } #[stable(feature = "rust1", since = "1.0.0")] impl FromIterator<(K, V)> for HashMap - where K: Eq + Hash, S: BuildHasher + Default + where K: Eq + Hash, + S: BuildHasher + Default { - fn from_iter>(iter: T) -> HashMap { + fn from_iter>(iter: T) -> HashMap { let iterator = iter.into_iter(); let lower = iterator.size_hint().0; let mut map = HashMap::with_capacity_and_hasher(lower, Default::default()); @@ -1905,9 +1980,10 @@ impl FromIterator<(K, V)> for HashMap #[stable(feature = "rust1", since = "1.0.0")] impl Extend<(K, V)> for HashMap - where K: Eq + Hash, S: BuildHasher + where K: Eq + Hash, + S: BuildHasher { - fn extend>(&mut self, iter: T) { + fn extend>(&mut self, iter: T) { for (k, v) in iter { self.insert(k, v); } @@ -1916,9 +1992,11 @@ impl Extend<(K, V)> for HashMap #[stable(feature = "hash_extend_copy", since = "1.4.0")] impl<'a, K, V, S> Extend<(&'a K, &'a V)> for HashMap - where K: Eq + Hash + Copy, V: Copy, S: BuildHasher + where K: Eq + Hash + Copy, + V: Copy, + S: BuildHasher { - fn extend>(&mut self, iter: T) { + fn extend>(&mut self, iter: T) { self.extend(iter.into_iter().map(|(&key, &value)| (key, value))); } } @@ -1960,7 +2038,8 @@ impl RandomState { /// let s = RandomState::new(); /// ``` #[inline] - #[allow(deprecated)] // rand + #[allow(deprecated)] + // rand #[stable(feature = "hashmap_build_hasher", since = "1.7.0")] pub fn new() -> RandomState { // Historically this function did not cache keys from the OS and instead @@ -1987,9 +2066,7 @@ impl RandomState { (r.gen(), r.gen()) }); - KEYS.with(|&(k0, k1)| { - RandomState { k0: k0, k1: k1 } - }) + KEYS.with(|&(k0, k1)| RandomState { k0: k0, k1: k1 }) } } @@ -1997,6 +2074,7 @@ impl RandomState { impl BuildHasher for RandomState { type Hasher = DefaultHasher; #[inline] + #[allow(deprecated)] fn build_hasher(&self) -> DefaultHasher { DefaultHasher(SipHasher13::new_with_keys(self.k0, self.k1)) } @@ -2009,10 +2087,32 @@ impl BuildHasher for RandomState { /// /// [`RandomState`]: struct.RandomState.html /// [`Hasher`]: ../../hash/trait.Hasher.html -#[unstable(feature = "hashmap_default_hasher", issue = "0")] +#[stable(feature = "hashmap_default_hasher", since = "1.13.0")] +#[allow(deprecated)] +#[derive(Debug)] pub struct DefaultHasher(SipHasher13); -#[unstable(feature = "hashmap_default_hasher", issue = "0")] +impl DefaultHasher { + /// Creates a new `DefaultHasher`. + /// + /// This hasher is not guaranteed to be the same as all other + /// `DefaultHasher` instances, but is the same as all other `DefaultHasher` + /// instances created through `new` or `default`. + #[stable(feature = "hashmap_default_hasher", since = "1.13.0")] + #[allow(deprecated)] + pub fn new() -> DefaultHasher { + DefaultHasher(SipHasher13::new_with_keys(0, 0)) + } +} + +#[stable(feature = "hashmap_default_hasher", since = "1.13.0")] +impl Default for DefaultHasher { + fn default() -> DefaultHasher { + DefaultHasher::new() + } +} + +#[stable(feature = "hashmap_default_hasher", since = "1.13.0")] impl Hasher for DefaultHasher { #[inline] fn write(&mut self, msg: &[u8]) { @@ -2025,7 +2125,7 @@ impl Hasher for DefaultHasher { } } -#[stable(feature = "rust1", since = "1.0.0")] +#[stable(feature = "hashmap_build_hasher", since = "1.7.0")] impl Default for RandomState { /// Constructs a new `RandomState`. #[inline] @@ -2035,7 +2135,9 @@ impl Default for RandomState { } impl super::Recover for HashMap - where K: Eq + Hash + Borrow, S: BuildHasher, Q: Eq + Hash + where K: Eq + Hash + Borrow, + S: BuildHasher, + Q: Eq + Hash { type Key = K; @@ -2045,7 +2147,7 @@ impl super::Recover for HashMap fn take(&mut self, key: &Q) -> Option { if self.table.size() == 0 { - return None + return None; } self.search_mut(key).into_occupied_bucket().map(|bucket| pop_internal(bucket).0) @@ -2069,27 +2171,82 @@ impl super::Recover for HashMap #[allow(dead_code)] fn assert_covariance() { - fn map_key<'new>(v: HashMap<&'static str, u8>) -> HashMap<&'new str, u8> { v } - fn map_val<'new>(v: HashMap) -> HashMap { v } - fn iter_key<'a, 'new>(v: Iter<'a, &'static str, u8>) -> Iter<'a, &'new str, u8> { v } - fn iter_val<'a, 'new>(v: Iter<'a, u8, &'static str>) -> Iter<'a, u8, &'new str> { v } - fn into_iter_key<'new>(v: IntoIter<&'static str, u8>) -> IntoIter<&'new str, u8> { v } - fn into_iter_val<'new>(v: IntoIter) -> IntoIter { v } - fn keys_key<'a, 'new>(v: Keys<'a, &'static str, u8>) -> Keys<'a, &'new str, u8> { v } - fn keys_val<'a, 'new>(v: Keys<'a, u8, &'static str>) -> Keys<'a, u8, &'new str> { v } - fn values_key<'a, 'new>(v: Values<'a, &'static str, u8>) -> Values<'a, &'new str, u8> { v } - fn values_val<'a, 'new>(v: Values<'a, u8, &'static str>) -> Values<'a, u8, &'new str> { v } + fn map_key<'new>(v: HashMap<&'static str, u8>) -> HashMap<&'new str, u8> { + v + } + fn map_val<'new>(v: HashMap) -> HashMap { + v + } + fn iter_key<'a, 'new>(v: Iter<'a, &'static str, u8>) -> Iter<'a, &'new str, u8> { + v + } + fn iter_val<'a, 'new>(v: Iter<'a, u8, &'static str>) -> Iter<'a, u8, &'new str> { + v + } + fn into_iter_key<'new>(v: IntoIter<&'static str, u8>) -> IntoIter<&'new str, u8> { + v + } + fn into_iter_val<'new>(v: IntoIter) -> IntoIter { + v + } + fn keys_key<'a, 'new>(v: Keys<'a, &'static str, u8>) -> Keys<'a, &'new str, u8> { + v + } + fn keys_val<'a, 'new>(v: Keys<'a, u8, &'static str>) -> Keys<'a, u8, &'new str> { + v + } + fn values_key<'a, 'new>(v: Values<'a, &'static str, u8>) -> Values<'a, &'new str, u8> { + v + } + fn values_val<'a, 'new>(v: Values<'a, u8, &'static str>) -> Values<'a, u8, &'new str> { + v + } fn drain<'new>(d: Drain<'static, &'static str, &'static str>) - -> Drain<'new, &'new str, &'new str> { d } + -> Drain<'new, &'new str, &'new str> { + d + } } #[cfg(test)] mod test_map { use super::HashMap; use super::Entry::{Occupied, Vacant}; + use super::RandomState; use cell::RefCell; use rand::{thread_rng, Rng}; + #[test] + fn test_zero_capacities() { + type HM = HashMap; + + let m = HM::new(); + assert_eq!(m.capacity(), 0); + + let m = HM::default(); + assert_eq!(m.capacity(), 0); + + let m = HM::with_hasher(RandomState::new()); + assert_eq!(m.capacity(), 0); + + let m = HM::with_capacity(0); + assert_eq!(m.capacity(), 0); + + let m = HM::with_capacity_and_hasher(0, RandomState::new()); + assert_eq!(m.capacity(), 0); + + let mut m = HM::new(); + m.insert(1, 1); + m.insert(2, 2); + m.remove(&1); + m.remove(&2); + m.shrink_to_fit(); + assert_eq!(m.capacity(), 0); + + let mut m = HM::new(); + m.reserve(0); + assert_eq!(m.capacity(), 0); + } + #[test] fn test_create_capacity_zero() { let mut m = HashMap::with_capacity(0); @@ -2130,7 +2287,7 @@ mod test_map { #[derive(Hash, PartialEq, Eq)] struct Dropable { - k: usize + k: usize, } impl Dropable { @@ -2174,7 +2331,7 @@ mod test_map { for i in 0..100 { let d1 = Dropable::new(i); - let d2 = Dropable::new(i+100); + let d2 = Dropable::new(i + 100); m.insert(d1, d2); } @@ -2233,7 +2390,7 @@ mod test_map { for i in 0..100 { let d1 = Dropable::new(i); - let d2 = Dropable::new(i+100); + let d2 = Dropable::new(i + 100); hm.insert(d1, d2); } @@ -2261,13 +2418,13 @@ mod test_map { for _ in half.by_ref() {} DROP_VECTOR.with(|v| { - let nk = (0..100).filter(|&i| { - v.borrow()[i] == 1 - }).count(); + let nk = (0..100) + .filter(|&i| v.borrow()[i] == 1) + .count(); - let nv = (0..100).filter(|&i| { - v.borrow()[i+100] == 1 - }).count(); + let nv = (0..100) + .filter(|&i| v.borrow()[i + 100] == 1) + .count(); assert_eq!(nk, 50); assert_eq!(nv, 50); @@ -2324,12 +2481,12 @@ mod test_map { for i in 1..1001 { assert!(m.insert(i, i).is_none()); - for j in 1..i+1 { + for j in 1..i + 1 { let r = m.get(&j); assert_eq!(r, Some(&j)); } - for j in i+1..1001 { + for j in i + 1..1001 { let r = m.get(&j); assert_eq!(r, None); } @@ -2343,11 +2500,11 @@ mod test_map { for i in 1..1001 { assert!(m.remove(&i).is_some()); - for j in 1..i+1 { + for j in 1..i + 1 { assert!(!m.contains_key(&j)); } - for j in i+1..1001 { + for j in i + 1..1001 { assert!(m.contains_key(&j)); } } @@ -2383,7 +2540,8 @@ mod test_map { assert!(m.insert(5, 14).is_none()); let new = 100; match m.get_mut(&5) { - None => panic!(), Some(x) => *x = new + None => panic!(), + Some(x) => *x = new, } assert_eq!(m.get(&5), Some(&new)); } @@ -2502,7 +2660,7 @@ mod test_map { m.insert(1, 2); match m.get(&1) { None => panic!(), - Some(v) => assert_eq!(*v, 2) + Some(v) => assert_eq!(*v, 2), } } @@ -2547,8 +2705,8 @@ mod test_map { assert!(m.is_empty()); let mut i = 0; - let old_cap = m.table.capacity(); - while old_cap == m.table.capacity() { + let old_raw_cap = m.raw_capacity(); + while old_raw_cap == m.raw_capacity() { m.insert(i, i); i += 1; } @@ -2562,47 +2720,47 @@ mod test_map { let mut m = HashMap::new(); assert_eq!(m.len(), 0); - assert_eq!(m.table.capacity(), 0); + assert_eq!(m.raw_capacity(), 0); assert!(m.is_empty()); m.insert(0, 0); m.remove(&0); assert!(m.is_empty()); - let initial_cap = m.table.capacity(); - m.reserve(initial_cap); - let cap = m.table.capacity(); + let initial_raw_cap = m.raw_capacity(); + m.reserve(initial_raw_cap); + let raw_cap = m.raw_capacity(); - assert_eq!(cap, initial_cap * 2); + assert_eq!(raw_cap, initial_raw_cap * 2); let mut i = 0; - for _ in 0..cap * 3 / 4 { + for _ in 0..raw_cap * 3 / 4 { m.insert(i, i); i += 1; } // three quarters full assert_eq!(m.len(), i); - assert_eq!(m.table.capacity(), cap); + assert_eq!(m.raw_capacity(), raw_cap); - for _ in 0..cap / 4 { + for _ in 0..raw_cap / 4 { m.insert(i, i); i += 1; } // half full - let new_cap = m.table.capacity(); - assert_eq!(new_cap, cap * 2); + let new_raw_cap = m.raw_capacity(); + assert_eq!(new_raw_cap, raw_cap * 2); - for _ in 0..cap / 2 - 1 { + for _ in 0..raw_cap / 2 - 1 { i -= 1; m.remove(&i); - assert_eq!(m.table.capacity(), new_cap); + assert_eq!(m.raw_capacity(), new_raw_cap); } // A little more than one quarter full. m.shrink_to_fit(); - assert_eq!(m.table.capacity(), cap); + assert_eq!(m.raw_capacity(), raw_cap); // again, a little more than half full - for _ in 0..cap / 2 - 1 { + for _ in 0..raw_cap / 2 - 1 { i -= 1; m.remove(&i); } @@ -2610,7 +2768,7 @@ mod test_map { assert_eq!(m.len(), i); assert!(!m.is_empty()); - assert_eq!(m.table.capacity(), initial_cap); + assert_eq!(m.raw_capacity(), initial_raw_cap); } #[test] @@ -2665,7 +2823,7 @@ mod test_map { fn test_size_hint() { let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]; - let map: HashMap<_, _> = xs.iter().cloned().collect(); + let map: HashMap<_, _> = xs.iter().cloned().collect(); let mut iter = map.iter(); @@ -2678,7 +2836,7 @@ mod test_map { fn test_iter_len() { let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]; - let map: HashMap<_, _> = xs.iter().cloned().collect(); + let map: HashMap<_, _> = xs.iter().cloned().collect(); let mut iter = map.iter(); @@ -2691,7 +2849,7 @@ mod test_map { fn test_mut_size_hint() { let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]; - let mut map: HashMap<_, _> = xs.iter().cloned().collect(); + let mut map: HashMap<_, _> = xs.iter().cloned().collect(); let mut iter = map.iter_mut(); @@ -2704,7 +2862,7 @@ mod test_map { fn test_iter_mut_len() { let xs = [(1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6)]; - let mut map: HashMap<_, _> = xs.iter().cloned().collect(); + let mut map: HashMap<_, _> = xs.iter().cloned().collect(); let mut iter = map.iter_mut(); @@ -2737,7 +2895,7 @@ mod test_map { } #[test] - fn test_entry(){ + fn test_entry() { let xs = [(1, 10), (2, 20), (3, 30), (4, 40), (5, 50), (6, 60)]; let mut map: HashMap<_, _> = xs.iter().cloned().collect(); @@ -2811,11 +2969,11 @@ mod test_map { for i in 0..1000 { let x = rng.gen_range(-10, 10); match m.entry(x) { - Vacant(_) => {}, + Vacant(_) => {} Occupied(e) => { println!("{}: remove {}", i, x); e.remove(); - }, + } } check(&m); @@ -2893,7 +3051,7 @@ mod test_map { Vacant(e) => { assert_eq!(key, *e.key()); e.insert(value.clone()); - }, + } } assert_eq!(a.len(), 1); assert_eq!(a[key], value); diff --git a/src/libstd/collections/hash/set.rs b/src/libstd/collections/hash/set.rs index ff56747fee6af..1ec7a4a7b639b 100644 --- a/src/libstd/collections/hash/set.rs +++ b/src/libstd/collections/hash/set.rs @@ -17,8 +17,6 @@ use ops::{BitOr, BitAnd, BitXor, Sub}; use super::Recover; use super::map::{self, HashMap, Keys, RandomState}; -const INITIAL_CAPACITY: usize = 32; - // Future Optimization (FIXME!) // ============================= // @@ -100,10 +98,24 @@ const INITIAL_CAPACITY: usize = 32; /// println!("{:?}", x); /// } /// ``` +/// +/// HashSet with fixed list of elements can be initialized from an array: +/// +/// ``` +/// use std::collections::HashSet; +/// +/// fn main() { +/// let viking_names: HashSet<&str> = +/// [ "Einar", "Olaf", "Harald" ].iter().cloned().collect(); +/// // use the values stored in the set +/// } +/// ``` + + #[derive(Clone)] #[stable(feature = "rust1", since = "1.0.0")] pub struct HashSet { - map: HashMap + map: HashMap, } impl HashSet { @@ -118,11 +130,13 @@ impl HashSet { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn new() -> HashSet { - HashSet::with_capacity(INITIAL_CAPACITY) + HashSet { map: HashMap::new() } } - /// Creates an empty HashSet with space for at least `n` elements in - /// the hash table. + /// Creates an empty `HashSet` with the specified capacity. + /// + /// The hash set will be able to hold at least `capacity` elements without + /// reallocating. If `capacity` is 0, the hash set will not allocate. /// /// # Examples /// @@ -138,7 +152,8 @@ impl HashSet { } impl HashSet - where T: Eq + Hash, S: BuildHasher + where T: Eq + Hash, + S: BuildHasher { /// Creates a new empty hash set which will use the given hasher to hash /// keys. @@ -163,11 +178,14 @@ impl HashSet #[inline] #[stable(feature = "hashmap_build_hasher", since = "1.7.0")] pub fn with_hasher(hasher: S) -> HashSet { - HashSet::with_capacity_and_hasher(INITIAL_CAPACITY, hasher) + HashSet { map: HashMap::with_hasher(hasher) } } - /// Creates an empty HashSet with space for at least `capacity` - /// elements in the hash table, using `hasher` to hash the keys. + /// Creates an empty HashSet with with the specified capacity, using + /// `hasher` to hash the keys. + /// + /// The hash set will be able to hold at least `capacity` elements without + /// reallocating. If `capacity` is 0, the hash set will not allocate. /// /// Warning: `hasher` is normally randomly generated, and /// is designed to allow `HashSet`s to be resistant to attacks that @@ -186,11 +204,8 @@ impl HashSet /// ``` #[inline] #[stable(feature = "hashmap_build_hasher", since = "1.7.0")] - pub fn with_capacity_and_hasher(capacity: usize, hasher: S) - -> HashSet { - HashSet { - map: HashMap::with_capacity_and_hasher(capacity, hasher), - } + pub fn with_capacity_and_hasher(capacity: usize, hasher: S) -> HashSet { + HashSet { map: HashMap::with_capacity_and_hasher(capacity, hasher) } } /// Returns a reference to the set's hasher. @@ -327,8 +342,9 @@ impl HashSet /// assert_eq!(diff1, [1, 4].iter().cloned().collect()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - pub fn symmetric_difference<'a>(&'a self, other: &'a HashSet) - -> SymmetricDifference<'a, T, S> { + pub fn symmetric_difference<'a>(&'a self, + other: &'a HashSet) + -> SymmetricDifference<'a, T, S> { SymmetricDifference { iter: self.difference(other).chain(other.difference(self)) } } @@ -392,7 +408,9 @@ impl HashSet /// assert_eq!(v.len(), 1); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - pub fn len(&self) -> usize { self.map.len() } + pub fn len(&self) -> usize { + self.map.len() + } /// Returns true if the set contains no elements. /// @@ -407,7 +425,9 @@ impl HashSet /// assert!(!v.is_empty()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - pub fn is_empty(&self) -> bool { self.map.is_empty() } + pub fn is_empty(&self) -> bool { + self.map.is_empty() + } /// Clears the set, returning all elements in an iterator. #[inline] @@ -429,7 +449,9 @@ impl HashSet /// assert!(v.is_empty()); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - pub fn clear(&mut self) { self.map.clear() } + pub fn clear(&mut self) { + self.map.clear() + } /// Returns `true` if the set contains a value. /// @@ -448,7 +470,8 @@ impl HashSet /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn contains(&self, value: &Q) -> bool - where T: Borrow, Q: Hash + Eq + where T: Borrow, + Q: Hash + Eq { self.map.contains_key(value) } @@ -460,7 +483,8 @@ impl HashSet /// the value type. #[stable(feature = "set_recovery", since = "1.9.0")] pub fn get(&self, value: &Q) -> Option<&T> - where T: Borrow, Q: Hash + Eq + where T: Borrow, + Q: Hash + Eq { Recover::get(&self.map, value) } @@ -551,7 +575,9 @@ impl HashSet /// assert_eq!(set.len(), 1); /// ``` #[stable(feature = "rust1", since = "1.0.0")] - pub fn insert(&mut self, value: T) -> bool { self.map.insert(value, ()).is_none() } + pub fn insert(&mut self, value: T) -> bool { + self.map.insert(value, ()).is_none() + } /// Adds a value to the set, replacing the existing value, if any, that is equal to the given /// one. Returns the replaced value. @@ -580,7 +606,8 @@ impl HashSet /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn remove(&mut self, value: &Q) -> bool - where T: Borrow, Q: Hash + Eq + where T: Borrow, + Q: Hash + Eq { self.map.remove(value).is_some() } @@ -592,7 +619,8 @@ impl HashSet /// the value type. #[stable(feature = "set_recovery", since = "1.9.0")] pub fn take(&mut self, value: &Q) -> Option - where T: Borrow, Q: Hash + Eq + where T: Borrow, + Q: Hash + Eq { Recover::take(&mut self.map, value) } @@ -600,10 +628,13 @@ impl HashSet #[stable(feature = "rust1", since = "1.0.0")] impl PartialEq for HashSet - where T: Eq + Hash, S: BuildHasher + where T: Eq + Hash, + S: BuildHasher { fn eq(&self, other: &HashSet) -> bool { - if self.len() != other.len() { return false; } + if self.len() != other.len() { + return false; + } self.iter().all(|key| other.contains(key)) } @@ -611,8 +642,10 @@ impl PartialEq for HashSet #[stable(feature = "rust1", since = "1.0.0")] impl Eq for HashSet - where T: Eq + Hash, S: BuildHasher -{} + where T: Eq + Hash, + S: BuildHasher +{ +} #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Debug for HashSet @@ -627,9 +660,9 @@ impl fmt::Debug for HashSet #[stable(feature = "rust1", since = "1.0.0")] impl FromIterator for HashSet where T: Eq + Hash, - S: BuildHasher + Default, + S: BuildHasher + Default { - fn from_iter>(iter: I) -> HashSet { + fn from_iter>(iter: I) -> HashSet { let iterator = iter.into_iter(); let lower = iterator.size_hint().0; let mut set = HashSet::with_capacity_and_hasher(lower, Default::default()); @@ -641,9 +674,9 @@ impl FromIterator for HashSet #[stable(feature = "rust1", since = "1.0.0")] impl Extend for HashSet where T: Eq + Hash, - S: BuildHasher, + S: BuildHasher { - fn extend>(&mut self, iter: I) { + fn extend>(&mut self, iter: I) { for k in iter { self.insert(k); } @@ -653,9 +686,9 @@ impl Extend for HashSet #[stable(feature = "hash_extend_copy", since = "1.4.0")] impl<'a, T, S> Extend<&'a T> for HashSet where T: 'a + Eq + Hash + Copy, - S: BuildHasher, + S: BuildHasher { - fn extend>(&mut self, iter: I) { + fn extend>(&mut self, iter: I) { self.extend(iter.into_iter().cloned()); } } @@ -663,18 +696,18 @@ impl<'a, T, S> Extend<&'a T> for HashSet #[stable(feature = "rust1", since = "1.0.0")] impl Default for HashSet where T: Eq + Hash, - S: BuildHasher + Default, + S: BuildHasher + Default { /// Creates an empty `HashSet` with the `Default` value for the hasher. fn default() -> HashSet { - HashSet::with_hasher(Default::default()) + HashSet { map: HashMap::default() } } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, 'b, T, S> BitOr<&'b HashSet> for &'a HashSet where T: Eq + Hash + Clone, - S: BuildHasher + Default, + S: BuildHasher + Default { type Output = HashSet; @@ -706,7 +739,7 @@ impl<'a, 'b, T, S> BitOr<&'b HashSet> for &'a HashSet #[stable(feature = "rust1", since = "1.0.0")] impl<'a, 'b, T, S> BitAnd<&'b HashSet> for &'a HashSet where T: Eq + Hash + Clone, - S: BuildHasher + Default, + S: BuildHasher + Default { type Output = HashSet; @@ -738,7 +771,7 @@ impl<'a, 'b, T, S> BitAnd<&'b HashSet> for &'a HashSet #[stable(feature = "rust1", since = "1.0.0")] impl<'a, 'b, T, S> BitXor<&'b HashSet> for &'a HashSet where T: Eq + Hash + Clone, - S: BuildHasher + Default, + S: BuildHasher + Default { type Output = HashSet; @@ -770,7 +803,7 @@ impl<'a, 'b, T, S> BitXor<&'b HashSet> for &'a HashSet #[stable(feature = "rust1", since = "1.0.0")] impl<'a, 'b, T, S> Sub<&'b HashSet> for &'a HashSet where T: Eq + Hash + Clone, - S: BuildHasher + Default, + S: BuildHasher + Default { type Output = HashSet; @@ -802,13 +835,13 @@ impl<'a, 'b, T, S> Sub<&'b HashSet> for &'a HashSet /// HashSet iterator #[stable(feature = "rust1", since = "1.0.0")] pub struct Iter<'a, K: 'a> { - iter: Keys<'a, K, ()> + iter: Keys<'a, K, ()>, } /// HashSet move iterator #[stable(feature = "rust1", since = "1.0.0")] pub struct IntoIter { - iter: map::IntoIter + iter: map::IntoIter, } /// HashSet drain iterator @@ -838,18 +871,19 @@ pub struct Difference<'a, T: 'a, S: 'a> { /// Symmetric difference iterator. #[stable(feature = "rust1", since = "1.0.0")] pub struct SymmetricDifference<'a, T: 'a, S: 'a> { - iter: Chain, Difference<'a, T, S>> + iter: Chain, Difference<'a, T, S>>, } /// Set union iterator. #[stable(feature = "rust1", since = "1.0.0")] pub struct Union<'a, T: 'a, S: 'a> { - iter: Chain, Difference<'a, T, S>> + iter: Chain, Difference<'a, T, S>>, } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T, S> IntoIterator for &'a HashSet - where T: Eq + Hash, S: BuildHasher + where T: Eq + Hash, + S: BuildHasher { type Item = &'a T; type IntoIter = Iter<'a, T>; @@ -894,18 +928,26 @@ impl IntoIterator for HashSet #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K> Clone for Iter<'a, K> { - fn clone(&self) -> Iter<'a, K> { Iter { iter: self.iter.clone() } } + fn clone(&self) -> Iter<'a, K> { + Iter { iter: self.iter.clone() } + } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K> Iterator for Iter<'a, K> { type Item = &'a K; - fn next(&mut self) -> Option<&'a K> { self.iter.next() } - fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } + fn next(&mut self) -> Option<&'a K> { + self.iter.next() + } + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K> ExactSizeIterator for Iter<'a, K> { - fn len(&self) -> usize { self.iter.len() } + fn len(&self) -> usize { + self.iter.len() + } } #[unstable(feature = "fused", issue = "35602")] impl<'a, K> FusedIterator for Iter<'a, K> {} @@ -914,12 +956,18 @@ impl<'a, K> FusedIterator for Iter<'a, K> {} impl Iterator for IntoIter { type Item = K; - fn next(&mut self) -> Option { self.iter.next().map(|(k, _)| k) } - fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } + fn next(&mut self) -> Option { + self.iter.next().map(|(k, _)| k) + } + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } } #[stable(feature = "rust1", since = "1.0.0")] impl ExactSizeIterator for IntoIter { - fn len(&self) -> usize { self.iter.len() } + fn len(&self) -> usize { + self.iter.len() + } } #[unstable(feature = "fused", issue = "35602")] impl FusedIterator for IntoIter {} @@ -928,12 +976,18 @@ impl FusedIterator for IntoIter {} impl<'a, K> Iterator for Drain<'a, K> { type Item = K; - fn next(&mut self) -> Option { self.iter.next().map(|(k, _)| k) } - fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } + fn next(&mut self) -> Option { + self.iter.next().map(|(k, _)| k) + } + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } } #[stable(feature = "rust1", since = "1.0.0")] impl<'a, K> ExactSizeIterator for Drain<'a, K> { - fn len(&self) -> usize { self.iter.len() } + fn len(&self) -> usize { + self.iter.len() + } } #[unstable(feature = "fused", issue = "35602")] impl<'a, K> FusedIterator for Drain<'a, K> {} @@ -947,7 +1001,8 @@ impl<'a, T, S> Clone for Intersection<'a, T, S> { #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T, S> Iterator for Intersection<'a, T, S> - where T: Eq + Hash, S: BuildHasher + where T: Eq + Hash, + S: BuildHasher { type Item = &'a T; @@ -955,9 +1010,11 @@ impl<'a, T, S> Iterator for Intersection<'a, T, S> loop { match self.iter.next() { None => return None, - Some(elt) => if self.other.contains(elt) { - return Some(elt) - }, + Some(elt) => { + if self.other.contains(elt) { + return Some(elt); + } + } } } } @@ -970,8 +1027,10 @@ impl<'a, T, S> Iterator for Intersection<'a, T, S> #[unstable(feature = "fused", issue = "35602")] impl<'a, T, S> FusedIterator for Intersection<'a, T, S> - where T: Eq + Hash, S: BuildHasher -{} + where T: Eq + Hash, + S: BuildHasher +{ +} #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T, S> Clone for Difference<'a, T, S> { @@ -982,7 +1041,8 @@ impl<'a, T, S> Clone for Difference<'a, T, S> { #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T, S> Iterator for Difference<'a, T, S> - where T: Eq + Hash, S: BuildHasher + where T: Eq + Hash, + S: BuildHasher { type Item = &'a T; @@ -990,9 +1050,11 @@ impl<'a, T, S> Iterator for Difference<'a, T, S> loop { match self.iter.next() { None => return None, - Some(elt) => if !self.other.contains(elt) { - return Some(elt) - }, + Some(elt) => { + if !self.other.contains(elt) { + return Some(elt); + } + } } } } @@ -1005,8 +1067,10 @@ impl<'a, T, S> Iterator for Difference<'a, T, S> #[unstable(feature = "fused", issue = "35602")] impl<'a, T, S> FusedIterator for Difference<'a, T, S> - where T: Eq + Hash, S: BuildHasher -{} + where T: Eq + Hash, + S: BuildHasher +{ +} #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T, S> Clone for SymmetricDifference<'a, T, S> { @@ -1017,58 +1081,123 @@ impl<'a, T, S> Clone for SymmetricDifference<'a, T, S> { #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T, S> Iterator for SymmetricDifference<'a, T, S> - where T: Eq + Hash, S: BuildHasher + where T: Eq + Hash, + S: BuildHasher { type Item = &'a T; - fn next(&mut self) -> Option<&'a T> { self.iter.next() } - fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } + fn next(&mut self) -> Option<&'a T> { + self.iter.next() + } + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } } #[unstable(feature = "fused", issue = "35602")] impl<'a, T, S> FusedIterator for SymmetricDifference<'a, T, S> - where T: Eq + Hash, S: BuildHasher -{} + where T: Eq + Hash, + S: BuildHasher +{ +} #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T, S> Clone for Union<'a, T, S> { - fn clone(&self) -> Union<'a, T, S> { Union { iter: self.iter.clone() } } + fn clone(&self) -> Union<'a, T, S> { + Union { iter: self.iter.clone() } + } } #[unstable(feature = "fused", issue = "35602")] impl<'a, T, S> FusedIterator for Union<'a, T, S> - where T: Eq + Hash, S: BuildHasher -{} + where T: Eq + Hash, + S: BuildHasher +{ +} #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T, S> Iterator for Union<'a, T, S> - where T: Eq + Hash, S: BuildHasher + where T: Eq + Hash, + S: BuildHasher { type Item = &'a T; - fn next(&mut self) -> Option<&'a T> { self.iter.next() } - fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } + fn next(&mut self) -> Option<&'a T> { + self.iter.next() + } + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } } #[allow(dead_code)] fn assert_covariance() { - fn set<'new>(v: HashSet<&'static str>) -> HashSet<&'new str> { v } - fn iter<'a, 'new>(v: Iter<'a, &'static str>) -> Iter<'a, &'new str> { v } - fn into_iter<'new>(v: IntoIter<&'static str>) -> IntoIter<&'new str> { v } + fn set<'new>(v: HashSet<&'static str>) -> HashSet<&'new str> { + v + } + fn iter<'a, 'new>(v: Iter<'a, &'static str>) -> Iter<'a, &'new str> { + v + } + fn into_iter<'new>(v: IntoIter<&'static str>) -> IntoIter<&'new str> { + v + } fn difference<'a, 'new>(v: Difference<'a, &'static str, RandomState>) - -> Difference<'a, &'new str, RandomState> { v } + -> Difference<'a, &'new str, RandomState> { + v + } fn symmetric_difference<'a, 'new>(v: SymmetricDifference<'a, &'static str, RandomState>) - -> SymmetricDifference<'a, &'new str, RandomState> { v } + -> SymmetricDifference<'a, &'new str, RandomState> { + v + } fn intersection<'a, 'new>(v: Intersection<'a, &'static str, RandomState>) - -> Intersection<'a, &'new str, RandomState> { v } + -> Intersection<'a, &'new str, RandomState> { + v + } fn union<'a, 'new>(v: Union<'a, &'static str, RandomState>) - -> Union<'a, &'new str, RandomState> { v } - fn drain<'new>(d: Drain<'static, &'static str>) -> Drain<'new, &'new str> { d } + -> Union<'a, &'new str, RandomState> { + v + } + fn drain<'new>(d: Drain<'static, &'static str>) -> Drain<'new, &'new str> { + d + } } #[cfg(test)] mod test_set { use super::HashSet; + use super::super::map::RandomState; + + #[test] + fn test_zero_capacities() { + type HS = HashSet; + + let s = HS::new(); + assert_eq!(s.capacity(), 0); + + let s = HS::default(); + assert_eq!(s.capacity(), 0); + + let s = HS::with_hasher(RandomState::new()); + assert_eq!(s.capacity(), 0); + + let s = HS::with_capacity(0); + assert_eq!(s.capacity(), 0); + + let s = HS::with_capacity_and_hasher(0, RandomState::new()); + assert_eq!(s.capacity(), 0); + + let mut s = HS::new(); + s.insert(1); + s.insert(2); + s.remove(&1); + s.remove(&2); + s.shrink_to_fit(); + assert_eq!(s.capacity(), 0); + + let mut s = HS::new(); + s.reserve(0); + assert_eq!(s.capacity(), 0); + } #[test] fn test_disjoint() { @@ -1335,7 +1464,9 @@ mod test_set { assert_eq!(last_i, 49); } - for _ in &s { panic!("s should be empty!"); } + for _ in &s { + panic!("s should be empty!"); + } // reset to try again. s.extend(1..100); diff --git a/src/libstd/collections/hash/table.rs b/src/libstd/collections/hash/table.rs index 8f02c9c7d3de0..ec0e457dc6aa3 100644 --- a/src/libstd/collections/hash/table.rs +++ b/src/libstd/collections/hash/table.rs @@ -21,13 +21,24 @@ use ptr::{self, Unique, Shared}; use self::BucketState::*; -const EMPTY_BUCKET: u64 = 0; +/// Integer type used for stored hash values. +/// +/// No more than bit_width(usize) bits are needed to select a bucket. +/// +/// The most significant bit is ours to use for tagging `SafeHash`. +/// +/// (Even if we could have usize::MAX bytes allocated for buckets, +/// each bucket stores at least a `HashUint`, so there can be no more than +/// usize::MAX / size_of(usize) buckets.) +type HashUint = usize; + +const EMPTY_BUCKET: HashUint = 0; /// The raw hashtable, providing safe-ish access to the unzipped and highly -/// optimized arrays of hashes, keys, and values. +/// optimized arrays of hashes, and key-value pairs. /// -/// This design uses less memory and is a lot faster than the naive -/// `Vec>`, because we don't pay for the overhead of an +/// This design is a lot faster than the naive +/// `Vec>`, because we don't pay for the overhead of an /// option on every element, and we get a generally more cache-aware design. /// /// Essential invariants of this structure: @@ -48,22 +59,23 @@ const EMPTY_BUCKET: u64 = 0; /// which will likely map to the same bucket, while not being confused /// with "empty". /// -/// - All three "arrays represented by pointers" are the same length: +/// - Both "arrays represented by pointers" are the same length: /// `capacity`. This is set at creation and never changes. The arrays -/// are unzipped to save space (we don't have to pay for the padding -/// between odd sized elements, such as in a map from u64 to u8), and -/// be more cache aware (scanning through 8 hashes brings in at most -/// 2 cache lines, since they're all right beside each other). +/// are unzipped and are more cache aware (scanning through 8 hashes +/// brings in at most 2 cache lines, since they're all right beside each +/// other). This layout may waste space in padding such as in a map from +/// u64 to u8, but is a more cache conscious layout as the key-value pairs +/// are only very shortly probed and the desired value will be in the same +/// or next cache line. /// /// You can kind of think of this module/data structure as a safe wrapper /// around just the "table" part of the hashtable. It enforces some /// invariants at the type level and employs some performance trickery, -/// but in general is just a tricked out `Vec>`. -#[cfg_attr(stage0, unsafe_no_drop_flag)] +/// but in general is just a tricked out `Vec>`. pub struct RawTable { capacity: usize, size: usize, - hashes: Unique, + hashes: Unique, // Because K/V do not appear directly in any of the types in the struct, // inform rustc that in fact instances of K and V are reachable from here. @@ -74,11 +86,9 @@ unsafe impl Send for RawTable {} unsafe impl Sync for RawTable {} struct RawBucket { - hash: *mut u64, - + hash: *mut HashUint, // We use *const to ensure covariance with respect to K and V - key: *const K, - val: *const V, + pair: *const (K, V), _marker: marker::PhantomData<(K, V)>, } @@ -137,15 +147,27 @@ pub struct GapThenFull { /// buckets. #[derive(PartialEq, Copy, Clone)] pub struct SafeHash { - hash: u64, + hash: HashUint, } impl SafeHash { /// Peek at the hash value, which is guaranteed to be non-zero. #[inline(always)] - pub fn inspect(&self) -> u64 { + pub fn inspect(&self) -> HashUint { self.hash } + + #[inline(always)] + pub fn new(hash: u64) -> Self { + // We need to avoid 0 in order to prevent collisions with + // EMPTY_HASH. We can maintain our precious uniform distribution + // of initial indexes by unconditionally setting the MSB, + // effectively reducing the hashes by one bit. + // + // Truncate hash to fit in `HashUint`. + let hash_bits = size_of::() * 8; + SafeHash { hash: (1 << (hash_bits - 1)) | (hash as HashUint) } + } } /// We need to remove hashes of 0. That's reserved for empty buckets. @@ -157,33 +179,28 @@ pub fn make_hash(hash_state: &S, t: &T) -> SafeHash { let mut state = hash_state.build_hasher(); t.hash(&mut state); - // We need to avoid 0 in order to prevent collisions with - // EMPTY_HASH. We can maintain our precious uniform distribution - // of initial indexes by unconditionally setting the MSB, - // effectively reducing 64-bits hashes to 63 bits. - SafeHash { hash: 0x8000_0000_0000_0000 | state.finish() } + SafeHash::new(state.finish()) } -// `replace` casts a `*u64` to a `*SafeHash`. Since we statically +// `replace` casts a `*HashUint` to a `*SafeHash`. Since we statically // ensure that a `FullBucket` points to an index with a non-zero hash, -// and a `SafeHash` is just a `u64` with a different name, this is +// and a `SafeHash` is just a `HashUint` with a different name, this is // safe. // // This test ensures that a `SafeHash` really IS the same size as a -// `u64`. If you need to change the size of `SafeHash` (and +// `HashUint`. If you need to change the size of `SafeHash` (and // consequently made this test fail), `replace` needs to be // modified to no longer assume this. #[test] -fn can_alias_safehash_as_u64() { - assert_eq!(size_of::(), size_of::()) +fn can_alias_safehash_as_hash() { + assert_eq!(size_of::(), size_of::()) } impl RawBucket { unsafe fn offset(self, count: isize) -> RawBucket { RawBucket { hash: self.hash.offset(count), - key: self.key.offset(count), - val: self.val.offset(count), + pair: self.pair.offset(count), _marker: marker::PhantomData, } } @@ -371,8 +388,7 @@ impl EmptyBucket pub fn put(mut self, hash: SafeHash, key: K, value: V) -> FullBucket { unsafe { *self.raw.hash = hash.inspect(); - ptr::write(self.raw.key as *mut K, key); - ptr::write(self.raw.val as *mut V, value); + ptr::write(self.raw.pair as *mut (K, V), (key, value)); self.table.borrow_table_mut().size += 1; } @@ -431,7 +447,7 @@ impl>> FullBucket { /// Gets references to the key and value at a given index. pub fn read(&self) -> (&K, &V) { - unsafe { (&*self.raw.key, &*self.raw.val) } + unsafe { (&(*self.raw.pair).0, &(*self.raw.pair).1) } } } @@ -448,13 +464,14 @@ impl<'t, K, V> FullBucket> { unsafe { *self.raw.hash = EMPTY_BUCKET; + let (k, v) = ptr::read(self.raw.pair); (EmptyBucket { - raw: self.raw, - idx: self.idx, - table: self.table, - }, - ptr::read(self.raw.key), - ptr::read(self.raw.val)) + raw: self.raw, + idx: self.idx, + table: self.table, + }, + k, + v) } } } @@ -467,8 +484,7 @@ impl FullBucket pub fn replace(&mut self, h: SafeHash, k: K, v: V) -> (SafeHash, K, V) { unsafe { let old_hash = ptr::replace(self.raw.hash as *mut SafeHash, h); - let old_key = ptr::replace(self.raw.key as *mut K, k); - let old_val = ptr::replace(self.raw.val as *mut V, v); + let (old_key, old_val) = ptr::replace(self.raw.pair as *mut (K, V), (k, v)); (old_hash, old_key, old_val) } @@ -480,7 +496,8 @@ impl FullBucket { /// Gets mutable references to the key and value at a given index. pub fn read_mut(&mut self) -> (&mut K, &mut V) { - unsafe { (&mut *(self.raw.key as *mut K), &mut *(self.raw.val as *mut V)) } + let pair_mut = self.raw.pair as *mut (K, V); + unsafe { (&mut (*pair_mut).0, &mut (*pair_mut).1) } } } @@ -493,7 +510,7 @@ impl<'t, K, V, M> FullBucket /// in exchange for this, the returned references have a longer lifetime /// than the references returned by `read()`. pub fn into_refs(self) -> (&'t K, &'t V) { - unsafe { (&*self.raw.key, &*self.raw.val) } + unsafe { (&(*self.raw.pair).0, &(*self.raw.pair).1) } } } @@ -503,7 +520,8 @@ impl<'t, K, V, M> FullBucket /// This works similarly to `into_refs`, exchanging a bucket state /// for mutable references into the table. pub fn into_mut_refs(self) -> (&'t mut K, &'t mut V) { - unsafe { (&mut *(self.raw.key as *mut K), &mut *(self.raw.val as *mut V)) } + let pair_mut = self.raw.pair as *mut (K, V); + unsafe { (&mut (*pair_mut).0, &mut (*pair_mut).1) } } } @@ -518,8 +536,7 @@ impl GapThenFull pub fn shift(mut self) -> Option> { unsafe { *self.gap.raw.hash = mem::replace(&mut *self.full.raw.hash, EMPTY_BUCKET); - ptr::copy_nonoverlapping(self.full.raw.key, self.gap.raw.key as *mut K, 1); - ptr::copy_nonoverlapping(self.full.raw.val, self.gap.raw.val as *mut V, 1); + ptr::copy_nonoverlapping(self.full.raw.pair, self.gap.raw.pair as *mut (K, V), 1); } let FullBucket { raw: prev_raw, idx: prev_idx, .. } = self.full; @@ -561,49 +578,42 @@ fn test_rounding() { assert_eq!(round_up_to_next(5, 4), 8); } -// Returns a tuple of (key_offset, val_offset), +// Returns a tuple of (pairs_offset, end_of_pairs_offset), // from the start of a mallocated array. #[inline] fn calculate_offsets(hashes_size: usize, - keys_size: usize, - keys_align: usize, - vals_align: usize) + pairs_size: usize, + pairs_align: usize) -> (usize, usize, bool) { - let keys_offset = round_up_to_next(hashes_size, keys_align); - let (end_of_keys, oflo) = keys_offset.overflowing_add(keys_size); + let pairs_offset = round_up_to_next(hashes_size, pairs_align); + let (end_of_pairs, oflo) = pairs_offset.overflowing_add(pairs_size); - let vals_offset = round_up_to_next(end_of_keys, vals_align); - - (keys_offset, vals_offset, oflo) + (pairs_offset, end_of_pairs, oflo) } // Returns a tuple of (minimum required malloc alignment, hash_offset, // array_size), from the start of a mallocated array. fn calculate_allocation(hash_size: usize, hash_align: usize, - keys_size: usize, - keys_align: usize, - vals_size: usize, - vals_align: usize) + pairs_size: usize, + pairs_align: usize) -> (usize, usize, usize, bool) { let hash_offset = 0; - let (_, vals_offset, oflo) = calculate_offsets(hash_size, keys_size, keys_align, vals_align); - let (end_of_vals, oflo2) = vals_offset.overflowing_add(vals_size); + let (_, end_of_pairs, oflo) = calculate_offsets(hash_size, pairs_size, pairs_align); - let align = cmp::max(hash_align, cmp::max(keys_align, vals_align)); + let align = cmp::max(hash_align, pairs_align); - (align, hash_offset, end_of_vals, oflo || oflo2) + (align, hash_offset, end_of_pairs, oflo) } #[test] fn test_offset_calculation() { - assert_eq!(calculate_allocation(128, 8, 15, 1, 4, 4), - (8, 0, 148, false)); - assert_eq!(calculate_allocation(3, 1, 2, 1, 1, 1), (1, 0, 6, false)); - assert_eq!(calculate_allocation(6, 2, 12, 4, 24, 8), (8, 0, 48, false)); - assert_eq!(calculate_offsets(128, 15, 1, 4), (128, 144, false)); - assert_eq!(calculate_offsets(3, 2, 1, 1), (3, 5, false)); - assert_eq!(calculate_offsets(6, 12, 4, 8), (8, 24, false)); + assert_eq!(calculate_allocation(128, 8, 16, 8), (8, 0, 144, false)); + assert_eq!(calculate_allocation(3, 1, 2, 1), (1, 0, 5, false)); + assert_eq!(calculate_allocation(6, 2, 12, 4), (4, 0, 20, false)); + assert_eq!(calculate_offsets(128, 15, 4), (128, 143, false)); + assert_eq!(calculate_offsets(3, 2, 4), (4, 6, false)); + assert_eq!(calculate_offsets(6, 12, 4), (8, 20, false)); } impl RawTable { @@ -614,18 +624,17 @@ impl RawTable { return RawTable { size: 0, capacity: 0, - hashes: Unique::new(EMPTY as *mut u64), + hashes: Unique::new(EMPTY as *mut HashUint), marker: marker::PhantomData, }; } // No need for `checked_mul` before a more restrictive check performed // later in this method. - let hashes_size = capacity * size_of::(); - let keys_size = capacity * size_of::(); - let vals_size = capacity * size_of::(); + let hashes_size = capacity.wrapping_mul(size_of::()); + let pairs_size = capacity.wrapping_mul(size_of::<(K, V)>()); - // Allocating hashmaps is a little tricky. We need to allocate three + // Allocating hashmaps is a little tricky. We need to allocate two // arrays, but since we know their sizes and alignments up front, // we just allocate a single array, and then have the subarrays // point into it. @@ -633,32 +642,25 @@ impl RawTable { // This is great in theory, but in practice getting the alignment // right is a little subtle. Therefore, calculating offsets has been // factored out into a different function. - let (malloc_alignment, hash_offset, size, oflo) = calculate_allocation(hashes_size, - align_of::(), - keys_size, - align_of::(), - vals_size, - align_of::()); - + let (alignment, hash_offset, size, oflo) = calculate_allocation(hashes_size, + align_of::(), + pairs_size, + align_of::<(K, V)>()); assert!(!oflo, "capacity overflow"); // One check for overflow that covers calculation and rounding of size. - let size_of_bucket = size_of::() - .checked_add(size_of::()) - .unwrap() - .checked_add(size_of::()) - .unwrap(); + let size_of_bucket = size_of::().checked_add(size_of::<(K, V)>()).unwrap(); assert!(size >= capacity.checked_mul(size_of_bucket) - .expect("capacity overflow"), + .expect("capacity overflow"), "capacity overflow"); - let buffer = allocate(size, malloc_alignment); + let buffer = allocate(size, alignment); if buffer.is_null() { ::alloc::oom() } - let hashes = buffer.offset(hash_offset as isize) as *mut u64; + let hashes = buffer.offset(hash_offset as isize) as *mut HashUint; RawTable { capacity: capacity, @@ -669,20 +671,17 @@ impl RawTable { } fn first_bucket_raw(&self) -> RawBucket { - let hashes_size = self.capacity * size_of::(); - let keys_size = self.capacity * size_of::(); - - let buffer = *self.hashes as *const u8; - let (keys_offset, vals_offset, oflo) = calculate_offsets(hashes_size, - keys_size, - align_of::(), - align_of::()); + let hashes_size = self.capacity * size_of::(); + let pairs_size = self.capacity * size_of::<(K, V)>(); + + let buffer = *self.hashes as *mut u8; + let (pairs_offset, _, oflo) = + calculate_offsets(hashes_size, pairs_size, align_of::<(K, V)>()); debug_assert!(!oflo, "capacity overflow"); unsafe { RawBucket { hash: *self.hashes, - key: buffer.offset(keys_offset as isize) as *const K, - val: buffer.offset(vals_offset as isize) as *const V, + pair: buffer.offset(pairs_offset as isize) as *const _, _marker: marker::PhantomData, } } @@ -776,7 +775,7 @@ impl RawTable { /// this interface is safe, it's not used outside this module. struct RawBuckets<'a, K, V> { raw: RawBucket, - hashes_end: *mut u64, + hashes_end: *mut HashUint, // Strictly speaking, this should be &'a (K,V), but that would // require that K:'a, and we often use RawBuckets<'static...> for @@ -822,7 +821,7 @@ impl<'a, K, V> Iterator for RawBuckets<'a, K, V> { /// the table's remaining entries. It's used in the implementation of Drop. struct RevMoveBuckets<'a, K, V> { raw: RawBucket, - hashes_end: *mut u64, + hashes_end: *mut HashUint, elems_left: usize, // As above, `&'a (K,V)` would seem better, but we often use @@ -847,7 +846,7 @@ impl<'a, K, V> Iterator for RevMoveBuckets<'a, K, V> { if *self.raw.hash != EMPTY_BUCKET { self.elems_left -= 1; - return Some((ptr::read(self.raw.key), ptr::read(self.raw.val))); + return Some(ptr::read(self.raw.pair)); } } } @@ -912,7 +911,7 @@ impl<'a, K, V> Iterator for Iter<'a, K, V> { fn next(&mut self) -> Option<(&'a K, &'a V)> { self.iter.next().map(|bucket| { self.elems_left -= 1; - unsafe { (&*bucket.key, &*bucket.val) } + unsafe { (&(*bucket.pair).0, &(*bucket.pair).1) } }) } @@ -932,7 +931,8 @@ impl<'a, K, V> Iterator for IterMut<'a, K, V> { fn next(&mut self) -> Option<(&'a K, &'a mut V)> { self.iter.next().map(|bucket| { self.elems_left -= 1; - unsafe { (&*bucket.key, &mut *(bucket.val as *mut V)) } + let pair_mut = bucket.pair as *mut (K, V); + unsafe { (&(*pair_mut).0, &mut (*pair_mut).1) } }) } @@ -953,7 +953,8 @@ impl Iterator for IntoIter { self.iter.next().map(|bucket| { self.table.size -= 1; unsafe { - (SafeHash { hash: *bucket.hash }, ptr::read(bucket.key), ptr::read(bucket.val)) + let (k, v) = ptr::read(bucket.pair); + (SafeHash { hash: *bucket.hash }, k, v) } }) } @@ -977,9 +978,8 @@ impl<'a, K, V> Iterator for Drain<'a, K, V> { self.iter.next().map(|bucket| { unsafe { (**self.table).size -= 1; - (SafeHash { hash: ptr::replace(bucket.hash, EMPTY_BUCKET) }, - ptr::read(bucket.key), - ptr::read(bucket.val)) + let (k, v) = ptr::read(bucket.pair); + (SafeHash { hash: ptr::replace(bucket.hash, EMPTY_BUCKET) }, k, v) } }) } @@ -991,9 +991,7 @@ impl<'a, K, V> Iterator for Drain<'a, K, V> { } impl<'a, K, V> ExactSizeIterator for Drain<'a, K, V> { fn len(&self) -> usize { - unsafe { - (**self.table).size() - } + unsafe { (**self.table).size() } } } @@ -1020,8 +1018,7 @@ impl Clone for RawTable { (full.hash(), k.clone(), v.clone()) }; *new_buckets.raw.hash = h.inspect(); - ptr::write(new_buckets.raw.key as *mut K, k); - ptr::write(new_buckets.raw.val as *mut V, v); + ptr::write(new_buckets.raw.pair as *mut (K, V), (k, v)); } Empty(..) => { *new_buckets.raw.hash = EMPTY_BUCKET; @@ -1058,15 +1055,12 @@ impl Drop for RawTable { } } - let hashes_size = self.capacity * size_of::(); - let keys_size = self.capacity * size_of::(); - let vals_size = self.capacity * size_of::(); + let hashes_size = self.capacity * size_of::(); + let pairs_size = self.capacity * size_of::<(K, V)>(); let (align, _, size, oflo) = calculate_allocation(hashes_size, - align_of::(), - keys_size, - align_of::(), - vals_size, - align_of::()); + align_of::(), + pairs_size, + align_of::<(K, V)>()); debug_assert!(!oflo, "should be impossible"); diff --git a/src/libstd/collections/mod.rs b/src/libstd/collections/mod.rs index 9b13d54230078..b9e92a01b2f8e 100644 --- a/src/libstd/collections/mod.rs +++ b/src/libstd/collections/mod.rs @@ -15,7 +15,7 @@ //! standard implementations, it should be possible for two libraries to //! communicate without significant data conversion. //! -//! To get this out of the way: you should probably just use `Vec` or `HashMap`. +//! To get this out of the way: you should probably just use [`Vec`] or [`HashMap`]. //! These two collections cover most use cases for generic data storage and //! processing. They are exceptionally good at doing what they do. All the other //! collections in the standard library have specific use cases where they are @@ -25,10 +25,10 @@ //! //! Rust's collections can be grouped into four major categories: //! -//! * Sequences: `Vec`, `VecDeque`, `LinkedList` -//! * Maps: `HashMap`, `BTreeMap` -//! * Sets: `HashSet`, `BTreeSet` -//! * Misc: `BinaryHeap` +//! * Sequences: [`Vec`], [`VecDeque`], [`LinkedList`] +//! * Maps: [`HashMap`], [`BTreeMap`] +//! * Sets: [`HashSet`], [`BTreeSet`] +//! * Misc: [`BinaryHeap`] //! //! # When Should You Use Which Collection? //! @@ -46,13 +46,13 @@ //! * You want a heap-allocated array. //! //! ### Use a `VecDeque` when: -//! * You want a `Vec` that supports efficient insertion at both ends of the +//! * You want a [`Vec`] that supports efficient insertion at both ends of the //! sequence. //! * You want a queue. //! * You want a double-ended queue (deque). //! //! ### Use a `LinkedList` when: -//! * You want a `Vec` or `VecDeque` of unknown size, and can't tolerate +//! * You want a [`Vec`] or [`VecDeque`] of unknown size, and can't tolerate //! amortization. //! * You want to efficiently split and append lists. //! * You are *absolutely* certain you *really*, *truly*, want a doubly linked @@ -92,38 +92,38 @@ //! Throughout the documentation, we will follow a few conventions. For all //! operations, the collection's size is denoted by n. If another collection is //! involved in the operation, it contains m elements. Operations which have an -//! *amortized* cost are suffixed with a `*`. Operations with an *expected* +//! *amortized* cost are suffixed with a `*`. Operations with an *expected* //! cost are suffixed with a `~`. //! //! All amortized costs are for the potential need to resize when capacity is -//! exhausted. If a resize occurs it will take O(n) time. Our collections never +//! exhausted. If a resize occurs it will take O(n) time. Our collections never //! automatically shrink, so removal operations aren't amortized. Over a //! sufficiently large series of operations, the average cost per operation will //! deterministically equal the given cost. //! -//! Only HashMap has expected costs, due to the probabilistic nature of hashing. -//! It is theoretically possible, though very unlikely, for HashMap to +//! Only [`HashMap`] has expected costs, due to the probabilistic nature of hashing. +//! It is theoretically possible, though very unlikely, for [`HashMap`] to //! experience worse performance. //! //! ## Sequences //! -//! | | get(i) | insert(i) | remove(i) | append | split_off(i) | -//! |--------------|----------------|-----------------|----------------|--------|----------------| -//! | Vec | O(1) | O(n-i)* | O(n-i) | O(m)* | O(n-i) | -//! | VecDeque | O(1) | O(min(i, n-i))* | O(min(i, n-i)) | O(m)* | O(min(i, n-i)) | -//! | LinkedList | O(min(i, n-i)) | O(min(i, n-i)) | O(min(i, n-i)) | O(1) | O(min(i, n-i)) | +//! | | get(i) | insert(i) | remove(i) | append | split_off(i) | +//! |----------------|----------------|-----------------|----------------|--------|----------------| +//! | [`Vec`] | O(1) | O(n-i)* | O(n-i) | O(m)* | O(n-i) | +//! | [`VecDeque`] | O(1) | O(min(i, n-i))* | O(min(i, n-i)) | O(m)* | O(min(i, n-i)) | +//! | [`LinkedList`] | O(min(i, n-i)) | O(min(i, n-i)) | O(min(i, n-i)) | O(1) | O(min(i, n-i)) | //! -//! Note that where ties occur, Vec is generally going to be faster than VecDeque, and VecDeque -//! is generally going to be faster than LinkedList. +//! Note that where ties occur, [`Vec`] is generally going to be faster than [`VecDeque`], and +//! [`VecDeque`] is generally going to be faster than [`LinkedList`]. //! //! ## Maps //! //! For Sets, all operations have the cost of the equivalent Map operation. //! -//! | | get | insert | remove | predecessor | append | -//! |----------|-----------|----------|----------|-------------|--------| -//! | HashMap | O(1)~ | O(1)~* | O(1)~ | N/A | N/A | -//! | BTreeMap | O(log n) | O(log n) | O(log n) | O(log n) | O(n+m) | +//! | | get | insert | remove | predecessor | append | +//! |--------------|-----------|----------|----------|-------------|--------| +//! | [`HashMap`] | O(1)~ | O(1)~* | O(1)~ | N/A | N/A | +//! | [`BTreeMap`] | O(log n) | O(log n) | O(log n) | O(log n) | O(n+m) | //! //! # Correct and Efficient Usage of Collections //! @@ -136,7 +136,7 @@ //! ## Capacity Management //! //! Many collections provide several constructors and methods that refer to -//! "capacity". These collections are generally built on top of an array. +//! "capacity". These collections are generally built on top of an array. //! Optimally, this array would be exactly the right size to fit only the //! elements stored in the collection, but for the collection to do this would //! be very inefficient. If the backing array was exactly the right size at all @@ -157,29 +157,29 @@ //! information to do this itself. Therefore, it is up to us programmers to give //! it hints. //! -//! Any `with_capacity` constructor will instruct the collection to allocate +//! Any `with_capacity()` constructor will instruct the collection to allocate //! enough space for the specified number of elements. Ideally this will be for //! exactly that many elements, but some implementation details may prevent -//! this. `Vec` and `VecDeque` can be relied on to allocate exactly the -//! requested amount, though. Use `with_capacity` when you know exactly how many +//! this. [`Vec`] and [`VecDeque`] can be relied on to allocate exactly the +//! requested amount, though. Use `with_capacity()` when you know exactly how many //! elements will be inserted, or at least have a reasonable upper-bound on that //! number. //! -//! When anticipating a large influx of elements, the `reserve` family of +//! When anticipating a large influx of elements, the `reserve()` family of //! methods can be used to hint to the collection how much room it should make -//! for the coming items. As with `with_capacity`, the precise behavior of +//! for the coming items. As with `with_capacity()`, the precise behavior of //! these methods will be specific to the collection of interest. //! //! For optimal performance, collections will generally avoid shrinking -//! themselves. If you believe that a collection will not soon contain any more -//! elements, or just really need the memory, the `shrink_to_fit` method prompts +//! themselves. If you believe that a collection will not soon contain any more +//! elements, or just really need the memory, the `shrink_to_fit()` method prompts //! the collection to shrink the backing array to the minimum size capable of //! holding its elements. //! //! Finally, if ever you're interested in what the actual capacity of the -//! collection is, most collections provide a `capacity` method to query this -//! information on demand. This can be useful for debugging purposes, or for -//! use with the `reserve` methods. +//! collection is, most collections provide a `capacity()` method to query this +//! information on demand. This can be useful for debugging purposes, or for +//! use with the `reserve()` methods. //! //! ## Iterators //! @@ -194,15 +194,15 @@ //! //! All of the standard collections provide several iterators for performing //! bulk manipulation of their contents. The three primary iterators almost -//! every collection should provide are `iter`, `iter_mut`, and `into_iter`. +//! every collection should provide are `iter()`, `iter_mut()`, and `into_iter()`. //! Some of these are not provided on collections where it would be unsound or //! unreasonable to provide them. //! -//! `iter` provides an iterator of immutable references to all the contents of a -//! collection in the most "natural" order. For sequence collections like `Vec`, +//! `iter()` provides an iterator of immutable references to all the contents of a +//! collection in the most "natural" order. For sequence collections like [`Vec`], //! this means the items will be yielded in increasing order of index starting -//! at 0. For ordered collections like `BTreeMap`, this means that the items -//! will be yielded in sorted order. For unordered collections like `HashMap`, +//! at 0. For ordered collections like [`BTreeMap`], this means that the items +//! will be yielded in sorted order. For unordered collections like [`HashMap`], //! the items will be yielded in whatever order the internal representation made //! most convenient. This is great for reading through all the contents of the //! collection. @@ -214,8 +214,8 @@ //! } //! ``` //! -//! `iter_mut` provides an iterator of *mutable* references in the same order as -//! `iter`. This is great for mutating all the contents of the collection. +//! `iter_mut()` provides an iterator of *mutable* references in the same order as +//! `iter()`. This is great for mutating all the contents of the collection. //! //! ``` //! let mut vec = vec![1, 2, 3, 4]; @@ -224,12 +224,12 @@ //! } //! ``` //! -//! `into_iter` transforms the actual collection into an iterator over its +//! `into_iter()` transforms the actual collection into an iterator over its //! contents by-value. This is great when the collection itself is no longer -//! needed, and the values are needed elsewhere. Using `extend` with `into_iter` +//! needed, and the values are needed elsewhere. Using `extend()` with `into_iter()` //! is the main way that contents of one collection are moved into another. -//! `extend` automatically calls `into_iter`, and takes any `T: IntoIterator`. -//! Calling `collect` on an iterator itself is also a great way to convert one +//! `extend()` automatically calls `into_iter()`, and takes any `T: `[`IntoIterator`]. +//! Calling `collect()` on an iterator itself is also a great way to convert one //! collection into another. Both of these methods should internally use the //! capacity management tools discussed in the previous section to do this as //! efficiently as possible. @@ -248,9 +248,9 @@ //! ``` //! //! Iterators also provide a series of *adapter* methods for performing common -//! threads to sequences. Among the adapters are functional favorites like `map`, -//! `fold`, `skip`, and `take`. Of particular interest to collections is the -//! `rev` adapter, that reverses any iterator that supports this operation. Most +//! threads to sequences. Among the adapters are functional favorites like `map()`, +//! `fold()`, `skip()` and `take()`. Of particular interest to collections is the +//! `rev()` adapter, that reverses any iterator that supports this operation. Most //! collections provide reversible iterators as the way to iterate over them in //! reverse order. //! @@ -263,27 +263,27 @@ //! //! Several other collection methods also return iterators to yield a sequence //! of results but avoid allocating an entire collection to store the result in. -//! This provides maximum flexibility as `collect` or `extend` can be called to +//! This provides maximum flexibility as `collect()` or `extend()` can be called to //! "pipe" the sequence into any collection if desired. Otherwise, the sequence //! can be looped over with a `for` loop. The iterator can also be discarded //! after partial use, preventing the computation of the unused items. //! //! ## Entries //! -//! The `entry` API is intended to provide an efficient mechanism for +//! The `entry()` API is intended to provide an efficient mechanism for //! manipulating the contents of a map conditionally on the presence of a key or //! not. The primary motivating use case for this is to provide efficient //! accumulator maps. For instance, if one wishes to maintain a count of the //! number of times each key has been seen, they will have to perform some //! conditional logic on whether this is the first time the key has been seen or -//! not. Normally, this would require a `find` followed by an `insert`, +//! not. Normally, this would require a `find()` followed by an `insert()`, //! effectively duplicating the search effort on each insertion. //! //! When a user calls `map.entry(&key)`, the map will search for the key and //! then yield a variant of the `Entry` enum. //! //! If a `Vacant(entry)` is yielded, then the key *was not* found. In this case -//! the only valid operation is to `insert` a value into the entry. When this is +//! the only valid operation is to `insert()` a value into the entry. When this is //! done, the vacant entry is consumed and converted into a mutable reference to //! the value that was inserted. This allows for further manipulation of the //! value beyond the lifetime of the search itself. This is useful if complex @@ -291,14 +291,14 @@ //! just inserted. //! //! If an `Occupied(entry)` is yielded, then the key *was* found. In this case, -//! the user has several options: they can `get`, `insert`, or `remove` the +//! the user has several options: they can `get()`, `insert()` or `remove()` the //! value of the occupied entry. Additionally, they can convert the occupied //! entry into a mutable reference to its value, providing symmetry to the -//! vacant `insert` case. +//! vacant `insert()` case. //! //! ### Examples //! -//! Here are the two primary ways in which `entry` is used. First, a simple +//! Here are the two primary ways in which `entry()` is used. First, a simple //! example where the logic performed on the values is trivial. //! //! #### Counting the number of times each character in a string occurs @@ -322,7 +322,7 @@ //! ``` //! //! When the logic to be performed on the value is more complex, we may simply -//! use the `entry` API to ensure that the value is initialized, and perform the +//! use the `entry()` API to ensure that the value is initialized and perform the //! logic afterwards. //! //! #### Tracking the inebriation of customers at a bar @@ -406,6 +406,16 @@ //! // ...but the key hasn't changed. b is still "baz", not "xyz". //! assert_eq!(map.keys().next().unwrap().b, "baz"); //! ``` +//! +//! [`Vec`]: ../../std/vec/struct.Vec.html +//! [`HashMap`]: ../../std/collections/struct.HashMap.html +//! [`VecDeque`]: ../../std/collections/struct.VecDeque.html +//! [`LinkedList`]: ../../std/collections/struct.LinkedList.html +//! [`BTreeMap`]: ../../std/collections/struct.BTreeMap.html +//! [`HashSet`]: ../../std/collections/struct.HashSet.html +//! [`BTreeSet`]: ../../std/collections/struct.BTreeSet.html +//! [`BinaryHeap`]: ../../std/collections/struct.BinaryHeap.html +//! [`IntoIterator`]: ../../std/iter/trait.IntoIterator.html #![stable(feature = "rust1", since = "1.0.0")] @@ -425,6 +435,9 @@ pub use self::hash_map::HashMap; #[stable(feature = "rust1", since = "1.0.0")] pub use self::hash_set::HashSet; +#[stable(feature = "rust1", since = "1.0.0")] +pub use core_collections::range; + mod hash; #[stable(feature = "rust1", since = "1.0.0")] diff --git a/src/libstd/env.rs b/src/libstd/env.rs index 63bf051c9bcd0..e29dbe35c5a59 100644 --- a/src/libstd/env.rs +++ b/src/libstd/env.rs @@ -21,6 +21,7 @@ use ffi::{OsStr, OsString}; use fmt; use io; use path::{Path, PathBuf}; +use sys; use sys::os as os_imp; /// Returns the current working directory as a `PathBuf`. @@ -83,7 +84,7 @@ pub struct VarsOs { inner: os_imp::Env } /// environment variables of the current process. /// /// The returned iterator contains a snapshot of the process's environment -/// variables at the time of this invocation, modifications to environment +/// variables at the time of this invocation. Modifications to environment /// variables afterwards will not be reflected in the returned iterator. /// /// # Panics @@ -112,7 +113,7 @@ pub fn vars() -> Vars { /// environment variables of the current process. /// /// The returned iterator contains a snapshot of the process's environment -/// variables at the time of this invocation, modifications to environment +/// variables at the time of this invocation. Modifications to environment /// variables afterwards will not be reflected in the returned iterator. /// /// # Examples @@ -557,7 +558,7 @@ pub struct Args { inner: ArgsOs } /// /// This structure is created through the `std::env::args_os` method. #[stable(feature = "env", since = "1.0.0")] -pub struct ArgsOs { inner: os_imp::Args } +pub struct ArgsOs { inner: sys::args::Args } /// Returns the arguments which this program was started with (normally passed /// via the command line). @@ -606,7 +607,7 @@ pub fn args() -> Args { /// ``` #[stable(feature = "env", since = "1.0.0")] pub fn args_os() -> ArgsOs { - ArgsOs { inner: os_imp::args() } + ArgsOs { inner: sys::args::args() } } #[stable(feature = "env", since = "1.0.0")] @@ -649,6 +650,8 @@ impl DoubleEndedIterator for ArgsOs { /// Constants associated with the current target #[stable(feature = "env", since = "1.0.0")] pub mod consts { + use sys::env::os; + /// A string describing the architecture of the CPU that is currently /// in use. /// @@ -673,7 +676,7 @@ pub mod consts { /// - unix /// - windows #[stable(feature = "env", since = "1.0.0")] - pub const FAMILY: &'static str = super::os::FAMILY; + pub const FAMILY: &'static str = os::FAMILY; /// A string describing the specific operating system in use. /// Example value is `linux`. @@ -692,7 +695,7 @@ pub mod consts { /// - android /// - windows #[stable(feature = "env", since = "1.0.0")] - pub const OS: &'static str = super::os::OS; + pub const OS: &'static str = os::OS; /// Specifies the filename prefix used for shared libraries on this /// platform. Example value is `lib`. @@ -702,7 +705,7 @@ pub mod consts { /// - lib /// - `""` (an empty string) #[stable(feature = "env", since = "1.0.0")] - pub const DLL_PREFIX: &'static str = super::os::DLL_PREFIX; + pub const DLL_PREFIX: &'static str = os::DLL_PREFIX; /// Specifies the filename suffix used for shared libraries on this /// platform. Example value is `.so`. @@ -713,7 +716,7 @@ pub mod consts { /// - .dylib /// - .dll #[stable(feature = "env", since = "1.0.0")] - pub const DLL_SUFFIX: &'static str = super::os::DLL_SUFFIX; + pub const DLL_SUFFIX: &'static str = os::DLL_SUFFIX; /// Specifies the file extension used for shared libraries on this /// platform that goes after the dot. Example value is `so`. @@ -724,7 +727,7 @@ pub mod consts { /// - dylib /// - dll #[stable(feature = "env", since = "1.0.0")] - pub const DLL_EXTENSION: &'static str = super::os::DLL_EXTENSION; + pub const DLL_EXTENSION: &'static str = os::DLL_EXTENSION; /// Specifies the filename suffix used for executable binaries on this /// platform. Example value is `.exe`. @@ -736,7 +739,7 @@ pub mod consts { /// - .pexe /// - `""` (an empty string) #[stable(feature = "env", since = "1.0.0")] - pub const EXE_SUFFIX: &'static str = super::os::EXE_SUFFIX; + pub const EXE_SUFFIX: &'static str = os::EXE_SUFFIX; /// Specifies the file extension, if any, used for executable binaries /// on this platform. Example value is `exe`. @@ -746,161 +749,7 @@ pub mod consts { /// - exe /// - `""` (an empty string) #[stable(feature = "env", since = "1.0.0")] - pub const EXE_EXTENSION: &'static str = super::os::EXE_EXTENSION; - -} - -#[cfg(target_os = "linux")] -mod os { - pub const FAMILY: &'static str = "unix"; - pub const OS: &'static str = "linux"; - pub const DLL_PREFIX: &'static str = "lib"; - pub const DLL_SUFFIX: &'static str = ".so"; - pub const DLL_EXTENSION: &'static str = "so"; - pub const EXE_SUFFIX: &'static str = ""; - pub const EXE_EXTENSION: &'static str = ""; -} - -#[cfg(target_os = "macos")] -mod os { - pub const FAMILY: &'static str = "unix"; - pub const OS: &'static str = "macos"; - pub const DLL_PREFIX: &'static str = "lib"; - pub const DLL_SUFFIX: &'static str = ".dylib"; - pub const DLL_EXTENSION: &'static str = "dylib"; - pub const EXE_SUFFIX: &'static str = ""; - pub const EXE_EXTENSION: &'static str = ""; -} - -#[cfg(target_os = "ios")] -mod os { - pub const FAMILY: &'static str = "unix"; - pub const OS: &'static str = "ios"; - pub const DLL_PREFIX: &'static str = "lib"; - pub const DLL_SUFFIX: &'static str = ".dylib"; - pub const DLL_EXTENSION: &'static str = "dylib"; - pub const EXE_SUFFIX: &'static str = ""; - pub const EXE_EXTENSION: &'static str = ""; -} - -#[cfg(target_os = "freebsd")] -mod os { - pub const FAMILY: &'static str = "unix"; - pub const OS: &'static str = "freebsd"; - pub const DLL_PREFIX: &'static str = "lib"; - pub const DLL_SUFFIX: &'static str = ".so"; - pub const DLL_EXTENSION: &'static str = "so"; - pub const EXE_SUFFIX: &'static str = ""; - pub const EXE_EXTENSION: &'static str = ""; -} - -#[cfg(target_os = "dragonfly")] -mod os { - pub const FAMILY: &'static str = "unix"; - pub const OS: &'static str = "dragonfly"; - pub const DLL_PREFIX: &'static str = "lib"; - pub const DLL_SUFFIX: &'static str = ".so"; - pub const DLL_EXTENSION: &'static str = "so"; - pub const EXE_SUFFIX: &'static str = ""; - pub const EXE_EXTENSION: &'static str = ""; -} - -#[cfg(target_os = "bitrig")] -mod os { - pub const FAMILY: &'static str = "unix"; - pub const OS: &'static str = "bitrig"; - pub const DLL_PREFIX: &'static str = "lib"; - pub const DLL_SUFFIX: &'static str = ".so"; - pub const DLL_EXTENSION: &'static str = "so"; - pub const EXE_SUFFIX: &'static str = ""; - pub const EXE_EXTENSION: &'static str = ""; -} - -#[cfg(target_os = "netbsd")] -mod os { - pub const FAMILY: &'static str = "unix"; - pub const OS: &'static str = "netbsd"; - pub const DLL_PREFIX: &'static str = "lib"; - pub const DLL_SUFFIX: &'static str = ".so"; - pub const DLL_EXTENSION: &'static str = "so"; - pub const EXE_SUFFIX: &'static str = ""; - pub const EXE_EXTENSION: &'static str = ""; -} - -#[cfg(target_os = "openbsd")] -mod os { - pub const FAMILY: &'static str = "unix"; - pub const OS: &'static str = "openbsd"; - pub const DLL_PREFIX: &'static str = "lib"; - pub const DLL_SUFFIX: &'static str = ".so"; - pub const DLL_EXTENSION: &'static str = "so"; - pub const EXE_SUFFIX: &'static str = ""; - pub const EXE_EXTENSION: &'static str = ""; -} - -#[cfg(target_os = "android")] -mod os { - pub const FAMILY: &'static str = "unix"; - pub const OS: &'static str = "android"; - pub const DLL_PREFIX: &'static str = "lib"; - pub const DLL_SUFFIX: &'static str = ".so"; - pub const DLL_EXTENSION: &'static str = "so"; - pub const EXE_SUFFIX: &'static str = ""; - pub const EXE_EXTENSION: &'static str = ""; -} - -#[cfg(target_os = "solaris")] -mod os { - pub const FAMILY: &'static str = "unix"; - pub const OS: &'static str = "solaris"; - pub const DLL_PREFIX: &'static str = "lib"; - pub const DLL_SUFFIX: &'static str = ".so"; - pub const DLL_EXTENSION: &'static str = "so"; - pub const EXE_SUFFIX: &'static str = ""; - pub const EXE_EXTENSION: &'static str = ""; -} - -#[cfg(target_os = "windows")] -mod os { - pub const FAMILY: &'static str = "windows"; - pub const OS: &'static str = "windows"; - pub const DLL_PREFIX: &'static str = ""; - pub const DLL_SUFFIX: &'static str = ".dll"; - pub const DLL_EXTENSION: &'static str = "dll"; - pub const EXE_SUFFIX: &'static str = ".exe"; - pub const EXE_EXTENSION: &'static str = "exe"; -} - -#[cfg(all(target_os = "nacl", not(target_arch = "le32")))] -mod os { - pub const FAMILY: &'static str = "unix"; - pub const OS: &'static str = "nacl"; - pub const DLL_PREFIX: &'static str = "lib"; - pub const DLL_SUFFIX: &'static str = ".so"; - pub const DLL_EXTENSION: &'static str = "so"; - pub const EXE_SUFFIX: &'static str = ".nexe"; - pub const EXE_EXTENSION: &'static str = "nexe"; -} -#[cfg(all(target_os = "nacl", target_arch = "le32"))] -mod os { - pub const FAMILY: &'static str = "unix"; - pub const OS: &'static str = "pnacl"; - pub const DLL_PREFIX: &'static str = "lib"; - pub const DLL_SUFFIX: &'static str = ".pso"; - pub const DLL_EXTENSION: &'static str = "pso"; - pub const EXE_SUFFIX: &'static str = ".pexe"; - pub const EXE_EXTENSION: &'static str = "pexe"; -} - -#[cfg(target_os = "emscripten")] -mod os { - pub const FAMILY: &'static str = "unix"; - pub const OS: &'static str = "emscripten"; - pub const DLL_PREFIX: &'static str = "lib"; - pub const DLL_SUFFIX: &'static str = ".so"; - pub const DLL_EXTENSION: &'static str = "so"; - pub const EXE_SUFFIX: &'static str = ".js"; - pub const EXE_EXTENSION: &'static str = "js"; + pub const EXE_EXTENSION: &'static str = os::EXE_EXTENSION; } #[cfg(target_arch = "x86")] @@ -958,6 +807,11 @@ mod arch { pub const ARCH: &'static str = "asmjs"; } +#[cfg(target_arch = "wasm32")] +mod arch { + pub const ARCH: &'static str = "wasm32"; +} + #[cfg(test)] mod tests { use super::*; @@ -1006,6 +860,7 @@ mod tests { } #[test] + #[cfg_attr(target_os = "emscripten", ignore)] fn test_var_big() { let mut s = "".to_string(); let mut i = 0; @@ -1019,6 +874,7 @@ mod tests { } #[test] + #[cfg_attr(target_os = "emscripten", ignore)] fn test_self_exe_path() { let path = current_exe(); assert!(path.is_ok()); @@ -1029,6 +885,7 @@ mod tests { } #[test] + #[cfg_attr(target_os = "emscripten", ignore)] fn test_env_set_get_huge() { let n = make_rand_name(); let s = repeat("x").take(10000).collect::(); diff --git a/src/libstd/error.rs b/src/libstd/error.rs index 1629062001003..a1909b0f95789 100644 --- a/src/libstd/error.rs +++ b/src/libstd/error.rs @@ -13,9 +13,9 @@ //! # The `Error` trait //! //! `Error` is a trait representing the basic expectations for error values, -//! i.e. values of type `E` in `Result`. At a minimum, errors must provide +//! i.e. values of type `E` in [`Result`]. At a minimum, errors must provide //! a description, but they may optionally provide additional detail (via -//! `Display`) and cause chain information: +//! [`Display`]) and cause chain information: //! //! ``` //! use std::fmt::Display; @@ -27,12 +27,16 @@ //! } //! ``` //! -//! The `cause` method is generally used when errors cross "abstraction +//! The [`cause`] method is generally used when errors cross "abstraction //! boundaries", i.e. when a one module must report an error that is "caused" //! by an error from a lower-level module. This setup makes it possible for the //! high-level module to provide its own errors that do not commit to any //! particular implementation, but also reveal some of its implementation for -//! debugging via `cause` chains. +//! debugging via [`cause`] chains. +//! +//! [`Result`]: ../result/enum.Result.html +//! [`Display`]: ../fmt/trait.Display.html +//! [`cause`]: trait.Error.html#method.cause #![stable(feature = "rust1", since = "1.0.0")] @@ -51,7 +55,6 @@ use any::TypeId; use cell; use char; use fmt::{self, Debug, Display}; -use marker::Reflect; use mem::transmute; use num; use str; @@ -59,12 +62,14 @@ use string; /// Base functionality for all errors in Rust. #[stable(feature = "rust1", since = "1.0.0")] -pub trait Error: Debug + Display + Reflect { +pub trait Error: Debug + Display { /// A short description of the error. /// - /// The description should not contain newlines or sentence-ending - /// punctuation, to facilitate embedding in larger user-facing - /// strings. + /// The description should only be used for a simple message. + /// It should not contain newlines or sentence-ending punctuation, + /// to facilitate embedding in larger user-facing strings. + /// For showing formatted error messages with more information see + /// [Display](https://doc.rust-lang.org/std/fmt/trait.Display.html). /// /// # Examples /// @@ -288,15 +293,15 @@ impl Error for fmt::Error { } } -#[unstable(feature = "try_borrow", issue = "35070")] -impl<'a, T: ?Sized + Reflect> Error for cell::BorrowError<'a, T> { +#[stable(feature = "try_borrow", since = "1.13.0")] +impl Error for cell::BorrowError { fn description(&self) -> &str { "already mutably borrowed" } } -#[unstable(feature = "try_borrow", issue = "35070")] -impl<'a, T: ?Sized + Reflect> Error for cell::BorrowMutError<'a, T> { +#[stable(feature = "try_borrow", since = "1.13.0")] +impl Error for cell::BorrowMutError { fn description(&self) -> &str { "already borrowed" } diff --git a/src/libstd/ffi/c_str.rs b/src/libstd/ffi/c_str.rs index 1c449712e1f7a..3ad5b5627d319 100644 --- a/src/libstd/ffi/c_str.rs +++ b/src/libstd/ffi/c_str.rs @@ -146,19 +146,19 @@ pub struct CStr { /// An error returned from `CString::new` to indicate that a nul byte was found /// in the vector provided. -#[derive(Clone, PartialEq, Debug)] +#[derive(Clone, PartialEq, Eq, Debug)] #[stable(feature = "rust1", since = "1.0.0")] pub struct NulError(usize, Vec); /// An error returned from `CStr::from_bytes_with_nul` to indicate that a nul /// byte was found too early in the slice provided or one wasn't found at all. -#[derive(Clone, PartialEq, Debug)] +#[derive(Clone, PartialEq, Eq, Debug)] #[stable(feature = "cstr_from_bytes", since = "1.10.0")] pub struct FromBytesWithNulError { _a: () } /// An error returned from `CString::into_string` to indicate that a UTF-8 error /// was encountered during the conversion. -#[derive(Clone, PartialEq, Debug)] +#[derive(Clone, PartialEq, Eq, Debug)] #[stable(feature = "cstring_into", since = "1.7.0")] pub struct IntoStringError { inner: CString, @@ -228,9 +228,14 @@ impl CString { /// Retakes ownership of a `CString` that was transferred to C. /// + /// Additionally, the length of the string will be recalculated from the pointer. + /// + /// # Safety + /// /// This should only ever be called with a pointer that was earlier - /// obtained by calling `into_raw` on a `CString`. Additionally, the length - /// of the string will be recalculated from the pointer. + /// obtained by calling `into_raw` on a `CString`. Other usage (e.g. trying to take + /// ownership of a string that was allocated by foreign code) is likely to lead + /// to undefined behavior or allocator corruption. #[stable(feature = "cstr_memory", since = "1.4.0")] pub unsafe fn from_raw(ptr: *mut c_char) -> CString { let len = libc::strlen(ptr) + 1; // Including the NUL byte @@ -309,9 +314,11 @@ impl CString { } // Turns this `CString` into an empty string to prevent -// memory unsafe code from working by accident. +// memory unsafe code from working by accident. Inline +// to prevent LLVM from optimizing it away in debug builds. #[stable(feature = "cstring_drop", since = "1.13.0")] impl Drop for CString { + #[inline] fn drop(&mut self) { unsafe { *self.inner.get_unchecked_mut(0) = 0; } } @@ -719,7 +726,8 @@ mod tests { use super::*; use os::raw::c_char; use borrow::Cow::{Borrowed, Owned}; - use hash::{SipHasher, Hash, Hasher}; + use hash::{Hash, Hasher}; + use collections::hash_map::DefaultHasher; #[test] fn c_to_rust() { @@ -801,10 +809,10 @@ mod tests { let ptr = data.as_ptr() as *const c_char; let cstr: &'static CStr = unsafe { CStr::from_ptr(ptr) }; - let mut s = SipHasher::new_with_keys(0, 0); + let mut s = DefaultHasher::new(); cstr.hash(&mut s); let cstr_hash = s.finish(); - let mut s = SipHasher::new_with_keys(0, 0); + let mut s = DefaultHasher::new(); CString::new(&data[..data.len() - 1]).unwrap().hash(&mut s); let cstring_hash = s.finish(); diff --git a/src/libstd/ffi/os_str.rs b/src/libstd/ffi/os_str.rs index d93d3c7362261..84b50f04463fe 100644 --- a/src/libstd/ffi/os_str.rs +++ b/src/libstd/ffi/os_str.rs @@ -53,17 +53,6 @@ impl OsString { OsString { inner: Buf::from_string(String::new()) } } - #[cfg(unix)] - fn _from_bytes(vec: Vec) -> Option { - use os::unix::ffi::OsStringExt; - Some(OsString::from_vec(vec)) - } - - #[cfg(windows)] - fn _from_bytes(vec: Vec) -> Option { - String::from_utf8(vec).ok().map(OsString::from) - } - /// Converts to an `OsStr` slice. #[stable(feature = "rust1", since = "1.0.0")] pub fn as_os_str(&self) -> &OsStr { diff --git a/src/libstd/fs.rs b/src/libstd/fs.rs index 698ec4f1b7389..df5741d00a2c1 100644 --- a/src/libstd/fs.rs +++ b/src/libstd/fs.rs @@ -83,6 +83,7 @@ pub struct Metadata(fs_imp::FileAttr); /// /// [`io::Result`]: ../io/type.Result.html #[stable(feature = "rust1", since = "1.0.0")] +#[derive(Debug)] pub struct ReadDir(fs_imp::ReadDir); /// Entries returned by the [`ReadDir`] iterator. @@ -1055,6 +1056,15 @@ impl DirEntry { } } +#[stable(feature = "dir_entry_debug", since = "1.13.0")] +impl fmt::Debug for DirEntry { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_tuple("DirEntry") + .field(&self.path()) + .finish() + } +} + impl AsInner for DirEntry { fn as_inner(&self) -> &fs_imp::DirEntry { &self.0 } } @@ -1677,7 +1687,7 @@ impl AsInnerMut for DirBuilder { } } -#[cfg(test)] +#[cfg(all(test, not(target_os = "emscripten")))] mod tests { use io::prelude::*; @@ -1894,6 +1904,130 @@ mod tests { check!(fs::remove_file(filename)); } + #[test] + fn file_test_io_eof() { + let tmpdir = tmpdir(); + let filename = tmpdir.join("file_rt_io_file_test_eof.txt"); + let mut buf = [0; 256]; + { + let oo = OpenOptions::new().create_new(true).write(true).read(true).clone(); + let mut rw = check!(oo.open(&filename)); + assert_eq!(check!(rw.read(&mut buf)), 0); + assert_eq!(check!(rw.read(&mut buf)), 0); + } + check!(fs::remove_file(&filename)); + } + + #[test] + #[cfg(unix)] + fn file_test_io_read_write_at() { + use os::unix::fs::FileExt; + + let tmpdir = tmpdir(); + let filename = tmpdir.join("file_rt_io_file_test_read_write_at.txt"); + let mut buf = [0; 256]; + let write1 = "asdf"; + let write2 = "qwer-"; + let write3 = "-zxcv"; + let content = "qwer-asdf-zxcv"; + { + let oo = OpenOptions::new().create_new(true).write(true).read(true).clone(); + let mut rw = check!(oo.open(&filename)); + assert_eq!(check!(rw.write_at(write1.as_bytes(), 5)), write1.len()); + assert_eq!(check!(rw.seek(SeekFrom::Current(0))), 0); + assert_eq!(check!(rw.read_at(&mut buf, 5)), write1.len()); + assert_eq!(str::from_utf8(&buf[..write1.len()]), Ok(write1)); + assert_eq!(check!(rw.seek(SeekFrom::Current(0))), 0); + assert_eq!(check!(rw.read_at(&mut buf[..write2.len()], 0)), write2.len()); + assert_eq!(str::from_utf8(&buf[..write2.len()]), Ok("\0\0\0\0\0")); + assert_eq!(check!(rw.seek(SeekFrom::Current(0))), 0); + assert_eq!(check!(rw.write(write2.as_bytes())), write2.len()); + assert_eq!(check!(rw.seek(SeekFrom::Current(0))), 5); + assert_eq!(check!(rw.read(&mut buf)), write1.len()); + assert_eq!(str::from_utf8(&buf[..write1.len()]), Ok(write1)); + assert_eq!(check!(rw.seek(SeekFrom::Current(0))), 9); + assert_eq!(check!(rw.read_at(&mut buf[..write2.len()], 0)), write2.len()); + assert_eq!(str::from_utf8(&buf[..write2.len()]), Ok(write2)); + assert_eq!(check!(rw.seek(SeekFrom::Current(0))), 9); + assert_eq!(check!(rw.write_at(write3.as_bytes(), 9)), write3.len()); + assert_eq!(check!(rw.seek(SeekFrom::Current(0))), 9); + } + { + let mut read = check!(File::open(&filename)); + assert_eq!(check!(read.read_at(&mut buf, 0)), content.len()); + assert_eq!(str::from_utf8(&buf[..content.len()]), Ok(content)); + assert_eq!(check!(read.seek(SeekFrom::Current(0))), 0); + assert_eq!(check!(read.seek(SeekFrom::End(-5))), 9); + assert_eq!(check!(read.read_at(&mut buf, 0)), content.len()); + assert_eq!(str::from_utf8(&buf[..content.len()]), Ok(content)); + assert_eq!(check!(read.seek(SeekFrom::Current(0))), 9); + assert_eq!(check!(read.read(&mut buf)), write3.len()); + assert_eq!(str::from_utf8(&buf[..write3.len()]), Ok(write3)); + assert_eq!(check!(read.seek(SeekFrom::Current(0))), 14); + assert_eq!(check!(read.read_at(&mut buf, 0)), content.len()); + assert_eq!(str::from_utf8(&buf[..content.len()]), Ok(content)); + assert_eq!(check!(read.seek(SeekFrom::Current(0))), 14); + assert_eq!(check!(read.read_at(&mut buf, 14)), 0); + assert_eq!(check!(read.read_at(&mut buf, 15)), 0); + assert_eq!(check!(read.seek(SeekFrom::Current(0))), 14); + } + check!(fs::remove_file(&filename)); + } + + #[test] + #[cfg(windows)] + fn file_test_io_seek_read_write() { + use os::windows::fs::FileExt; + + let tmpdir = tmpdir(); + let filename = tmpdir.join("file_rt_io_file_test_seek_read_write.txt"); + let mut buf = [0; 256]; + let write1 = "asdf"; + let write2 = "qwer-"; + let write3 = "-zxcv"; + let content = "qwer-asdf-zxcv"; + { + let oo = OpenOptions::new().create_new(true).write(true).read(true).clone(); + let mut rw = check!(oo.open(&filename)); + assert_eq!(check!(rw.seek_write(write1.as_bytes(), 5)), write1.len()); + assert_eq!(check!(rw.seek(SeekFrom::Current(0))), 9); + assert_eq!(check!(rw.seek_read(&mut buf, 5)), write1.len()); + assert_eq!(str::from_utf8(&buf[..write1.len()]), Ok(write1)); + assert_eq!(check!(rw.seek(SeekFrom::Current(0))), 9); + assert_eq!(check!(rw.seek(SeekFrom::Start(0))), 0); + assert_eq!(check!(rw.write(write2.as_bytes())), write2.len()); + assert_eq!(check!(rw.seek(SeekFrom::Current(0))), 5); + assert_eq!(check!(rw.read(&mut buf)), write1.len()); + assert_eq!(str::from_utf8(&buf[..write1.len()]), Ok(write1)); + assert_eq!(check!(rw.seek(SeekFrom::Current(0))), 9); + assert_eq!(check!(rw.seek_read(&mut buf[..write2.len()], 0)), write2.len()); + assert_eq!(str::from_utf8(&buf[..write2.len()]), Ok(write2)); + assert_eq!(check!(rw.seek(SeekFrom::Current(0))), 5); + assert_eq!(check!(rw.seek_write(write3.as_bytes(), 9)), write3.len()); + assert_eq!(check!(rw.seek(SeekFrom::Current(0))), 14); + } + { + let mut read = check!(File::open(&filename)); + assert_eq!(check!(read.seek_read(&mut buf, 0)), content.len()); + assert_eq!(str::from_utf8(&buf[..content.len()]), Ok(content)); + assert_eq!(check!(read.seek(SeekFrom::Current(0))), 14); + assert_eq!(check!(read.seek(SeekFrom::End(-5))), 9); + assert_eq!(check!(read.seek_read(&mut buf, 0)), content.len()); + assert_eq!(str::from_utf8(&buf[..content.len()]), Ok(content)); + assert_eq!(check!(read.seek(SeekFrom::Current(0))), 14); + assert_eq!(check!(read.seek(SeekFrom::End(-5))), 9); + assert_eq!(check!(read.read(&mut buf)), write3.len()); + assert_eq!(str::from_utf8(&buf[..write3.len()]), Ok(write3)); + assert_eq!(check!(read.seek(SeekFrom::Current(0))), 14); + assert_eq!(check!(read.seek_read(&mut buf, 0)), content.len()); + assert_eq!(str::from_utf8(&buf[..content.len()]), Ok(content)); + assert_eq!(check!(read.seek(SeekFrom::Current(0))), 14); + assert_eq!(check!(read.seek_read(&mut buf, 14)), 0); + assert_eq!(check!(read.seek_read(&mut buf, 15)), 0); + } + check!(fs::remove_file(&filename)); + } + #[test] fn file_test_stat_is_correct_on_is_file() { let tmpdir = tmpdir(); @@ -2212,8 +2346,8 @@ mod tests { check!(fs::set_permissions(&out, attr.permissions())); } - #[cfg(windows)] #[test] + #[cfg(windows)] fn copy_file_preserves_streams() { let tmp = tmpdir(); check!(check!(File::create(tmp.join("in.txt:bunny"))).write("carrot".as_bytes())); @@ -2641,6 +2775,17 @@ mod tests { } } + #[test] + fn dir_entry_debug() { + let tmpdir = tmpdir(); + File::create(&tmpdir.join("b")).unwrap(); + let mut read_dir = tmpdir.path().read_dir().unwrap(); + let dir_entry = read_dir.next().unwrap().unwrap(); + let actual = format!("{:?}", dir_entry); + let expected = format!("DirEntry({:?})", dir_entry.0.path()); + assert_eq!(actual, expected); + } + #[test] fn read_dir_not_found() { let res = fs::read_dir("/path/that/does/not/exist"); diff --git a/src/libstd/io/buffered.rs b/src/libstd/io/buffered.rs index dbb45d54f38d1..44dd4e9874ac4 100644 --- a/src/libstd/io/buffered.rs +++ b/src/libstd/io/buffered.rs @@ -12,7 +12,6 @@ use io::prelude::*; -use marker::Reflect; use cmp; use error; use fmt; @@ -21,11 +20,15 @@ use memchr; /// The `BufReader` struct adds buffering to any reader. /// -/// It can be excessively inefficient to work directly with a `Read` instance. -/// For example, every call to `read` on `TcpStream` results in a system call. -/// A `BufReader` performs large, infrequent reads on the underlying `Read` +/// It can be excessively inefficient to work directly with a [`Read`] instance. +/// For example, every call to [`read`] on [`TcpStream`] results in a system call. +/// A `BufReader` performs large, infrequent reads on the underlying [`Read`] /// and maintains an in-memory buffer of the results. /// +/// [`Read`]: ../../std/io/trait.Read.html +/// [`read`]: ../../std/net/struct.TcpStream.html#method.read +/// [`TcpStream`]: ../../std/net/struct.TcpStream.html +/// /// # Examples /// /// ``` @@ -216,8 +219,8 @@ impl Seek for BufReader { /// /// Seeking always discards the internal buffer, even if the seek position /// would otherwise fall within it. This guarantees that calling - /// `.unwrap()` immediately after a seek yields the underlying reader at - /// the same position. + /// `.into_inner()` immediately after a seek yields the underlying reader + /// at the same position. /// /// See `std::io::Seek` for more details. /// @@ -231,7 +234,7 @@ impl Seek for BufReader { if let SeekFrom::Current(n) = pos { let remainder = (self.cap - self.pos) as i64; // it should be safe to assume that remainder fits within an i64 as the alternative - // means we managed to allocate 8 ebibytes and that's absurd. + // means we managed to allocate 8 exbibytes and that's absurd. // But it's not out of the realm of possibility for some weird underlying reader to // support seeking by i64::min_value() so we need to handle underflow when subtracting // remainder. @@ -255,7 +258,7 @@ impl Seek for BufReader { /// Wraps a writer and buffers its output. /// /// It can be excessively inefficient to work directly with something that -/// implements `Write`. For example, every call to `write` on `TcpStream` +/// implements [`Write`]. For example, every call to [`write`] on [`TcpStream`] /// results in a system call. A `BufWriter` keeps an in-memory buffer of data /// and writes it to an underlying writer in large, infrequent batches. /// @@ -263,7 +266,7 @@ impl Seek for BufReader { /// /// # Examples /// -/// Let's write the numbers one through ten to a `TcpStream`: +/// Let's write the numbers one through ten to a [`TcpStream`]: /// /// ```no_run /// use std::io::prelude::*; @@ -295,6 +298,10 @@ impl Seek for BufReader { /// By wrapping the stream with a `BufWriter`, these ten writes are all grouped /// together by the buffer, and will all be written out in one system call when /// the `stream` is dropped. +/// +/// [`Write`]: ../../std/io/trait.Write.html +/// [`write`]: ../../std/net/struct.TcpStream.html#method.write +/// [`TcpStream`]: ../../std/net/struct.TcpStream.html #[stable(feature = "rust1", since = "1.0.0")] pub struct BufWriter { inner: Option, @@ -578,7 +585,7 @@ impl From> for Error { } #[stable(feature = "rust1", since = "1.0.0")] -impl error::Error for IntoInnerError { +impl error::Error for IntoInnerError { fn description(&self) -> &str { error::Error::description(self.error()) } @@ -1107,6 +1114,7 @@ mod tests { } #[test] + #[cfg_attr(target_os = "emscripten", ignore)] fn panic_in_write_doesnt_flush_in_drop() { static WRITES: AtomicUsize = AtomicUsize::new(0); diff --git a/src/libstd/io/cursor.rs b/src/libstd/io/cursor.rs index 1b836b745372f..cb9e7bd327028 100644 --- a/src/libstd/io/cursor.rs +++ b/src/libstd/io/cursor.rs @@ -10,26 +10,33 @@ use io::prelude::*; +use core::convert::TryInto; use cmp; use io::{self, SeekFrom, Error, ErrorKind}; /// A `Cursor` wraps another type and provides it with a -/// [`Seek`](trait.Seek.html) implementation. +/// [`Seek`] implementation. /// -/// Cursors are typically used with in-memory buffers to allow them to -/// implement `Read` and/or `Write`, allowing these buffers to be used +/// `Cursor`s are typically used with in-memory buffers to allow them to +/// implement [`Read`] and/or [`Write`], allowing these buffers to be used /// anywhere you might use a reader or writer that does actual I/O. /// /// The standard library implements some I/O traits on various types which -/// are commonly used as a buffer, like `Cursor>` and `Cursor<&[u8]>`. +/// are commonly used as a buffer, like `Cursor<`[`Vec`]`>` and +/// `Cursor<`[`&[u8]`]`>`. /// /// # Examples /// -/// We may want to write bytes to a [`File`][file] in our production +/// We may want to write bytes to a [`File`] in our production /// code, but use an in-memory buffer in our tests. We can do this with /// `Cursor`: /// -/// [file]: ../fs/struct.File.html +/// [`Seek`]: trait.Seek.html +/// [`Read`]: ../../std/io/trait.Read.html +/// [`Write`]: ../../std/io/trait.Write.html +/// [`Vec`]: ../../std/vec/struct.Vec.html +/// [`&[u8]`]: ../../std/primitive.slice.html +/// [`File`]: ../fs/struct.File.html /// /// ```no_run /// use std::io::prelude::*; @@ -242,18 +249,20 @@ impl<'a> Write for Cursor<&'a mut [u8]> { #[stable(feature = "rust1", since = "1.0.0")] impl Write for Cursor> { fn write(&mut self, buf: &[u8]) -> io::Result { + let pos: usize = self.position().try_into().map_err(|_| { + Error::new(ErrorKind::InvalidInput, + "cursor position exceeds maximum possible vector length") + })?; // Make sure the internal buffer is as least as big as where we // currently are - let pos = self.position(); - let amt = pos.saturating_sub(self.inner.len() as u64); - // use `resize` so that the zero filling is as efficient as possible let len = self.inner.len(); - self.inner.resize(len + amt as usize, 0); - + if len < pos { + // use `resize` so that the zero filling is as efficient as possible + self.inner.resize(pos, 0); + } // Figure out what bytes will be used to overwrite what's currently // there (left), and what will be appended on the end (right) { - let pos = pos as usize; let space = self.inner.len() - pos; let (left, right) = buf.split_at(cmp::min(space, buf.len())); self.inner[pos..pos + left.len()].copy_from_slice(left); @@ -261,7 +270,7 @@ impl Write for Cursor> { } // Bump us forward - self.set_position(pos + buf.len() as u64); + self.set_position((pos + buf.len()) as u64); Ok(buf.len()) } fn flush(&mut self) -> io::Result<()> { Ok(()) } @@ -383,7 +392,7 @@ mod tests { #[test] fn test_mem_reader() { - let mut reader = Cursor::new(vec!(0, 1, 2, 3, 4, 5, 6, 7)); + let mut reader = Cursor::new(vec![0, 1, 2, 3, 4, 5, 6, 7]); let mut buf = []; assert_eq!(reader.read(&mut buf).unwrap(), 0); assert_eq!(reader.position(), 0); @@ -405,7 +414,7 @@ mod tests { #[test] fn test_boxed_slice_reader() { - let mut reader = Cursor::new(vec!(0, 1, 2, 3, 4, 5, 6, 7).into_boxed_slice()); + let mut reader = Cursor::new(vec![0, 1, 2, 3, 4, 5, 6, 7].into_boxed_slice()); let mut buf = []; assert_eq!(reader.read(&mut buf).unwrap(), 0); assert_eq!(reader.position(), 0); @@ -427,7 +436,7 @@ mod tests { #[test] fn read_to_end() { - let mut reader = Cursor::new(vec!(0, 1, 2, 3, 4, 5, 6, 7)); + let mut reader = Cursor::new(vec![0, 1, 2, 3, 4, 5, 6, 7]); let mut v = Vec::new(); reader.read_to_end(&mut v).unwrap(); assert_eq!(v, [0, 1, 2, 3, 4, 5, 6, 7]); @@ -503,7 +512,7 @@ mod tests { assert_eq!(r.seek(SeekFrom::Start(10)).unwrap(), 10); assert_eq!(r.read(&mut [0]).unwrap(), 0); - let mut r = Cursor::new(vec!(10)); + let mut r = Cursor::new(vec![10]); assert_eq!(r.seek(SeekFrom::Start(10)).unwrap(), 10); assert_eq!(r.read(&mut [0]).unwrap(), 0); @@ -523,14 +532,14 @@ mod tests { let mut r = Cursor::new(&buf[..]); assert!(r.seek(SeekFrom::End(-2)).is_err()); - let mut r = Cursor::new(vec!(10)); + let mut r = Cursor::new(vec![10]); assert!(r.seek(SeekFrom::End(-2)).is_err()); let mut buf = [0]; let mut r = Cursor::new(&mut buf[..]); assert!(r.seek(SeekFrom::End(-2)).is_err()); - let mut r = Cursor::new(vec!(10).into_boxed_slice()); + let mut r = Cursor::new(vec![10].into_boxed_slice()); assert!(r.seek(SeekFrom::End(-2)).is_err()); } @@ -580,4 +589,12 @@ mod tests { let mut r = Cursor::new(Vec::new()); assert!(r.seek(SeekFrom::End(-2)).is_err()); } + + #[test] + #[cfg(target_pointer_width = "32")] + fn vec_seek_and_write_past_usize_max() { + let mut c = Cursor::new(Vec::new()); + c.set_position(::max_value() as u64 + 1); + assert!(c.write_all(&[1, 2, 3]).is_err()); + } } diff --git a/src/libstd/io/impls.rs b/src/libstd/io/impls.rs index cd05e6b5de9d2..6b26c016638a7 100644 --- a/src/libstd/io/impls.rs +++ b/src/libstd/io/impls.rs @@ -147,6 +147,10 @@ impl BufRead for Box { // ============================================================================= // In-memory buffer implementations +/// Read is implemented for `&[u8]` by copying from the slice. +/// +/// Note that reading updates the slice to point to the yet unread part. +/// The slice will be empty when EOF is reached. #[stable(feature = "rust1", since = "1.0.0")] impl<'a> Read for &'a [u8] { #[inline] @@ -180,6 +184,11 @@ impl<'a> BufRead for &'a [u8] { fn consume(&mut self, amt: usize) { *self = &self[amt..]; } } +/// Write is implemented for `&mut [u8]` by copying into the slice, overwriting +/// its data. +/// +/// Note that writing updates the slice to point to the yet unwritten part. +/// The slice will be empty when it has been completely overwritten. #[stable(feature = "rust1", since = "1.0.0")] impl<'a> Write for &'a mut [u8] { #[inline] @@ -204,6 +213,8 @@ impl<'a> Write for &'a mut [u8] { fn flush(&mut self) -> io::Result<()> { Ok(()) } } +/// Write is implemented for `Vec` by appending to the vector. +/// The vector will grow as needed. #[stable(feature = "rust1", since = "1.0.0")] impl Write for Vec { #[inline] diff --git a/src/libstd/io/mod.rs b/src/libstd/io/mod.rs index 06609cfad152e..37c3f70d54dda 100644 --- a/src/libstd/io/mod.rs +++ b/src/libstd/io/mod.rs @@ -12,18 +12,15 @@ //! //! The `std::io` module contains a number of common things you'll need //! when doing input and output. The most core part of this module is -//! the [`Read`][read] and [`Write`][write] traits, which provide the +//! the [`Read`] and [`Write`] traits, which provide the //! most general interface for reading and writing input and output. //! -//! [read]: trait.Read.html -//! [write]: trait.Write.html -//! //! # Read and Write //! -//! Because they are traits, `Read` and `Write` are implemented by a number +//! Because they are traits, [`Read`] and [`Write`] are implemented by a number //! of other types, and you can implement them for your types too. As such, //! you'll see a few different types of I/O throughout the documentation in -//! this module: `File`s, `TcpStream`s, and sometimes even `Vec`s. For +//! this module: [`File`]s, [`TcpStream`]s, and sometimes even [`Vec`]s. For //! example, `Read` adds a `read()` method, which we can use on `File`s: //! //! ``` @@ -43,15 +40,15 @@ //! # } //! ``` //! -//! `Read` and `Write` are so important, implementors of the two traits have a +//! [`Read`] and [`Write`] are so important, implementors of the two traits have a //! nickname: readers and writers. So you'll sometimes see 'a reader' instead -//! of 'a type that implements the `Read` trait'. Much easier! +//! of 'a type that implements the [`Read`] trait'. Much easier! //! //! ## Seek and BufRead //! -//! Beyond that, there are two important traits that are provided: [`Seek`][seek] -//! and [`BufRead`][bufread]. Both of these build on top of a reader to control -//! how the reading happens. `Seek` lets you control where the next byte is +//! Beyond that, there are two important traits that are provided: [`Seek`] +//! and [`BufRead`]. Both of these build on top of a reader to control +//! how the reading happens. [`Seek`] lets you control where the next byte is //! coming from: //! //! ``` @@ -75,21 +72,18 @@ //! # } //! ``` //! -//! [seek]: trait.Seek.html -//! [bufread]: trait.BufRead.html -//! -//! `BufRead` uses an internal buffer to provide a number of other ways to read, but +//! [`BufRead`] uses an internal buffer to provide a number of other ways to read, but //! to show it off, we'll need to talk about buffers in general. Keep reading! //! //! ## BufReader and BufWriter //! //! Byte-based interfaces are unwieldy and can be inefficient, as we'd need to be //! making near-constant calls to the operating system. To help with this, -//! `std::io` comes with two structs, `BufReader` and `BufWriter`, which wrap +//! `std::io` comes with two structs, [`BufReader`] and [`BufWriter`], which wrap //! readers and writers. The wrapper uses a buffer, reducing the number of //! calls and providing nicer methods for accessing exactly what you want. //! -//! For example, `BufReader` works with the `BufRead` trait to add extra +//! For example, [`BufReader`] works with the [`BufRead`] trait to add extra //! methods to any reader: //! //! ``` @@ -111,8 +105,8 @@ //! # } //! ``` //! -//! `BufWriter` doesn't add any new ways of writing; it just buffers every call -//! to [`write()`][write()]: +//! [`BufWriter`] doesn't add any new ways of writing; it just buffers every call +//! to [`write()`]: //! //! ``` //! use std::io; @@ -134,8 +128,6 @@ //! # } //! ``` //! -//! [write()]: trait.Write.html#tymethod.write -//! //! ## Standard input and output //! //! A very common source of input is standard input: @@ -165,13 +157,13 @@ //! # } //! ``` //! -//! Of course, using `io::stdout()` directly is less common than something like -//! `println!`. +//! Of course, using [`io::stdout()`] directly is less common than something like +//! [`println!`]. //! //! ## Iterator types //! //! A large number of the structures provided by `std::io` are for various -//! ways of iterating over I/O. For example, `Lines` is used to split over +//! ways of iterating over I/O. For example, [`Lines`] is used to split over //! lines: //! //! ``` @@ -211,10 +203,10 @@ //! //! ## io::Result //! -//! Last, but certainly not least, is [`io::Result`][result]. This type is used +//! Last, but certainly not least, is [`io::Result`]. This type is used //! as the return type of many `std::io` functions that can cause an error, and //! can be returned from your own functions as well. Many of the examples in this -//! module use the [`try!`][try] macro: +//! module use the [`try!`] macro: //! //! ``` //! use std::io; @@ -230,14 +222,11 @@ //! } //! ``` //! -//! The return type of `read_input()`, `io::Result<()>`, is a very common type -//! for functions which don't have a 'real' return value, but do want to return -//! errors if they happen. In this case, the only purpose of this function is +//! The return type of `read_input()`, [`io::Result<()>`][`io::Result`], is a very +//! common type for functions which don't have a 'real' return value, but do want to +//! return errors if they happen. In this case, the only purpose of this function is //! to read the line and print it, so we use `()`. //! -//! [result]: type.Result.html -//! [try]: ../macro.try.html -//! //! ## Platform-specific behavior //! //! Many I/O functions throughout the standard library are documented to indicate @@ -246,6 +235,22 @@ //! any possibly unclear semantics. Note, however, that this is informative, not a binding //! contract. The implementation of many of these functions are subject to change over //! time and may call fewer or more syscalls/library functions. +//! +//! [`Read`]: trait.Read.html +//! [`Write`]: trait.Write.html +//! [`Seek`]: trait.Seek.html +//! [`BufRead`]: trait.BufRead.html +//! [`File`]: ../fs/struct.File.html +//! [`TcpStream`]: ../net/struct.TcpStream.html +//! [`Vec`]: ../vec/struct.Vec.html +//! [`BufReader`]: struct.BufReader.html +//! [`BufWriter`]: struct.BufWriter.html +//! [`write()`]: trait.Write.html#tymethod.write +//! [`io::stdout()`]: fn.stdout.html +//! [`println!`]: ../macro.println.html +//! [`Lines`]: struct.Lines.html +//! [`io::Result`]: type.Result.html +//! [`try!`]: ../macro.try.html #![stable(feature = "rust1", since = "1.0.0")] @@ -1149,10 +1154,7 @@ fn read_until(r: &mut R, delim: u8, buf: &mut Vec) /// /// For example, reading line-by-line is inefficient without using a buffer, so /// if you want to read by line, you'll need `BufRead`, which includes a -/// [`read_line()`][readline] method as well as a [`lines()`][lines] iterator. -/// -/// [readline]: #method.read_line -/// [lines]: #method.lines +/// [`read_line()`] method as well as a [`lines()`] iterator. /// /// # Examples /// @@ -1168,14 +1170,17 @@ fn read_until(r: &mut R, delim: u8, buf: &mut Vec) /// } /// ``` /// -/// If you have something that implements `Read`, you can use the [`BufReader` -/// type][bufreader] to turn it into a `BufRead`. +/// If you have something that implements [`Read`], you can use the [`BufReader` +/// type][`BufReader`] to turn it into a `BufRead`. /// -/// For example, [`File`][file] implements `Read`, but not `BufRead`. -/// `BufReader` to the rescue! +/// For example, [`File`] implements [`Read`], but not `BufRead`. +/// [`BufReader`] to the rescue! /// -/// [bufreader]: struct.BufReader.html -/// [file]: ../fs/struct.File.html +/// [`BufReader`]: struct.BufReader.html +/// [`File`]: ../fs/struct.File.html +/// [`read_line()`]: #method.read_line +/// [`lines()`]: #method.lines +/// [`Read`]: trait.Read.html /// /// ``` /// use std::io::{self, BufReader}; @@ -1199,13 +1204,13 @@ pub trait BufRead: Read { /// Fills the internal buffer of this object, returning the buffer contents. /// /// This function is a lower-level call. It needs to be paired with the - /// [`consume`][consume] method to function properly. When calling this + /// [`consume()`] method to function properly. When calling this /// method, none of the contents will be "read" in the sense that later - /// calling `read` may return the same contents. As such, `consume` must be - /// called with the number of bytes that are consumed from this buffer to + /// calling `read` may return the same contents. As such, [`consume()`] must + /// be called with the number of bytes that are consumed from this buffer to /// ensure that the bytes are never returned twice. /// - /// [consume]: #tymethod.consume + /// [`consume()`]: #tymethod.consume /// /// An empty buffer returned indicates that the stream has reached EOF. /// @@ -1246,38 +1251,36 @@ pub trait BufRead: Read { /// so they should no longer be returned in calls to `read`. /// /// This function is a lower-level call. It needs to be paired with the - /// [`fill_buf`][fillbuf] method to function properly. This function does + /// [`fill_buf()`] method to function properly. This function does /// not perform any I/O, it simply informs this object that some amount of - /// its buffer, returned from `fill_buf`, has been consumed and should no - /// longer be returned. As such, this function may do odd things if - /// `fill_buf` isn't called before calling it. - /// - /// [fillbuf]: #tymethod.fill_buf + /// its buffer, returned from [`fill_buf()`], has been consumed and should + /// no longer be returned. As such, this function may do odd things if + /// [`fill_buf()`] isn't called before calling it. /// /// The `amt` must be `<=` the number of bytes in the buffer returned by - /// `fill_buf`. + /// [`fill_buf()`]. /// /// # Examples /// - /// Since `consume()` is meant to be used with [`fill_buf()`][fillbuf], + /// Since `consume()` is meant to be used with [`fill_buf()`], /// that method's example includes an example of `consume()`. + /// + /// [`fill_buf()`]: #tymethod.fill_buf #[stable(feature = "rust1", since = "1.0.0")] fn consume(&mut self, amt: usize); - /// Read all bytes into `buf` until the delimiter `byte` is reached. + /// Read all bytes into `buf` until the delimiter `byte` or EOF is reached. /// /// This function will read bytes from the underlying stream until the /// delimiter or EOF is found. Once found, all bytes up to, and including, /// the delimiter (if found) will be appended to `buf`. /// - /// If this reader is currently at EOF then this function will not modify - /// `buf` and will return `Ok(n)` where `n` is the number of bytes which - /// were read. + /// If successful, this function will return the total number of bytes read. /// /// # Errors /// - /// This function will ignore all instances of `ErrorKind::Interrupted` and - /// will otherwise return any errors returned by `fill_buf`. + /// This function will ignore all instances of [`ErrorKind::Interrupted`] and + /// will otherwise return any errors returned by [`fill_buf()`]. /// /// If an I/O error is encountered then all bytes read so far will be /// present in `buf` and its length will have been adjusted appropriately. @@ -1287,6 +1290,9 @@ pub trait BufRead: Read { /// A locked standard input implements `BufRead`. In this example, we'll /// read from standard input until we see an `a` byte. /// + /// [`fill_buf()`]: #tymethod.fill_buf + /// [`ErrorKind::Interrupted`]: enum.ErrorKind.html#variant.Interrupted + /// /// ``` /// use std::io; /// use std::io::prelude::*; @@ -1315,25 +1321,24 @@ pub trait BufRead: Read { /// up to, and including, the delimiter (if found) will be appended to /// `buf`. /// - /// If this reader is currently at EOF then this function will not modify - /// `buf` and will return `Ok(n)` where `n` is the number of bytes which - /// were read. + /// If successful, this function will return the total number of bytes read. /// /// # Errors /// - /// This function has the same error semantics as `read_until` and will also - /// return an error if the read bytes are not valid UTF-8. If an I/O error - /// is encountered then `buf` may contain some bytes already read in the - /// event that all data read so far was valid UTF-8. + /// This function has the same error semantics as [`read_until()`] and will + /// also return an error if the read bytes are not valid UTF-8. If an I/O + /// error is encountered then `buf` may contain some bytes already read in + /// the event that all data read so far was valid UTF-8. /// /// # Examples /// /// A locked standard input implements `BufRead`. In this example, we'll /// read all of the lines from standard input. If we were to do this in - /// an actual project, the [`lines()`][lines] method would be easier, of + /// an actual project, the [`lines()`] method would be easier, of /// course. /// - /// [lines]: #method.lines + /// [`lines()`]: #method.lines + /// [`read_until()`]: #method.read_until /// /// ``` /// use std::io; @@ -1362,17 +1367,21 @@ pub trait BufRead: Read { /// `byte`. /// /// The iterator returned from this function will return instances of - /// `io::Result>`. Each vector returned will *not* have the - /// delimiter byte at the end. + /// [`io::Result`]`<`[`Vec`]`>`. Each vector returned will *not* have + /// the delimiter byte at the end. /// - /// This function will yield errors whenever `read_until` would have also - /// yielded an error. + /// This function will yield errors whenever [`read_until()`] would have + /// also yielded an error. /// /// # Examples /// /// A locked standard input implements `BufRead`. In this example, we'll /// read some input from standard input, splitting on commas. /// + /// [`io::Result`]: type.Result.html + /// [`Vec`]: ../vec/struct.Vec.html + /// [`read_until()`]: #method.read_until + /// /// ``` /// use std::io; /// use std::io::prelude::*; @@ -1391,9 +1400,12 @@ pub trait BufRead: Read { /// Returns an iterator over the lines of this reader. /// /// The iterator returned from this function will yield instances of - /// `io::Result`. Each string returned will *not* have a newline + /// [`io::Result`]`<`[`String`]`>`. Each string returned will *not* have a newline /// byte (the 0xA byte) or CRLF (0xD, 0xA bytes) at the end. /// + /// [`io::Result`]: type.Result.html + /// [`String`]: ../string/struct.String.html + /// /// # Examples /// /// A locked standard input implements `BufRead`: @@ -1416,10 +1428,10 @@ pub trait BufRead: Read { /// Adaptor to chain together two readers. /// -/// This struct is generally created by calling [`chain()`][chain] on a reader. -/// Please see the documentation of `chain()` for more details. +/// This struct is generally created by calling [`chain()`] on a reader. +/// Please see the documentation of [`chain()`] for more details. /// -/// [chain]: trait.Read.html#method.chain +/// [`chain()`]: trait.Read.html#method.chain #[stable(feature = "rust1", since = "1.0.0")] pub struct Chain { first: T, @@ -1432,7 +1444,7 @@ impl Read for Chain { fn read(&mut self, buf: &mut [u8]) -> Result { if !self.done_first { match self.first.read(buf)? { - 0 => { self.done_first = true; } + 0 if buf.len() != 0 => { self.done_first = true; } n => return Ok(n), } } @@ -1524,7 +1536,7 @@ impl Take { /// # Ok(()) /// # } /// ``` - #[unstable(feature = "io_take_into_inner", issue = "0")] + #[unstable(feature = "io_take_into_inner", issue = "23755")] pub fn into_inner(self) -> T { self.inner } @@ -1580,10 +1592,10 @@ fn read_one_byte(reader: &mut Read) -> Option> { /// An iterator over `u8` values of a reader. /// -/// This struct is generally created by calling [`bytes()`][bytes] on a reader. -/// Please see the documentation of `bytes()` for more details. +/// This struct is generally created by calling [`bytes()`] on a reader. +/// Please see the documentation of [`bytes()`] for more details. /// -/// [bytes]: trait.Read.html#method.bytes +/// [`bytes()`]: trait.Read.html#method.bytes #[stable(feature = "rust1", since = "1.0.0")] pub struct Bytes { inner: R, @@ -1761,6 +1773,7 @@ mod tests { use super::repeat; #[test] + #[cfg_attr(target_os = "emscripten", ignore)] fn read_until() { let mut buf = Cursor::new(&b"12"[..]); let mut v = Vec::new(); @@ -1959,7 +1972,19 @@ mod tests { cmp_bufread(chain1, chain2, &testdata[..]); } + #[test] + fn chain_zero_length_read_is_not_eof() { + let a = b"A"; + let b = b"B"; + let mut s = String::new(); + let mut chain = (&a[..]).chain(&b[..]); + chain.read(&mut []).unwrap(); + chain.read_to_string(&mut s).unwrap(); + assert_eq!("AB", s); + } + #[bench] + #[cfg_attr(target_os = "emscripten", ignore)] fn bench_read_to_end(b: &mut test::Bencher) { b.iter(|| { let mut lr = repeat(1).take(10000000); diff --git a/src/libstd/io/stdio.rs b/src/libstd/io/stdio.rs index 9a782e95f6e5f..c24ee8ff303c8 100644 --- a/src/libstd/io/stdio.rs +++ b/src/libstd/io/stdio.rs @@ -125,13 +125,10 @@ impl io::Read for Maybe { } fn handle_ebadf(r: io::Result, default: T) -> io::Result { - #[cfg(windows)] - const ERR: i32 = ::sys::c::ERROR_INVALID_HANDLE as i32; - #[cfg(not(windows))] - const ERR: i32 = ::libc::EBADF as i32; + use sys::stdio::EBADF_ERR; match r { - Err(ref e) if e.raw_os_error() == Some(ERR) => Ok(default), + Err(ref e) if e.raw_os_error() == Some(EBADF_ERR) => Ok(default), r => r } } @@ -593,11 +590,11 @@ impl<'a> Write for StderrLock<'a> { with a more general mechanism", issue = "0")] #[doc(hidden)] -pub fn set_panic(sink: Box) -> Option> { +pub fn set_panic(sink: Option>) -> Option> { use panicking::LOCAL_STDERR; use mem; LOCAL_STDERR.with(move |slot| { - mem::replace(&mut *slot.borrow_mut(), Some(sink)) + mem::replace(&mut *slot.borrow_mut(), sink) }).and_then(|mut s| { let _ = s.flush(); Some(s) @@ -617,10 +614,10 @@ pub fn set_panic(sink: Box) -> Option> { with a more general mechanism", issue = "0")] #[doc(hidden)] -pub fn set_print(sink: Box) -> Option> { +pub fn set_print(sink: Option>) -> Option> { use mem; LOCAL_STDOUT.with(move |slot| { - mem::replace(&mut *slot.borrow_mut(), Some(sink)) + mem::replace(&mut *slot.borrow_mut(), sink) }).and_then(|mut s| { let _ = s.flush(); Some(s) @@ -668,6 +665,7 @@ mod tests { use super::*; #[test] + #[cfg_attr(target_os = "emscripten", ignore)] fn panic_doesnt_poison() { thread::spawn(|| { let _a = stdin(); diff --git a/src/libstd/lib.rs b/src/libstd/lib.rs index 115a24fc83c22..e470690b7c6b8 100644 --- a/src/libstd/lib.rs +++ b/src/libstd/lib.rs @@ -218,12 +218,14 @@ #![feature(associated_consts)] #![feature(borrow_state)] #![feature(box_syntax)] +#![feature(cfg_target_has_atomic)] #![feature(cfg_target_thread_local)] #![feature(cfg_target_vendor)] #![feature(char_escape_debug)] #![feature(char_internals)] #![feature(collections)] #![feature(collections_bound)] +#![feature(collections_range)] #![feature(compiler_builtins_lib)] #![feature(const_fn)] #![feature(core_float)] @@ -239,6 +241,7 @@ #![feature(heap_api)] #![feature(inclusive_range)] #![feature(int_error_internals)] +#![feature(integer_atomics)] #![feature(into_cow)] #![feature(lang_items)] #![feature(libc)] @@ -255,10 +258,9 @@ #![feature(panic_unwind)] #![feature(placement_in_syntax)] #![feature(prelude_import)] -#![feature(question_mark)] +#![cfg_attr(stage0, feature(question_mark))] #![feature(rand)] #![feature(raw)] -#![feature(reflect_marker)] #![feature(repr_simd)] #![feature(rustc_attrs)] #![feature(shared)] @@ -273,12 +275,10 @@ #![feature(str_utf16)] #![feature(test, rustc_private)] #![feature(thread_local)] -#![feature(try_borrow)] #![feature(try_from)] #![feature(unboxed_closures)] #![feature(unicode)] #![feature(unique)] -#![cfg_attr(stage0, feature(unsafe_no_drop_flag))] #![feature(unwind_attributes)] #![feature(vec_push_all)] #![feature(zero_one)] @@ -304,8 +304,8 @@ use prelude::v1::*; // We want to reexport a few macros from core but libcore has already been // imported by the compiler (via our #[no_std] attribute) In this case we just // add a new crate name so we can attach the reexports to it. -#[macro_reexport(assert, assert_eq, debug_assert, debug_assert_eq, - unreachable, unimplemented, write, writeln, try)] +#[macro_reexport(assert, assert_eq, assert_ne, debug_assert, debug_assert_eq, + debug_assert_ne, unreachable, unimplemented, write, writeln, try)] extern crate core as __core; #[macro_use] diff --git a/src/libstd/macros.rs b/src/libstd/macros.rs index 6f0f6ecab5ba8..0ce6b0a9431d4 100644 --- a/src/libstd/macros.rs +++ b/src/libstd/macros.rs @@ -112,12 +112,14 @@ macro_rules! print { /// # Examples /// /// ``` +/// println!(); /// println!("hello there!"); /// println!("format {} arguments", "some"); /// ``` #[macro_export] #[stable(feature = "rust1", since = "1.0.0")] macro_rules! println { + () => (print!("\n")); ($fmt:expr) => (print!(concat!($fmt, "\n"))); ($fmt:expr, $($arg:tt)*) => (print!(concat!($fmt, "\n"), $($arg)*)); } @@ -193,12 +195,18 @@ macro_rules! assert_approx_eq { pub mod builtin { /// The core macro for formatted string creation & output. /// - /// This macro produces a value of type `fmt::Arguments`. This value can be - /// passed to the functions in `std::fmt` for performing useful functions. - /// All other formatting macros (`format!`, `write!`, `println!`, etc) are + /// This macro produces a value of type [`fmt::Arguments`]. This value can be + /// passed to the functions in [`std::fmt`] for performing useful functions. + /// All other formatting macros ([`format!`], [`write!`], [`println!`], etc) are /// proxied through this one. /// - /// For more information, see the documentation in `std::fmt`. + /// For more information, see the documentation in [`std::fmt`]. + /// + /// [`fmt::Arguments`]: ../std/fmt/struct.Arguments.html + /// [`std::fmt`]: ../std/fmt/index.html + /// [`format!`]: ../std/macro.format.html + /// [`write!`]: ../std/macro.write.html + /// [`println!`]: ../std/macro.println.html /// /// # Examples /// @@ -278,7 +286,7 @@ pub mod builtin { /// // fn concat_idents!(new, fun, name) { } // not usable in this way! /// # } /// ``` - #[unstable(feature = "concat_idents", issue = "29599")] + #[unstable(feature = "concat_idents_macro", issue = "29599")] #[macro_export] macro_rules! concat_idents { ($($e:ident),*) => ({ /* compiler built-in */ }) @@ -373,9 +381,11 @@ pub mod builtin { /// Includes a utf8-encoded file as a string. /// + /// The file is located relative to the current file. (similarly to how + /// modules are found) + /// /// This macro will yield an expression of type `&'static str` which is the - /// contents of the filename specified. The file is located relative to the - /// current file (similarly to how modules are found), + /// contents of the file. /// /// # Examples /// @@ -388,9 +398,11 @@ pub mod builtin { /// Includes a file as a reference to a byte array. /// + /// The file is located relative to the current file. (similarly to how + /// modules are found) + /// /// This macro will yield an expression of type `&'static [u8; N]` which is - /// the contents of the filename specified. The file is located relative to - /// the current file (similarly to how modules are found), + /// the contents of the file. /// /// # Examples /// @@ -444,9 +456,17 @@ pub mod builtin { #[macro_export] macro_rules! cfg { ($($cfg:tt)*) => ({ /* compiler built-in */ }) } - /// Parse the current given file as an expression. + /// Parse a file as an expression or an item according to the context. + /// + /// The file is located relative to the current file. (similarly to how + /// modules are found) /// - /// This is generally a bad idea, because it's going to behave unhygienically. + /// Using this macro is often a bad idea, because if the file is + /// parsed as an expression, it is going to be placed in the + /// surrounding code unhygenically. This could result in variables + /// or functions being different from what the file expected if + /// there are variables or functions that have the same name in + /// the current file. /// /// # Examples /// diff --git a/src/libstd/memchr.rs b/src/libstd/memchr.rs index 03f55f7ad6186..7c8c97a6caf9c 100644 --- a/src/libstd/memchr.rs +++ b/src/libstd/memchr.rs @@ -11,8 +11,6 @@ // Original implementation taken from rust-memchr // Copyright 2015 Andrew Gallant, bluss and Nicolas Koch - - /// A safe interface to `memchr`. /// /// Returns the index corresponding to the first occurrence of `needle` in @@ -32,32 +30,9 @@ /// let haystack = b"the quick brown fox"; /// assert_eq!(memchr(b'k', haystack), Some(8)); /// ``` +#[inline] pub fn memchr(needle: u8, haystack: &[u8]) -> Option { - // libc memchr - #[cfg(not(target_os = "windows"))] - fn memchr_specific(needle: u8, haystack: &[u8]) -> Option { - use libc; - - let p = unsafe { - libc::memchr( - haystack.as_ptr() as *const libc::c_void, - needle as libc::c_int, - haystack.len() as libc::size_t) - }; - if p.is_null() { - None - } else { - Some(p as usize - (haystack.as_ptr() as usize)) - } - } - - // use fallback on windows, since it's faster - #[cfg(target_os = "windows")] - fn memchr_specific(needle: u8, haystack: &[u8]) -> Option { - fallback::memchr(needle, haystack) - } - - memchr_specific(needle, haystack) + ::sys::memchr::memchr(needle, haystack) } /// A safe interface to `memrchr`. @@ -75,251 +50,9 @@ pub fn memchr(needle: u8, haystack: &[u8]) -> Option { /// let haystack = b"the quick brown fox"; /// assert_eq!(memrchr(b'o', haystack), Some(17)); /// ``` +#[inline] pub fn memrchr(needle: u8, haystack: &[u8]) -> Option { - - #[cfg(target_os = "linux")] - fn memrchr_specific(needle: u8, haystack: &[u8]) -> Option { - use libc; - - // GNU's memrchr() will - unlike memchr() - error if haystack is empty. - if haystack.is_empty() {return None} - let p = unsafe { - libc::memrchr( - haystack.as_ptr() as *const libc::c_void, - needle as libc::c_int, - haystack.len() as libc::size_t) - }; - if p.is_null() { - None - } else { - Some(p as usize - (haystack.as_ptr() as usize)) - } - } - - #[cfg(not(target_os = "linux"))] - fn memrchr_specific(needle: u8, haystack: &[u8]) -> Option { - fallback::memrchr(needle, haystack) - } - - memrchr_specific(needle, haystack) -} - -#[allow(dead_code)] -mod fallback { - use cmp; - use mem; - - const LO_U64: u64 = 0x0101010101010101; - const HI_U64: u64 = 0x8080808080808080; - - // use truncation - const LO_USIZE: usize = LO_U64 as usize; - const HI_USIZE: usize = HI_U64 as usize; - - /// Return `true` if `x` contains any zero byte. - /// - /// From *Matters Computational*, J. Arndt - /// - /// "The idea is to subtract one from each of the bytes and then look for - /// bytes where the borrow propagated all the way to the most significant - /// bit." - #[inline] - fn contains_zero_byte(x: usize) -> bool { - x.wrapping_sub(LO_USIZE) & !x & HI_USIZE != 0 - } - - #[cfg(target_pointer_width = "32")] - #[inline] - fn repeat_byte(b: u8) -> usize { - let mut rep = (b as usize) << 8 | b as usize; - rep = rep << 16 | rep; - rep - } - - #[cfg(target_pointer_width = "64")] - #[inline] - fn repeat_byte(b: u8) -> usize { - let mut rep = (b as usize) << 8 | b as usize; - rep = rep << 16 | rep; - rep = rep << 32 | rep; - rep - } - - /// Return the first index matching the byte `a` in `text`. - pub fn memchr(x: u8, text: &[u8]) -> Option { - // Scan for a single byte value by reading two `usize` words at a time. - // - // Split `text` in three parts - // - unaligned initial part, before the first word aligned address in text - // - body, scan by 2 words at a time - // - the last remaining part, < 2 word size - let len = text.len(); - let ptr = text.as_ptr(); - let usize_bytes = mem::size_of::(); - - // search up to an aligned boundary - let align = (ptr as usize) & (usize_bytes- 1); - let mut offset; - if align > 0 { - offset = cmp::min(usize_bytes - align, len); - if let Some(index) = text[..offset].iter().position(|elt| *elt == x) { - return Some(index); - } - } else { - offset = 0; - } - - // search the body of the text - let repeated_x = repeat_byte(x); - - if len >= 2 * usize_bytes { - while offset <= len - 2 * usize_bytes { - unsafe { - let u = *(ptr.offset(offset as isize) as *const usize); - let v = *(ptr.offset((offset + usize_bytes) as isize) as *const usize); - - // break if there is a matching byte - let zu = contains_zero_byte(u ^ repeated_x); - let zv = contains_zero_byte(v ^ repeated_x); - if zu || zv { - break; - } - } - offset += usize_bytes * 2; - } - } - - // find the byte after the point the body loop stopped - text[offset..].iter().position(|elt| *elt == x).map(|i| offset + i) - } - - /// Return the last index matching the byte `a` in `text`. - pub fn memrchr(x: u8, text: &[u8]) -> Option { - // Scan for a single byte value by reading two `usize` words at a time. - // - // Split `text` in three parts - // - unaligned tail, after the last word aligned address in text - // - body, scan by 2 words at a time - // - the first remaining bytes, < 2 word size - let len = text.len(); - let ptr = text.as_ptr(); - let usize_bytes = mem::size_of::(); - - // search to an aligned boundary - let end_align = (ptr as usize + len) & (usize_bytes - 1); - let mut offset; - if end_align > 0 { - offset = if end_align >= len { 0 } else { len - end_align }; - if let Some(index) = text[offset..].iter().rposition(|elt| *elt == x) { - return Some(offset + index); - } - } else { - offset = len; - } - - // search the body of the text - let repeated_x = repeat_byte(x); - - while offset >= 2 * usize_bytes { - unsafe { - let u = *(ptr.offset(offset as isize - 2 * usize_bytes as isize) as *const usize); - let v = *(ptr.offset(offset as isize - usize_bytes as isize) as *const usize); - - // break if there is a matching byte - let zu = contains_zero_byte(u ^ repeated_x); - let zv = contains_zero_byte(v ^ repeated_x); - if zu || zv { - break; - } - } - offset -= 2 * usize_bytes; - } - - // find the byte before the point the body loop stopped - text[..offset].iter().rposition(|elt| *elt == x) - } - - // test fallback implementations on all platforms - #[test] - fn matches_one() { - assert_eq!(Some(0), memchr(b'a', b"a")); - } - - #[test] - fn matches_begin() { - assert_eq!(Some(0), memchr(b'a', b"aaaa")); - } - - #[test] - fn matches_end() { - assert_eq!(Some(4), memchr(b'z', b"aaaaz")); - } - - #[test] - fn matches_nul() { - assert_eq!(Some(4), memchr(b'\x00', b"aaaa\x00")); - } - - #[test] - fn matches_past_nul() { - assert_eq!(Some(5), memchr(b'z', b"aaaa\x00z")); - } - - #[test] - fn no_match_empty() { - assert_eq!(None, memchr(b'a', b"")); - } - - #[test] - fn no_match() { - assert_eq!(None, memchr(b'a', b"xyz")); - } - - #[test] - fn matches_one_reversed() { - assert_eq!(Some(0), memrchr(b'a', b"a")); - } - - #[test] - fn matches_begin_reversed() { - assert_eq!(Some(3), memrchr(b'a', b"aaaa")); - } - - #[test] - fn matches_end_reversed() { - assert_eq!(Some(0), memrchr(b'z', b"zaaaa")); - } - - #[test] - fn matches_nul_reversed() { - assert_eq!(Some(4), memrchr(b'\x00', b"aaaa\x00")); - } - - #[test] - fn matches_past_nul_reversed() { - assert_eq!(Some(0), memrchr(b'z', b"z\x00aaaa")); - } - - #[test] - fn no_match_empty_reversed() { - assert_eq!(None, memrchr(b'a', b"")); - } - - #[test] - fn no_match_reversed() { - assert_eq!(None, memrchr(b'a', b"xyz")); - } - - #[test] - fn each_alignment_reversed() { - let mut data = [1u8; 64]; - let needle = 2; - let pos = 40; - data[pos] = needle; - for start in 0..16 { - assert_eq!(Some(pos - start), memrchr(needle, &data[start..])); - } - } + ::sys::memchr::memrchr(needle, haystack) } #[cfg(test)] diff --git a/src/libstd/net/addr.rs b/src/libstd/net/addr.rs index d0b59b42c1798..20dc5b3801ba4 100644 --- a/src/libstd/net/addr.rs +++ b/src/libstd/net/addr.rs @@ -93,6 +93,26 @@ impl SocketAddr { SocketAddr::V6(ref mut a) => a.set_port(new_port), } } + + /// Returns true if the IP in this `SocketAddr` is a valid IPv4 address, + /// false if it's a valid IPv6 address. + #[unstable(feature = "sockaddr_checker", issue = "36949")] + pub fn is_ipv4(&self) -> bool { + match *self { + SocketAddr::V4(_) => true, + SocketAddr::V6(_) => false, + } + } + + /// Returns true if the IP in this `SocketAddr` is a valid IPv6 address, + /// false if it's a valid IPv4 address. + #[unstable(feature = "sockaddr_checker", issue = "36949")] + pub fn is_ipv6(&self) -> bool { + match *self { + SocketAddr::V4(_) => false, + SocketAddr::V6(_) => true, + } + } } impl SocketAddrV4 { @@ -519,7 +539,7 @@ impl<'a, T: ToSocketAddrs + ?Sized> ToSocketAddrs for &'a T { } } -#[cfg(test)] +#[cfg(all(test, not(target_os = "emscripten")))] mod tests { use net::*; use net::test::{tsa, sa6, sa4}; @@ -631,4 +651,19 @@ mod tests { v6.set_scope_id(20); assert_eq!(v6.scope_id(), 20); } + + #[test] + fn is_v4() { + let v4 = SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(77, 88, 21, 11), 80)); + assert!(v4.is_ipv4()); + assert!(!v4.is_ipv6()); + } + + #[test] + fn is_v6() { + let v6 = SocketAddr::V6(SocketAddrV6::new( + Ipv6Addr::new(0x2a02, 0x6b8, 0, 1, 0, 0, 0, 1), 80, 10, 0)); + assert!(!v6.is_ipv4()); + assert!(v6.is_ipv6()); + } } diff --git a/src/libstd/net/ip.rs b/src/libstd/net/ip.rs index 05ef559422f33..49080680fac63 100644 --- a/src/libstd/net/ip.rs +++ b/src/libstd/net/ip.rs @@ -130,6 +130,24 @@ impl IpAddr { IpAddr::V6(ref a) => a.is_documentation(), } } + + /// Returns true if this address is a valid IPv4 address, false if it's a valid IPv6 address. + #[unstable(feature = "ipaddr_checker", issue = "36949")] + pub fn is_ipv4(&self) -> bool { + match *self { + IpAddr::V4(_) => true, + IpAddr::V6(_) => false, + } + } + + /// Returns true if this address is a valid IPv6 address, false if it's a valid IPv4 address. + #[unstable(feature = "ipaddr_checker", issue = "36949")] + pub fn is_ipv6(&self) -> bool { + match *self { + IpAddr::V4(_) => false, + IpAddr::V6(_) => true, + } + } } impl Ipv4Addr { @@ -277,8 +295,7 @@ impl Ipv4Addr { } } -#[stable(feature = "rust1", since = "1.0.0")] -#[allow(deprecated)] +#[stable(feature = "ip_addr", since = "1.7.0")] impl fmt::Display for IpAddr { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { match *self { @@ -669,7 +686,7 @@ impl From<[u8; 16]> for Ipv6Addr { } // Tests for this module -#[cfg(test)] +#[cfg(all(test, not(target_os = "emscripten")))] mod tests { use net::*; use net::Ipv6MulticastScope::*; @@ -1023,4 +1040,18 @@ mod tests { assert!("2001:db8:f00::1002".parse::().unwrap() < "2001:db8:f00::2001".parse::().unwrap()); } + + #[test] + fn is_v4() { + let ip = IpAddr::V4(Ipv4Addr::new(100, 64, 3, 3)); + assert!(ip.is_ipv4()); + assert!(!ip.is_ipv6()); + } + + #[test] + fn is_v6() { + let ip = IpAddr::V6(Ipv6Addr::new(0, 0, 0, 0, 0, 0xffff, 0x1234, 0x5678)); + assert!(!ip.is_ipv4()); + assert!(ip.is_ipv6()); + } } diff --git a/src/libstd/net/mod.rs b/src/libstd/net/mod.rs index 2a78afa85f7f0..56286fbe25399 100644 --- a/src/libstd/net/mod.rs +++ b/src/libstd/net/mod.rs @@ -31,14 +31,15 @@ mod addr; mod tcp; mod udp; mod parser; -#[cfg(test)] mod test; +#[cfg(test)] +mod test; /// Possible values which can be passed to the [`shutdown`] method of /// [`TcpStream`]. /// /// [`shutdown`]: struct.TcpStream.html#method.shutdown /// [`TcpStream`]: struct.TcpStream.html -#[derive(Copy, Clone, PartialEq, Debug)] +#[derive(Copy, Clone, PartialEq, Eq, Debug)] #[stable(feature = "rust1", since = "1.0.0")] pub enum Shutdown { /// Indicates that the reading portion of this stream/socket should be shut diff --git a/src/libstd/net/parser.rs b/src/libstd/net/parser.rs index 854d87c4cbead..d86711c10ac79 100644 --- a/src/libstd/net/parser.rs +++ b/src/libstd/net/parser.rs @@ -302,7 +302,7 @@ impl<'a> Parser<'a> { } } -#[stable(feature = "rust1", since = "1.0.0")] +#[stable(feature = "ip_addr", since = "1.7.0")] impl FromStr for IpAddr { type Err = AddrParseError; fn from_str(s: &str) -> Result { @@ -370,7 +370,7 @@ impl FromStr for SocketAddr { /// An error returned when parsing an IP address or a socket address. #[stable(feature = "rust1", since = "1.0.0")] -#[derive(Debug, Clone, PartialEq)] +#[derive(Debug, Clone, PartialEq, Eq)] pub struct AddrParseError(()); #[stable(feature = "addr_parse_error_error", since = "1.4.0")] diff --git a/src/libstd/net/tcp.rs b/src/libstd/net/tcp.rs index 3c5f07c3e33a6..0e7c5b06713fb 100644 --- a/src/libstd/net/tcp.rs +++ b/src/libstd/net/tcp.rs @@ -428,7 +428,7 @@ impl fmt::Debug for TcpListener { } } -#[cfg(test)] +#[cfg(all(test, not(target_os = "emscripten")))] mod tests { use io::ErrorKind; use io::prelude::*; diff --git a/src/libstd/net/test.rs b/src/libstd/net/test.rs index 98ac61f646113..3f2eacda7d60e 100644 --- a/src/libstd/net/test.rs +++ b/src/libstd/net/test.rs @@ -8,6 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +#[allow(dead_code)] // not used on emscripten + use env; use net::{SocketAddr, SocketAddrV4, SocketAddrV6, Ipv4Addr, Ipv6Addr, ToSocketAddrs}; use sync::atomic::{AtomicUsize, Ordering}; diff --git a/src/libstd/net/udp.rs b/src/libstd/net/udp.rs index 781f026c12c77..c03ac496adbb2 100644 --- a/src/libstd/net/udp.rs +++ b/src/libstd/net/udp.rs @@ -353,7 +353,7 @@ impl fmt::Debug for UdpSocket { } } -#[cfg(test)] +#[cfg(all(test, not(target_os = "emscripten")))] mod tests { use io::ErrorKind; use net::*; diff --git a/src/libstd/os/fuchsia/fs.rs b/src/libstd/os/fuchsia/fs.rs new file mode 100644 index 0000000000000..d22f9a628bd17 --- /dev/null +++ b/src/libstd/os/fuchsia/fs.rs @@ -0,0 +1,103 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![stable(feature = "metadata_ext", since = "1.1.0")] + +use fs::Metadata; +use sys_common::AsInner; + +/// OS-specific extension methods for `fs::Metadata` +#[stable(feature = "metadata_ext", since = "1.1.0")] +pub trait MetadataExt { + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_dev(&self) -> u64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_ino(&self) -> u64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_mode(&self) -> u32; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_nlink(&self) -> u64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_uid(&self) -> u32; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_gid(&self) -> u32; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_rdev(&self) -> u64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_size(&self) -> u64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_atime(&self) -> i64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_atime_nsec(&self) -> i64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_mtime(&self) -> i64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_mtime_nsec(&self) -> i64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_ctime(&self) -> i64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_ctime_nsec(&self) -> i64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_blksize(&self) -> u64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_blocks(&self) -> u64; +} + +#[stable(feature = "metadata_ext", since = "1.1.0")] +impl MetadataExt for Metadata { + fn st_dev(&self) -> u64 { + self.as_inner().as_inner().st_dev as u64 + } + fn st_ino(&self) -> u64 { + self.as_inner().as_inner().st_ino as u64 + } + fn st_mode(&self) -> u32 { + self.as_inner().as_inner().st_mode as u32 + } + fn st_nlink(&self) -> u64 { + self.as_inner().as_inner().st_nlink as u64 + } + fn st_uid(&self) -> u32 { + self.as_inner().as_inner().st_uid as u32 + } + fn st_gid(&self) -> u32 { + self.as_inner().as_inner().st_gid as u32 + } + fn st_rdev(&self) -> u64 { + self.as_inner().as_inner().st_rdev as u64 + } + fn st_size(&self) -> u64 { + self.as_inner().as_inner().st_size as u64 + } + fn st_atime(&self) -> i64 { + self.as_inner().as_inner().st_atime as i64 + } + fn st_atime_nsec(&self) -> i64 { + self.as_inner().as_inner().st_atime_nsec as i64 + } + fn st_mtime(&self) -> i64 { + self.as_inner().as_inner().st_mtime as i64 + } + fn st_mtime_nsec(&self) -> i64 { + self.as_inner().as_inner().st_mtime_nsec as i64 + } + fn st_ctime(&self) -> i64 { + self.as_inner().as_inner().st_ctime as i64 + } + fn st_ctime_nsec(&self) -> i64 { + self.as_inner().as_inner().st_ctime_nsec as i64 + } + fn st_blksize(&self) -> u64 { + self.as_inner().as_inner().st_blksize as u64 + } + fn st_blocks(&self) -> u64 { + self.as_inner().as_inner().st_blocks as u64 + } +} diff --git a/src/libstd/os/fuchsia/mod.rs b/src/libstd/os/fuchsia/mod.rs new file mode 100644 index 0000000000000..1ebcbba9147e5 --- /dev/null +++ b/src/libstd/os/fuchsia/mod.rs @@ -0,0 +1,16 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Fuchsia-specific definitions + +#![stable(feature = "raw_ext", since = "1.1.0")] + +pub mod raw; +pub mod fs; diff --git a/src/libstd/os/fuchsia/raw.rs b/src/libstd/os/fuchsia/raw.rs new file mode 100644 index 0000000000000..5d017351492dd --- /dev/null +++ b/src/libstd/os/fuchsia/raw.rs @@ -0,0 +1,270 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Fuchsia-specific raw type definitions + +#![stable(feature = "raw_ext", since = "1.1.0")] +#![rustc_deprecated(since = "1.8.0", + reason = "these type aliases are no longer supported by \ + the standard library, the `libc` crate on \ + crates.io should be used instead for the correct \ + definitions")] +#![allow(deprecated)] + +use os::raw::c_ulong; + +#[stable(feature = "raw_ext", since = "1.1.0")] pub type dev_t = u64; +#[stable(feature = "raw_ext", since = "1.1.0")] pub type mode_t = u32; + +#[stable(feature = "pthread_t", since = "1.8.0")] +pub type pthread_t = c_ulong; + +#[doc(inline)] +#[stable(feature = "raw_ext", since = "1.1.0")] +pub use self::arch::{off_t, ino_t, nlink_t, blksize_t, blkcnt_t, stat, time_t}; + +#[cfg(any(target_arch = "x86", + target_arch = "le32", + target_arch = "powerpc", + target_arch = "arm"))] +mod arch { + use os::raw::{c_long, c_short, c_uint}; + + #[stable(feature = "raw_ext", since = "1.1.0")] pub type blkcnt_t = u64; + #[stable(feature = "raw_ext", since = "1.1.0")] pub type blksize_t = u64; + #[stable(feature = "raw_ext", since = "1.1.0")] pub type ino_t = u64; + #[stable(feature = "raw_ext", since = "1.1.0")] pub type nlink_t = u64; + #[stable(feature = "raw_ext", since = "1.1.0")] pub type off_t = u64; + #[stable(feature = "raw_ext", since = "1.1.0")] pub type time_t = i64; + + #[repr(C)] + #[derive(Clone)] + #[stable(feature = "raw_ext", since = "1.1.0")] + pub struct stat { + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_dev: u64, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub __pad1: c_short, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub __st_ino: u32, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_mode: u32, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_nlink: u32, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_uid: u32, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_gid: u32, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_rdev: u64, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub __pad2: c_uint, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_size: i64, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_blksize: i32, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_blocks: i64, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_atime: i32, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_atime_nsec: c_long, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_mtime: i32, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_mtime_nsec: c_long, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_ctime: i32, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_ctime_nsec: c_long, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_ino: u64, + } +} + +#[cfg(target_arch = "mips")] +mod arch { + use os::raw::{c_long, c_ulong}; + + #[cfg(target_env = "musl")] + #[stable(feature = "raw_ext", since = "1.1.0")] pub type blkcnt_t = i64; + #[cfg(not(target_env = "musl"))] + #[stable(feature = "raw_ext", since = "1.1.0")] pub type blkcnt_t = u64; + #[stable(feature = "raw_ext", since = "1.1.0")] pub type blksize_t = u64; + #[cfg(target_env = "musl")] + #[stable(feature = "raw_ext", since = "1.1.0")] pub type ino_t = u64; + #[cfg(not(target_env = "musl"))] + #[stable(feature = "raw_ext", since = "1.1.0")] pub type ino_t = u64; + #[stable(feature = "raw_ext", since = "1.1.0")] pub type nlink_t = u64; + #[cfg(target_env = "musl")] + #[stable(feature = "raw_ext", since = "1.1.0")] pub type off_t = u64; + #[cfg(not(target_env = "musl"))] + #[stable(feature = "raw_ext", since = "1.1.0")] pub type off_t = u64; + #[stable(feature = "raw_ext", since = "1.1.0")] pub type time_t = i64; + + #[repr(C)] + #[derive(Clone)] + #[stable(feature = "raw_ext", since = "1.1.0")] + pub struct stat { + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_dev: c_ulong, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_pad1: [c_long; 3], + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_ino: u64, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_mode: u32, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_nlink: u32, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_uid: u32, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_gid: u32, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_rdev: c_ulong, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_pad2: [c_long; 2], + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_size: i64, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_atime: i32, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_atime_nsec: c_long, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_mtime: i32, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_mtime_nsec: c_long, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_ctime: i32, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_ctime_nsec: c_long, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_blksize: i32, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_blocks: i64, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_pad5: [c_long; 14], + } +} + +#[cfg(target_arch = "mips64")] +mod arch { + pub use libc::{off_t, ino_t, nlink_t, blksize_t, blkcnt_t, stat, time_t}; +} + +#[cfg(target_arch = "aarch64")] +mod arch { + use os::raw::{c_long, c_int}; + + #[stable(feature = "raw_ext", since = "1.1.0")] pub type blkcnt_t = u64; + #[stable(feature = "raw_ext", since = "1.1.0")] pub type blksize_t = u64; + #[stable(feature = "raw_ext", since = "1.1.0")] pub type ino_t = u64; + #[stable(feature = "raw_ext", since = "1.1.0")] pub type nlink_t = u64; + #[stable(feature = "raw_ext", since = "1.1.0")] pub type off_t = u64; + #[stable(feature = "raw_ext", since = "1.1.0")] pub type time_t = i64; + + #[repr(C)] + #[derive(Clone)] + #[stable(feature = "raw_ext", since = "1.1.0")] + pub struct stat { + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_dev: u64, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_ino: u64, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_mode: u32, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_nlink: u32, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_uid: u32, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_gid: u32, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_rdev: u64, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub __pad1: u64, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_size: i64, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_blksize: i32, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub __pad2: c_int, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_blocks: i64, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_atime: i64, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_atime_nsec: c_long, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_mtime: i64, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_mtime_nsec: c_long, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_ctime: i64, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_ctime_nsec: c_long, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub __unused: [c_int; 2], + } +} + +#[cfg(target_arch = "x86_64")] +mod arch { + use os::raw::{c_long, c_int}; + + #[stable(feature = "raw_ext", since = "1.1.0")] pub type blkcnt_t = u64; + #[stable(feature = "raw_ext", since = "1.1.0")] pub type blksize_t = u64; + #[stable(feature = "raw_ext", since = "1.1.0")] pub type ino_t = u64; + #[stable(feature = "raw_ext", since = "1.1.0")] pub type nlink_t = u64; + #[stable(feature = "raw_ext", since = "1.1.0")] pub type off_t = u64; + #[stable(feature = "raw_ext", since = "1.1.0")] pub type time_t = i64; + + #[repr(C)] + #[derive(Clone)] + #[stable(feature = "raw_ext", since = "1.1.0")] + pub struct stat { + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_dev: u64, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_ino: u64, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_nlink: u64, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_mode: u32, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_uid: u32, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_gid: u32, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub __pad0: c_int, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_rdev: u64, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_size: i64, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_blksize: i64, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_blocks: i64, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_atime: i64, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_atime_nsec: c_long, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_mtime: i64, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_mtime_nsec: c_long, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_ctime: i64, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_ctime_nsec: c_long, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub __unused: [c_long; 3], + } +} diff --git a/src/libstd/os/haiku/fs.rs b/src/libstd/os/haiku/fs.rs new file mode 100644 index 0000000000000..54f8ea1b71b3e --- /dev/null +++ b/src/libstd/os/haiku/fs.rs @@ -0,0 +1,138 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![stable(feature = "metadata_ext", since = "1.1.0")] + +use libc; + +use fs::Metadata; +use sys_common::AsInner; + +#[allow(deprecated)] +use os::haiku::raw; + +/// OS-specific extension methods for `fs::Metadata` +#[stable(feature = "metadata_ext", since = "1.1.0")] +pub trait MetadataExt { + /// Gain a reference to the underlying `stat` structure which contains + /// the raw information returned by the OS. + /// + /// The contents of the returned `stat` are **not** consistent across + /// Unix platforms. The `os::unix::fs::MetadataExt` trait contains the + /// cross-Unix abstractions contained within the raw stat. + #[stable(feature = "metadata_ext", since = "1.1.0")] + #[rustc_deprecated(since = "1.8.0", + reason = "deprecated in favor of the accessor \ + methods of this trait")] + #[allow(deprecated)] + fn as_raw_stat(&self) -> &raw::stat; + + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_dev(&self) -> u64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_ino(&self) -> u64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_mode(&self) -> u32; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_nlink(&self) -> u64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_uid(&self) -> u32; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_gid(&self) -> u32; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_rdev(&self) -> u64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_size(&self) -> u64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_atime(&self) -> i64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_atime_nsec(&self) -> i64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_mtime(&self) -> i64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_mtime_nsec(&self) -> i64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_ctime(&self) -> i64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_ctime_nsec(&self) -> i64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_crtime(&self) -> i64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_crtime_nsec(&self) -> i64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_blksize(&self) -> u64; + #[stable(feature = "metadata_ext2", since = "1.8.0")] + fn st_blocks(&self) -> u64; +} + +#[stable(feature = "metadata_ext", since = "1.1.0")] +impl MetadataExt for Metadata { + #[allow(deprecated)] + fn as_raw_stat(&self) -> &raw::stat { + unsafe { + &*(self.as_inner().as_inner() as *const libc::stat + as *const raw::stat) + } + } + fn st_dev(&self) -> u64 { + self.as_inner().as_inner().st_dev as u64 + } + fn st_ino(&self) -> u64 { + self.as_inner().as_inner().st_ino as u64 + } + fn st_mode(&self) -> u32 { + self.as_inner().as_inner().st_mode as u32 + } + fn st_nlink(&self) -> u64 { + self.as_inner().as_inner().st_nlink as u64 + } + fn st_uid(&self) -> u32 { + self.as_inner().as_inner().st_uid as u32 + } + fn st_gid(&self) -> u32 { + self.as_inner().as_inner().st_gid as u32 + } + fn st_rdev(&self) -> u64 { + self.as_inner().as_inner().st_rdev as u64 + } + fn st_size(&self) -> u64 { + self.as_inner().as_inner().st_size as u64 + } + fn st_atime(&self) -> i64 { + self.as_inner().as_inner().st_atime as i64 + } + fn st_atime_nsec(&self) -> i64 { + self.as_inner().as_inner().st_atime_nsec as i64 + } + fn st_mtime(&self) -> i64 { + self.as_inner().as_inner().st_mtime as i64 + } + fn st_mtime_nsec(&self) -> i64 { + self.as_inner().as_inner().st_mtime_nsec as i64 + } + fn st_ctime(&self) -> i64 { + self.as_inner().as_inner().st_ctime as i64 + } + fn st_ctime_nsec(&self) -> i64 { + self.as_inner().as_inner().st_ctime_nsec as i64 + } + fn st_crtime(&self) -> i64 { + self.as_inner().as_inner().st_crtime as i64 + } + fn st_crtime_nsec(&self) -> i64 { + self.as_inner().as_inner().st_crtime_nsec as i64 + } + fn st_blksize(&self) -> u64 { + self.as_inner().as_inner().st_blksize as u64 + } + fn st_blocks(&self) -> u64 { + self.as_inner().as_inner().st_blocks as u64 + } +} diff --git a/src/libstd/os/haiku/mod.rs b/src/libstd/os/haiku/mod.rs new file mode 100644 index 0000000000000..dd1675cc9b51f --- /dev/null +++ b/src/libstd/os/haiku/mod.rs @@ -0,0 +1,16 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Haiku-specific definitions + +#![stable(feature = "raw_ext", since = "1.1.0")] + +pub mod raw; +pub mod fs; diff --git a/src/libstd/os/haiku/raw.rs b/src/libstd/os/haiku/raw.rs new file mode 100644 index 0000000000000..95353d999f952 --- /dev/null +++ b/src/libstd/os/haiku/raw.rs @@ -0,0 +1,74 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Haiku-specific raw type definitions + +#![stable(feature = "raw_ext", since = "1.1.0")] + +#![allow(deprecated)] + +use os::raw::{c_long}; +use os::unix::raw::{uid_t, gid_t}; + +// Use the direct definition of usize, instead of uintptr_t like in libc +#[stable(feature = "pthread_t", since = "1.8.0")] pub type pthread_t = usize; + +#[stable(feature = "raw_ext", since = "1.1.0")] pub type blkcnt_t = i64; +#[stable(feature = "raw_ext", since = "1.1.0")] pub type blksize_t = i32; +#[stable(feature = "raw_ext", since = "1.1.0")] pub type dev_t = i32; +#[stable(feature = "raw_ext", since = "1.1.0")] pub type ino_t = i64; +#[stable(feature = "raw_ext", since = "1.1.0")] pub type mode_t = u32; +#[stable(feature = "raw_ext", since = "1.1.0")] pub type nlink_t = i32; +#[stable(feature = "raw_ext", since = "1.1.0")] pub type off_t = i64; +#[stable(feature = "raw_ext", since = "1.1.0")] pub type time_t = i32; + +#[repr(C)] +#[derive(Clone)] +#[stable(feature = "raw_ext", since = "1.1.0")] +pub struct stat { + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_dev: dev_t, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_ino: ino_t, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_mode: mode_t, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_nlink: nlink_t, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_uid: uid_t, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_gid: gid_t, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_size: off_t, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_rdev: dev_t, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_blksize: blksize_t, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_atime: time_t, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_atime_nsec: c_long, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_mtime: time_t, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_mtime_nsec: c_long, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_ctime: time_t, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_ctime_nsec: c_long, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_crtime: time_t, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_crtime_nsec: c_long, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_type: u32, + #[stable(feature = "raw_ext", since = "1.1.0")] + pub st_blocks: blkcnt_t, +} diff --git a/src/libstd/os/linux/raw.rs b/src/libstd/os/linux/raw.rs index 1c19e58818d74..e6a95bc831ffb 100644 --- a/src/libstd/os/linux/raw.rs +++ b/src/libstd/os/linux/raw.rs @@ -34,7 +34,8 @@ pub use self::arch::{off_t, ino_t, nlink_t, blksize_t, blkcnt_t, stat, time_t}; target_arch = "le32", target_arch = "powerpc", target_arch = "arm", - target_arch = "asmjs"))] + target_arch = "asmjs", + target_arch = "wasm32"))] mod arch { use os::raw::{c_long, c_short, c_uint}; diff --git a/src/libstd/os/mod.rs b/src/libstd/os/mod.rs index a91d251fc120a..366a167415684 100644 --- a/src/libstd/os/mod.rs +++ b/src/libstd/os/mod.rs @@ -24,6 +24,7 @@ pub use sys::ext as windows; #[cfg(target_os = "bitrig")] pub mod bitrig; #[cfg(target_os = "dragonfly")] pub mod dragonfly; #[cfg(target_os = "freebsd")] pub mod freebsd; +#[cfg(target_os = "haiku")] pub mod haiku; #[cfg(target_os = "ios")] pub mod ios; #[cfg(target_os = "linux")] pub mod linux; #[cfg(target_os = "macos")] pub mod macos; @@ -32,5 +33,6 @@ pub use sys::ext as windows; #[cfg(target_os = "openbsd")] pub mod openbsd; #[cfg(target_os = "solaris")] pub mod solaris; #[cfg(target_os = "emscripten")] pub mod emscripten; +#[cfg(target_os = "fuchsia")] pub mod fuchsia; pub mod raw; diff --git a/src/libstd/os/raw.rs b/src/libstd/os/raw.rs index 6c5c1b90a4a92..2a918d8aeb7b8 100644 --- a/src/libstd/os/raw.rs +++ b/src/libstd/os/raw.rs @@ -18,7 +18,8 @@ target_arch = "arm", target_arch = "powerpc", target_arch = "powerpc64", - target_arch = "s390x"))))] + target_arch = "s390x")), + all(target_os = "fuchsia", target_arch = "aarch64")))] #[stable(feature = "raw_os", since = "1.1.0")] pub type c_char = u8; #[cfg(not(any(target_os = "android", target_os = "emscripten", @@ -26,7 +27,8 @@ target_arch = "arm", target_arch = "powerpc", target_arch = "powerpc64", - target_arch = "s390x")))))] + target_arch = "s390x")), + all(target_os = "fuchsia", target_arch = "aarch64"))))] #[stable(feature = "raw_os", since = "1.1.0")] pub type c_char = i8; #[stable(feature = "raw_os", since = "1.1.0")] pub type c_schar = i8; #[stable(feature = "raw_os", since = "1.1.0")] pub type c_uchar = u8; diff --git a/src/libstd/panic.rs b/src/libstd/panic.rs index 47f594a9b0c1e..a7e8c4fab3735 100644 --- a/src/libstd/panic.rs +++ b/src/libstd/panic.rs @@ -18,7 +18,7 @@ use ops::{Deref, DerefMut}; use panicking; use ptr::{Unique, Shared}; use rc::Rc; -use sync::{Arc, Mutex, RwLock}; +use sync::{Arc, Mutex, RwLock, atomic}; use thread::Result; #[stable(feature = "panic_hooks", since = "1.10.0")] @@ -196,9 +196,9 @@ impl<'a, T: RefUnwindSafe + ?Sized> UnwindSafe for &'a T {} impl UnwindSafe for *const T {} #[stable(feature = "catch_unwind", since = "1.9.0")] impl UnwindSafe for *mut T {} -#[stable(feature = "catch_unwind", since = "1.9.0")] +#[unstable(feature = "unique", issue = "27730")] impl UnwindSafe for Unique {} -#[stable(feature = "catch_unwind", since = "1.9.0")] +#[unstable(feature = "shared", issue = "27730")] impl UnwindSafe for Shared {} #[stable(feature = "catch_unwind", since = "1.9.0")] impl UnwindSafe for Mutex {} @@ -231,6 +231,46 @@ impl RefUnwindSafe for Mutex {} #[stable(feature = "unwind_safe_lock_refs", since = "1.12.0")] impl RefUnwindSafe for RwLock {} +#[cfg(target_has_atomic = "ptr")] +#[stable(feature = "unwind_safe_atomic_refs", since = "1.14.0")] +impl RefUnwindSafe for atomic::AtomicIsize {} +#[cfg(target_has_atomic = "8")] +#[unstable(feature = "integer_atomics", issue = "32976")] +impl RefUnwindSafe for atomic::AtomicI8 {} +#[cfg(target_has_atomic = "16")] +#[unstable(feature = "integer_atomics", issue = "32976")] +impl RefUnwindSafe for atomic::AtomicI16 {} +#[cfg(target_has_atomic = "32")] +#[unstable(feature = "integer_atomics", issue = "32976")] +impl RefUnwindSafe for atomic::AtomicI32 {} +#[cfg(target_has_atomic = "64")] +#[unstable(feature = "integer_atomics", issue = "32976")] +impl RefUnwindSafe for atomic::AtomicI64 {} + +#[cfg(target_has_atomic = "ptr")] +#[stable(feature = "unwind_safe_atomic_refs", since = "1.14.0")] +impl RefUnwindSafe for atomic::AtomicUsize {} +#[cfg(target_has_atomic = "8")] +#[unstable(feature = "integer_atomics", issue = "32976")] +impl RefUnwindSafe for atomic::AtomicU8 {} +#[cfg(target_has_atomic = "16")] +#[unstable(feature = "integer_atomics", issue = "32976")] +impl RefUnwindSafe for atomic::AtomicU16 {} +#[cfg(target_has_atomic = "32")] +#[unstable(feature = "integer_atomics", issue = "32976")] +impl RefUnwindSafe for atomic::AtomicU32 {} +#[cfg(target_has_atomic = "64")] +#[unstable(feature = "integer_atomics", issue = "32976")] +impl RefUnwindSafe for atomic::AtomicU64 {} + +#[cfg(target_has_atomic = "8")] +#[stable(feature = "unwind_safe_atomic_refs", since = "1.14.0")] +impl RefUnwindSafe for atomic::AtomicBool {} + +#[cfg(target_has_atomic = "ptr")] +#[stable(feature = "unwind_safe_atomic_refs", since = "1.14.0")] +impl RefUnwindSafe for atomic::AtomicPtr {} + #[stable(feature = "catch_unwind", since = "1.9.0")] impl Deref for AssertUnwindSafe { type Target = T; diff --git a/src/libstd/panicking.rs b/src/libstd/panicking.rs index 0c10dcbdad646..1f5b3437b6155 100644 --- a/src/libstd/panicking.rs +++ b/src/libstd/panicking.rs @@ -84,6 +84,20 @@ static mut HOOK: Hook = Hook::Default; /// # Panics /// /// Panics if called from a panicking thread. +/// +/// # Examples +/// +/// The following will print "Custom panic hook": +/// +/// ```should_panic +/// use std::panic; +/// +/// panic::set_hook(Box::new(|_| { +/// println!("Custom panic hook"); +/// })); +/// +/// panic!("Normal panic"); +/// ``` #[stable(feature = "panic_hooks", since = "1.10.0")] pub fn set_hook(hook: Box) { if thread::panicking() { @@ -109,6 +123,22 @@ pub fn set_hook(hook: Box) { /// # Panics /// /// Panics if called from a panicking thread. +/// +/// # Examples +/// +/// The following will print "Normal panic": +/// +/// ```should_panic +/// use std::panic; +/// +/// panic::set_hook(Box::new(|_| { +/// println!("Custom panic hook"); +/// })); +/// +/// let _ = panic::take_hook(); +/// +/// panic!("Normal panic"); +/// ``` #[stable(feature = "panic_hooks", since = "1.10.0")] pub fn take_hook() -> Box { if thread::panicking() { diff --git a/src/libstd/path.rs b/src/libstd/path.rs index bd27bcf48a09d..a55318d388396 100644 --- a/src/libstd/path.rs +++ b/src/libstd/path.rs @@ -113,7 +113,7 @@ use ops::{self, Deref}; use ffi::{OsStr, OsString}; -use self::platform::{is_sep_byte, is_verbatim_sep, MAIN_SEP_STR, parse_prefix}; +use sys::path::{is_sep_byte, is_verbatim_sep, MAIN_SEP_STR, parse_prefix}; //////////////////////////////////////////////////////////////////////////////// // GENERAL NOTES @@ -125,130 +125,6 @@ use self::platform::{is_sep_byte, is_verbatim_sep, MAIN_SEP_STR, parse_prefix}; // OsStr APIs for parsing, but it will take a while for those to become // available. -//////////////////////////////////////////////////////////////////////////////// -// Platform-specific definitions -//////////////////////////////////////////////////////////////////////////////// - -// The following modules give the most basic tools for parsing paths on various -// platforms. The bulk of the code is devoted to parsing prefixes on Windows. - -#[cfg(unix)] -mod platform { - use super::Prefix; - use ffi::OsStr; - - #[inline] - pub fn is_sep_byte(b: u8) -> bool { - b == b'/' - } - - #[inline] - pub fn is_verbatim_sep(b: u8) -> bool { - b == b'/' - } - - pub fn parse_prefix(_: &OsStr) -> Option { - None - } - - pub const MAIN_SEP_STR: &'static str = "/"; - pub const MAIN_SEP: char = '/'; -} - -#[cfg(windows)] -mod platform { - use ascii::*; - - use super::{os_str_as_u8_slice, u8_slice_as_os_str, Prefix}; - use ffi::OsStr; - - #[inline] - pub fn is_sep_byte(b: u8) -> bool { - b == b'/' || b == b'\\' - } - - #[inline] - pub fn is_verbatim_sep(b: u8) -> bool { - b == b'\\' - } - - pub fn parse_prefix<'a>(path: &'a OsStr) -> Option { - use super::Prefix::*; - unsafe { - // The unsafety here stems from converting between &OsStr and &[u8] - // and back. This is safe to do because (1) we only look at ASCII - // contents of the encoding and (2) new &OsStr values are produced - // only from ASCII-bounded slices of existing &OsStr values. - let mut path = os_str_as_u8_slice(path); - - if path.starts_with(br"\\") { - // \\ - path = &path[2..]; - if path.starts_with(br"?\") { - // \\?\ - path = &path[2..]; - if path.starts_with(br"UNC\") { - // \\?\UNC\server\share - path = &path[4..]; - let (server, share) = match parse_two_comps(path, is_verbatim_sep) { - Some((server, share)) => - (u8_slice_as_os_str(server), u8_slice_as_os_str(share)), - None => (u8_slice_as_os_str(path), u8_slice_as_os_str(&[])), - }; - return Some(VerbatimUNC(server, share)); - } else { - // \\?\path - let idx = path.iter().position(|&b| b == b'\\'); - if idx == Some(2) && path[1] == b':' { - let c = path[0]; - if c.is_ascii() && (c as char).is_alphabetic() { - // \\?\C:\ path - return Some(VerbatimDisk(c.to_ascii_uppercase())); - } - } - let slice = &path[..idx.unwrap_or(path.len())]; - return Some(Verbatim(u8_slice_as_os_str(slice))); - } - } else if path.starts_with(b".\\") { - // \\.\path - path = &path[2..]; - let pos = path.iter().position(|&b| b == b'\\'); - let slice = &path[..pos.unwrap_or(path.len())]; - return Some(DeviceNS(u8_slice_as_os_str(slice))); - } - match parse_two_comps(path, is_sep_byte) { - Some((server, share)) if !server.is_empty() && !share.is_empty() => { - // \\server\share - return Some(UNC(u8_slice_as_os_str(server), u8_slice_as_os_str(share))); - } - _ => (), - } - } else if path.get(1) == Some(& b':') { - // C: - let c = path[0]; - if c.is_ascii() && (c as char).is_alphabetic() { - return Some(Disk(c.to_ascii_uppercase())); - } - } - return None; - } - - fn parse_two_comps(mut path: &[u8], f: fn(u8) -> bool) -> Option<(&[u8], &[u8])> { - let first = match path.iter().position(|x| f(*x)) { - None => return None, - Some(x) => &path[..x], - }; - path = &path[(first.len() + 1)..]; - let idx = path.iter().position(|x| f(*x)); - let second = &path[..idx.unwrap_or(path.len())]; - Some((first, second)) - } - } - - pub const MAIN_SEP_STR: &'static str = "\\"; - pub const MAIN_SEP: char = '\\'; -} - //////////////////////////////////////////////////////////////////////////////// // Windows Prefixes //////////////////////////////////////////////////////////////////////////////// @@ -373,7 +249,7 @@ pub fn is_separator(c: char) -> bool { /// The primary separator for the current platform #[stable(feature = "rust1", since = "1.0.0")] -pub const MAIN_SEPARATOR: char = platform::MAIN_SEP; +pub const MAIN_SEPARATOR: char = ::sys::path::MAIN_SEP; //////////////////////////////////////////////////////////////////////////////// // Misc helpers @@ -1418,14 +1294,18 @@ impl Into for PathBuf { /// This type supports a number of operations for inspecting a path, including /// breaking the path into its components (separated by `/` or `\`, depending on /// the platform), extracting the file name, determining whether the path is -/// absolute, and so on. More details about the overall approach can be found in -/// the module documentation. +/// absolute, and so on. /// /// This is an *unsized* type, meaning that it must always be used behind a -/// pointer like `&` or [`Box`]. +/// pointer like `&` or [`Box`]. For an owned version of this type, +/// see [`PathBuf`]. /// /// [`str`]: ../primitive.str.html /// [`Box`]: ../boxed/struct.Box.html +/// [`PathBuf`]: struct.PathBuf.html +/// +/// More details about the overall approach can be found in +/// the module documentation. /// /// # Examples /// @@ -3511,10 +3391,11 @@ mod tests { #[test] pub fn test_compare() { - use hash::{Hash, Hasher, SipHasher}; + use hash::{Hash, Hasher}; + use collections::hash_map::DefaultHasher; fn hash(t: T) -> u64 { - let mut s = SipHasher::new_with_keys(0, 0); + let mut s = DefaultHasher::new(); t.hash(&mut s); s.finish() } diff --git a/src/libstd/primitive_docs.rs b/src/libstd/primitive_docs.rs index fdc84467015f9..54dde6681e188 100644 --- a/src/libstd/primitive_docs.rs +++ b/src/libstd/primitive_docs.rs @@ -229,7 +229,7 @@ mod prim_unit { } /// /// fn main() { /// unsafe { -/// let my_num: *mut i32 = libc::malloc(mem::size_of::() as libc::size_t) as *mut i32; +/// let my_num: *mut i32 = libc::malloc(mem::size_of::()) as *mut i32; /// if my_num.is_null() { /// panic!("failed to allocate memory"); /// } diff --git a/src/libstd/process.rs b/src/libstd/process.rs index f0c4443070074..9d21a76e81b9e 100644 --- a/src/libstd/process.rs +++ b/src/libstd/process.rs @@ -8,7 +8,25 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -//! Working with processes. +//! A module for working with processes. +//! +//! # Examples +//! +//! Basic usage where we try to execute the `cat` shell command: +//! +//! ```should_panic +//! use std::process::Command; +//! +//! let mut child = Command::new("/bin/cat") +//! .arg("file.txt") +//! .spawn() +//! .expect("failed to execute child"); +//! +//! let ecode = child.wait() +//! .expect("failed to wait on child"); +//! +//! assert!(ecode.success()); +//! ``` #![stable(feature = "process", since = "1.0.0")] @@ -807,7 +825,7 @@ pub fn exit(code: i32) -> ! { ::sys::os::exit(code) } -#[cfg(test)] +#[cfg(all(test, not(target_os = "emscripten")))] mod tests { use io::prelude::*; diff --git a/src/libstd/rand/mod.rs b/src/libstd/rand/mod.rs index 3f14fcd239f1d..f48325218fb49 100644 --- a/src/libstd/rand/mod.rs +++ b/src/libstd/rand/mod.rs @@ -242,9 +242,10 @@ mod tests { } #[test] + #[cfg_attr(target_os = "emscripten", ignore)] fn test_os_rng_tasks() { - let mut txs = vec!(); + let mut txs = vec![]; for _ in 0..20 { let (tx, rx) = channel(); txs.push(tx); diff --git a/src/libstd/rt.rs b/src/libstd/rt.rs index e3de1efaa31e3..78d5aa597ba0d 100644 --- a/src/libstd/rt.rs +++ b/src/libstd/rt.rs @@ -51,7 +51,7 @@ fn lang_start(main: *const u8, argc: isize, argv: *const *const u8) -> isize { thread_info::set(main_guard, thread); // Store our args if necessary in a squirreled away location - sys_common::args::init(argc, argv); + sys::args::init(argc, argv); // Let's run some code! let res = panic::catch_unwind(mem::transmute::<_, fn()>(main)); diff --git a/src/libstd/rtdeps.rs b/src/libstd/rtdeps.rs index a11200873d500..5dc6ee2bc8c66 100644 --- a/src/libstd/rtdeps.rs +++ b/src/libstd/rtdeps.rs @@ -62,3 +62,7 @@ extern {} #[cfg(target_os = "ios")] #[link(name = "System")] extern {} + +#[cfg(target_os = "haiku")] +#[link(name = "network")] +extern {} diff --git a/src/libstd/sync/barrier.rs b/src/libstd/sync/barrier.rs index ac0f400379e3c..f46eab6848463 100644 --- a/src/libstd/sync/barrier.rs +++ b/src/libstd/sync/barrier.rs @@ -118,6 +118,7 @@ mod tests { use thread; #[test] + #[cfg_attr(target_os = "emscripten", ignore)] fn test_barrier() { const N: usize = 10; diff --git a/src/libstd/sync/condvar.rs b/src/libstd/sync/condvar.rs index 3db8b05b954c3..a983ae716a481 100644 --- a/src/libstd/sync/condvar.rs +++ b/src/libstd/sync/condvar.rs @@ -270,6 +270,7 @@ mod tests { } #[test] + #[cfg_attr(target_os = "emscripten", ignore)] fn notify_one() { let m = Arc::new(Mutex::new(())); let m2 = m.clone(); @@ -286,6 +287,7 @@ mod tests { } #[test] + #[cfg_attr(target_os = "emscripten", ignore)] fn notify_all() { const N: usize = 10; @@ -322,6 +324,7 @@ mod tests { } #[test] + #[cfg_attr(target_os = "emscripten", ignore)] fn wait_timeout_ms() { let m = Arc::new(Mutex::new(())); let m2 = m.clone(); @@ -343,6 +346,7 @@ mod tests { #[test] #[should_panic] + #[cfg_attr(target_os = "emscripten", ignore)] fn two_mutexes() { let m = Arc::new(Mutex::new(())); let m2 = m.clone(); diff --git a/src/libstd/sync/mpsc/mod.rs b/src/libstd/sync/mpsc/mod.rs index 3d9f81413dc73..fce640e7c7a2c 100644 --- a/src/libstd/sync/mpsc/mod.rs +++ b/src/libstd/sync/mpsc/mod.rs @@ -270,7 +270,6 @@ use error; use fmt; use mem; use cell::UnsafeCell; -use marker::Reflect; use time::{Duration, Instant}; #[unstable(feature = "mpsc_select", issue = "27800")] @@ -1163,7 +1162,7 @@ impl fmt::Display for SendError { } #[stable(feature = "rust1", since = "1.0.0")] -impl error::Error for SendError { +impl error::Error for SendError { fn description(&self) -> &str { "sending on a closed channel" } @@ -1198,7 +1197,7 @@ impl fmt::Display for TrySendError { } #[stable(feature = "rust1", since = "1.0.0")] -impl error::Error for TrySendError { +impl error::Error for TrySendError { fn description(&self) -> &str { match *self { @@ -1268,7 +1267,7 @@ impl error::Error for TryRecvError { } } -#[cfg(test)] +#[cfg(all(test, not(target_os = "emscripten")))] mod tests { use env; use super::*; @@ -1940,9 +1939,16 @@ mod tests { // wait for the child thread to exit before we exit rx2.recv().unwrap(); } + + #[test] + fn issue_32114() { + let (tx, _) = channel(); + let _ = tx.send(123); + assert_eq!(tx.send(123), Err(SendError(123))); + } } -#[cfg(test)] +#[cfg(all(test, not(target_os = "emscripten")))] mod sync_tests { use env; use thread; diff --git a/src/libstd/sync/mpsc/mpsc_queue.rs b/src/libstd/sync/mpsc/mpsc_queue.rs index d926043fbbcd0..8d80f942ff75c 100644 --- a/src/libstd/sync/mpsc/mpsc_queue.rs +++ b/src/libstd/sync/mpsc/mpsc_queue.rs @@ -146,7 +146,7 @@ impl Drop for Queue { } } -#[cfg(test)] +#[cfg(all(test, not(target_os = "emscripten")))] mod tests { use sync::mpsc::channel; use super::{Queue, Data, Empty, Inconsistent}; diff --git a/src/libstd/sync/mpsc/oneshot.rs b/src/libstd/sync/mpsc/oneshot.rs index 7389280b853db..767e9f96ac8e4 100644 --- a/src/libstd/sync/mpsc/oneshot.rs +++ b/src/libstd/sync/mpsc/oneshot.rs @@ -113,6 +113,8 @@ impl Packet { // Couldn't send the data, the port hung up first. Return the data // back up the stack. DISCONNECTED => { + self.state.swap(DISCONNECTED, Ordering::SeqCst); + self.upgrade = NothingSent; Err(self.data.take().unwrap()) } diff --git a/src/libstd/sync/mpsc/select.rs b/src/libstd/sync/mpsc/select.rs index 677544d335e7a..8b4da532af6f0 100644 --- a/src/libstd/sync/mpsc/select.rs +++ b/src/libstd/sync/mpsc/select.rs @@ -103,7 +103,7 @@ pub struct Handle<'rx, T:Send+'rx> { struct Packets { cur: *mut Handle<'static, ()> } #[doc(hidden)] -#[derive(PartialEq)] +#[derive(PartialEq, Eq)] pub enum StartResult { Installed, Abort, @@ -352,22 +352,20 @@ impl Iterator for Packets { } } -#[stable(feature = "mpsc_debug", since = "1.7.0")] impl fmt::Debug for Select { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Select {{ .. }}") } } -#[stable(feature = "mpsc_debug", since = "1.7.0")] impl<'rx, T:Send+'rx> fmt::Debug for Handle<'rx, T> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Handle {{ .. }}") } } -#[cfg(test)] #[allow(unused_imports)] +#[cfg(all(test, not(target_os = "emscripten")))] mod tests { use thread; use sync::mpsc::*; diff --git a/src/libstd/sync/mpsc/spsc_queue.rs b/src/libstd/sync/mpsc/spsc_queue.rs index 724d7b1be730d..5858e4b6ddb1f 100644 --- a/src/libstd/sync/mpsc/spsc_queue.rs +++ b/src/libstd/sync/mpsc/spsc_queue.rs @@ -231,7 +231,7 @@ impl Drop for Queue { } } -#[cfg(test)] +#[cfg(all(test, not(target_os = "emscripten")))] mod tests { use sync::Arc; use super::Queue; diff --git a/src/libstd/sync/mutex.rs b/src/libstd/sync/mutex.rs index 098a3e44258c7..812724c7a167e 100644 --- a/src/libstd/sync/mutex.rs +++ b/src/libstd/sync/mutex.rs @@ -352,7 +352,7 @@ pub fn guard_poison<'a, T: ?Sized>(guard: &MutexGuard<'a, T>) -> &'a poison::Fla &guard.__lock.poison } -#[cfg(test)] +#[cfg(all(test, not(target_os = "emscripten")))] mod tests { use sync::mpsc::channel; use sync::{Arc, Mutex, Condvar}; diff --git a/src/libstd/sync/once.rs b/src/libstd/sync/once.rs index 86d2986959c99..71e163321aea2 100644 --- a/src/libstd/sync/once.rs +++ b/src/libstd/sync/once.rs @@ -367,7 +367,7 @@ impl OnceState { } } -#[cfg(test)] +#[cfg(all(test, not(target_os = "emscripten")))] mod tests { use panic; use sync::mpsc::channel; @@ -387,7 +387,7 @@ mod tests { #[test] fn stampede_once() { static O: Once = Once::new(); - static mut run: bool = false; + static mut RUN: bool = false; let (tx, rx) = channel(); for _ in 0..10 { @@ -396,10 +396,10 @@ mod tests { for _ in 0..4 { thread::yield_now() } unsafe { O.call_once(|| { - assert!(!run); - run = true; + assert!(!RUN); + RUN = true; }); - assert!(run); + assert!(RUN); } tx.send(()).unwrap(); }); @@ -407,10 +407,10 @@ mod tests { unsafe { O.call_once(|| { - assert!(!run); - run = true; + assert!(!RUN); + RUN = true; }); - assert!(run); + assert!(RUN); } for _ in 0..10 { diff --git a/src/libstd/sync/rwlock.rs b/src/libstd/sync/rwlock.rs index 7f053c6704b56..f08b764152144 100644 --- a/src/libstd/sync/rwlock.rs +++ b/src/libstd/sync/rwlock.rs @@ -136,6 +136,10 @@ impl RwLock { /// This function will return an error if the RwLock is poisoned. An RwLock /// is poisoned whenever a writer panics while holding an exclusive lock. /// The failure will occur immediately after the lock has been acquired. + /// + /// # Panics + /// + /// This function might panic when called if the lock is already held by the current thread. #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn read(&self) -> LockResult> { @@ -188,6 +192,10 @@ impl RwLock { /// This function will return an error if the RwLock is poisoned. An RwLock /// is poisoned whenever a writer panics while holding an exclusive lock. /// An error will be returned when the lock is acquired. + /// + /// # Panics + /// + /// This function might panic when called if the lock is already held by the current thread. #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn write(&self) -> LockResult> { @@ -380,7 +388,7 @@ impl<'a, T: ?Sized> Drop for RwLockWriteGuard<'a, T> { } } -#[cfg(test)] +#[cfg(all(test, not(target_os = "emscripten")))] mod tests { #![allow(deprecated)] // rand diff --git a/src/libstd/sys/common/args.rs b/src/libstd/sys/common/args.rs deleted file mode 100644 index fad2c277da417..0000000000000 --- a/src/libstd/sys/common/args.rs +++ /dev/null @@ -1,99 +0,0 @@ -// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Global storage for command line arguments -//! -//! The current incarnation of the Rust runtime expects for -//! the processes `argc` and `argv` arguments to be stored -//! in a globally-accessible location for use by the `os` module. -//! -//! Only valid to call on Linux. Mac and Windows use syscalls to -//! discover the command line arguments. -//! -//! FIXME #7756: Would be nice for this to not exist. - -#![allow(dead_code)] // different code on OSX/linux/etc - -/// One-time global initialization. -pub unsafe fn init(argc: isize, argv: *const *const u8) { imp::init(argc, argv) } - -/// One-time global cleanup. -pub unsafe fn cleanup() { imp::cleanup() } - -/// Make a clone of the global arguments. -pub fn clone() -> Option>> { imp::clone() } - -#[cfg(any(target_os = "linux", - target_os = "android", - target_os = "freebsd", - target_os = "dragonfly", - target_os = "bitrig", - target_os = "netbsd", - target_os = "openbsd", - target_os = "solaris", - target_os = "emscripten"))] -mod imp { - use libc::c_char; - use mem; - use ffi::CStr; - - use sys_common::mutex::Mutex; - - static mut GLOBAL_ARGS_PTR: usize = 0; - static LOCK: Mutex = Mutex::new(); - - pub unsafe fn init(argc: isize, argv: *const *const u8) { - let args = (0..argc).map(|i| { - CStr::from_ptr(*argv.offset(i) as *const c_char).to_bytes().to_vec() - }).collect(); - - LOCK.lock(); - let ptr = get_global_ptr(); - assert!((*ptr).is_none()); - (*ptr) = Some(box args); - LOCK.unlock(); - } - - pub unsafe fn cleanup() { - LOCK.lock(); - *get_global_ptr() = None; - LOCK.unlock(); - } - - pub fn clone() -> Option>> { - unsafe { - LOCK.lock(); - let ptr = get_global_ptr(); - let ret = (*ptr).as_ref().map(|s| (**s).clone()); - LOCK.unlock(); - return ret - } - } - - fn get_global_ptr() -> *mut Option>>> { - unsafe { mem::transmute(&GLOBAL_ARGS_PTR) } - } - -} - -#[cfg(any(target_os = "macos", - target_os = "ios", - target_os = "windows"))] -mod imp { - pub unsafe fn init(_argc: isize, _argv: *const *const u8) { - } - - pub fn cleanup() { - } - - pub fn clone() -> Option>> { - panic!() - } -} diff --git a/src/libstd/sys/common/io.rs b/src/libstd/sys/common/io.rs index 3cd70eddb858c..0483725dd83bc 100644 --- a/src/libstd/sys/common/io.rs +++ b/src/libstd/sys/common/io.rs @@ -51,6 +51,7 @@ pub unsafe fn read_to_end_uninitialized(r: &mut Read, buf: &mut Vec) -> io:: } #[cfg(test)] +#[allow(dead_code)] // not used on emscripten pub mod test { use path::{Path, PathBuf}; use env; @@ -165,6 +166,7 @@ mod tests { } #[bench] + #[cfg_attr(target_os = "emscripten", ignore)] fn bench_uninitialized(b: &mut ::test::Bencher) { b.iter(|| { let mut lr = repeat(1).take(10000000); diff --git a/src/libstd/sys/common/memchr.rs b/src/libstd/sys/common/memchr.rs new file mode 100644 index 0000000000000..3824a5fb5284c --- /dev/null +++ b/src/libstd/sys/common/memchr.rs @@ -0,0 +1,230 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. +// +// Original implementation taken from rust-memchr +// Copyright 2015 Andrew Gallant, bluss and Nicolas Koch + +#[allow(dead_code)] +pub mod fallback { + use cmp; + use mem; + + const LO_U64: u64 = 0x0101010101010101; + const HI_U64: u64 = 0x8080808080808080; + + // use truncation + const LO_USIZE: usize = LO_U64 as usize; + const HI_USIZE: usize = HI_U64 as usize; + + /// Return `true` if `x` contains any zero byte. + /// + /// From *Matters Computational*, J. Arndt + /// + /// "The idea is to subtract one from each of the bytes and then look for + /// bytes where the borrow propagated all the way to the most significant + /// bit." + #[inline] + fn contains_zero_byte(x: usize) -> bool { + x.wrapping_sub(LO_USIZE) & !x & HI_USIZE != 0 + } + + #[cfg(target_pointer_width = "32")] + #[inline] + fn repeat_byte(b: u8) -> usize { + let mut rep = (b as usize) << 8 | b as usize; + rep = rep << 16 | rep; + rep + } + + #[cfg(target_pointer_width = "64")] + #[inline] + fn repeat_byte(b: u8) -> usize { + let mut rep = (b as usize) << 8 | b as usize; + rep = rep << 16 | rep; + rep = rep << 32 | rep; + rep + } + + /// Return the first index matching the byte `a` in `text`. + pub fn memchr(x: u8, text: &[u8]) -> Option { + // Scan for a single byte value by reading two `usize` words at a time. + // + // Split `text` in three parts + // - unaligned initial part, before the first word aligned address in text + // - body, scan by 2 words at a time + // - the last remaining part, < 2 word size + let len = text.len(); + let ptr = text.as_ptr(); + let usize_bytes = mem::size_of::(); + + // search up to an aligned boundary + let align = (ptr as usize) & (usize_bytes- 1); + let mut offset; + if align > 0 { + offset = cmp::min(usize_bytes - align, len); + if let Some(index) = text[..offset].iter().position(|elt| *elt == x) { + return Some(index); + } + } else { + offset = 0; + } + + // search the body of the text + let repeated_x = repeat_byte(x); + + if len >= 2 * usize_bytes { + while offset <= len - 2 * usize_bytes { + unsafe { + let u = *(ptr.offset(offset as isize) as *const usize); + let v = *(ptr.offset((offset + usize_bytes) as isize) as *const usize); + + // break if there is a matching byte + let zu = contains_zero_byte(u ^ repeated_x); + let zv = contains_zero_byte(v ^ repeated_x); + if zu || zv { + break; + } + } + offset += usize_bytes * 2; + } + } + + // find the byte after the point the body loop stopped + text[offset..].iter().position(|elt| *elt == x).map(|i| offset + i) + } + + /// Return the last index matching the byte `a` in `text`. + pub fn memrchr(x: u8, text: &[u8]) -> Option { + // Scan for a single byte value by reading two `usize` words at a time. + // + // Split `text` in three parts + // - unaligned tail, after the last word aligned address in text + // - body, scan by 2 words at a time + // - the first remaining bytes, < 2 word size + let len = text.len(); + let ptr = text.as_ptr(); + let usize_bytes = mem::size_of::(); + + // search to an aligned boundary + let end_align = (ptr as usize + len) & (usize_bytes - 1); + let mut offset; + if end_align > 0 { + offset = if end_align >= len { 0 } else { len - end_align }; + if let Some(index) = text[offset..].iter().rposition(|elt| *elt == x) { + return Some(offset + index); + } + } else { + offset = len; + } + + // search the body of the text + let repeated_x = repeat_byte(x); + + while offset >= 2 * usize_bytes { + unsafe { + let u = *(ptr.offset(offset as isize - 2 * usize_bytes as isize) as *const usize); + let v = *(ptr.offset(offset as isize - usize_bytes as isize) as *const usize); + + // break if there is a matching byte + let zu = contains_zero_byte(u ^ repeated_x); + let zv = contains_zero_byte(v ^ repeated_x); + if zu || zv { + break; + } + } + offset -= 2 * usize_bytes; + } + + // find the byte before the point the body loop stopped + text[..offset].iter().rposition(|elt| *elt == x) + } + + // test fallback implementations on all platforms + #[test] + fn matches_one() { + assert_eq!(Some(0), memchr(b'a', b"a")); + } + + #[test] + fn matches_begin() { + assert_eq!(Some(0), memchr(b'a', b"aaaa")); + } + + #[test] + fn matches_end() { + assert_eq!(Some(4), memchr(b'z', b"aaaaz")); + } + + #[test] + fn matches_nul() { + assert_eq!(Some(4), memchr(b'\x00', b"aaaa\x00")); + } + + #[test] + fn matches_past_nul() { + assert_eq!(Some(5), memchr(b'z', b"aaaa\x00z")); + } + + #[test] + fn no_match_empty() { + assert_eq!(None, memchr(b'a', b"")); + } + + #[test] + fn no_match() { + assert_eq!(None, memchr(b'a', b"xyz")); + } + + #[test] + fn matches_one_reversed() { + assert_eq!(Some(0), memrchr(b'a', b"a")); + } + + #[test] + fn matches_begin_reversed() { + assert_eq!(Some(3), memrchr(b'a', b"aaaa")); + } + + #[test] + fn matches_end_reversed() { + assert_eq!(Some(0), memrchr(b'z', b"zaaaa")); + } + + #[test] + fn matches_nul_reversed() { + assert_eq!(Some(4), memrchr(b'\x00', b"aaaa\x00")); + } + + #[test] + fn matches_past_nul_reversed() { + assert_eq!(Some(0), memrchr(b'z', b"z\x00aaaa")); + } + + #[test] + fn no_match_empty_reversed() { + assert_eq!(None, memrchr(b'a', b"")); + } + + #[test] + fn no_match_reversed() { + assert_eq!(None, memrchr(b'a', b"xyz")); + } + + #[test] + fn each_alignment_reversed() { + let mut data = [1u8; 64]; + let needle = 2; + let pos = 40; + data[pos] = needle; + for start in 0..16 { + assert_eq!(Some(pos - start), memrchr(needle, &data[start..])); + } + } +} diff --git a/src/libstd/sys/common/mod.rs b/src/libstd/sys/common/mod.rs index d1ca676510714..2845f895f1869 100644 --- a/src/libstd/sys/common/mod.rs +++ b/src/libstd/sys/common/mod.rs @@ -25,12 +25,12 @@ macro_rules! rtassert { }) } -pub mod args; pub mod at_exit_imp; #[cfg(any(not(cargobuild), feature = "backtrace"))] pub mod backtrace; pub mod condvar; pub mod io; +pub mod memchr; pub mod mutex; pub mod net; pub mod poison; @@ -91,7 +91,7 @@ pub fn at_exit(f: F) -> Result<(), ()> { pub fn cleanup() { static CLEANUP: Once = Once::new(); CLEANUP.call_once(|| unsafe { - args::cleanup(); + sys::args::cleanup(); sys::stack_overflow::cleanup(); at_exit_imp::cleanup(); }); diff --git a/src/libstd/sys/common/net.rs b/src/libstd/sys/common/net.rs index a777cfe35e56d..10ad61f4c800c 100644 --- a/src/libstd/sys/common/net.rs +++ b/src/libstd/sys/common/net.rs @@ -24,24 +24,35 @@ use time::Duration; #[cfg(any(target_os = "dragonfly", target_os = "freebsd", target_os = "ios", target_os = "macos", target_os = "openbsd", target_os = "netbsd", - target_os = "solaris"))] + target_os = "solaris", target_os = "haiku"))] use sys::net::netc::IPV6_JOIN_GROUP as IPV6_ADD_MEMBERSHIP; #[cfg(not(any(target_os = "dragonfly", target_os = "freebsd", target_os = "ios", target_os = "macos", target_os = "openbsd", target_os = "netbsd", - target_os = "solaris")))] + target_os = "solaris", target_os = "haiku")))] use sys::net::netc::IPV6_ADD_MEMBERSHIP; #[cfg(any(target_os = "dragonfly", target_os = "freebsd", target_os = "ios", target_os = "macos", target_os = "openbsd", target_os = "netbsd", - target_os = "solaris"))] + target_os = "solaris", target_os = "haiku"))] use sys::net::netc::IPV6_LEAVE_GROUP as IPV6_DROP_MEMBERSHIP; #[cfg(not(any(target_os = "dragonfly", target_os = "freebsd", target_os = "ios", target_os = "macos", target_os = "openbsd", target_os = "netbsd", - target_os = "solaris")))] + target_os = "solaris", target_os = "haiku")))] use sys::net::netc::IPV6_DROP_MEMBERSHIP; +#[cfg(any(target_os = "linux", target_os = "android", + target_os = "dragonfly", target_os = "freebsd", + target_os = "openbsd", target_os = "netbsd", + target_os = "haiku", target_os = "bitrig"))] +use libc::MSG_NOSIGNAL; +#[cfg(not(any(target_os = "linux", target_os = "android", + target_os = "dragonfly", target_os = "freebsd", + target_os = "openbsd", target_os = "netbsd", + target_os = "haiku", target_os = "bitrig")))] +const MSG_NOSIGNAL: c_int = 0x0; + //////////////////////////////////////////////////////////////////////////////// // sockaddr and misc bindings //////////////////////////////////////////////////////////////////////////////// @@ -225,7 +236,7 @@ impl TcpStream { c::send(*self.inner.as_inner(), buf.as_ptr() as *const c_void, len, - 0) + MSG_NOSIGNAL) })?; Ok(ret as usize) } @@ -449,7 +460,7 @@ impl UdpSocket { let ret = cvt(unsafe { c::sendto(*self.inner.as_inner(), buf.as_ptr() as *const c_void, len, - 0, dstp, dstlen) + MSG_NOSIGNAL, dstp, dstlen) })?; Ok(ret as usize) } @@ -573,7 +584,7 @@ impl UdpSocket { c::send(*self.inner.as_inner(), buf.as_ptr() as *const c_void, len, - 0) + MSG_NOSIGNAL) })?; Ok(ret as usize) } diff --git a/src/libstd/sys/common/poison.rs b/src/libstd/sys/common/poison.rs index 55212bf35d698..bdc727f1dfcfe 100644 --- a/src/libstd/sys/common/poison.rs +++ b/src/libstd/sys/common/poison.rs @@ -10,7 +10,6 @@ use error::{Error}; use fmt; -use marker::Reflect; use sync::atomic::{AtomicBool, Ordering}; use thread; @@ -117,7 +116,7 @@ impl fmt::Display for PoisonError { } #[stable(feature = "rust1", since = "1.0.0")] -impl Error for PoisonError { +impl Error for PoisonError { fn description(&self) -> &str { "poisoned lock: another task failed inside" } @@ -174,7 +173,7 @@ impl fmt::Display for TryLockError { } #[stable(feature = "rust1", since = "1.0.0")] -impl Error for TryLockError { +impl Error for TryLockError { fn description(&self) -> &str { match *self { TryLockError::Poisoned(ref p) => p.description(), diff --git a/src/libstd/sys/common/remutex.rs b/src/libstd/sys/common/remutex.rs index cbdeaad7f6bd3..4d0407ccf6c89 100644 --- a/src/libstd/sys/common/remutex.rs +++ b/src/libstd/sys/common/remutex.rs @@ -156,7 +156,7 @@ impl<'a, T> Drop for ReentrantMutexGuard<'a, T> { } -#[cfg(test)] +#[cfg(all(test, not(target_os = "emscripten")))] mod tests { use sys_common::remutex::{ReentrantMutex, ReentrantMutexGuard}; use cell::RefCell; diff --git a/src/libstd/sys/common/wtf8.rs b/src/libstd/sys/common/wtf8.rs index 8d357aa78c9e9..0a94ff1e95823 100644 --- a/src/libstd/sys/common/wtf8.rs +++ b/src/libstd/sys/common/wtf8.rs @@ -206,10 +206,12 @@ impl Wtf8Buf { /// Copied from String::push /// This does **not** include the WTF-8 concatenation check. fn push_code_point_unchecked(&mut self, code_point: CodePoint) { - let bytes = unsafe { - char::from_u32_unchecked(code_point.value).encode_utf8() + let c = unsafe { + char::from_u32_unchecked(code_point.value) }; - self.bytes.extend_from_slice(bytes.as_slice()); + let mut bytes = [0; 4]; + let bytes = c.encode_utf8(&mut bytes).as_bytes(); + self.bytes.extend_from_slice(bytes) } #[inline] @@ -738,15 +740,16 @@ impl<'a> Iterator for EncodeWide<'a> { return Some(tmp); } + let mut buf = [0; 2]; self.code_points.next().map(|code_point| { - let n = unsafe { - char::from_u32_unchecked(code_point.value).encode_utf16() + let c = unsafe { + char::from_u32_unchecked(code_point.value) }; - let n = n.as_slice(); - if n.len() == 2 { - self.extra = n[1]; + let n = c.encode_utf16(&mut buf).len(); + if n == 2 { + self.extra = buf[1]; } - n[0] + buf[0] }) } diff --git a/src/libstd/sys/unix/android.rs b/src/libstd/sys/unix/android.rs index abbe3fc1846bd..10436723a81d0 100644 --- a/src/libstd/sys/unix/android.rs +++ b/src/libstd/sys/unix/android.rs @@ -28,10 +28,11 @@ #![cfg(target_os = "android")] -use libc::{c_int, sighandler_t}; +use libc::{c_int, c_void, sighandler_t, size_t, ssize_t}; +use libc::{ftruncate, pread, pwrite}; use io; -use sys::cvt_r; +use super::{cvt, cvt_r}; // The `log2` and `log2f` functions apparently appeared in android-18, or at // least you can see they're not present in the android-17 header [1] and they @@ -96,13 +97,10 @@ pub unsafe fn signal(signum: c_int, handler: sighandler_t) -> sighandler_t { // // If it doesn't we just fall back to `ftruncate`, generating an error for // too-large values. +#[cfg(target_pointer_width = "32")] pub fn ftruncate64(fd: c_int, size: u64) -> io::Result<()> { weak!(fn ftruncate64(c_int, i64) -> c_int); - extern { - fn ftruncate(fd: c_int, off: i32) -> c_int; - } - unsafe { match ftruncate64.get() { Some(f) => cvt_r(|| f(fd, size as i64)).map(|_| ()), @@ -117,3 +115,56 @@ pub fn ftruncate64(fd: c_int, size: u64) -> io::Result<()> { } } } + +#[cfg(target_pointer_width = "64")] +pub fn ftruncate64(fd: c_int, size: u64) -> io::Result<()> { + unsafe { + cvt_r(|| ftruncate(fd, size as i64)).map(|_| ()) + } +} + +#[cfg(target_pointer_width = "32")] +pub unsafe fn cvt_pread64(fd: c_int, buf: *mut c_void, count: size_t, offset: i64) + -> io::Result +{ + use convert::TryInto; + weak!(fn pread64(c_int, *mut c_void, size_t, i64) -> ssize_t); + pread64.get().map(|f| cvt(f(fd, buf, count, offset))).unwrap_or_else(|| { + if let Ok(o) = offset.try_into() { + cvt(pread(fd, buf, count, o)) + } else { + Err(io::Error::new(io::ErrorKind::InvalidInput, + "cannot pread >2GB")) + } + }) +} + +#[cfg(target_pointer_width = "32")] +pub unsafe fn cvt_pwrite64(fd: c_int, buf: *const c_void, count: size_t, offset: i64) + -> io::Result +{ + use convert::TryInto; + weak!(fn pwrite64(c_int, *const c_void, size_t, i64) -> ssize_t); + pwrite64.get().map(|f| cvt(f(fd, buf, count, offset))).unwrap_or_else(|| { + if let Ok(o) = offset.try_into() { + cvt(pwrite(fd, buf, count, o)) + } else { + Err(io::Error::new(io::ErrorKind::InvalidInput, + "cannot pwrite >2GB")) + } + }) +} + +#[cfg(target_pointer_width = "64")] +pub unsafe fn cvt_pread64(fd: c_int, buf: *mut c_void, count: size_t, offset: i64) + -> io::Result +{ + cvt(pread(fd, buf, count, offset)) +} + +#[cfg(target_pointer_width = "64")] +pub unsafe fn cvt_pwrite64(fd: c_int, buf: *const c_void, count: size_t, offset: i64) + -> io::Result +{ + cvt(pwrite(fd, buf, count, offset)) +} diff --git a/src/libstd/sys/unix/args.rs b/src/libstd/sys/unix/args.rs new file mode 100644 index 0000000000000..c04fd863674d4 --- /dev/null +++ b/src/libstd/sys/unix/args.rs @@ -0,0 +1,212 @@ +// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Global initialization and retreival of command line arguments. +//! +//! On some platforms these are stored during runtime startup, +//! and on some they are retrieved from the system on demand. + +#![allow(dead_code)] // runtime init functions not used during testing + +use ffi::OsString; +use marker::PhantomData; +use vec; + +/// One-time global initialization. +pub unsafe fn init(argc: isize, argv: *const *const u8) { imp::init(argc, argv) } + +/// One-time global cleanup. +pub unsafe fn cleanup() { imp::cleanup() } + +/// Returns the command line arguments +pub fn args() -> Args { + imp::args() +} + +pub struct Args { + iter: vec::IntoIter, + _dont_send_or_sync_me: PhantomData<*mut ()>, +} + +impl Iterator for Args { + type Item = OsString; + fn next(&mut self) -> Option { self.iter.next() } + fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } +} + +impl ExactSizeIterator for Args { + fn len(&self) -> usize { self.iter.len() } +} + +impl DoubleEndedIterator for Args { + fn next_back(&mut self) -> Option { self.iter.next_back() } +} + +#[cfg(any(target_os = "linux", + target_os = "android", + target_os = "freebsd", + target_os = "dragonfly", + target_os = "bitrig", + target_os = "netbsd", + target_os = "openbsd", + target_os = "solaris", + target_os = "emscripten", + target_os = "haiku", + target_os = "fuchsia"))] +mod imp { + use os::unix::prelude::*; + use mem; + use ffi::{CStr, OsString}; + use marker::PhantomData; + use libc; + use super::Args; + + use sys_common::mutex::Mutex; + + static mut GLOBAL_ARGS_PTR: usize = 0; + static LOCK: Mutex = Mutex::new(); + + pub unsafe fn init(argc: isize, argv: *const *const u8) { + let args = (0..argc).map(|i| { + CStr::from_ptr(*argv.offset(i) as *const libc::c_char).to_bytes().to_vec() + }).collect(); + + LOCK.lock(); + let ptr = get_global_ptr(); + assert!((*ptr).is_none()); + (*ptr) = Some(box args); + LOCK.unlock(); + } + + pub unsafe fn cleanup() { + LOCK.lock(); + *get_global_ptr() = None; + LOCK.unlock(); + } + + pub fn args() -> Args { + let bytes = clone().unwrap_or(Vec::new()); + let v: Vec = bytes.into_iter().map(|v| { + OsStringExt::from_vec(v) + }).collect(); + Args { iter: v.into_iter(), _dont_send_or_sync_me: PhantomData } + } + + fn clone() -> Option>> { + unsafe { + LOCK.lock(); + let ptr = get_global_ptr(); + let ret = (*ptr).as_ref().map(|s| (**s).clone()); + LOCK.unlock(); + return ret + } + } + + fn get_global_ptr() -> *mut Option>>> { + unsafe { mem::transmute(&GLOBAL_ARGS_PTR) } + } + +} + +#[cfg(any(target_os = "macos", + target_os = "ios"))] +mod imp { + use ffi::CStr; + use marker::PhantomData; + use libc; + use super::Args; + + pub unsafe fn init(_argc: isize, _argv: *const *const u8) { + } + + pub fn cleanup() { + } + + #[cfg(target_os = "macos")] + pub fn args() -> Args { + use os::unix::prelude::*; + extern { + // These functions are in crt_externs.h. + fn _NSGetArgc() -> *mut libc::c_int; + fn _NSGetArgv() -> *mut *mut *mut libc::c_char; + } + + let vec = unsafe { + let (argc, argv) = (*_NSGetArgc() as isize, + *_NSGetArgv() as *const *const libc::c_char); + (0.. argc as isize).map(|i| { + let bytes = CStr::from_ptr(*argv.offset(i)).to_bytes().to_vec(); + OsStringExt::from_vec(bytes) + }).collect::>() + }; + Args { + iter: vec.into_iter(), + _dont_send_or_sync_me: PhantomData, + } + } + + // As _NSGetArgc and _NSGetArgv aren't mentioned in iOS docs + // and use underscores in their names - they're most probably + // are considered private and therefore should be avoided + // Here is another way to get arguments using Objective C + // runtime + // + // In general it looks like: + // res = Vec::new() + // let args = [[NSProcessInfo processInfo] arguments] + // for i in (0..[args count]) + // res.push([args objectAtIndex:i]) + // res + #[cfg(target_os = "ios")] + pub fn args() -> Args { + use ffi::OsString; + use mem; + use str; + + extern { + fn sel_registerName(name: *const libc::c_uchar) -> Sel; + fn objc_msgSend(obj: NsId, sel: Sel, ...) -> NsId; + fn objc_getClass(class_name: *const libc::c_uchar) -> NsId; + } + + #[link(name = "Foundation", kind = "framework")] + #[link(name = "objc")] + #[cfg(not(cargobuild))] + extern {} + + type Sel = *const libc::c_void; + type NsId = *const libc::c_void; + + let mut res = Vec::new(); + + unsafe { + let process_info_sel = sel_registerName("processInfo\0".as_ptr()); + let arguments_sel = sel_registerName("arguments\0".as_ptr()); + let utf8_sel = sel_registerName("UTF8String\0".as_ptr()); + let count_sel = sel_registerName("count\0".as_ptr()); + let object_at_sel = sel_registerName("objectAtIndex:\0".as_ptr()); + + let klass = objc_getClass("NSProcessInfo\0".as_ptr()); + let info = objc_msgSend(klass, process_info_sel); + let args = objc_msgSend(info, arguments_sel); + + let cnt: usize = mem::transmute(objc_msgSend(args, count_sel)); + for i in 0..cnt { + let tmp = objc_msgSend(args, object_at_sel, i); + let utf_c_str: *const libc::c_char = + mem::transmute(objc_msgSend(tmp, utf8_sel)); + let bytes = CStr::from_ptr(utf_c_str).to_bytes(); + res.push(OsString::from(str::from_utf8(bytes).unwrap())) + } + } + + Args { iter: res.into_iter(), _dont_send_or_sync_me: PhantomData } + } +} diff --git a/src/libstd/sys/unix/env.rs b/src/libstd/sys/unix/env.rs new file mode 100644 index 0000000000000..eff3a8c2a34a4 --- /dev/null +++ b/src/libstd/sys/unix/env.rs @@ -0,0 +1,184 @@ +// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#[cfg(target_os = "linux")] +pub mod os { + pub const FAMILY: &'static str = "unix"; + pub const OS: &'static str = "linux"; + pub const DLL_PREFIX: &'static str = "lib"; + pub const DLL_SUFFIX: &'static str = ".so"; + pub const DLL_EXTENSION: &'static str = "so"; + pub const EXE_SUFFIX: &'static str = ""; + pub const EXE_EXTENSION: &'static str = ""; +} + +#[cfg(target_os = "macos")] +pub mod os { + pub const FAMILY: &'static str = "unix"; + pub const OS: &'static str = "macos"; + pub const DLL_PREFIX: &'static str = "lib"; + pub const DLL_SUFFIX: &'static str = ".dylib"; + pub const DLL_EXTENSION: &'static str = "dylib"; + pub const EXE_SUFFIX: &'static str = ""; + pub const EXE_EXTENSION: &'static str = ""; +} + +#[cfg(target_os = "ios")] +pub mod os { + pub const FAMILY: &'static str = "unix"; + pub const OS: &'static str = "ios"; + pub const DLL_PREFIX: &'static str = "lib"; + pub const DLL_SUFFIX: &'static str = ".dylib"; + pub const DLL_EXTENSION: &'static str = "dylib"; + pub const EXE_SUFFIX: &'static str = ""; + pub const EXE_EXTENSION: &'static str = ""; +} + +#[cfg(target_os = "freebsd")] +pub mod os { + pub const FAMILY: &'static str = "unix"; + pub const OS: &'static str = "freebsd"; + pub const DLL_PREFIX: &'static str = "lib"; + pub const DLL_SUFFIX: &'static str = ".so"; + pub const DLL_EXTENSION: &'static str = "so"; + pub const EXE_SUFFIX: &'static str = ""; + pub const EXE_EXTENSION: &'static str = ""; +} + +#[cfg(target_os = "dragonfly")] +pub mod os { + pub const FAMILY: &'static str = "unix"; + pub const OS: &'static str = "dragonfly"; + pub const DLL_PREFIX: &'static str = "lib"; + pub const DLL_SUFFIX: &'static str = ".so"; + pub const DLL_EXTENSION: &'static str = "so"; + pub const EXE_SUFFIX: &'static str = ""; + pub const EXE_EXTENSION: &'static str = ""; +} + +#[cfg(target_os = "bitrig")] +pub mod os { + pub const FAMILY: &'static str = "unix"; + pub const OS: &'static str = "bitrig"; + pub const DLL_PREFIX: &'static str = "lib"; + pub const DLL_SUFFIX: &'static str = ".so"; + pub const DLL_EXTENSION: &'static str = "so"; + pub const EXE_SUFFIX: &'static str = ""; + pub const EXE_EXTENSION: &'static str = ""; +} + +#[cfg(target_os = "netbsd")] +pub mod os { + pub const FAMILY: &'static str = "unix"; + pub const OS: &'static str = "netbsd"; + pub const DLL_PREFIX: &'static str = "lib"; + pub const DLL_SUFFIX: &'static str = ".so"; + pub const DLL_EXTENSION: &'static str = "so"; + pub const EXE_SUFFIX: &'static str = ""; + pub const EXE_EXTENSION: &'static str = ""; +} + +#[cfg(target_os = "openbsd")] +pub mod os { + pub const FAMILY: &'static str = "unix"; + pub const OS: &'static str = "openbsd"; + pub const DLL_PREFIX: &'static str = "lib"; + pub const DLL_SUFFIX: &'static str = ".so"; + pub const DLL_EXTENSION: &'static str = "so"; + pub const EXE_SUFFIX: &'static str = ""; + pub const EXE_EXTENSION: &'static str = ""; +} + +#[cfg(target_os = "android")] +pub mod os { + pub const FAMILY: &'static str = "unix"; + pub const OS: &'static str = "android"; + pub const DLL_PREFIX: &'static str = "lib"; + pub const DLL_SUFFIX: &'static str = ".so"; + pub const DLL_EXTENSION: &'static str = "so"; + pub const EXE_SUFFIX: &'static str = ""; + pub const EXE_EXTENSION: &'static str = ""; +} + +#[cfg(target_os = "solaris")] +pub mod os { + pub const FAMILY: &'static str = "unix"; + pub const OS: &'static str = "solaris"; + pub const DLL_PREFIX: &'static str = "lib"; + pub const DLL_SUFFIX: &'static str = ".so"; + pub const DLL_EXTENSION: &'static str = "so"; + pub const EXE_SUFFIX: &'static str = ""; + pub const EXE_EXTENSION: &'static str = ""; +} + +#[cfg(all(target_os = "nacl", not(target_arch = "le32")))] +pub mod os { + pub const FAMILY: &'static str = "unix"; + pub const OS: &'static str = "nacl"; + pub const DLL_PREFIX: &'static str = "lib"; + pub const DLL_SUFFIX: &'static str = ".so"; + pub const DLL_EXTENSION: &'static str = "so"; + pub const EXE_SUFFIX: &'static str = ".nexe"; + pub const EXE_EXTENSION: &'static str = "nexe"; +} +#[cfg(all(target_os = "nacl", target_arch = "le32"))] +pub mod os { + pub const FAMILY: &'static str = "unix"; + pub const OS: &'static str = "pnacl"; + pub const DLL_PREFIX: &'static str = "lib"; + pub const DLL_SUFFIX: &'static str = ".pso"; + pub const DLL_EXTENSION: &'static str = "pso"; + pub const EXE_SUFFIX: &'static str = ".pexe"; + pub const EXE_EXTENSION: &'static str = "pexe"; +} + +#[cfg(target_os = "haiku")] +pub mod os { + pub const FAMILY: &'static str = "unix"; + pub const OS: &'static str = "haiku"; + pub const DLL_PREFIX: &'static str = "lib"; + pub const DLL_SUFFIX: &'static str = ".so"; + pub const DLL_EXTENSION: &'static str = "so"; + pub const EXE_SUFFIX: &'static str = ""; + pub const EXE_EXTENSION: &'static str = ""; +} + +#[cfg(all(target_os = "emscripten", target_arch = "asmjs"))] +pub mod os { + pub const FAMILY: &'static str = "unix"; + pub const OS: &'static str = "emscripten"; + pub const DLL_PREFIX: &'static str = "lib"; + pub const DLL_SUFFIX: &'static str = ".so"; + pub const DLL_EXTENSION: &'static str = "so"; + pub const EXE_SUFFIX: &'static str = ".js"; + pub const EXE_EXTENSION: &'static str = "js"; +} + +#[cfg(all(target_os = "emscripten", target_arch = "wasm32"))] +pub mod os { + pub const FAMILY: &'static str = "unix"; + pub const OS: &'static str = "emscripten"; + pub const DLL_PREFIX: &'static str = "lib"; + pub const DLL_SUFFIX: &'static str = ".so"; + pub const DLL_EXTENSION: &'static str = "so"; + pub const EXE_SUFFIX: &'static str = ".js"; + pub const EXE_EXTENSION: &'static str = "js"; +} + +#[cfg(target_os = "fuchsia")] +pub mod os { + pub const FAMILY: &'static str = "unix"; + pub const OS: &'static str = "fuchsia"; + pub const DLL_PREFIX: &'static str = "lib"; + pub const DLL_SUFFIX: &'static str = ".so"; + pub const DLL_EXTENSION: &'static str = "so"; + pub const EXE_SUFFIX: &'static str = ""; + pub const EXE_EXTENSION: &'static str = ""; +} diff --git a/src/libstd/sys/unix/ext/fs.rs b/src/libstd/sys/unix/ext/fs.rs index 77587918ac94b..fcfab05158841 100644 --- a/src/libstd/sys/unix/ext/fs.rs +++ b/src/libstd/sys/unix/ext/fs.rs @@ -20,6 +20,51 @@ use sys; use sys_common::{FromInner, AsInner, AsInnerMut}; use sys::platform::fs::MetadataExt as UnixMetadataExt; +/// Unix-specific extensions to `File` +#[unstable(feature = "file_offset", issue = "35918")] +pub trait FileExt { + /// Reads a number of bytes starting from a given offset. + /// + /// Returns the number of bytes read. + /// + /// The offset is relative to the start of the file and thus independent + /// from the current cursor. + /// + /// The current file cursor is not affected by this function. + /// + /// Note that similar to `File::read`, it is not an error to return with a + /// short read. + #[unstable(feature = "file_offset", issue = "35918")] + fn read_at(&self, buf: &mut [u8], offset: u64) -> io::Result; + + /// Writes a number of bytes starting from a given offset. + /// + /// Returns the number of bytes written. + /// + /// The offset is relative to the start of the file and thus independent + /// from the current cursor. + /// + /// The current file cursor is not affected by this function. + /// + /// When writing beyond the end of the file, the file is appropiately + /// extended and the intermediate bytes are initialized with the value 0. + /// + /// Note that similar to `File::write`, it is not an error to return a + /// short write. + #[unstable(feature = "file_offset", issue = "35918")] + fn write_at(&self, buf: &[u8], offset: u64) -> io::Result; +} + +#[unstable(feature = "file_offset", issue = "35918")] +impl FileExt for fs::File { + fn read_at(&self, buf: &mut [u8], offset: u64) -> io::Result { + self.as_inner().read_at(buf, offset) + } + fn write_at(&self, buf: &[u8], offset: u64) -> io::Result { + self.as_inner().write_at(buf, offset) + } +} + /// Unix-specific extensions to `Permissions` #[stable(feature = "fs_ext", since = "1.1.0")] pub trait PermissionsExt { diff --git a/src/libstd/sys/unix/ext/mod.rs b/src/libstd/sys/unix/ext/mod.rs index 1be3d75d866dd..b2483f4e20937 100644 --- a/src/libstd/sys/unix/ext/mod.rs +++ b/src/libstd/sys/unix/ext/mod.rs @@ -50,6 +50,8 @@ pub mod prelude { pub use super::fs::{PermissionsExt, OpenOptionsExt, MetadataExt, FileTypeExt}; #[doc(no_inline)] #[stable(feature = "rust1", since = "1.0.0")] pub use super::fs::DirEntryExt; + #[doc(no_inline)] #[unstable(feature = "file_offset", issue = "35918")] + pub use super::fs::FileExt; #[doc(no_inline)] #[stable(feature = "rust1", since = "1.0.0")] pub use super::thread::JoinHandleExt; #[doc(no_inline)] #[stable(feature = "rust1", since = "1.0.0")] diff --git a/src/libstd/sys/unix/ext/net.rs b/src/libstd/sys/unix/ext/net.rs index 3f93fce193561..80f53da1cefe0 100644 --- a/src/libstd/sys/unix/ext/net.rs +++ b/src/libstd/sys/unix/ext/net.rs @@ -28,6 +28,17 @@ use sys::cvt; use sys::net::Socket; use sys_common::{AsInner, FromInner, IntoInner}; +#[cfg(any(target_os = "linux", target_os = "android", + target_os = "dragonfly", target_os = "freebsd", + target_os = "openbsd", target_os = "netbsd", + target_os = "haiku", target_os = "bitrig"))] +use libc::MSG_NOSIGNAL; +#[cfg(not(any(target_os = "linux", target_os = "android", + target_os = "dragonfly", target_os = "freebsd", + target_os = "openbsd", target_os = "netbsd", + target_os = "haiku", target_os = "bitrig")))] +const MSG_NOSIGNAL: libc::c_int = 0x0; + fn sun_path_offset() -> usize { unsafe { // Work with an actual instance of the type since using a null pointer is UB @@ -690,7 +701,7 @@ impl UnixDatagram { let count = cvt(libc::sendto(*d.0.as_inner(), buf.as_ptr() as *const _, buf.len(), - 0, + MSG_NOSIGNAL, &addr as *const _ as *const _, len))?; Ok(count as usize) @@ -786,7 +797,7 @@ impl IntoRawFd for UnixDatagram { } } -#[cfg(test)] +#[cfg(all(test, not(target_os = "emscripten")))] mod test { use thread; use io; diff --git a/src/libstd/sys/unix/ext/process.rs b/src/libstd/sys/unix/ext/process.rs index 5bd92f2eb574d..3a7c59d4e6d09 100644 --- a/src/libstd/sys/unix/ext/process.rs +++ b/src/libstd/sys/unix/ext/process.rs @@ -161,21 +161,21 @@ impl AsRawFd for process::ChildStderr { } } -#[stable(feature = "process_extensions", since = "1.2.0")] +#[stable(feature = "into_raw_os", since = "1.4.0")] impl IntoRawFd for process::ChildStdin { fn into_raw_fd(self) -> RawFd { self.into_inner().into_fd().into_raw() } } -#[stable(feature = "process_extensions", since = "1.2.0")] +#[stable(feature = "into_raw_os", since = "1.4.0")] impl IntoRawFd for process::ChildStdout { fn into_raw_fd(self) -> RawFd { self.into_inner().into_fd().into_raw() } } -#[stable(feature = "process_extensions", since = "1.2.0")] +#[stable(feature = "into_raw_os", since = "1.4.0")] impl IntoRawFd for process::ChildStderr { fn into_raw_fd(self) -> RawFd { self.into_inner().into_fd().into_raw() diff --git a/src/libstd/sys/unix/fd.rs b/src/libstd/sys/unix/fd.rs index b2b1f16f20a9a..41bb96fed16cb 100644 --- a/src/libstd/sys/unix/fd.rs +++ b/src/libstd/sys/unix/fd.rs @@ -11,7 +11,7 @@ #![unstable(reason = "not public", issue = "0", feature = "fd")] use io::{self, Read}; -use libc::{self, c_int, size_t, c_void}; +use libc::{self, c_int, c_void}; use mem; use sync::atomic::{AtomicBool, Ordering}; use sys::cvt; @@ -40,7 +40,7 @@ impl FileDesc { let ret = cvt(unsafe { libc::read(self.fd, buf.as_mut_ptr() as *mut c_void, - buf.len() as size_t) + buf.len()) })?; Ok(ret as usize) } @@ -50,23 +50,77 @@ impl FileDesc { (&mut me).read_to_end(buf) } + pub fn read_at(&self, buf: &mut [u8], offset: u64) -> io::Result { + #[cfg(target_os = "android")] + use super::android::cvt_pread64; + + #[cfg(not(target_os = "android"))] + unsafe fn cvt_pread64(fd: c_int, buf: *mut c_void, count: usize, offset: i64) + -> io::Result + { + #[cfg(any(target_os = "linux", target_os = "emscripten"))] + use libc::pread64; + #[cfg(not(any(target_os = "linux", target_os = "emscripten")))] + use libc::pread as pread64; + cvt(pread64(fd, buf, count, offset)) + } + + unsafe { + cvt_pread64(self.fd, + buf.as_mut_ptr() as *mut c_void, + buf.len(), + offset as i64) + .map(|n| n as usize) + } + } + pub fn write(&self, buf: &[u8]) -> io::Result { let ret = cvt(unsafe { libc::write(self.fd, buf.as_ptr() as *const c_void, - buf.len() as size_t) + buf.len()) })?; Ok(ret as usize) } - #[cfg(not(any(target_env = "newlib", target_os = "solaris", target_os = "emscripten")))] + pub fn write_at(&self, buf: &[u8], offset: u64) -> io::Result { + #[cfg(target_os = "android")] + use super::android::cvt_pwrite64; + + #[cfg(not(target_os = "android"))] + unsafe fn cvt_pwrite64(fd: c_int, buf: *const c_void, count: usize, offset: i64) + -> io::Result + { + #[cfg(any(target_os = "linux", target_os = "emscripten"))] + use libc::pwrite64; + #[cfg(not(any(target_os = "linux", target_os = "emscripten")))] + use libc::pwrite as pwrite64; + cvt(pwrite64(fd, buf, count, offset)) + } + + unsafe { + cvt_pwrite64(self.fd, + buf.as_ptr() as *const c_void, + buf.len(), + offset as i64) + .map(|n| n as usize) + } + } + + #[cfg(not(any(target_env = "newlib", + target_os = "solaris", + target_os = "emscripten", + target_os = "haiku")))] pub fn set_cloexec(&self) -> io::Result<()> { unsafe { cvt(libc::ioctl(self.fd, libc::FIOCLEX))?; Ok(()) } } - #[cfg(any(target_env = "newlib", target_os = "solaris", target_os = "emscripten"))] + #[cfg(any(target_env = "newlib", + target_os = "solaris", + target_os = "emscripten", + target_os = "haiku"))] pub fn set_cloexec(&self) -> io::Result<()> { unsafe { let previous = cvt(libc::fcntl(self.fd, libc::F_GETFD))?; @@ -104,9 +158,9 @@ impl FileDesc { // resolve so we at least compile this. // // [1]: http://comments.gmane.org/gmane.linux.lib.musl.general/2963 - #[cfg(target_os = "android")] + #[cfg(any(target_os = "android", target_os = "haiku"))] use libc::F_DUPFD as F_DUPFD_CLOEXEC; - #[cfg(not(target_os = "android"))] + #[cfg(not(any(target_os = "android", target_os="haiku")))] use libc::F_DUPFD_CLOEXEC; let make_filedesc = |fd| { diff --git a/src/libstd/sys/unix/fs.rs b/src/libstd/sys/unix/fs.rs index e6fe3eb112a60..0b43fd2ac8c4d 100644 --- a/src/libstd/sys/unix/fs.rs +++ b/src/libstd/sys/unix/fs.rs @@ -193,6 +193,14 @@ impl FromInner for FilePermissions { } } +impl fmt::Debug for ReadDir { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + // This will only be called from std::fs::ReadDir, which will add a "ReadDir()" frame. + // Thus the result will be e g 'ReadDir("/home")' + fmt::Debug::fmt(&*self.root, f) + } +} + impl Iterator for ReadDir { type Item = io::Result; @@ -279,7 +287,12 @@ impl DirEntry { stat(&self.path()).map(|m| m.file_type()) } - #[cfg(not(target_os = "solaris"))] + #[cfg(target_os = "haiku")] + pub fn file_type(&self) -> io::Result { + lstat(&self.path()).map(|m| m.file_type()) + } + + #[cfg(not(any(target_os = "solaris", target_os = "haiku")))] pub fn file_type(&self) -> io::Result { match self.entry.d_type { libc::DT_CHR => Ok(FileType { mode: libc::S_IFCHR }), @@ -298,7 +311,9 @@ impl DirEntry { target_os = "linux", target_os = "emscripten", target_os = "android", - target_os = "solaris"))] + target_os = "solaris", + target_os = "haiku", + target_os = "fuchsia"))] pub fn ino(&self) -> u64 { self.entry.d_ino as u64 } @@ -327,7 +342,9 @@ impl DirEntry { } #[cfg(any(target_os = "android", target_os = "linux", - target_os = "emscripten"))] + target_os = "emscripten", + target_os = "haiku", + target_os = "fuchsia"))] fn name_bytes(&self) -> &[u8] { unsafe { CStr::from_ptr(self.entry.d_name.as_ptr()).to_bytes() @@ -476,10 +493,18 @@ impl File { self.0.read_to_end(buf) } + pub fn read_at(&self, buf: &mut [u8], offset: u64) -> io::Result { + self.0.read_at(buf, offset) + } + pub fn write(&self, buf: &[u8]) -> io::Result { self.0.write(buf) } + pub fn write_at(&self, buf: &[u8], offset: u64) -> io::Result { + self.0.write_at(buf, offset) + } + pub fn flush(&self) -> io::Result<()> { Ok(()) } pub fn seek(&self, pos: SeekFrom) -> io::Result { @@ -662,7 +687,7 @@ pub fn readlink(p: &Path) -> io::Result { loop { let buf_read = cvt(unsafe { - libc::readlink(p, buf.as_mut_ptr() as *mut _, buf.capacity() as libc::size_t) + libc::readlink(p, buf.as_mut_ptr() as *mut _, buf.capacity()) })? as usize; unsafe { buf.set_len(buf_read); } diff --git a/src/libstd/sys/unix/memchr.rs b/src/libstd/sys/unix/memchr.rs new file mode 100644 index 0000000000000..aed04703ea117 --- /dev/null +++ b/src/libstd/sys/unix/memchr.rs @@ -0,0 +1,57 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. +// +// Original implementation taken from rust-memchr +// Copyright 2015 Andrew Gallant, bluss and Nicolas Koch + +pub fn memchr(needle: u8, haystack: &[u8]) -> Option { + use libc; + + let p = unsafe { + libc::memchr( + haystack.as_ptr() as *const libc::c_void, + needle as libc::c_int, + haystack.len()) + }; + if p.is_null() { + None + } else { + Some(p as usize - (haystack.as_ptr() as usize)) + } +} + +pub fn memrchr(needle: u8, haystack: &[u8]) -> Option { + + #[cfg(target_os = "linux")] + fn memrchr_specific(needle: u8, haystack: &[u8]) -> Option { + use libc; + + // GNU's memrchr() will - unlike memchr() - error if haystack is empty. + if haystack.is_empty() {return None} + let p = unsafe { + libc::memrchr( + haystack.as_ptr() as *const libc::c_void, + needle as libc::c_int, + haystack.len()) + }; + if p.is_null() { + None + } else { + Some(p as usize - (haystack.as_ptr() as usize)) + } + } + + #[cfg(not(target_os = "linux"))] + fn memrchr_specific(needle: u8, haystack: &[u8]) -> Option { + ::sys_common::memchr::fallback::memrchr(needle, haystack) + } + + memrchr_specific(needle, haystack) +} diff --git a/src/libstd/sys/unix/mod.rs b/src/libstd/sys/unix/mod.rs index 23687e10e476d..66bc9d4a4915f 100644 --- a/src/libstd/sys/unix/mod.rs +++ b/src/libstd/sys/unix/mod.rs @@ -17,6 +17,7 @@ use libc; #[cfg(target_os = "bitrig")] pub use os::bitrig as platform; #[cfg(target_os = "dragonfly")] pub use os::dragonfly as platform; #[cfg(target_os = "freebsd")] pub use os::freebsd as platform; +#[cfg(target_os = "haiku")] pub use os::haiku as platform; #[cfg(target_os = "ios")] pub use os::ios as platform; #[cfg(target_os = "linux")] pub use os::linux as platform; #[cfg(target_os = "macos")] pub use os::macos as platform; @@ -25,21 +26,26 @@ use libc; #[cfg(target_os = "openbsd")] pub use os::openbsd as platform; #[cfg(target_os = "solaris")] pub use os::solaris as platform; #[cfg(target_os = "emscripten")] pub use os::emscripten as platform; +#[cfg(target_os = "fuchsia")] pub use os::fuchsia as platform; #[macro_use] pub mod weak; +pub mod args; pub mod android; #[cfg(any(not(cargobuild), feature = "backtrace"))] pub mod backtrace; pub mod condvar; +pub mod env; pub mod ext; pub mod fd; pub mod fs; +pub mod memchr; pub mod mutex; pub mod net; pub mod os; pub mod os_str; +pub mod path; pub mod pipe; pub mod process; pub mod rand; @@ -78,16 +84,16 @@ pub fn init() { unsafe { libc::write(libc::STDERR_FILENO, msg.as_ptr() as *const libc::c_void, - msg.len() as libc::size_t); + msg.len()); intrinsics::abort(); } } - #[cfg(not(any(target_os = "nacl", target_os = "emscripten")))] + #[cfg(not(any(target_os = "nacl", target_os = "emscripten", target_os="fuchsia")))] unsafe fn reset_sigpipe() { assert!(signal(libc::SIGPIPE, libc::SIG_IGN) != !0); } - #[cfg(any(target_os = "nacl", target_os = "emscripten"))] + #[cfg(any(target_os = "nacl", target_os = "emscripten", target_os="fuchsia"))] unsafe fn reset_sigpipe() {} } diff --git a/src/libstd/sys/unix/net.rs b/src/libstd/sys/unix/net.rs index f124ea651ec2c..ad287bbec3889 100644 --- a/src/libstd/sys/unix/net.rs +++ b/src/libstd/sys/unix/net.rs @@ -10,7 +10,7 @@ use ffi::CStr; use io; -use libc::{self, c_int, size_t, sockaddr, socklen_t}; +use libc::{self, c_int, size_t, sockaddr, socklen_t, EAI_SYSTEM}; use net::{SocketAddr, Shutdown}; use str; use sys::fd::FileDesc; @@ -33,12 +33,25 @@ use libc::SOCK_CLOEXEC; #[cfg(not(target_os = "linux"))] const SOCK_CLOEXEC: c_int = 0; +// Another conditional contant for name resolution: Macos et iOS use +// SO_NOSIGPIPE as a setsockopt flag to disable SIGPIPE emission on socket. +// Other platforms do otherwise. +#[cfg(target_vendor = "apple")] +use libc::SO_NOSIGPIPE; +#[cfg(not(target_vendor = "apple"))] +const SO_NOSIGPIPE: c_int = 0; + pub struct Socket(FileDesc); pub fn init() {} pub fn cvt_gai(err: c_int) -> io::Result<()> { - if err == 0 { return Ok(()) } + if err == 0 { + return Ok(()) + } + if err == EAI_SYSTEM { + return Err(io::Error::last_os_error()) + } let detail = unsafe { str::from_utf8(CStr::from_ptr(libc::gai_strerror(err)).to_bytes()).unwrap() @@ -76,7 +89,11 @@ impl Socket { let fd = cvt(libc::socket(fam, ty, 0))?; let fd = FileDesc::new(fd); fd.set_cloexec()?; - Ok(Socket(fd)) + let socket = Socket(fd); + if cfg!(target_vendor = "apple") { + setsockopt(&socket, libc::SOL_SOCKET, SO_NOSIGPIPE, 1)?; + } + Ok(socket) } } diff --git a/src/libstd/sys/unix/os.rs b/src/libstd/sys/unix/os.rs index 82606d2c728ea..e591f25cac157 100644 --- a/src/libstd/sys/unix/os.rs +++ b/src/libstd/sys/unix/os.rs @@ -38,7 +38,7 @@ static ENV_LOCK: Mutex = Mutex::new(); extern { #[cfg(not(target_os = "dragonfly"))] - #[cfg_attr(any(target_os = "linux", target_os = "emscripten"), + #[cfg_attr(any(target_os = "linux", target_os = "emscripten", target_os = "fuchsia"), link_name = "__errno_location")] #[cfg_attr(any(target_os = "bitrig", target_os = "netbsd", @@ -51,6 +51,7 @@ extern { target_os = "ios", target_os = "freebsd"), link_name = "__error")] + #[cfg_attr(target_os = "haiku", link_name = "_errnop")] fn errno_location() -> *mut c_int; } @@ -93,7 +94,7 @@ pub fn error_string(errno: i32) -> String { let p = buf.as_mut_ptr(); unsafe { - if strerror_r(errno as c_int, p, buf.len() as libc::size_t) < 0 { + if strerror_r(errno as c_int, p, buf.len()) < 0 { panic!("strerror_r failure"); } @@ -107,7 +108,7 @@ pub fn getcwd() -> io::Result { loop { unsafe { let ptr = buf.as_mut_ptr() as *mut libc::c_char; - if !libc::getcwd(ptr, buf.capacity() as libc::size_t).is_null() { + if !libc::getcwd(ptr, buf.capacity()).is_null() { let len = CStr::from_ptr(buf.as_ptr() as *const libc::c_char).to_bytes().len(); buf.set_len(len); buf.shrink_to_fit(); @@ -199,21 +200,20 @@ pub fn current_exe() -> io::Result { libc::KERN_PROC as c_int, libc::KERN_PROC_PATHNAME as c_int, -1 as c_int]; - let mut sz: libc::size_t = 0; + let mut sz = 0; cvt(libc::sysctl(mib.as_mut_ptr(), mib.len() as ::libc::c_uint, - ptr::null_mut(), &mut sz, ptr::null_mut(), - 0 as libc::size_t))?; + ptr::null_mut(), &mut sz, ptr::null_mut(), 0))?; if sz == 0 { return Err(io::Error::last_os_error()) } - let mut v: Vec = Vec::with_capacity(sz as usize); + let mut v: Vec = Vec::with_capacity(sz); cvt(libc::sysctl(mib.as_mut_ptr(), mib.len() as ::libc::c_uint, v.as_mut_ptr() as *mut libc::c_void, &mut sz, - ptr::null_mut(), 0 as libc::size_t))?; + ptr::null_mut(), 0))?; if sz == 0 { return Err(io::Error::last_os_error()); } - v.set_len(sz as usize - 1); // chop off trailing NUL + v.set_len(sz - 1); // chop off trailing NUL Ok(PathBuf::from(OsString::from_vec(v))) } } @@ -303,123 +303,53 @@ pub fn current_exe() -> io::Result { } } -pub struct Args { - iter: vec::IntoIter, - _dont_send_or_sync_me: PhantomData<*mut ()>, -} - -impl Iterator for Args { - type Item = OsString; - fn next(&mut self) -> Option { self.iter.next() } - fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } -} - -impl ExactSizeIterator for Args { - fn len(&self) -> usize { self.iter.len() } -} - -impl DoubleEndedIterator for Args { - fn next_back(&mut self) -> Option { self.iter.next_back() } -} - -/// Returns the command line arguments -/// -/// Returns a list of the command line arguments. -#[cfg(target_os = "macos")] -pub fn args() -> Args { - extern { - // These functions are in crt_externs.h. - fn _NSGetArgc() -> *mut c_int; - fn _NSGetArgv() -> *mut *mut *mut c_char; - } - - let vec = unsafe { - let (argc, argv) = (*_NSGetArgc() as isize, - *_NSGetArgv() as *const *const c_char); - (0.. argc as isize).map(|i| { - let bytes = CStr::from_ptr(*argv.offset(i)).to_bytes().to_vec(); - OsStringExt::from_vec(bytes) - }).collect::>() - }; - Args { - iter: vec.into_iter(), - _dont_send_or_sync_me: PhantomData, - } -} - -// As _NSGetArgc and _NSGetArgv aren't mentioned in iOS docs -// and use underscores in their names - they're most probably -// are considered private and therefore should be avoided -// Here is another way to get arguments using Objective C -// runtime -// -// In general it looks like: -// res = Vec::new() -// let args = [[NSProcessInfo processInfo] arguments] -// for i in (0..[args count]) -// res.push([args objectAtIndex:i]) -// res -#[cfg(target_os = "ios")] -pub fn args() -> Args { - use mem; - - extern { - fn sel_registerName(name: *const libc::c_uchar) -> Sel; - fn objc_msgSend(obj: NsId, sel: Sel, ...) -> NsId; - fn objc_getClass(class_name: *const libc::c_uchar) -> NsId; +#[cfg(target_os = "haiku")] +pub fn current_exe() -> io::Result { + // Use Haiku's image info functions + #[repr(C)] + struct image_info { + id: i32, + type_: i32, + sequence: i32, + init_order: i32, + init_routine: *mut libc::c_void, // function pointer + term_routine: *mut libc::c_void, // function pointer + device: libc::dev_t, + node: libc::ino_t, + name: [libc::c_char; 1024], // MAXPATHLEN + text: *mut libc::c_void, + data: *mut libc::c_void, + text_size: i32, + data_size: i32, + api_version: i32, + abi: i32, } - #[link(name = "Foundation", kind = "framework")] - #[link(name = "objc")] - #[cfg(not(cargobuild))] - extern {} - - type Sel = *const libc::c_void; - type NsId = *const libc::c_void; - - let mut res = Vec::new(); - unsafe { - let process_info_sel = sel_registerName("processInfo\0".as_ptr()); - let arguments_sel = sel_registerName("arguments\0".as_ptr()); - let utf8_sel = sel_registerName("UTF8String\0".as_ptr()); - let count_sel = sel_registerName("count\0".as_ptr()); - let object_at_sel = sel_registerName("objectAtIndex:\0".as_ptr()); - - let klass = objc_getClass("NSProcessInfo\0".as_ptr()); - let info = objc_msgSend(klass, process_info_sel); - let args = objc_msgSend(info, arguments_sel); - - let cnt: usize = mem::transmute(objc_msgSend(args, count_sel)); - for i in 0..cnt { - let tmp = objc_msgSend(args, object_at_sel, i); - let utf_c_str: *const libc::c_char = - mem::transmute(objc_msgSend(tmp, utf8_sel)); - let bytes = CStr::from_ptr(utf_c_str).to_bytes(); - res.push(OsString::from(str::from_utf8(bytes).unwrap())) + extern { + fn _get_next_image_info(team_id: i32, cookie: *mut i32, + info: *mut image_info, size: i32) -> i32; } - } - Args { iter: res.into_iter(), _dont_send_or_sync_me: PhantomData } + let mut info: image_info = mem::zeroed(); + let mut cookie: i32 = 0; + // the executable can be found at team id 0 + let result = _get_next_image_info(0, &mut cookie, &mut info, + mem::size_of::() as i32); + if result != 0 { + use io::ErrorKind; + Err(io::Error::new(ErrorKind::Other, "Error getting executable path")) + } else { + let name = CStr::from_ptr(info.name.as_ptr()).to_bytes(); + Ok(PathBuf::from(OsStr::from_bytes(name))) + } + } } -#[cfg(any(target_os = "linux", - target_os = "android", - target_os = "freebsd", - target_os = "dragonfly", - target_os = "bitrig", - target_os = "netbsd", - target_os = "openbsd", - target_os = "solaris", - target_os = "nacl", - target_os = "emscripten"))] -pub fn args() -> Args { - use sys_common; - let bytes = sys_common::args::clone().unwrap_or(Vec::new()); - let v: Vec = bytes.into_iter().map(|v| { - OsStringExt::from_vec(v) - }).collect(); - Args { iter: v.into_iter(), _dont_send_or_sync_me: PhantomData } +#[cfg(target_os = "fuchsia")] +pub fn current_exe() -> io::Result { + use io::ErrorKind; + Err(io::Error::new(ErrorKind::Other, "Not yet implemented on fuchsia")) } pub struct Env { @@ -563,7 +493,7 @@ pub fn home_dir() -> Option { buf: &mut Vec) -> Option<()> { let mut result = ptr::null_mut(); match libc::getpwuid_r(me, passwd, buf.as_mut_ptr(), - buf.capacity() as libc::size_t, + buf.capacity(), &mut result) { 0 if !result.is_null() => Some(()), _ => None @@ -576,7 +506,7 @@ pub fn home_dir() -> Option { // getpwuid_r semantics is different on Illumos/Solaris: // http://illumos.org/man/3c/getpwuid_r let result = libc::getpwuid_r(me, passwd, buf.as_mut_ptr(), - buf.capacity() as libc::size_t); + buf.capacity()); if result.is_null() { None } else { Some(()) } } diff --git a/src/libstd/sys/unix/path.rs b/src/libstd/sys/unix/path.rs new file mode 100644 index 0000000000000..bf9af7a4353a8 --- /dev/null +++ b/src/libstd/sys/unix/path.rs @@ -0,0 +1,29 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use path::Prefix; +use ffi::OsStr; + +#[inline] +pub fn is_sep_byte(b: u8) -> bool { + b == b'/' +} + +#[inline] +pub fn is_verbatim_sep(b: u8) -> bool { + b == b'/' +} + +pub fn parse_prefix(_: &OsStr) -> Option { + None +} + +pub const MAIN_SEP_STR: &'static str = "/"; +pub const MAIN_SEP: char = '/'; diff --git a/src/libstd/sys/unix/process.rs b/src/libstd/sys/unix/process.rs index 50014f51f6cf4..dafc11d9cc8e9 100644 --- a/src/libstd/sys/unix/process.rs +++ b/src/libstd/sys/unix/process.rs @@ -369,7 +369,7 @@ impl Command { } // NaCl has no signal support. - if cfg!(not(target_os = "nacl")) { + if cfg!(not(any(target_os = "nacl", target_os = "emscripten"))) { // Reset signal handling so the child process starts in a // standardized state. libstd ignores SIGPIPE, and signal-handling // libraries often set a mask. Child processes inherit ignored @@ -589,7 +589,7 @@ impl Process { } } -#[cfg(test)] +#[cfg(all(test, not(target_os = "emscripten")))] mod tests { use super::*; diff --git a/src/libstd/sys/unix/rand.rs b/src/libstd/sys/unix/rand.rs index 6b50ca9bcdf6d..3aebb8c18ec86 100644 --- a/src/libstd/sys/unix/rand.rs +++ b/src/libstd/sys/unix/rand.rs @@ -27,7 +27,8 @@ fn next_u64(mut fill_buf: &mut FnMut(&mut [u8])) -> u64 { #[cfg(all(unix, not(target_os = "ios"), not(target_os = "openbsd"), - not(target_os = "freebsd")))] + not(target_os = "freebsd"), + not(target_os = "fuchsia")))] mod imp { use self::OsRngInner::*; use super::{next_u32, next_u64}; @@ -97,8 +98,8 @@ mod imp { // full entropy pool let reader = File::open("/dev/urandom").expect("Unable to open /dev/urandom"); let mut reader_rng = ReaderRng::new(reader); - reader_rng.fill_bytes(& mut v[read..]); - read += v.len() as usize; + reader_rng.fill_bytes(&mut v[read..]); + read += v.len(); } else { panic!("unexpected getrandom error: {}", err); } @@ -281,7 +282,7 @@ mod imp { } fn fill_bytes(&mut self, v: &mut [u8]) { let ret = unsafe { - SecRandomCopyBytes(kSecRandomDefault, v.len() as size_t, + SecRandomCopyBytes(kSecRandomDefault, v.len(), v.as_mut_ptr()) }; if ret == -1 { @@ -339,3 +340,54 @@ mod imp { } } } + +#[cfg(target_os = "fuchsia")] +mod imp { + use super::{next_u32, next_u64}; + + use io; + use rand::Rng; + + #[link(name = "magenta")] + extern { + fn mx_cprng_draw(buffer: *mut u8, len: usize) -> isize; + } + + fn getrandom(buf: &mut [u8]) -> isize { + unsafe { mx_cprng_draw(buf.as_mut_ptr(), buf.len()) } + } + + pub struct OsRng { + // dummy field to ensure that this struct cannot be constructed outside + // of this module + _dummy: (), + } + + impl OsRng { + /// Create a new `OsRng`. + pub fn new() -> io::Result { + Ok(OsRng { _dummy: () }) + } + } + + impl Rng for OsRng { + fn next_u32(&mut self) -> u32 { + next_u32(&mut |v| self.fill_bytes(v)) + } + fn next_u64(&mut self) -> u64 { + next_u64(&mut |v| self.fill_bytes(v)) + } + fn fill_bytes(&mut self, v: &mut [u8]) { + let mut buf = v; + while !buf.is_empty() { + let ret = getrandom(buf); + if ret < 0 { + panic!("kernel mx_cprng_draw call failed! (returned {}, buf.len() {})", + ret, buf.len()); + } + let move_buf = buf; + buf = &mut move_buf[(ret as usize)..]; + } + } + } +} diff --git a/src/libstd/sys/unix/stdio.rs b/src/libstd/sys/unix/stdio.rs index 972bdbc38186b..947ba2cc75232 100644 --- a/src/libstd/sys/unix/stdio.rs +++ b/src/libstd/sys/unix/stdio.rs @@ -65,3 +65,5 @@ impl io::Write for Stderr { } fn flush(&mut self) -> io::Result<()> { Ok(()) } } + +pub const EBADF_ERR: i32 = ::libc::EBADF as i32; diff --git a/src/libstd/sys/unix/thread.rs b/src/libstd/sys/unix/thread.rs index 5db7086e42752..1642baa34d636 100644 --- a/src/libstd/sys/unix/thread.rs +++ b/src/libstd/sys/unix/thread.rs @@ -29,6 +29,20 @@ pub struct Thread { unsafe impl Send for Thread {} unsafe impl Sync for Thread {} +// The pthread_attr_setstacksize symbol doesn't exist in the emscripten libc, +// so we have to not link to it to satisfy emcc's ERROR_ON_UNDEFINED_SYMBOLS. +#[cfg(not(target_os = "emscripten"))] +unsafe fn pthread_attr_setstacksize(attr: *mut libc::pthread_attr_t, + stack_size: libc::size_t) -> libc::c_int { + libc::pthread_attr_setstacksize(attr, stack_size) +} + +#[cfg(target_os = "emscripten")] +unsafe fn pthread_attr_setstacksize(_attr: *mut libc::pthread_attr_t, + _stack_size: libc::size_t) -> libc::c_int { + panic!() +} + impl Thread { pub unsafe fn new<'a>(stack: usize, p: Box) -> io::Result { @@ -38,8 +52,8 @@ impl Thread { assert_eq!(libc::pthread_attr_init(&mut attr), 0); let stack_size = cmp::max(stack, min_stack_size(&attr)); - match libc::pthread_attr_setstacksize(&mut attr, - stack_size as libc::size_t) { + match pthread_attr_setstacksize(&mut attr, + stack_size) { 0 => {} n => { assert_eq!(n, libc::EINVAL); @@ -50,7 +64,6 @@ impl Thread { let page_size = os::page_size(); let stack_size = (stack_size + page_size - 1) & (-(page_size as isize - 1) as usize - 1); - let stack_size = stack_size as libc::size_t; assert_eq!(libc::pthread_attr_setstacksize(&mut attr, stack_size), 0); } @@ -115,9 +128,16 @@ impl Thread { name.as_ptr() as *mut libc::c_void); } } - #[cfg(any(target_env = "newlib", target_os = "solaris", target_os = "emscripten"))] + #[cfg(any(target_env = "newlib", + target_os = "solaris", + target_os = "haiku", + target_os = "emscripten"))] + pub fn set_name(_name: &CStr) { + // Newlib, Illumos, Haiku, and Emscripten have no way to set a thread name. + } + #[cfg(target_os = "fuchsia")] pub fn set_name(_name: &CStr) { - // Newlib, Illumos and Emscripten have no way to set a thread name. + // FIXME: determine whether Fuchsia has a way to set a thread name. } pub fn sleep(dur: Duration) { @@ -247,12 +267,8 @@ pub mod guard { // Rellocate the last page of the stack. // This ensures SIGBUS will be raised on // stack overflow. - let result = mmap(stackaddr, - psize as libc::size_t, - PROT_NONE, - MAP_PRIVATE | MAP_ANON | MAP_FIXED, - -1, - 0); + let result = mmap(stackaddr, psize, PROT_NONE, + MAP_PRIVATE | MAP_ANON | MAP_FIXED, -1, 0); if result != stackaddr || result == MAP_FAILED { panic!("failed to allocate a guard page"); @@ -276,8 +292,8 @@ pub mod guard { #[cfg(target_os = "macos")] pub unsafe fn current() -> Option { - Some((libc::pthread_get_stackaddr_np(libc::pthread_self()) as libc::size_t - - libc::pthread_get_stacksize_np(libc::pthread_self())) as usize) + Some((libc::pthread_get_stackaddr_np(libc::pthread_self()) as usize - + libc::pthread_get_stacksize_np(libc::pthread_self()))) } #[cfg(any(target_os = "openbsd", target_os = "bitrig"))] @@ -289,10 +305,10 @@ pub mod guard { let extra = if cfg!(target_os = "bitrig") {3} else {1} * os::page_size(); Some(if libc::pthread_main_np() == 1 { // main thread - current_stack.ss_sp as usize - current_stack.ss_size as usize + extra + current_stack.ss_sp as usize - current_stack.ss_size + extra } else { // new thread - current_stack.ss_sp as usize - current_stack.ss_size as usize + current_stack.ss_sp as usize - current_stack.ss_size }) } @@ -318,11 +334,11 @@ pub mod guard { &mut size), 0); ret = if cfg!(target_os = "freebsd") { - Some(stackaddr as usize - guardsize as usize) + Some(stackaddr as usize - guardsize) } else if cfg!(target_os = "netbsd") { Some(stackaddr as usize) } else { - Some(stackaddr as usize + guardsize as usize) + Some(stackaddr as usize + guardsize) }; } assert_eq!(libc::pthread_attr_destroy(&mut attr), 0); @@ -341,8 +357,8 @@ fn min_stack_size(attr: *const libc::pthread_attr_t) -> usize { weak!(fn __pthread_get_minstack(*const libc::pthread_attr_t) -> libc::size_t); match __pthread_get_minstack.get() { - None => libc::PTHREAD_STACK_MIN as usize, - Some(f) => unsafe { f(attr) as usize }, + None => libc::PTHREAD_STACK_MIN, + Some(f) => unsafe { f(attr) }, } } @@ -351,7 +367,7 @@ fn min_stack_size(attr: *const libc::pthread_attr_t) -> usize { #[cfg(all(not(target_os = "linux"), not(target_os = "netbsd")))] fn min_stack_size(_: *const libc::pthread_attr_t) -> usize { - libc::PTHREAD_STACK_MIN as usize + libc::PTHREAD_STACK_MIN } #[cfg(target_os = "netbsd")] diff --git a/src/libstd/sys/windows/args.rs b/src/libstd/sys/windows/args.rs new file mode 100644 index 0000000000000..aa61f9adb824d --- /dev/null +++ b/src/libstd/sys/windows/args.rs @@ -0,0 +1,76 @@ +// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![allow(dead_code)] // runtime init functions not used during testing + +use os::windows::prelude::*; +use sys::c; +use slice; +use ops::Range; +use ffi::OsString; +use libc::{c_int, c_void}; + +pub unsafe fn init(_argc: isize, _argv: *const *const u8) { } + +pub unsafe fn cleanup() { } + +pub fn args() -> Args { + unsafe { + let mut nArgs: c_int = 0; + let lpCmdLine = c::GetCommandLineW(); + let szArgList = c::CommandLineToArgvW(lpCmdLine, &mut nArgs); + + // szArcList can be NULL if CommandLinToArgvW failed, + // but in that case nArgs is 0 so we won't actually + // try to read a null pointer + Args { cur: szArgList, range: 0..(nArgs as isize) } + } +} + +pub struct Args { + range: Range, + cur: *mut *mut u16, +} + +unsafe fn os_string_from_ptr(ptr: *mut u16) -> OsString { + let mut len = 0; + while *ptr.offset(len) != 0 { len += 1; } + + // Push it onto the list. + let ptr = ptr as *const u16; + let buf = slice::from_raw_parts(ptr, len as usize); + OsStringExt::from_wide(buf) +} + +impl Iterator for Args { + type Item = OsString; + fn next(&mut self) -> Option { + self.range.next().map(|i| unsafe { os_string_from_ptr(*self.cur.offset(i)) } ) + } + fn size_hint(&self) -> (usize, Option) { self.range.size_hint() } +} + +impl DoubleEndedIterator for Args { + fn next_back(&mut self) -> Option { + self.range.next_back().map(|i| unsafe { os_string_from_ptr(*self.cur.offset(i)) } ) + } +} + +impl ExactSizeIterator for Args { + fn len(&self) -> usize { self.range.len() } +} + +impl Drop for Args { + fn drop(&mut self) { + // self.cur can be null if CommandLineToArgvW previously failed, + // but LocalFree ignores NULL pointers + unsafe { c::LocalFree(self.cur as *mut c_void); } + } +} diff --git a/src/libstd/sys/windows/env.rs b/src/libstd/sys/windows/env.rs new file mode 100644 index 0000000000000..e6d74895774ce --- /dev/null +++ b/src/libstd/sys/windows/env.rs @@ -0,0 +1,19 @@ +// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub mod os { + pub const FAMILY: &'static str = "windows"; + pub const OS: &'static str = "windows"; + pub const DLL_PREFIX: &'static str = ""; + pub const DLL_SUFFIX: &'static str = ".dll"; + pub const DLL_EXTENSION: &'static str = "dll"; + pub const EXE_SUFFIX: &'static str = ".exe"; + pub const EXE_EXTENSION: &'static str = "exe"; +} diff --git a/src/libstd/sys/windows/ext/fs.rs b/src/libstd/sys/windows/ext/fs.rs index 4388a0bdff2cf..1e2b8bf38fa71 100644 --- a/src/libstd/sys/windows/ext/fs.rs +++ b/src/libstd/sys/windows/ext/fs.rs @@ -12,12 +12,61 @@ #![stable(feature = "rust1", since = "1.0.0")] -use fs::{OpenOptions, Metadata}; +use fs::{self, OpenOptions, Metadata}; use io; use path::Path; use sys; use sys_common::{AsInnerMut, AsInner}; +/// Windows-specific extensions to `File` +#[unstable(feature = "file_offset", issue = "35918")] +pub trait FileExt { + /// Seeks to a given position and reads a number of bytes. + /// + /// Returns the number of bytes read. + /// + /// The offset is relative to the start of the file and thus independent + /// from the current cursor. The current cursor **is** affected by this + /// function, it is set to the end of the read. + /// + /// Reading beyond the end of the file will always return with a length of + /// 0. + /// + /// Note that similar to `File::read`, it is not an error to return with a + /// short read. When returning from such a short read, the file pointer is + /// still updated. + #[unstable(feature = "file_offset", issue = "35918")] + fn seek_read(&self, buf: &mut [u8], offset: u64) -> io::Result; + + /// Seeks to a given position and writes a number of bytes. + /// + /// Returns the number of bytes written. + /// + /// The offset is relative to the start of the file and thus independent + /// from the current cursor. The current cursor **is** affected by this + /// function, it is set to the end of the write. + /// + /// When writing beyond the end of the file, the file is appropiately + /// extended and the intermediate bytes are left uninitialized. + /// + /// Note that similar to `File::write`, it is not an error to return a + /// short write. When returning from such a short write, the file pointer + /// is still updated. + #[unstable(feature = "file_offset", issue = "35918")] + fn seek_write(&self, buf: &[u8], offset: u64) -> io::Result; +} + +#[unstable(feature = "file_offset", issue = "35918")] +impl FileExt for fs::File { + fn seek_read(&self, buf: &mut [u8], offset: u64) -> io::Result { + self.as_inner().read_at(buf, offset) + } + + fn seek_write(&self, buf: &[u8], offset: u64) -> io::Result { + self.as_inner().write_at(buf, offset) + } +} + /// Windows-specific extensions to `OpenOptions` #[stable(feature = "open_options_ext", since = "1.10.0")] pub trait OpenOptionsExt { diff --git a/src/libstd/sys/windows/ext/mod.rs b/src/libstd/sys/windows/ext/mod.rs index c3578fdfdb16b..932bb5e956405 100644 --- a/src/libstd/sys/windows/ext/mod.rs +++ b/src/libstd/sys/windows/ext/mod.rs @@ -36,4 +36,6 @@ pub mod prelude { pub use super::ffi::{OsStrExt, OsStringExt}; #[doc(no_inline)] #[stable(feature = "rust1", since = "1.0.0")] pub use super::fs::{OpenOptionsExt, MetadataExt}; + #[doc(no_inline)] #[unstable(feature = "file_offset", issue = "35918")] + pub use super::fs::FileExt; } diff --git a/src/libstd/sys/windows/ext/process.rs b/src/libstd/sys/windows/ext/process.rs index 98166bf8cda09..bce32959a23c3 100644 --- a/src/libstd/sys/windows/ext/process.rs +++ b/src/libstd/sys/windows/ext/process.rs @@ -33,7 +33,7 @@ impl AsRawHandle for process::Child { } } -#[stable(feature = "process_extensions", since = "1.2.0")] +#[stable(feature = "into_raw_os", since = "1.4.0")] impl IntoRawHandle for process::Child { fn into_raw_handle(self) -> RawHandle { self.into_inner().into_handle().into_raw() as *mut _ @@ -61,21 +61,21 @@ impl AsRawHandle for process::ChildStderr { } } -#[stable(feature = "process_extensions", since = "1.2.0")] +#[stable(feature = "into_raw_os", since = "1.4.0")] impl IntoRawHandle for process::ChildStdin { fn into_raw_handle(self) -> RawHandle { self.into_inner().into_handle().into_raw() as *mut _ } } -#[stable(feature = "process_extensions", since = "1.2.0")] +#[stable(feature = "into_raw_os", since = "1.4.0")] impl IntoRawHandle for process::ChildStdout { fn into_raw_handle(self) -> RawHandle { self.into_inner().into_handle().into_raw() as *mut _ } } -#[stable(feature = "process_extensions", since = "1.2.0")] +#[stable(feature = "into_raw_os", since = "1.4.0")] impl IntoRawHandle for process::ChildStderr { fn into_raw_handle(self) -> RawHandle { self.into_inner().into_handle().into_raw() as *mut _ @@ -91,7 +91,7 @@ pub trait ExitStatusExt { fn from_raw(raw: u32) -> Self; } -#[stable(feature = "rust1", since = "1.0.0")] +#[stable(feature = "exit_status_from", since = "1.12.0")] impl ExitStatusExt for process::ExitStatus { fn from_raw(raw: u32) -> Self { process::ExitStatus::from_inner(From::from(raw)) diff --git a/src/libstd/sys/windows/fs.rs b/src/libstd/sys/windows/fs.rs index 90a16853d56dd..98fd15f863ba1 100644 --- a/src/libstd/sys/windows/fs.rs +++ b/src/libstd/sys/windows/fs.rs @@ -81,6 +81,14 @@ pub struct FilePermissions { attrs: c::DWORD } pub struct DirBuilder; +impl fmt::Debug for ReadDir { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + // This will only be called from std::fs::ReadDir, which will add a "ReadDir()" frame. + // Thus the result will be e g 'ReadDir("C:\")' + fmt::Debug::fmt(&*self.root, f) + } +} + impl Iterator for ReadDir { type Item = io::Result; fn next(&mut self) -> Option> { @@ -311,6 +319,10 @@ impl File { self.handle.read(buf) } + pub fn read_at(&self, buf: &mut [u8], offset: u64) -> io::Result { + self.handle.read_at(buf, offset) + } + pub fn read_to_end(&self, buf: &mut Vec) -> io::Result { self.handle.read_to_end(buf) } @@ -319,6 +331,10 @@ impl File { self.handle.write(buf) } + pub fn write_at(&self, buf: &[u8], offset: u64) -> io::Result { + self.handle.write_at(buf, offset) + } + pub fn flush(&self) -> io::Result<()> { Ok(()) } pub fn seek(&self, pos: SeekFrom) -> io::Result { diff --git a/src/libstd/sys/windows/handle.rs b/src/libstd/sys/windows/handle.rs index 97e746ee34576..10b86ba44bc79 100644 --- a/src/libstd/sys/windows/handle.rs +++ b/src/libstd/sys/windows/handle.rs @@ -104,6 +104,23 @@ impl RawHandle { } } + pub fn read_at(&self, buf: &mut [u8], offset: u64) -> io::Result { + let mut read = 0; + let len = cmp::min(buf.len(), ::max_value() as usize) as c::DWORD; + let res = unsafe { + let mut overlapped: c::OVERLAPPED = mem::zeroed(); + overlapped.Offset = offset as u32; + overlapped.OffsetHigh = (offset >> 32) as u32; + cvt(c::ReadFile(self.0, buf.as_mut_ptr() as c::LPVOID, + len, &mut read, &mut overlapped)) + }; + match res { + Ok(_) => Ok(read as usize), + Err(ref e) if e.raw_os_error() == Some(c::ERROR_HANDLE_EOF as i32) => Ok(0), + Err(e) => Err(e), + } + } + pub unsafe fn read_overlapped(&self, buf: &mut [u8], overlapped: *mut c::OVERLAPPED) @@ -174,6 +191,19 @@ impl RawHandle { Ok(amt as usize) } + pub fn write_at(&self, buf: &[u8], offset: u64) -> io::Result { + let mut written = 0; + let len = cmp::min(buf.len(), ::max_value() as usize) as c::DWORD; + unsafe { + let mut overlapped: c::OVERLAPPED = mem::zeroed(); + overlapped.Offset = offset as u32; + overlapped.OffsetHigh = (offset >> 32) as u32; + cvt(c::WriteFile(self.0, buf.as_ptr() as c::LPVOID, + len, &mut written, &mut overlapped))?; + } + Ok(written as usize) + } + pub fn duplicate(&self, access: c::DWORD, inherit: bool, options: c::DWORD) -> io::Result { let mut ret = 0 as c::HANDLE; diff --git a/src/libstd/sys/windows/memchr.rs b/src/libstd/sys/windows/memchr.rs new file mode 100644 index 0000000000000..5a5386acaa531 --- /dev/null +++ b/src/libstd/sys/windows/memchr.rs @@ -0,0 +1,15 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. +// +// Original implementation taken from rust-memchr +// Copyright 2015 Andrew Gallant, bluss and Nicolas Koch + +// Fallback memchr is fastest on windows +pub use sys_common::memchr::fallback::{memchr, memrchr}; diff --git a/src/libstd/sys/windows/mod.rs b/src/libstd/sys/windows/mod.rs index 9741a704e8fe5..9cd6e6ca1761d 100644 --- a/src/libstd/sys/windows/mod.rs +++ b/src/libstd/sys/windows/mod.rs @@ -18,17 +18,21 @@ use time::Duration; #[macro_use] pub mod compat; +pub mod args; pub mod backtrace; pub mod c; pub mod condvar; pub mod dynamic_lib; +pub mod env; pub mod ext; pub mod fs; pub mod handle; +pub mod memchr; pub mod mutex; pub mod net; pub mod os; pub mod os_str; +pub mod path; pub mod pipe; pub mod process; pub mod rand; diff --git a/src/libstd/sys/windows/os.rs b/src/libstd/sys/windows/os.rs index 260fc3c4db62e..7e28dd1e259c8 100644 --- a/src/libstd/sys/windows/os.rs +++ b/src/libstd/sys/windows/os.rs @@ -18,8 +18,6 @@ use error::Error as StdError; use ffi::{OsString, OsStr}; use fmt; use io; -use libc::{c_int, c_void}; -use ops::Range; use os::windows::ffi::EncodeWide; use path::{self, PathBuf}; use ptr; @@ -272,60 +270,6 @@ pub fn unsetenv(n: &OsStr) -> io::Result<()> { }).map(|_| ()) } -pub struct Args { - range: Range, - cur: *mut *mut u16, -} - -unsafe fn os_string_from_ptr(ptr: *mut u16) -> OsString { - let mut len = 0; - while *ptr.offset(len) != 0 { len += 1; } - - // Push it onto the list. - let ptr = ptr as *const u16; - let buf = slice::from_raw_parts(ptr, len as usize); - OsStringExt::from_wide(buf) -} - -impl Iterator for Args { - type Item = OsString; - fn next(&mut self) -> Option { - self.range.next().map(|i| unsafe { os_string_from_ptr(*self.cur.offset(i)) } ) - } - fn size_hint(&self) -> (usize, Option) { self.range.size_hint() } -} - -impl DoubleEndedIterator for Args { - fn next_back(&mut self) -> Option { - self.range.next_back().map(|i| unsafe { os_string_from_ptr(*self.cur.offset(i)) } ) - } -} - -impl ExactSizeIterator for Args { - fn len(&self) -> usize { self.range.len() } -} - -impl Drop for Args { - fn drop(&mut self) { - // self.cur can be null if CommandLineToArgvW previously failed, - // but LocalFree ignores NULL pointers - unsafe { c::LocalFree(self.cur as *mut c_void); } - } -} - -pub fn args() -> Args { - unsafe { - let mut nArgs: c_int = 0; - let lpCmdLine = c::GetCommandLineW(); - let szArgList = c::CommandLineToArgvW(lpCmdLine, &mut nArgs); - - // szArcList can be NULL if CommandLinToArgvW failed, - // but in that case nArgs is 0 so we won't actually - // try to read a null pointer - Args { cur: szArgList, range: 0..(nArgs as isize) } - } -} - pub fn temp_dir() -> PathBuf { super::fill_utf16_buf(|buf, sz| unsafe { c::GetTempPathW(sz, buf) diff --git a/src/libstd/sys/windows/path.rs b/src/libstd/sys/windows/path.rs new file mode 100644 index 0000000000000..2b47808451bc2 --- /dev/null +++ b/src/libstd/sys/windows/path.rs @@ -0,0 +1,108 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use ascii::*; + +use path::Prefix; +use ffi::OsStr; +use mem; + +fn os_str_as_u8_slice(s: &OsStr) -> &[u8] { + unsafe { mem::transmute(s) } +} +unsafe fn u8_slice_as_os_str(s: &[u8]) -> &OsStr { + mem::transmute(s) +} + +#[inline] +pub fn is_sep_byte(b: u8) -> bool { + b == b'/' || b == b'\\' +} + +#[inline] +pub fn is_verbatim_sep(b: u8) -> bool { + b == b'\\' +} + +pub fn parse_prefix<'a>(path: &'a OsStr) -> Option { + use path::Prefix::*; + unsafe { + // The unsafety here stems from converting between &OsStr and &[u8] + // and back. This is safe to do because (1) we only look at ASCII + // contents of the encoding and (2) new &OsStr values are produced + // only from ASCII-bounded slices of existing &OsStr values. + let mut path = os_str_as_u8_slice(path); + + if path.starts_with(br"\\") { + // \\ + path = &path[2..]; + if path.starts_with(br"?\") { + // \\?\ + path = &path[2..]; + if path.starts_with(br"UNC\") { + // \\?\UNC\server\share + path = &path[4..]; + let (server, share) = match parse_two_comps(path, is_verbatim_sep) { + Some((server, share)) => + (u8_slice_as_os_str(server), u8_slice_as_os_str(share)), + None => (u8_slice_as_os_str(path), u8_slice_as_os_str(&[])), + }; + return Some(VerbatimUNC(server, share)); + } else { + // \\?\path + let idx = path.iter().position(|&b| b == b'\\'); + if idx == Some(2) && path[1] == b':' { + let c = path[0]; + if c.is_ascii() && (c as char).is_alphabetic() { + // \\?\C:\ path + return Some(VerbatimDisk(c.to_ascii_uppercase())); + } + } + let slice = &path[..idx.unwrap_or(path.len())]; + return Some(Verbatim(u8_slice_as_os_str(slice))); + } + } else if path.starts_with(b".\\") { + // \\.\path + path = &path[2..]; + let pos = path.iter().position(|&b| b == b'\\'); + let slice = &path[..pos.unwrap_or(path.len())]; + return Some(DeviceNS(u8_slice_as_os_str(slice))); + } + match parse_two_comps(path, is_sep_byte) { + Some((server, share)) if !server.is_empty() && !share.is_empty() => { + // \\server\share + return Some(UNC(u8_slice_as_os_str(server), u8_slice_as_os_str(share))); + } + _ => (), + } + } else if path.get(1) == Some(& b':') { + // C: + let c = path[0]; + if c.is_ascii() && (c as char).is_alphabetic() { + return Some(Disk(c.to_ascii_uppercase())); + } + } + return None; + } + + fn parse_two_comps(mut path: &[u8], f: fn(u8) -> bool) -> Option<(&[u8], &[u8])> { + let first = match path.iter().position(|x| f(*x)) { + None => return None, + Some(x) => &path[..x], + }; + path = &path[(first.len() + 1)..]; + let idx = path.iter().position(|x| f(*x)); + let second = &path[..idx.unwrap_or(path.len())]; + Some((first, second)) + } +} + +pub const MAIN_SEP_STR: &'static str = "\\"; +pub const MAIN_SEP: char = '\\'; diff --git a/src/libstd/sys/windows/stdio.rs b/src/libstd/sys/windows/stdio.rs index 01249f05f6202..5f097d2631d95 100644 --- a/src/libstd/sys/windows/stdio.rs +++ b/src/libstd/sys/windows/stdio.rs @@ -205,3 +205,5 @@ impl Output { fn invalid_encoding() -> io::Error { io::Error::new(io::ErrorKind::InvalidData, "text was not valid unicode") } + +pub const EBADF_ERR: i32 = ::sys::c::ERROR_INVALID_HANDLE as i32; diff --git a/src/libstd/thread/local.rs b/src/libstd/thread/local.rs index c44dee49f14a6..54d3f7930456c 100644 --- a/src/libstd/thread/local.rs +++ b/src/libstd/thread/local.rs @@ -358,36 +358,8 @@ pub mod elf { } } - // Since what appears to be glibc 2.18 this symbol has been shipped which - // GCC and clang both use to invoke destructors in thread_local globals, so - // let's do the same! - // - // Note, however, that we run on lots older linuxes, as well as cross - // compiling from a newer linux to an older linux, so we also have a - // fallback implementation to use as well. - // - // Due to rust-lang/rust#18804, make sure this is not generic! - #[cfg(target_os = "linux")] - unsafe fn register_dtor(t: *mut u8, dtor: unsafe extern fn(*mut u8)) { - use mem; - use libc; - use sys_common::thread_local as os; - - extern { - #[linkage = "extern_weak"] - static __dso_handle: *mut u8; - #[linkage = "extern_weak"] - static __cxa_thread_atexit_impl: *const libc::c_void; - } - if !__cxa_thread_atexit_impl.is_null() { - type F = unsafe extern fn(dtor: unsafe extern fn(*mut u8), - arg: *mut u8, - dso_handle: *mut u8) -> libc::c_int; - mem::transmute::<*const libc::c_void, F>(__cxa_thread_atexit_impl) - (dtor, t, &__dso_handle as *const _ as *mut _); - return - } - + #[cfg(any(target_os = "linux", target_os = "fuchsia"))] + unsafe fn register_dtor_fallback(t: *mut u8, dtor: unsafe extern fn(*mut u8)) { // The fallback implementation uses a vanilla OS-based TLS key to track // the list of destructors that need to be run for this thread. The key // then has its own destructor which runs all the other destructors. @@ -397,6 +369,8 @@ pub mod elf { // *should* be the case that this loop always terminates because we // provide the guarantee that a TLS key cannot be set after it is // flagged for destruction. + use sys_common::thread_local as os; + static DTORS: os::StaticKey = os::StaticKey::new(Some(run_dtors)); type List = Vec<(*mut u8, unsafe extern fn(*mut u8))>; if DTORS.get().is_null() { @@ -418,6 +392,37 @@ pub mod elf { } } + // Since what appears to be glibc 2.18 this symbol has been shipped which + // GCC and clang both use to invoke destructors in thread_local globals, so + // let's do the same! + // + // Note, however, that we run on lots older linuxes, as well as cross + // compiling from a newer linux to an older linux, so we also have a + // fallback implementation to use as well. + // + // Due to rust-lang/rust#18804, make sure this is not generic! + #[cfg(target_os = "linux")] + unsafe fn register_dtor(t: *mut u8, dtor: unsafe extern fn(*mut u8)) { + use mem; + use libc; + + extern { + #[linkage = "extern_weak"] + static __dso_handle: *mut u8; + #[linkage = "extern_weak"] + static __cxa_thread_atexit_impl: *const libc::c_void; + } + if !__cxa_thread_atexit_impl.is_null() { + type F = unsafe extern fn(dtor: unsafe extern fn(*mut u8), + arg: *mut u8, + dso_handle: *mut u8) -> libc::c_int; + mem::transmute::<*const libc::c_void, F>(__cxa_thread_atexit_impl) + (dtor, t, &__dso_handle as *const _ as *mut _); + return + } + register_dtor_fallback(t, dtor); + } + // OSX's analog of the above linux function is this _tlv_atexit function. // The disassembly of thread_local globals in C++ (at least produced by // clang) will have this show up in the output. @@ -430,6 +435,13 @@ pub mod elf { _tlv_atexit(dtor, t); } + // Just use the thread_local fallback implementation, at least until there's + // a more direct implementation. + #[cfg(target_os = "fuchsia")] + unsafe fn register_dtor(t: *mut u8, dtor: unsafe extern fn(*mut u8)) { + register_dtor_fallback(t, dtor); + } + pub unsafe extern fn destroy_value(ptr: *mut u8) { let ptr = ptr as *mut Key; // Right before we run the user destructor be sure to flag the @@ -524,7 +536,7 @@ pub mod os { } } -#[cfg(test)] +#[cfg(all(test, not(target_os = "emscripten")))] mod tests { use sync::mpsc::{channel, Sender}; use cell::{Cell, UnsafeCell}; diff --git a/src/libstd/thread/mod.rs b/src/libstd/thread/mod.rs index d8e021bb04ff9..150482e4af458 100644 --- a/src/libstd/thread/mod.rs +++ b/src/libstd/thread/mod.rs @@ -151,7 +151,7 @@ //! //! [`Cell`]: ../cell/struct.Cell.html //! [`RefCell`]: ../cell/struct.RefCell.html -//! [`thread_local!`]: ../macro.thread_local!.html +//! [`thread_local!`]: ../macro.thread_local.html //! [`with`]: struct.LocalKey.html#method.with #![stable(feature = "rust1", since = "1.0.0")] @@ -166,6 +166,7 @@ use panicking; use str; use sync::{Mutex, Condvar, Arc}; use sys::thread as imp; +use sys_common::mutex; use sys_common::thread_info; use sys_common::util; use sys_common::{AsInner, IntoInner}; @@ -524,6 +525,45 @@ pub fn park_timeout(dur: Duration) { *guard = false; } +//////////////////////////////////////////////////////////////////////////////// +// ThreadId +//////////////////////////////////////////////////////////////////////////////// + +/// A unique identifier for a running thread. +/// +/// A `ThreadId` is an opaque object that has a unique value for each thread +/// that creates one. `ThreadId`s do not correspond to a thread's system- +/// designated identifier. +#[unstable(feature = "thread_id", issue = "21507")] +#[derive(Eq, PartialEq, Copy, Clone)] +pub struct ThreadId(u64); + +impl ThreadId { + // Generate a new unique thread ID. + fn new() -> ThreadId { + static GUARD: mutex::Mutex = mutex::Mutex::new(); + static mut COUNTER: u64 = 0; + + unsafe { + GUARD.lock(); + + // If we somehow use up all our bits, panic so that we're not + // covering up subtle bugs of IDs being reused. + if COUNTER == ::u64::MAX { + GUARD.unlock(); + panic!("failed to generate unique thread ID: bitspace exhausted"); + } + + let id = COUNTER; + COUNTER += 1; + + GUARD.unlock(); + + ThreadId(id) + } + } +} + //////////////////////////////////////////////////////////////////////////////// // Thread //////////////////////////////////////////////////////////////////////////////// @@ -531,6 +571,7 @@ pub fn park_timeout(dur: Duration) { /// The internal representation of a `Thread` handle struct Inner { name: Option, // Guaranteed to be UTF-8 + id: ThreadId, lock: Mutex, // true when there is a buffered unpark cvar: Condvar, } @@ -551,6 +592,7 @@ impl Thread { Thread { inner: Arc::new(Inner { name: cname, + id: ThreadId::new(), lock: Mutex::new(false), cvar: Condvar::new(), }) @@ -569,6 +611,12 @@ impl Thread { } } + /// Gets the thread's unique identifier. + #[unstable(feature = "thread_id", issue = "21507")] + pub fn id(&self) -> ThreadId { + self.inner.id + } + /// Gets the thread's name. /// /// # Examples @@ -741,7 +789,7 @@ fn _assert_sync_and_send() { // Tests //////////////////////////////////////////////////////////////////////////////// -#[cfg(test)] +#[cfg(all(test, not(target_os = "emscripten")))] mod tests { use any::Any; use sync::mpsc::{channel, Sender}; @@ -977,6 +1025,17 @@ mod tests { thread::sleep(Duration::from_millis(2)); } + #[test] + fn test_thread_id_equal() { + assert!(thread::current().id() == thread::current().id()); + } + + #[test] + fn test_thread_id_not_equal() { + let spawned_id = thread::spawn(|| thread::current().id()).join().unwrap(); + assert!(thread::current().id() != spawned_id); + } + // NOTE: the corresponding test for stderr is in run-pass/thread-stderr, due // to the test harness apparently interfering with stderr configuration. } diff --git a/src/libstd/time/duration.rs b/src/libstd/time/duration.rs index 246c57ab23871..10a0c567e142f 100644 --- a/src/libstd/time/duration.rs +++ b/src/libstd/time/duration.rs @@ -83,7 +83,7 @@ impl Duration { /// Returns the number of whole seconds represented by this duration. /// - /// The extra precision represented by this duration is ignored (e.g. extra + /// The extra precision represented by this duration is ignored (i.e. extra /// nanoseconds are not represented in the returned value). #[stable(feature = "duration", since = "1.3.0")] #[inline] @@ -93,7 +93,7 @@ impl Duration { /// /// This method does **not** return the length of the duration when /// represented by nanoseconds. The returned number always represents a - /// fractional portion of a second (e.g. it is less than one billion). + /// fractional portion of a second (i.e. it is less than one billion). #[stable(feature = "duration", since = "1.3.0")] #[inline] pub fn subsec_nanos(&self) -> u32 { self.nanos } diff --git a/src/libstd/time/mod.rs b/src/libstd/time/mod.rs index 154f603c84f16..6854f1e14fa13 100644 --- a/src/libstd/time/mod.rs +++ b/src/libstd/time/mod.rs @@ -118,7 +118,7 @@ pub struct Instant(time::Instant); pub struct SystemTime(time::SystemTime); /// An error returned from the `duration_since` method on `SystemTime`, -/// used to learn about why how far in the opposite direction a timestamp lies. +/// used to learn how far in the opposite direction a system time lies. #[derive(Clone, Debug)] #[stable(feature = "time2", since = "1.8.0")] pub struct SystemTimeError(Duration); diff --git a/src/libsyntax/abi.rs b/src/libsyntax/abi.rs index 64a71133a8c02..a39cac8db9917 100644 --- a/src/libsyntax/abi.rs +++ b/src/libsyntax/abi.rs @@ -24,6 +24,7 @@ pub enum Os { Netbsd, Openbsd, NaCl, + Haiku, Solaris, } @@ -32,7 +33,7 @@ pub enum Abi { // NB: This ordering MUST match the AbiDatas array below. // (This is ensured by the test indices_are_correct().) - // Single platform ABIs come first (`for_arch()` relies on this) + // Single platform ABIs Cdecl, Stdcall, Fastcall, @@ -41,7 +42,7 @@ pub enum Abi { Win64, SysV64, - // Multiplatform ABIs second + // Multiplatform / generic ABIs Rust, C, System, @@ -64,41 +65,31 @@ pub enum Architecture { pub struct AbiData { abi: Abi, - // Name of this ABI as we like it called. + /// Name of this ABI as we like it called. name: &'static str, -} -#[derive(Copy, Clone)] -pub enum AbiArchitecture { - /// Not a real ABI (e.g., intrinsic) - Rust, - /// An ABI that specifies cross-platform defaults (e.g., "C") - All, - /// Multiple architectures (bitset) - Archs(u32) + /// A generic ABI is supported on all platforms. + generic: bool, } #[allow(non_upper_case_globals)] const AbiDatas: &'static [AbiData] = &[ // Platform-specific ABIs - AbiData {abi: Abi::Cdecl, name: "cdecl" }, - AbiData {abi: Abi::Stdcall, name: "stdcall" }, - AbiData {abi: Abi::Fastcall, name: "fastcall" }, - AbiData {abi: Abi::Vectorcall, name: "vectorcall"}, - AbiData {abi: Abi::Aapcs, name: "aapcs" }, - AbiData {abi: Abi::Win64, name: "win64" }, - AbiData {abi: Abi::SysV64, name: "sysv64" }, + AbiData {abi: Abi::Cdecl, name: "cdecl", generic: false }, + AbiData {abi: Abi::Stdcall, name: "stdcall", generic: false }, + AbiData {abi: Abi::Fastcall, name: "fastcall", generic: false }, + AbiData {abi: Abi::Vectorcall, name: "vectorcall", generic: false}, + AbiData {abi: Abi::Aapcs, name: "aapcs", generic: false }, + AbiData {abi: Abi::Win64, name: "win64", generic: false }, + AbiData {abi: Abi::SysV64, name: "sysv64", generic: false }, // Cross-platform ABIs - // - // NB: Do not adjust this ordering without - // adjusting the indices below. - AbiData {abi: Abi::Rust, name: "Rust" }, - AbiData {abi: Abi::C, name: "C" }, - AbiData {abi: Abi::System, name: "system" }, - AbiData {abi: Abi::RustIntrinsic, name: "rust-intrinsic" }, - AbiData {abi: Abi::RustCall, name: "rust-call" }, - AbiData {abi: Abi::PlatformIntrinsic, name: "platform-intrinsic" } + AbiData {abi: Abi::Rust, name: "Rust", generic: true }, + AbiData {abi: Abi::C, name: "C", generic: true }, + AbiData {abi: Abi::System, name: "system", generic: true }, + AbiData {abi: Abi::RustIntrinsic, name: "rust-intrinsic", generic: true }, + AbiData {abi: Abi::RustCall, name: "rust-call", generic: true }, + AbiData {abi: Abi::PlatformIntrinsic, name: "platform-intrinsic", generic: true }, ]; /// Returns the ABI with the given name (if any). @@ -124,6 +115,10 @@ impl Abi { pub fn name(&self) -> &'static str { self.data().name } + + pub fn generic(&self) -> bool { + self.data().generic + } } impl fmt::Display for Abi { @@ -146,6 +141,7 @@ impl fmt::Display for Os { Os::Netbsd => "netbsd".fmt(f), Os::Openbsd => "openbsd".fmt(f), Os::NaCl => "nacl".fmt(f), + Os::Haiku => "haiku".fmt(f), Os::Solaris => "solaris".fmt(f), } } diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 40c8ba93bd5d9..f077ead1f8e07 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -27,7 +27,9 @@ use tokenstream::{TokenTree}; use std::fmt; use std::rc::Rc; -use serialize::{Encodable, Decodable, Encoder, Decoder}; +use std::u32; + +use serialize::{self, Encodable, Decodable, Encoder, Decoder}; /// A name is a part of an identifier, representing a string or gensym. It's /// the result of interning. @@ -69,7 +71,7 @@ impl Encodable for Name { impl Decodable for Name { fn decode(d: &mut D) -> Result { - Ok(token::intern(&d.read_str()?[..])) + Ok(token::intern(&d.read_str()?)) } } @@ -119,6 +121,7 @@ impl fmt::Debug for Lifetime { /// A lifetime definition, e.g. `'a: 'b+'c+'d` #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct LifetimeDef { + pub attrs: ThinVec, pub lifetime: Lifetime, pub bounds: Vec } @@ -158,12 +161,12 @@ impl Path { Path { span: s, global: false, - segments: vec!( + segments: vec![ PathSegment { identifier: identifier, parameters: PathParameters::none() } - ), + ], } } } @@ -298,17 +301,53 @@ pub struct ParenthesizedParameterData { pub output: Option>, } -pub type CrateNum = u32; +#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Hash, Debug)] +pub struct NodeId(u32); + +impl NodeId { + pub fn new(x: usize) -> NodeId { + assert!(x < (u32::MAX as usize)); + NodeId(x as u32) + } + + pub fn from_u32(x: u32) -> NodeId { + NodeId(x) + } + + pub fn as_usize(&self) -> usize { + self.0 as usize + } + + pub fn as_u32(&self) -> u32 { + self.0 + } +} + +impl fmt::Display for NodeId { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Display::fmt(&self.0, f) + } +} -pub type NodeId = u32; +impl serialize::UseSpecializedEncodable for NodeId { + fn default_encode(&self, s: &mut S) -> Result<(), S::Error> { + s.emit_u32(self.0) + } +} + +impl serialize::UseSpecializedDecodable for NodeId { + fn default_decode(d: &mut D) -> Result { + d.read_u32().map(NodeId) + } +} /// Node id used to represent the root of the crate. -pub const CRATE_NODE_ID: NodeId = 0; +pub const CRATE_NODE_ID: NodeId = NodeId(0); /// When parsing and doing expansions, we initially give all AST nodes this AST /// node value. Then later, in the renumber pass, we renumber them to have /// small, positive ids. -pub const DUMMY_NODE_ID: NodeId = !0; +pub const DUMMY_NODE_ID: NodeId = NodeId(!0); /// The AST represents all type param bounds as types. /// typeck::collect::compute_bounds matches these against @@ -332,6 +371,7 @@ pub type TyParamBounds = P<[TyParamBound]>; #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct TyParam { + pub attrs: ThinVec, pub ident: Ident, pub id: NodeId, pub bounds: TyParamBounds, @@ -437,7 +477,6 @@ pub type CrateConfig = Vec>; pub struct Crate { pub module: Mod, pub attrs: Vec, - pub config: CrateConfig, pub span: Span, pub exported_macros: Vec, } @@ -555,7 +594,7 @@ impl Pat { PatKind::Box(ref s) | PatKind::Ref(ref s, _) => { s.walk(it) } - PatKind::Vec(ref before, ref slice, ref after) => { + PatKind::Slice(ref before, ref slice, ref after) => { before.iter().all(|p| p.walk(it)) && slice.iter().all(|p| p.walk(it)) && after.iter().all(|p| p.walk(it)) @@ -631,8 +670,8 @@ pub enum PatKind { /// A range pattern, e.g. `1...2` Range(P, P), /// `[a, b, ..i, y, z]` is represented as: - /// `PatKind::Vec(box [a, b], Some(i), box [y, z])` - Vec(Vec>, Option>, Vec>), + /// `PatKind::Slice(box [a, b], Some(i), box [y, z])` + Slice(Vec>, Option>, Vec>), /// A macro pattern; pre-expansion Mac(Mac), } @@ -860,6 +899,7 @@ pub struct Field { pub ident: SpannedIdent, pub expr: P, pub span: Span, + pub is_shorthand: bool, } pub type SpannedIdent = Spanned; @@ -1393,10 +1433,10 @@ pub struct BareFnTy { /// The different kinds of types recognized by the compiler #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum TyKind { - /// A variable-length array (`[T]`) - Vec(P), + /// A variable-length slice (`[T]`) + Slice(P), /// A fixed length array (`[T; n]`) - FixedLengthVec(P, P), + Array(P, P), /// A raw pointer (`*const T` or `*mut T`) Ptr(MutTy), /// A reference (`&'a T` or `&'a mut T`) @@ -1972,8 +2012,6 @@ pub struct MacroDef { pub id: NodeId, pub span: Span, pub imported_from: Option, - pub export: bool, - pub use_locally: bool, pub allow_internal_unstable: bool, pub body: Vec, } diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index 81ee96459fd64..0335f210347a2 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -501,10 +501,7 @@ pub fn requests_inline(attrs: &[Attribute]) -> bool { } /// Tests if a cfg-pattern matches the cfg set -pub fn cfg_matches(cfgs: &[P], cfg: &ast::MetaItem, - sess: &ParseSess, - features: Option<&Features>) - -> bool { +pub fn cfg_matches(cfg: &ast::MetaItem, sess: &ParseSess, features: Option<&Features>) -> bool { match cfg.node { ast::MetaItemKind::List(ref pred, ref mis) => { for mi in mis.iter() { @@ -518,10 +515,10 @@ pub fn cfg_matches(cfgs: &[P], cfg: &ast::MetaItem, // that they won't fail with the loop above. match &pred[..] { "any" => mis.iter().any(|mi| { - cfg_matches(cfgs, mi.meta_item().unwrap(), sess, features) + cfg_matches(mi.meta_item().unwrap(), sess, features) }), "all" => mis.iter().all(|mi| { - cfg_matches(cfgs, mi.meta_item().unwrap(), sess, features) + cfg_matches(mi.meta_item().unwrap(), sess, features) }), "not" => { if mis.len() != 1 { @@ -529,7 +526,7 @@ pub fn cfg_matches(cfgs: &[P], cfg: &ast::MetaItem, return false; } - !cfg_matches(cfgs, mis[0].meta_item().unwrap(), sess, features) + !cfg_matches(mis[0].meta_item().unwrap(), sess, features) }, p => { span_err!(sess.span_diagnostic, cfg.span, E0537, "invalid predicate `{}`", p); @@ -541,7 +538,7 @@ pub fn cfg_matches(cfgs: &[P], cfg: &ast::MetaItem, if let (Some(feats), Some(gated_cfg)) = (features, GatedCfg::gate(cfg)) { gated_cfg.check_and_emit(sess, feats); } - contains(cfgs, cfg) + contains(&sess.config, cfg) } } } @@ -895,7 +892,7 @@ pub fn find_repr_attrs(diagnostic: &Handler, attr: &Attribute) -> Vec "packed" => Some(ReprPacked), "simd" => Some(ReprSimd), _ => match int_type_of_word(word) { - Some(ity) => Some(ReprInt(item.span, ity)), + Some(ity) => Some(ReprInt(ity)), None => { // Not a word we recognize span_err!(diagnostic, item.span, E0552, @@ -939,7 +936,7 @@ fn int_type_of_word(s: &str) -> Option { #[derive(PartialEq, Debug, RustcEncodable, RustcDecodable, Copy, Clone)] pub enum ReprAttr { ReprAny, - ReprInt(Span, IntType), + ReprInt(IntType), ReprExtern, ReprPacked, ReprSimd, @@ -949,7 +946,7 @@ impl ReprAttr { pub fn is_ffi_safe(&self) -> bool { match *self { ReprAny => false, - ReprInt(_sp, ity) => ity.is_ffi_safe(), + ReprInt(ity) => ity.is_ffi_safe(), ReprExtern => true, ReprPacked => false, ReprSimd => true, diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index cd6f2874954b8..49012ad036a9a 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -364,6 +364,46 @@ impl CodeMap { } } + /// Returns `Some(span)`, a union of the lhs and rhs span. The lhs must precede the rhs. If + /// there are gaps between lhs and rhs, the resulting union will cross these gaps. + /// For this to work, the spans have to be: + /// * the expn_id of both spans much match + /// * the lhs span needs to end on the same line the rhs span begins + /// * the lhs span must start at or before the rhs span + pub fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option { + use std::cmp; + + // make sure we're at the same expansion id + if sp_lhs.expn_id != sp_rhs.expn_id { + return None; + } + + let lhs_end = match self.lookup_line(sp_lhs.hi) { + Ok(x) => x, + Err(_) => return None + }; + let rhs_begin = match self.lookup_line(sp_rhs.lo) { + Ok(x) => x, + Err(_) => return None + }; + + // if we must cross lines to merge, don't merge + if lhs_end.line != rhs_begin.line { + return None; + } + + // ensure these follow the expected order and we don't overlap + if (sp_lhs.lo <= sp_rhs.lo) && (sp_lhs.hi <= sp_rhs.lo) { + Some(Span { + lo: cmp::min(sp_lhs.lo, sp_rhs.lo), + hi: cmp::max(sp_lhs.hi, sp_rhs.hi), + expn_id: sp_lhs.expn_id, + }) + } else { + None + } + } + pub fn span_to_string(&self, sp: Span) -> String { if sp == COMMAND_LINE_SP { return "".to_string(); @@ -764,7 +804,7 @@ impl CodeMap { } pub fn macro_backtrace(&self, span: Span) -> Vec { - let mut last_span = DUMMY_SP; + let mut prev_span = DUMMY_SP; let mut span = span; let mut result = vec![]; loop { @@ -787,14 +827,14 @@ impl CodeMap { None => break, Some((call_site, macro_decl_name, def_site_span)) => { // Don't print recursive invocations - if !call_site.source_equal(&last_span) { + if !call_site.source_equal(&prev_span) { result.push(MacroBacktrace { call_site: call_site, macro_decl_name: macro_decl_name, def_site_span: def_site_span, }); } - last_span = span; + prev_span = span; span = call_site; } } @@ -819,6 +859,9 @@ impl CodeMapper for CodeMap { fn macro_backtrace(&self, span: Span) -> Vec { self.macro_backtrace(span) } + fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option { + self.merge_spans(sp_lhs, sp_rhs) + } } // _____________________________________________________________________________ @@ -1072,6 +1115,40 @@ mod tests { blork.rs:1:1: 1:12\n `first line.`\n"); } + /// Test merging two spans on the same line + #[test] + fn span_merging() { + let cm = CodeMap::new(); + let inputtext = "bbbb BB bb CCC\n"; + let selection1 = " ~~ \n"; + let selection2 = " ~~~\n"; + cm.new_filemap_and_lines("blork.rs", None, inputtext); + let span1 = span_from_selection(inputtext, selection1); + let span2 = span_from_selection(inputtext, selection2); + + if let Some(sp) = cm.merge_spans(span1, span2) { + let sstr = cm.span_to_expanded_string(sp); + assert_eq!(sstr, "blork.rs:1:6: 1:15\n`BB bb CCC`\n"); + } + else { + assert!(false); + } + } + + /// Test failing to merge two spans on different lines + #[test] + fn span_merging_fail() { + let cm = CodeMap::new(); + let inputtext = "bbbb BB\ncc CCC\n"; + let selection1 = " ~~\n \n"; + let selection2 = " \n ~~~\n"; + cm.new_filemap_and_lines("blork.rs", None, inputtext); + let span1 = span_from_selection(inputtext, selection1); + let span2 = span_from_selection(inputtext, selection2); + + assert!(cm.merge_spans(span1, span2).is_none()); + } + /// Returns the span corresponding to the `n`th occurrence of /// `substring` in `source_text`. trait CodeMapExtension { diff --git a/src/libsyntax/config.rs b/src/libsyntax/config.rs index 3f5b294cc0443..946257a16d5ac 100644 --- a/src/libsyntax/config.rs +++ b/src/libsyntax/config.rs @@ -9,7 +9,7 @@ // except according to those terms. use attr::HasAttrs; -use feature_gate::{emit_feature_err, EXPLAIN_STMT_ATTR_SYNTAX, Features, get_features, GateIssue}; +use feature_gate::{feature_err, EXPLAIN_STMT_ATTR_SYNTAX, Features, get_features, GateIssue}; use {fold, attr}; use ast; use codemap::{Spanned, respan}; @@ -20,7 +20,6 @@ use util::small_vector::SmallVector; /// A folder that strips out items that do not belong in the current configuration. pub struct StripUnconfigured<'a> { - pub config: &'a ast::CrateConfig, pub should_test: bool, pub sess: &'a ParseSess, pub features: Option<&'a Features>, @@ -32,7 +31,6 @@ pub fn features(mut krate: ast::Crate, sess: &ParseSess, should_test: bool) let features; { let mut strip_unconfigured = StripUnconfigured { - config: &krate.config.clone(), should_test: should_test, sess: sess, features: None, @@ -107,7 +105,7 @@ impl<'a> StripUnconfigured<'a> { use attr::cfg_matches; match (cfg.meta_item(), mi.meta_item()) { (Some(cfg), Some(mi)) => - if cfg_matches(self.config, &cfg, self.sess, self.features) { + if cfg_matches(&cfg, self.sess, self.features) { self.process_cfg_attr(respan(mi.span, ast::Attribute_ { id: attr::mk_attr_id(), style: attr.node.style, @@ -126,7 +124,7 @@ impl<'a> StripUnconfigured<'a> { } // Determine if a node with the given attributes should be included in this configuation. - fn in_cfg(&mut self, attrs: &[ast::Attribute]) -> bool { + pub fn in_cfg(&mut self, attrs: &[ast::Attribute]) -> bool { attrs.iter().all(|attr| { // When not compiling with --test we should not compile the #[test] functions if !self.should_test && is_test_or_bench(attr) { @@ -148,20 +146,24 @@ impl<'a> StripUnconfigured<'a> { return true; } - attr::cfg_matches(self.config, mis[0].meta_item().unwrap(), self.sess, self.features) + attr::cfg_matches(mis[0].meta_item().unwrap(), self.sess, self.features) }) } // Visit attributes on expression and statements (but not attributes on items in blocks). - fn visit_stmt_or_expr_attrs(&mut self, attrs: &[ast::Attribute]) { + fn visit_expr_attrs(&mut self, attrs: &[ast::Attribute]) { // flag the offending attributes for attr in attrs.iter() { if !self.features.map(|features| features.stmt_expr_attributes).unwrap_or(true) { - emit_feature_err(&self.sess.span_diagnostic, - "stmt_expr_attributes", - attr.span, - GateIssue::Language, - EXPLAIN_STMT_ATTR_SYNTAX); + let mut err = feature_err(&self.sess, + "stmt_expr_attributes", + attr.span, + GateIssue::Language, + EXPLAIN_STMT_ATTR_SYNTAX); + if attr.node.is_sugared_doc { + err.help("`///` is for documentation comments. For a plain comment, use `//`."); + } + err.emit(); } } } @@ -227,7 +229,7 @@ impl<'a> StripUnconfigured<'a> { } pub fn configure_expr(&mut self, expr: P) -> P { - self.visit_stmt_or_expr_attrs(expr.attrs()); + self.visit_expr_attrs(expr.attrs()); // If an expr is valid to cfg away it will have been removed by the // outer stmt or expression folder before descending in here. @@ -245,7 +247,6 @@ impl<'a> StripUnconfigured<'a> { } pub fn configure_stmt(&mut self, stmt: ast::Stmt) -> Option { - self.visit_stmt_or_expr_attrs(stmt.attrs()); self.configure(stmt) } } @@ -303,6 +304,6 @@ fn is_cfg(attr: &ast::Attribute) -> bool { attr.check_name("cfg") } -fn is_test_or_bench(attr: &ast::Attribute) -> bool { +pub fn is_test_or_bench(attr: &ast::Attribute) -> bool { attr.check_name("test") || attr.check_name("bench") } diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index 4e50299e836b3..81c8e0bdb8262 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -215,7 +215,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt, let ty = ecx.ty( span, - ast::TyKind::FixedLengthVec( + ast::TyKind::Array( ecx.ty( span, ast::TyKind::Tup(vec![ty_str.clone(), ty_str]) diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index fb4816d3847ed..cc097ab0efadc 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -8,30 +8,26 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -pub use self::SyntaxExtension::*; +pub use self::SyntaxExtension::{MultiDecorator, MultiModifier, NormalTT, IdentTT}; use ast::{self, Attribute, Name, PatKind}; use attr::HasAttrs; use codemap::{self, CodeMap, ExpnInfo, Spanned, respan}; use syntax_pos::{Span, ExpnId, NO_EXPANSION}; use errors::DiagnosticBuilder; -use ext::expand::{self, Invocation, Expansion}; +use ext::expand::{self, Expansion}; use ext::hygiene::Mark; -use ext::tt::macro_rules; -use parse; -use parse::parser; +use fold::{self, Folder}; +use parse::{self, parser}; use parse::token; use parse::token::{InternedString, str_to_ident}; use ptr::P; -use std_inject; use util::small_vector::SmallVector; -use fold::Folder; -use feature_gate; -use std::collections::HashMap; use std::path::PathBuf; use std::rc::Rc; -use tokenstream; +use std::default::Default; +use tokenstream::{self, TokenStream}; #[derive(Debug,Clone)] @@ -146,6 +142,50 @@ impl Into> for Annotatable { } } +pub trait ProcMacro { + fn expand<'cx>(&self, + ecx: &'cx mut ExtCtxt, + span: Span, + ts: TokenStream) + -> TokenStream; +} + +impl ProcMacro for F + where F: Fn(TokenStream) -> TokenStream +{ + fn expand<'cx>(&self, + _ecx: &'cx mut ExtCtxt, + _span: Span, + ts: TokenStream) + -> TokenStream { + // FIXME setup implicit context in TLS before calling self. + (*self)(ts) + } +} + +pub trait AttrProcMacro { + fn expand<'cx>(&self, + ecx: &'cx mut ExtCtxt, + span: Span, + annotation: TokenStream, + annotated: TokenStream) + -> TokenStream; +} + +impl AttrProcMacro for F + where F: Fn(TokenStream, TokenStream) -> TokenStream +{ + fn expand<'cx>(&self, + _ecx: &'cx mut ExtCtxt, + _span: Span, + annotation: TokenStream, + annotated: TokenStream) + -> TokenStream { + // FIXME setup implicit context in TLS before calling self. + (*self)(annotation, annotated) + } +} + /// Represents a thing that maps token trees to Macro Results pub trait TTMacroExpander { fn expand<'cx>(&self, @@ -177,7 +217,8 @@ pub trait IdentMacroExpander { cx: &'cx mut ExtCtxt, sp: Span, ident: ast::Ident, - token_tree: Vec ) + token_tree: Vec, + attrs: Vec) -> Box; } @@ -193,7 +234,8 @@ impl IdentMacroExpander for F cx: &'cx mut ExtCtxt, sp: Span, ident: ast::Ident, - token_tree: Vec ) + token_tree: Vec, + _attrs: Vec) -> Box { (*self)(cx, sp, ident, token_tree) @@ -437,11 +479,22 @@ pub enum SyntaxExtension { /// based upon it. /// /// `#[derive(...)]` is a `MultiItemDecorator`. - MultiDecorator(Box), + /// + /// Prefer ProcMacro or MultiModifier since they are more flexible. + MultiDecorator(Box), /// A syntax extension that is attached to an item and modifies it - /// in-place. More flexible version than Modifier. - MultiModifier(Box), + /// in-place. Also allows decoration, i.e., creating new items. + MultiModifier(Box), + + /// A function-like procedural macro. TokenStream -> TokenStream. + ProcMacro(Box), + + /// An attribute-like procedural macro. TokenStream, TokenStream -> TokenStream. + /// The first TokenSteam is the attribute, the second is the annotated item. + /// Allows modification of the input items and adding new items, similar to + /// MultiModifier, but uses TokenStreams, rather than AST nodes. + AttrProcMacro(Box), /// A normal, function-like syntax extension. /// @@ -449,50 +502,54 @@ pub enum SyntaxExtension { /// /// The `bool` dictates whether the contents of the macro can /// directly use `#[unstable]` things (true == yes). - NormalTT(Box, Option, bool), + NormalTT(Box, Option, bool), /// A function-like syntax extension that has an extra ident before /// the block. /// - IdentTT(Box, Option, bool), + IdentTT(Box, Option, bool), - /// Represents `macro_rules!` itself. - MacroRulesTT, + CustomDerive(Box), } pub type NamedSyntaxExtension = (Name, SyntaxExtension); pub trait Resolver { - fn load_crate(&mut self, extern_crate: &ast::Item, allows_macros: bool) -> Vec; fn next_node_id(&mut self) -> ast::NodeId; + fn get_module_scope(&mut self, id: ast::NodeId) -> Mark; fn visit_expansion(&mut self, mark: Mark, expansion: &Expansion); - fn add_macro(&mut self, scope: Mark, ident: ast::Ident, ext: Rc); + fn add_macro(&mut self, scope: Mark, def: ast::MacroDef, export: bool); + fn add_ext(&mut self, ident: ast::Ident, ext: Rc); fn add_expansions_at_stmt(&mut self, id: ast::NodeId, macros: Vec); fn find_attr_invoc(&mut self, attrs: &mut Vec) -> Option; - fn resolve_invoc(&mut self, invoc: &Invocation) -> Option>; + fn resolve_macro(&mut self, scope: Mark, path: &ast::Path, force: bool) + -> Result, Determinacy>; } -pub enum LoadedMacro { - Def(ast::MacroDef), - CustomDerive(String, Box), +#[derive(Copy, Clone, Debug)] +pub enum Determinacy { + Determined, + Undetermined, } pub struct DummyResolver; impl Resolver for DummyResolver { - fn load_crate(&mut self, _extern_crate: &ast::Item, _allows_macros: bool) -> Vec { - Vec::new() - } fn next_node_id(&mut self) -> ast::NodeId { ast::DUMMY_NODE_ID } + fn get_module_scope(&mut self, _id: ast::NodeId) -> Mark { Mark::root() } fn visit_expansion(&mut self, _invoc: Mark, _expansion: &Expansion) {} - fn add_macro(&mut self, _scope: Mark, _ident: ast::Ident, _ext: Rc) {} + fn add_macro(&mut self, _scope: Mark, _def: ast::MacroDef, _export: bool) {} + fn add_ext(&mut self, _ident: ast::Ident, _ext: Rc) {} fn add_expansions_at_stmt(&mut self, _id: ast::NodeId, _macros: Vec) {} fn find_attr_invoc(&mut self, _attrs: &mut Vec) -> Option { None } - fn resolve_invoc(&mut self, _invoc: &Invocation) -> Option> { None } + fn resolve_macro(&mut self, _scope: Mark, _path: &ast::Path, _force: bool) + -> Result, Determinacy> { + Err(Determinacy::Determined) + } } #[derive(Clone)] @@ -507,7 +564,9 @@ pub struct ExpansionData { pub depth: usize, pub backtrace: ExpnId, pub module: Rc, - pub in_block: bool, + + // True if non-inline modules without a `#[path]` are forbidden at the root of this expansion. + pub no_noninline_mod: bool, } /// One of these is made during expansion and incrementally updated as we go; @@ -515,34 +574,30 @@ pub struct ExpansionData { /// -> expn_info of their expansion context stored into their span. pub struct ExtCtxt<'a> { pub parse_sess: &'a parse::ParseSess, - pub cfg: ast::CrateConfig, pub ecfg: expand::ExpansionConfig<'a>, pub crate_root: Option<&'static str>, pub resolver: &'a mut Resolver, - pub exported_macros: Vec, - pub derive_modes: HashMap>, + pub resolve_err_count: usize, pub current_expansion: ExpansionData, } impl<'a> ExtCtxt<'a> { - pub fn new(parse_sess: &'a parse::ParseSess, cfg: ast::CrateConfig, + pub fn new(parse_sess: &'a parse::ParseSess, ecfg: expand::ExpansionConfig<'a>, resolver: &'a mut Resolver) -> ExtCtxt<'a> { ExtCtxt { parse_sess: parse_sess, - cfg: cfg, ecfg: ecfg, crate_root: None, - exported_macros: Vec::new(), resolver: resolver, - derive_modes: HashMap::new(), + resolve_err_count: 0, current_expansion: ExpansionData { mark: Mark::root(), depth: 0, backtrace: NO_EXPANSION, module: Rc::new(ModuleData { mod_path: Vec::new(), directory: PathBuf::new() }), - in_block: false, + no_noninline_mod: false, }, } } @@ -560,11 +615,11 @@ impl<'a> ExtCtxt<'a> { pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree]) -> parser::Parser<'a> { - parse::tts_to_parser(self.parse_sess, tts.to_vec(), self.cfg()) + parse::tts_to_parser(self.parse_sess, tts.to_vec()) } pub fn codemap(&self) -> &'a CodeMap { self.parse_sess.codemap() } pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess } - pub fn cfg(&self) -> ast::CrateConfig { self.cfg.clone() } + pub fn cfg(&self) -> &ast::CrateConfig { &self.parse_sess.config } pub fn call_site(&self) -> Span { self.codemap().with_expn_info(self.backtrace(), |ei| match ei { Some(expn_info) => expn_info.call_site, @@ -613,31 +668,6 @@ impl<'a> ExtCtxt<'a> { } pub fn bt_pop(&mut self) {} - pub fn insert_macro(&mut self, def: ast::MacroDef) { - if def.export { - self.exported_macros.push(def.clone()); - } - if def.use_locally { - let ext = macro_rules::compile(self, &def); - self.resolver.add_macro(self.current_expansion.mark, def.ident, Rc::new(ext)); - } - } - - pub fn insert_custom_derive(&mut self, name: &str, ext: Box, sp: Span) { - if !self.ecfg.enable_rustc_macro() { - feature_gate::emit_feature_err(&self.parse_sess.span_diagnostic, - "rustc_macro", - sp, - feature_gate::GateIssue::Language, - "loading custom derive macro crates \ - is experimentally supported"); - } - let name = token::intern_and_get_ident(name); - if self.derive_modes.insert(name.clone(), ext).is_some() { - self.span_err(sp, &format!("cannot shadow existing derive mode `{}`", name)); - } - } - pub fn struct_span_warn(&self, sp: Span, msg: &str) @@ -712,28 +742,6 @@ impl<'a> ExtCtxt<'a> { pub fn name_of(&self, st: &str) -> ast::Name { token::intern(st) } - - pub fn initialize(&mut self, user_exts: Vec, krate: &ast::Crate) { - if std_inject::no_core(&krate) { - self.crate_root = None; - } else if std_inject::no_std(&krate) { - self.crate_root = Some("core"); - } else { - self.crate_root = Some("std"); - } - - for (name, extension) in user_exts { - let ident = ast::Ident::with_empty_ctxt(name); - self.resolver.add_macro(Mark::root(), ident, Rc::new(extension)); - } - - let mut module = ModuleData { - mod_path: vec![token::str_to_ident(&self.ecfg.crate_name)], - directory: PathBuf::from(self.parse_sess.codemap().span_to_filename(krate.span)), - }; - module.directory.pop(); - self.current_expansion.module = Rc::new(module); - } } /// Extract a string literal from the macro expanded version of `expr`, @@ -818,3 +826,17 @@ pub fn get_exprs_from_tts(cx: &mut ExtCtxt, } Some(es) } + +pub struct ChangeSpan { + pub span: Span +} + +impl Folder for ChangeSpan { + fn new_span(&mut self, _sp: Span) -> Span { + self.span + } + + fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { + fold::noop_fold_mac(mac, self) + } +} diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index b81d95a6998c3..37bd83be7b4d4 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -73,6 +73,7 @@ pub trait AstBuilder { fn typaram(&self, span: Span, id: ast::Ident, + attrs: Vec, bounds: ast::TyParamBounds, default: Option>) -> ast::TyParam; @@ -83,6 +84,7 @@ pub trait AstBuilder { fn lifetime_def(&self, span: Span, name: ast::Name, + attrs: Vec, bounds: Vec) -> ast::LifetimeDef; @@ -96,7 +98,7 @@ pub trait AstBuilder { ident: ast::Ident, typ: P, ex: P) - -> P; + -> ast::Stmt; fn stmt_let_type_only(&self, span: Span, ty: P) -> ast::Stmt; fn stmt_item(&self, sp: Span, item: P) -> ast::Stmt; @@ -310,7 +312,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { self.path_all(span, false, strs, Vec::new(), Vec::new(), Vec::new()) } fn path_ident(&self, span: Span, id: ast::Ident) -> ast::Path { - self.path(span, vec!(id)) + self.path(span, vec![id]) } fn path_global(&self, span: Span, strs: Vec ) -> ast::Path { self.path_all(span, true, strs, Vec::new(), Vec::new(), Vec::new()) @@ -441,7 +443,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { true, self.std_path(&["option", "Option"]), Vec::new(), - vec!( ty ), + vec![ ty ], Vec::new())) } @@ -452,11 +454,13 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn typaram(&self, span: Span, id: ast::Ident, + attrs: Vec, bounds: ast::TyParamBounds, default: Option>) -> ast::TyParam { ast::TyParam { ident: id, id: ast::DUMMY_NODE_ID, + attrs: attrs.into(), bounds: bounds, default: default, span: span @@ -473,7 +477,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn ty_vars_global(&self, ty_params: &P<[ast::TyParam]>) -> Vec> { ty_params .iter() - .map(|p| self.ty_path(self.path_global(DUMMY_SP, vec!(p.ident)))) + .map(|p| self.ty_path(self.path_global(DUMMY_SP, vec![p.ident]))) .collect() } @@ -503,9 +507,11 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn lifetime_def(&self, span: Span, name: ast::Name, + attrs: Vec, bounds: Vec) -> ast::LifetimeDef { ast::LifetimeDef { + attrs: attrs.into(), lifetime: self.lifetime(span, name), bounds: bounds } @@ -556,7 +562,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { ident: ast::Ident, typ: P, ex: P) - -> P { + -> ast::Stmt { let pat = if mutbl { let binding_mode = ast::BindingMode::ByValue(ast::Mutability::Mutable); self.pat_ident_binding_mode(sp, ident, binding_mode) @@ -571,11 +577,11 @@ impl<'a> AstBuilder for ExtCtxt<'a> { span: sp, attrs: ast::ThinVec::new(), }); - P(ast::Stmt { + ast::Stmt { id: ast::DUMMY_NODE_ID, node: ast::StmtKind::Local(local), span: sp, - }) + } } // Generate `let _: Type;`, usually used for type assertions. @@ -707,7 +713,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { self.expr(b.span, ast::ExprKind::Block(b)) } fn field_imm(&self, span: Span, name: Ident, e: P) -> ast::Field { - ast::Field { ident: respan(span, name), expr: e, span: span } + ast::Field { ident: respan(span, name), expr: e, span: span, is_shorthand: false } } fn expr_struct(&self, span: Span, path: ast::Path, fields: Vec) -> P { self.expr(span, ast::ExprKind::Struct(path, fields, None)) @@ -764,7 +770,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn expr_some(&self, sp: Span, expr: P) -> P { let some = self.std_path(&["option", "Option", "Some"]); - self.expr_call_global(sp, some, vec!(expr)) + self.expr_call_global(sp, some, vec![expr]) } fn expr_none(&self, sp: Span) -> P { @@ -788,14 +794,14 @@ impl<'a> AstBuilder for ExtCtxt<'a> { let expr_file = self.expr_str(span, token::intern_and_get_ident(&loc.file.name)); let expr_line = self.expr_u32(span, loc.line as u32); - let expr_file_line_tuple = self.expr_tuple(span, vec!(expr_file, expr_line)); + let expr_file_line_tuple = self.expr_tuple(span, vec![expr_file, expr_line]); let expr_file_line_ptr = self.expr_addr_of(span, expr_file_line_tuple); self.expr_call_global( span, self.std_path(&["rt", "begin_panic"]), - vec!( + vec![ self.expr_str(span, msg), - expr_file_line_ptr)) + expr_file_line_ptr]) } fn expr_unreachable(&self, span: Span) -> P { @@ -806,12 +812,12 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn expr_ok(&self, sp: Span, expr: P) -> P { let ok = self.std_path(&["result", "Result", "Ok"]); - self.expr_call_global(sp, ok, vec!(expr)) + self.expr_call_global(sp, ok, vec![expr]) } fn expr_err(&self, sp: Span, expr: P) -> P { let err = self.std_path(&["result", "Result", "Err"]); - self.expr_call_global(sp, err, vec!(expr)) + self.expr_call_global(sp, err, vec![expr]) } fn expr_try(&self, sp: Span, head: P) -> P { @@ -830,17 +836,17 @@ impl<'a> AstBuilder for ExtCtxt<'a> { // Err(__try_var) (pattern and expression resp.) let err_pat = self.pat_tuple_struct(sp, err_path.clone(), vec![binding_pat]); let err_inner_expr = self.expr_call(sp, self.expr_path(err_path), - vec!(binding_expr.clone())); + vec![binding_expr.clone()]); // return Err(__try_var) let err_expr = self.expr(sp, ast::ExprKind::Ret(Some(err_inner_expr))); // Ok(__try_var) => __try_var - let ok_arm = self.arm(sp, vec!(ok_pat), binding_expr); + let ok_arm = self.arm(sp, vec![ok_pat], binding_expr); // Err(__try_var) => return Err(__try_var) - let err_arm = self.arm(sp, vec!(err_pat), err_expr); + let err_arm = self.arm(sp, vec![err_pat], err_expr); // match head { Ok() => ..., Err() => ... } - self.expr_match(sp, head, vec!(ok_arm, err_arm)) + self.expr_match(sp, head, vec![ok_arm, err_arm]) } @@ -906,7 +912,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { fn arm(&self, _span: Span, pats: Vec>, expr: P) -> ast::Arm { ast::Arm { - attrs: vec!(), + attrs: vec![], pats: pats, guard: None, body: expr @@ -914,7 +920,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { } fn arm_unreachable(&self, span: Span) -> ast::Arm { - self.arm(span, vec!(self.pat_wild(span)), self.expr_unreachable(span)) + self.arm(span, vec![self.pat_wild(span)], self.expr_unreachable(span)) } fn expr_match(&self, span: Span, arg: P, arms: Vec) -> P { @@ -964,7 +970,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { } fn lambda1(&self, span: Span, blk: P, ident: ast::Ident) -> P { - self.lambda(span, vec!(ident), blk) + self.lambda(span, vec![ident], blk) } fn lambda_expr(&self, span: Span, ids: Vec, diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 62e299684b760..e3b23e239f917 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -8,22 +8,26 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast::{Block, Crate, Ident, Mac_, PatKind}; -use ast::{MacStmtStyle, StmtKind, ItemKind}; +use ast::{Block, Ident, Mac_, PatKind}; +use ast::{Name, MacStmtStyle, StmtKind, ItemKind}; use ast; use ext::hygiene::Mark; -use ext::placeholders::{self, placeholder, PlaceholderExpander}; +use ext::placeholders::{placeholder, PlaceholderExpander}; use attr::{self, HasAttrs}; use codemap::{ExpnInfo, NameAndSpan, MacroBang, MacroAttribute}; use syntax_pos::{self, Span, ExpnId}; -use config::StripUnconfigured; +use config::{is_test_or_bench, StripUnconfigured}; use ext::base::*; use feature_gate::{self, Features}; use fold; use fold::*; -use parse::token::{intern, keywords}; +use parse::{ParseSess, PResult, lexer}; +use parse::parser::Parser; +use parse::token::{self, intern, keywords}; +use print::pprust; use ptr::P; -use tokenstream::TokenTree; +use std_inject; +use tokenstream::{TokenTree, TokenStream}; use util::small_vector::SmallVector; use visit::Visitor; @@ -35,12 +39,12 @@ macro_rules! expansions { ($($kind:ident: $ty:ty [$($vec:ident, $ty_elt:ty)*], $kind_name:expr, .$make:ident, $(.$fold:ident)* $(lift .$fold_elt:ident)*, $(.$visit:ident)* $(lift .$visit_elt:ident)*;)*) => { - #[derive(Copy, Clone)] + #[derive(Copy, Clone, PartialEq, Eq)] pub enum ExpansionKind { OptExpr, $( $kind, )* } pub enum Expansion { OptExpr(Option>), $( $kind($ty), )* } impl ExpansionKind { - fn name(self) -> &'static str { + pub fn name(self) -> &'static str { match self { ExpansionKind::OptExpr => "expression", $( ExpansionKind::$kind => $kind_name, )* @@ -103,6 +107,12 @@ macro_rules! expansions { self.expand(Expansion::$kind(SmallVector::one(node))).$make() })*)* } + + impl<'a> MacResult for ::ext::tt::macro_rules::ParserAnyMacro<'a> { + $(fn $make(self: Box<::ext::tt::macro_rules::ParserAnyMacro<'a>>) -> Option<$ty> { + Some(self.make(ExpansionKind::$kind).$make()) + })* + } } } @@ -165,50 +175,43 @@ impl Invocation { InvocationKind::Attr { ref attr, .. } => attr.span, } } - - pub fn mark(&self) -> Mark { - self.expansion_data.mark - } } pub struct MacroExpander<'a, 'b:'a> { pub cx: &'a mut ExtCtxt<'b>, - pub single_step: bool, - pub keep_macs: bool, monotonic: bool, // c.f. `cx.monotonic_expander()` } impl<'a, 'b> MacroExpander<'a, 'b> { pub fn new(cx: &'a mut ExtCtxt<'b>, monotonic: bool) -> Self { - MacroExpander { - cx: cx, - monotonic: monotonic, - single_step: false, - keep_macs: false, - } + MacroExpander { cx: cx, monotonic: monotonic } } - fn expand_crate(&mut self, mut krate: ast::Crate) -> ast::Crate { - let err_count = self.cx.parse_sess.span_diagnostic.err_count(); - - let mut krate_item = placeholder(ExpansionKind::Items, ast::DUMMY_NODE_ID) - .make_items().pop().unwrap().unwrap(); - krate_item.node = ast::ItemKind::Mod(krate.module); - let krate_item = Expansion::Items(SmallVector::one(P(krate_item))); - - krate.module = match self.expand(krate_item).make_items().pop().unwrap().unwrap().node { - ast::ItemKind::Mod(module) => module, + pub fn expand_crate(&mut self, mut krate: ast::Crate) -> ast::Crate { + self.cx.crate_root = std_inject::injected_crate_name(&krate); + let mut module = ModuleData { + mod_path: vec![token::str_to_ident(&self.cx.ecfg.crate_name)], + directory: PathBuf::from(self.cx.codemap().span_to_filename(krate.span)), + }; + module.directory.pop(); + self.cx.current_expansion.module = Rc::new(module); + + let krate_item = Expansion::Items(SmallVector::one(P(ast::Item { + attrs: krate.attrs, + span: krate.span, + node: ast::ItemKind::Mod(krate.module), + ident: keywords::Invalid.ident(), + id: ast::DUMMY_NODE_ID, + vis: ast::Visibility::Public, + }))); + + match self.expand(krate_item).make_items().pop().unwrap().unwrap() { + ast::Item { attrs, node: ast::ItemKind::Mod(module), .. } => { + krate.attrs = attrs; + krate.module = module; + }, _ => unreachable!(), }; - krate.exported_macros = mem::replace(&mut self.cx.exported_macros, Vec::new()); - - for def in &mut krate.exported_macros { - def.id = self.cx.resolver.next_node_id() - } - - if self.cx.parse_sess.span_diagnostic.err_count() > err_count { - self.cx.parse_sess.span_diagnostic.abort_if_errors(); - } krate } @@ -221,24 +224,58 @@ impl<'a, 'b> MacroExpander<'a, 'b> { let (expansion, mut invocations) = self.collect_invocations(expansion); invocations.reverse(); - let mut expansions = vec![vec![(0, expansion)]]; - while let Some(invoc) = invocations.pop() { + let mut expansions = Vec::new(); + let mut undetermined_invocations = Vec::new(); + let (mut progress, mut force) = (false, !self.monotonic); + loop { + let invoc = if let Some(invoc) = invocations.pop() { + invoc + } else if undetermined_invocations.is_empty() { + break + } else { + invocations = mem::replace(&mut undetermined_invocations, Vec::new()); + force = !mem::replace(&mut progress, false); + continue + }; + + let scope = + if self.monotonic { invoc.expansion_data.mark } else { orig_expansion_data.mark }; + let resolution = match invoc.kind { + InvocationKind::Bang { ref mac, .. } => { + self.cx.resolver.resolve_macro(scope, &mac.node.path, force) + } + InvocationKind::Attr { ref attr, .. } => { + let ident = ast::Ident::with_empty_ctxt(intern(&*attr.name())); + let path = ast::Path::from_ident(attr.span, ident); + self.cx.resolver.resolve_macro(scope, &path, force) + } + }; + let ext = match resolution { + Ok(ext) => Some(ext), + Err(Determinacy::Determined) => None, + Err(Determinacy::Undetermined) => { + undetermined_invocations.push(invoc); + continue + } + }; + + progress = true; let ExpansionData { depth, mark, .. } = invoc.expansion_data; self.cx.current_expansion = invoc.expansion_data.clone(); - let expansion = match self.cx.resolver.resolve_invoc(&invoc) { + self.cx.current_expansion.mark = scope; + let expansion = match ext { Some(ext) => self.expand_invoc(invoc, ext), None => invoc.expansion_kind.dummy(invoc.span()), }; - self.cx.current_expansion.depth = depth + 1; let (expansion, new_invocations) = self.collect_invocations(expansion); - if expansions.len() == depth { + if expansions.len() < depth { expansions.push(Vec::new()); } - expansions[depth].push((mark.as_u32(), expansion)); - if !self.single_step { + expansions[depth - 1].push((mark.as_u32(), expansion)); + if !self.cx.ecfg.single_step { invocations.extend(new_invocations.into_iter().rev()); } } @@ -248,20 +285,17 @@ impl<'a, 'b> MacroExpander<'a, 'b> { let mut placeholder_expander = PlaceholderExpander::new(self.cx, self.monotonic); while let Some(expansions) = expansions.pop() { for (mark, expansion) in expansions.into_iter().rev() { - let expansion = expansion.fold_with(&mut placeholder_expander); - placeholder_expander.add(mark, expansion); + placeholder_expander.add(ast::NodeId::from_u32(mark), expansion); } } - placeholder_expander.remove(0) + expansion.fold_with(&mut placeholder_expander) } fn collect_invocations(&mut self, expansion: Expansion) -> (Expansion, Vec) { - let crate_config = mem::replace(&mut self.cx.cfg, Vec::new()); let result = { let mut collector = InvocationCollector { cfg: StripUnconfigured { - config: &crate_config, should_test: self.cx.ecfg.should_test, sess: self.cx.parse_sess, features: self.cx.ecfg.features, @@ -272,10 +306,14 @@ impl<'a, 'b> MacroExpander<'a, 'b> { }; (expansion.fold_with(&mut collector), collector.invocations) }; - self.cx.cfg = crate_config; - let mark = self.cx.current_expansion.mark; - self.cx.resolver.visit_expansion(mark, &result.0); + if self.monotonic { + let err_count = self.cx.parse_sess.span_diagnostic.err_count(); + let mark = self.cx.current_expansion.mark; + self.cx.resolver.visit_expansion(mark, &result.0); + self.cx.resolve_err_count += self.cx.parse_sess.span_diagnostic.err_count() - err_count; + } + result } @@ -294,10 +332,11 @@ impl<'a, 'b> MacroExpander<'a, 'b> { }; attr::mark_used(&attr); + let name = intern(&attr.name()); self.cx.bt_push(ExpnInfo { call_site: attr.span, callee: NameAndSpan { - format: MacroAttribute(intern(&attr.name())), + format: MacroAttribute(name), span: Some(attr.span), allow_internal_unstable: false, } @@ -315,13 +354,28 @@ impl<'a, 'b> MacroExpander<'a, 'b> { items.push(item); kind.expect_from_annotatables(items) } - _ => unreachable!(), + SyntaxExtension::AttrProcMacro(ref mac) => { + let attr_toks = TokenStream::from_tts(tts_for_attr(&attr, &self.cx.parse_sess)); + let item_toks = TokenStream::from_tts(tts_for_item(&item, &self.cx.parse_sess)); + + let tok_result = mac.expand(self.cx, attr.span, attr_toks, item_toks); + self.parse_expansion(tok_result, kind, name, attr.span) + } + SyntaxExtension::CustomDerive(_) => { + self.cx.span_err(attr.span, &format!("`{}` is a derive mode", name)); + kind.dummy(attr.span) + } + _ => { + let msg = &format!("macro `{}` may not be used in attributes", name); + self.cx.span_err(attr.span, &msg); + kind.dummy(attr.span) + } } } /// Expand a macro invocation. Returns the result of expansion. fn expand_bang_invoc(&mut self, invoc: Invocation, ext: Rc) -> Expansion { - let (mark, kind) = (invoc.mark(), invoc.expansion_kind); + let (mark, kind) = (invoc.expansion_data.mark, invoc.expansion_kind); let (attrs, mac, ident, span) = match invoc.kind { InvocationKind::Bang { attrs, mac, ident, span } => (attrs, mac, ident, span), _ => unreachable!(), @@ -331,7 +385,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> { // Detect use of feature-gated or invalid attributes on macro invoations // since they will not be detected after macro expansion. for attr in attrs.iter() { - feature_gate::check_attribute(&attr, &self.cx.parse_sess.span_diagnostic, + feature_gate::check_attribute(&attr, &self.cx.parse_sess, &self.cx.parse_sess.codemap(), &self.cx.ecfg.features.unwrap()); } @@ -381,53 +435,43 @@ impl<'a, 'b> MacroExpander<'a, 'b> { } }); - kind.make_from(expander.expand(self.cx, span, ident, marked_tts)) + kind.make_from(expander.expand(self.cx, span, ident, marked_tts, attrs)) } - MacroRulesTT => { - if ident.name == keywords::Invalid.name() { - self.cx.span_err(path.span, - &format!("macro {}! expects an ident argument", extname)); + MultiDecorator(..) | MultiModifier(..) | SyntaxExtension::AttrProcMacro(..) => { + self.cx.span_err(path.span, + &format!("`{}` can only be used in attributes", extname)); + return kind.dummy(span); + } + + SyntaxExtension::CustomDerive(..) => { + self.cx.span_err(path.span, &format!("`{}` is a derive mode", extname)); + return kind.dummy(span); + } + + SyntaxExtension::ProcMacro(ref expandfun) => { + if ident.name != keywords::Invalid.name() { + let msg = + format!("macro {}! expects no ident argument, given '{}'", extname, ident); + self.cx.span_err(path.span, &msg); return kind.dummy(span); - }; + } self.cx.bt_push(ExpnInfo { call_site: span, callee: NameAndSpan { format: MacroBang(extname), + // FIXME procedural macros do not have proper span info + // yet, when they do, we should use it here. span: None, - // `macro_rules!` doesn't directly allow unstable - // (this is orthogonal to whether the macro it creates allows it) + // FIXME probably want to follow macro_rules macros here. allow_internal_unstable: false, - } + }, }); - let def = ast::MacroDef { - ident: ident, - id: ast::DUMMY_NODE_ID, - span: span, - imported_from: None, - use_locally: true, - body: marked_tts, - export: attr::contains_name(&attrs, "macro_export"), - allow_internal_unstable: attr::contains_name(&attrs, "allow_internal_unstable"), - attrs: attrs, - }; - - self.cx.insert_macro(def.clone()); - - // If keep_macs is true, expands to a MacEager::items instead. - if self.keep_macs { - Some(placeholders::reconstructed_macro_rules(&def, &path)) - } else { - Some(placeholders::macro_scope_placeholder()) - } - } - - MultiDecorator(..) | MultiModifier(..) => { - self.cx.span_err(path.span, - &format!("`{}` can only be used in attributes", extname)); - return kind.dummy(span); + let toks = TokenStream::from_tts(marked_tts); + let tok_result = expandfun.expand(self.cx, span, toks); + Some(self.parse_expansion(tok_result, kind, extname, span)) } }; @@ -445,6 +489,75 @@ impl<'a, 'b> MacroExpander<'a, 'b> { expn_id: Some(self.cx.backtrace()), }) } + + fn parse_expansion(&mut self, toks: TokenStream, kind: ExpansionKind, name: Name, span: Span) + -> Expansion { + let mut parser = self.cx.new_parser_from_tts(&toks.to_tts()); + let expansion = match parser.parse_expansion(kind, false) { + Ok(expansion) => expansion, + Err(mut err) => { + err.emit(); + return kind.dummy(span); + } + }; + parser.ensure_complete_parse(name, kind.name(), span); + // FIXME better span info + expansion.fold_with(&mut ChangeSpan { span: span }) + } +} + +impl<'a> Parser<'a> { + pub fn parse_expansion(&mut self, kind: ExpansionKind, macro_legacy_warnings: bool) + -> PResult<'a, Expansion> { + Ok(match kind { + ExpansionKind::Items => { + let mut items = SmallVector::zero(); + while let Some(item) = self.parse_item()? { + items.push(item); + } + Expansion::Items(items) + } + ExpansionKind::TraitItems => { + let mut items = SmallVector::zero(); + while self.token != token::Eof { + items.push(self.parse_trait_item()?); + } + Expansion::TraitItems(items) + } + ExpansionKind::ImplItems => { + let mut items = SmallVector::zero(); + while self.token != token::Eof { + items.push(self.parse_impl_item()?); + } + Expansion::ImplItems(items) + } + ExpansionKind::Stmts => { + let mut stmts = SmallVector::zero(); + while self.token != token::Eof { + if let Some(stmt) = self.parse_full_stmt(macro_legacy_warnings)? { + stmts.push(stmt); + } + } + Expansion::Stmts(stmts) + } + ExpansionKind::Expr => Expansion::Expr(self.parse_expr()?), + ExpansionKind::OptExpr => Expansion::OptExpr(Some(self.parse_expr()?)), + ExpansionKind::Ty => Expansion::Ty(self.parse_ty()?), + ExpansionKind::Pat => Expansion::Pat(self.parse_pat()?), + }) + } + + pub fn ensure_complete_parse(&mut self, macro_name: ast::Name, kind_name: &str, span: Span) { + if self.token != token::Eof { + let msg = format!("macro expansion ignores token `{}` and any following", + self.this_token_to_string()); + let mut err = self.diagnostic().struct_span_err(self.span, &msg); + let msg = format!("caused by the macro expansion here; the usage \ + of `{}!` is likely invalid in {} context", + macro_name, kind_name); + err.span_note(span, &msg).emit(); + } + } } struct InvocationCollector<'a, 'b: 'a> { @@ -469,9 +582,13 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { self.invocations.push(Invocation { kind: kind, expansion_kind: expansion_kind, - expansion_data: ExpansionData { mark: mark, ..self.cx.current_expansion.clone() }, + expansion_data: ExpansionData { + mark: mark, + depth: self.cx.current_expansion.depth + 1, + ..self.cx.current_expansion.clone() + }, }); - placeholder(expansion_kind, mark.as_u32()) + placeholder(expansion_kind, ast::NodeId::from_u32(mark.as_u32())) } fn collect_bang( @@ -500,6 +617,36 @@ impl<'a, 'b> InvocationCollector<'a, 'b> { } } +// These are pretty nasty. Ideally, we would keep the tokens around, linked from +// the AST. However, we don't so we need to create new ones. Since the item might +// have come from a macro expansion (possibly only in part), we can't use the +// existing codemap. +// +// Therefore, we must use the pretty printer (yuck) to turn the AST node into a +// string, which we then re-tokenise (double yuck), but first we have to patch +// the pretty-printed string on to the end of the existing codemap (infinity-yuck). +fn tts_for_item(item: &Annotatable, parse_sess: &ParseSess) -> Vec { + let text = match *item { + Annotatable::Item(ref i) => pprust::item_to_string(i), + Annotatable::TraitItem(ref ti) => pprust::trait_item_to_string(ti), + Annotatable::ImplItem(ref ii) => pprust::impl_item_to_string(ii), + }; + string_to_tts(text, parse_sess) +} + +fn tts_for_attr(attr: &ast::Attribute, parse_sess: &ParseSess) -> Vec { + string_to_tts(pprust::attr_to_string(attr), parse_sess) +} + +fn string_to_tts(text: String, parse_sess: &ParseSess) -> Vec { + let filemap = parse_sess.codemap() + .new_filemap(String::from(""), None, text); + + let lexer = lexer::StringReader::new(&parse_sess.span_diagnostic, filemap); + let mut parser = Parser::new(parse_sess, Box::new(lexer)); + panictry!(parser.parse_all_token_trees()) +} + impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { fn fold_expr(&mut self, expr: P) -> P { let mut expr = self.cfg.configure_expr(expr).unwrap(); @@ -568,16 +715,16 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { } fn fold_block(&mut self, block: P) -> P { - let orig_in_block = mem::replace(&mut self.cx.current_expansion.in_block, true); + let no_noninline_mod = mem::replace(&mut self.cx.current_expansion.no_noninline_mod, true); let result = noop_fold_block(block, self); - self.cx.current_expansion.in_block = orig_in_block; + self.cx.current_expansion.no_noninline_mod = no_noninline_mod; result } fn fold_item(&mut self, item: P) -> SmallVector> { let item = configure!(self, item); - let (item, attr) = self.classify_item(item); + let (mut item, attr) = self.classify_item(item); if let Some(attr) = attr { let item = Annotatable::Item(fully_configure!(self, item, noop_fold_item)); return self.collect_attr(attr, item, ExpansionKind::Items).make_items(); @@ -609,6 +756,7 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { return noop_fold_item(item, self); } + let orig_no_noninline_mod = self.cx.current_expansion.no_noninline_mod; let mut module = (*self.cx.current_expansion.module).clone(); module.mod_path.push(item.ident); @@ -618,11 +766,14 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { let inline_module = item.span.contains(inner) || inner == syntax_pos::DUMMY_SP; if inline_module { - module.directory.push(&*{ - ::attr::first_attr_value_str_by_name(&item.attrs, "path") - .unwrap_or(item.ident.name.as_str()) - }); + if let Some(path) = attr::first_attr_value_str_by_name(&item.attrs, "path") { + self.cx.current_expansion.no_noninline_mod = false; + module.directory.push(&*path); + } else { + module.directory.push(&*item.ident.name.as_str()); + } } else { + self.cx.current_expansion.no_noninline_mod = false; module.directory = PathBuf::from(self.cx.parse_sess.codemap().span_to_filename(inner)); module.directory.pop(); @@ -632,22 +783,16 @@ impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { mem::replace(&mut self.cx.current_expansion.module, Rc::new(module)); let result = noop_fold_item(item, self); self.cx.current_expansion.module = orig_module; + self.cx.current_expansion.no_noninline_mod = orig_no_noninline_mod; return result; } - ast::ItemKind::ExternCrate(..) => { - // We need to error on `#[macro_use] extern crate` when it isn't at the - // crate root, because `$crate` won't work properly. - let is_crate_root = self.cx.current_expansion.module.mod_path.len() == 1; - for def in self.cx.resolver.load_crate(&*item, is_crate_root) { - match def { - LoadedMacro::Def(def) => self.cx.insert_macro(def), - LoadedMacro::CustomDerive(name, ext) => { - self.cx.insert_custom_derive(&name, ext, item.span); - } - } + // Ensure that test functions are accessible from the test harness. + ast::ItemKind::Fn(..) if self.cx.ecfg.should_test => { + if item.attrs.iter().any(|attr| is_test_or_bench(attr)) { + item = item.map(|mut item| { item.vis = ast::Visibility::Public; item }); } noop_fold_item(item, self) - }, + } _ => noop_fold_item(item, self), } } @@ -726,6 +871,8 @@ pub struct ExpansionConfig<'feat> { pub recursion_limit: usize, pub trace_mac: bool, pub should_test: bool, // If false, strip `#[test]` nodes + pub single_step: bool, + pub keep_macs: bool, } macro_rules! feature_tests { @@ -749,6 +896,8 @@ impl<'feat> ExpansionConfig<'feat> { recursion_limit: 64, trace_mac: false, should_test: false, + single_step: false, + keep_macs: false, } } @@ -761,26 +910,10 @@ impl<'feat> ExpansionConfig<'feat> { fn enable_allow_internal_unstable = allow_internal_unstable, fn enable_custom_derive = custom_derive, fn enable_pushpop_unsafe = pushpop_unsafe, - fn enable_rustc_macro = rustc_macro, + fn enable_proc_macro = proc_macro, } } -pub fn expand_crate(cx: &mut ExtCtxt, - user_exts: Vec, - c: Crate) -> Crate { - cx.initialize(user_exts, &c); - cx.monotonic_expander().expand_crate(c) -} - -// Expands crate using supplied MacroExpander - allows for -// non-standard expansion behaviour (e.g. step-wise). -pub fn expand_crate_with_expander(expander: &mut MacroExpander, - user_exts: Vec, - c: Crate) -> Crate { - expander.cx.initialize(user_exts, &c); - expander.expand_crate(c) -} - // A Marker adds the given mark to the syntax context and // sets spans' `expn_id` to the given expn_id (unless it is `None`). struct Marker { mark: Mark, expn_id: Option } @@ -803,6 +936,6 @@ impl Folder for Marker { } // apply a given mark to the given token trees. Used prior to expansion of a macro. -fn mark_tts(tts: &[TokenTree], m: Mark) -> Vec { +pub fn mark_tts(tts: &[TokenTree], m: Mark) -> Vec { noop_fold_tts(tts, &mut Marker{mark:m, expn_id: None}) } diff --git a/src/libsyntax/ext/hygiene.rs b/src/libsyntax/ext/hygiene.rs index 34126fac4ac78..0fd72277cca9f 100644 --- a/src/libsyntax/ext/hygiene.rs +++ b/src/libsyntax/ext/hygiene.rs @@ -15,6 +15,7 @@ //! and definition contexts*. J. Funct. Program. 22, 2 (March 2012), 181-216. //! DOI=10.1017/S0956796812000093 http://dx.doi.org/10.1017/S0956796812000093 +use ast::NodeId; use std::cell::RefCell; use std::collections::HashMap; use std::fmt; @@ -46,6 +47,10 @@ impl Mark { Mark(0) } + pub fn from_placeholder_id(id: NodeId) -> Self { + Mark(id.as_u32()) + } + pub fn as_u32(&self) -> u32 { self.0 } diff --git a/src/libsyntax/ext/placeholders.rs b/src/libsyntax/ext/placeholders.rs index 47f366a88768e..e323dd2f62327 100644 --- a/src/libsyntax/ext/placeholders.rs +++ b/src/libsyntax/ext/placeholders.rs @@ -13,7 +13,7 @@ use codemap::{DUMMY_SP, dummy_spanned}; use ext::base::ExtCtxt; use ext::expand::{Expansion, ExpansionKind}; use fold::*; -use parse::token::keywords; +use parse::token::{intern, keywords}; use ptr::P; use util::move_map::MoveMap; use util::small_vector::SmallVector; @@ -88,10 +88,11 @@ impl<'a, 'b> PlaceholderExpander<'a, 'b> { } pub fn add(&mut self, id: ast::NodeId, expansion: Expansion) { + let expansion = expansion.fold_with(self); self.expansions.insert(id, expansion); } - pub fn remove(&mut self, id: ast::NodeId) -> Expansion { + fn remove(&mut self, id: ast::NodeId) -> Expansion { self.expansions.remove(&id).unwrap() } } @@ -214,7 +215,7 @@ impl<'a, 'b> Folder for PlaceholderExpander<'a, 'b> { } } -pub fn reconstructed_macro_rules(def: &ast::MacroDef, path: &ast::Path) -> Expansion { +pub fn reconstructed_macro_rules(def: &ast::MacroDef) -> Expansion { Expansion::Items(SmallVector::one(P(ast::Item { ident: def.ident, attrs: def.attrs.clone(), @@ -222,7 +223,14 @@ pub fn reconstructed_macro_rules(def: &ast::MacroDef, path: &ast::Path) -> Expan node: ast::ItemKind::Mac(ast::Mac { span: def.span, node: ast::Mac_ { - path: path.clone(), + path: ast::Path { + span: DUMMY_SP, + global: false, + segments: vec![ast::PathSegment { + identifier: ast::Ident::with_empty_ctxt(intern("macro_rules")), + parameters: ast::PathParameters::none(), + }], + }, tts: def.body.clone(), } }), diff --git a/src/libsyntax/ext/proc_macro_shim.rs b/src/libsyntax/ext/proc_macro_shim.rs index fa37e9b54e457..dc3a01f41bc9d 100644 --- a/src/libsyntax/ext/proc_macro_shim.rs +++ b/src/libsyntax/ext/proc_macro_shim.rs @@ -24,7 +24,9 @@ use ext::base::*; /// Take a `ExtCtxt`, `Span`, and `TokenStream`, and produce a Macro Result that parses /// the TokenStream as a block and returns it as an `Expr`. -pub fn build_block_emitter<'cx>(cx: &'cx mut ExtCtxt, sp: Span, output: TokenStream) +pub fn build_block_emitter<'cx>(cx: &'cx mut ExtCtxt, + sp: Span, + output: TokenStream) -> Box { let parser = cx.new_parser_from_tts(&output.to_tts()); @@ -60,7 +62,7 @@ pub fn build_block_emitter<'cx>(cx: &'cx mut ExtCtxt, sp: Span, output: TokenStr } pub mod prelude { - pub use ext::proc_macro_shim::build_block_emitter; + pub use super::build_block_emitter; pub use ast::Ident; pub use codemap::{DUMMY_SP, Span}; pub use ext::base::{ExtCtxt, MacResult}; diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index b70e270df54d2..f21360755bc26 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -46,7 +46,7 @@ pub mod rt { impl ToTokens for TokenTree { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec!(self.clone()) + vec![self.clone()] } } @@ -331,7 +331,6 @@ pub mod rt { panictry!(parse::parse_item_from_source_str( "".to_string(), s, - self.cfg(), self.parse_sess())).expect("parse error") } @@ -339,7 +338,6 @@ pub mod rt { panictry!(parse::parse_stmt_from_source_str( "".to_string(), s, - self.cfg(), self.parse_sess())).expect("parse error") } @@ -347,7 +345,6 @@ pub mod rt { panictry!(parse::parse_expr_from_source_str( "".to_string(), s, - self.cfg(), self.parse_sess())) } @@ -355,7 +352,6 @@ pub mod rt { panictry!(parse::parse_tts_from_source_str( "".to_string(), s, - self.cfg(), self.parse_sess())) } } @@ -420,7 +416,7 @@ pub fn expand_quote_expr<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box { - let expanded = expand_parse_call(cx, sp, "parse_expr_panic", vec!(), tts); + let expanded = expand_parse_call(cx, sp, "parse_expr_panic", vec![], tts); base::MacEager::expr(expanded) } @@ -428,7 +424,7 @@ pub fn expand_quote_item<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box { - let expanded = expand_parse_call(cx, sp, "parse_item_panic", vec!(), tts); + let expanded = expand_parse_call(cx, sp, "parse_item_panic", vec![], tts); base::MacEager::expr(expanded) } @@ -436,7 +432,7 @@ pub fn expand_quote_pat<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box { - let expanded = expand_parse_call(cx, sp, "parse_pat_panic", vec!(), tts); + let expanded = expand_parse_call(cx, sp, "parse_pat_panic", vec![], tts); base::MacEager::expr(expanded) } @@ -444,7 +440,7 @@ pub fn expand_quote_arm(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box { - let expanded = expand_parse_call(cx, sp, "parse_arm_panic", vec!(), tts); + let expanded = expand_parse_call(cx, sp, "parse_arm_panic", vec![], tts); base::MacEager::expr(expanded) } @@ -452,7 +448,7 @@ pub fn expand_quote_ty(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box { - let expanded = expand_parse_call(cx, sp, "parse_ty_panic", vec!(), tts); + let expanded = expand_parse_call(cx, sp, "parse_ty_panic", vec![], tts); base::MacEager::expr(expanded) } @@ -460,7 +456,7 @@ pub fn expand_quote_stmt(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box { - let expanded = expand_parse_call(cx, sp, "parse_stmt_panic", vec!(), tts); + let expanded = expand_parse_call(cx, sp, "parse_stmt_panic", vec![], tts); base::MacEager::expr(expanded) } @@ -469,7 +465,7 @@ pub fn expand_quote_attr(cx: &mut ExtCtxt, tts: &[TokenTree]) -> Box { let expanded = expand_parse_call(cx, sp, "parse_attribute_panic", - vec!(cx.expr_bool(sp, true)), tts); + vec![cx.expr_bool(sp, true)], tts); base::MacEager::expr(expanded) } @@ -478,7 +474,7 @@ pub fn expand_quote_arg(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box { - let expanded = expand_parse_call(cx, sp, "parse_arg_panic", vec!(), tts); + let expanded = expand_parse_call(cx, sp, "parse_arg_panic", vec![], tts); base::MacEager::expr(expanded) } @@ -486,7 +482,7 @@ pub fn expand_quote_block(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box { - let expanded = expand_parse_call(cx, sp, "parse_block_panic", vec!(), tts); + let expanded = expand_parse_call(cx, sp, "parse_block_panic", vec![], tts); base::MacEager::expr(expanded) } @@ -494,7 +490,7 @@ pub fn expand_quote_meta_item(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box { - let expanded = expand_parse_call(cx, sp, "parse_meta_item_panic", vec!(), tts); + let expanded = expand_parse_call(cx, sp, "parse_meta_item_panic", vec![], tts); base::MacEager::expr(expanded) } @@ -503,7 +499,7 @@ pub fn expand_quote_path(cx: &mut ExtCtxt, tts: &[TokenTree]) -> Box { let mode = mk_parser_path(cx, sp, &["PathStyle", "Type"]); - let expanded = expand_parse_call(cx, sp, "parse_path_panic", vec!(mode), tts); + let expanded = expand_parse_call(cx, sp, "parse_path_panic", vec![mode], tts); base::MacEager::expr(expanded) } @@ -535,7 +531,7 @@ fn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P { cx.expr_method_call(sp, cx.expr_ident(sp, id_ext("ext_cx")), id_ext("ident_of"), - vec!(e_str)) + vec![e_str]) } // Lift a name to the expr that evaluates to that name @@ -544,16 +540,16 @@ fn mk_name(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> P { cx.expr_method_call(sp, cx.expr_ident(sp, id_ext("ext_cx")), id_ext("name_of"), - vec!(e_str)) + vec![e_str]) } fn mk_tt_path(cx: &ExtCtxt, sp: Span, name: &str) -> P { - let idents = vec!(id_ext("syntax"), id_ext("tokenstream"), id_ext("TokenTree"), id_ext(name)); + let idents = vec![id_ext("syntax"), id_ext("tokenstream"), id_ext("TokenTree"), id_ext(name)]; cx.expr_path(cx.path_global(sp, idents)) } fn mk_token_path(cx: &ExtCtxt, sp: Span, name: &str) -> P { - let idents = vec!(id_ext("syntax"), id_ext("parse"), id_ext("token"), id_ext(name)); + let idents = vec![id_ext("syntax"), id_ext("parse"), id_ext("token"), id_ext(name)]; cx.expr_path(cx.path_global(sp, idents)) } @@ -603,11 +599,11 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P { } match *tok { token::BinOp(binop) => { - return cx.expr_call(sp, mk_token_path(cx, sp, "BinOp"), vec!(mk_binop(cx, sp, binop))); + return cx.expr_call(sp, mk_token_path(cx, sp, "BinOp"), vec![mk_binop(cx, sp, binop)]); } token::BinOpEq(binop) => { return cx.expr_call(sp, mk_token_path(cx, sp, "BinOpEq"), - vec!(mk_binop(cx, sp, binop))); + vec![mk_binop(cx, sp, binop)]); } token::OpenDelim(delim) => { @@ -657,13 +653,13 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P { token::Lifetime(ident) => { return cx.expr_call(sp, mk_token_path(cx, sp, "Lifetime"), - vec!(mk_ident(cx, sp, ident))); + vec![mk_ident(cx, sp, ident)]); } token::DocComment(ident) => { return cx.expr_call(sp, mk_token_path(cx, sp, "DocComment"), - vec!(mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident)))); + vec![mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident))]); } token::MatchNt(name, kind) => { @@ -718,7 +714,7 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec Vec { let mut seq = vec![]; @@ -741,13 +737,13 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec { statements_mk_tt(cx, &delimed.open_tt(), matcher).into_iter() @@ -800,13 +796,13 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec Vec { let stmt_let_tt = cx.stmt_let(sp, true, id_ext("tt"), cx.expr_vec_ng(sp)); - vec!(stmt_let_sp, stmt_let_tt) + vec![stmt_let_sp, stmt_let_tt] } fn statements_mk_tts(cx: &ExtCtxt, tts: &[TokenTree], matcher: bool) -> Vec { @@ -920,10 +916,6 @@ fn expand_parse_call(cx: &ExtCtxt, tts: &[TokenTree]) -> P { let (cx_expr, tts_expr) = expand_tts(cx, sp, tts); - let cfg_call = || cx.expr_method_call( - sp, cx.expr_ident(sp, id_ext("ext_cx")), - id_ext("cfg"), Vec::new()); - let parse_sess_call = || cx.expr_method_call( sp, cx.expr_ident(sp, id_ext("ext_cx")), id_ext("parse_sess"), Vec::new()); @@ -931,7 +923,7 @@ fn expand_parse_call(cx: &ExtCtxt, let new_parser_call = cx.expr_call(sp, cx.expr_ident(sp, id_ext("new_parser_from_tts")), - vec!(parse_sess_call(), cfg_call(), tts_expr)); + vec![parse_sess_call(), tts_expr]); let path = vec![id_ext("syntax"), id_ext("ext"), id_ext("quote"), id_ext(parse_method)]; let mut args = vec![cx.expr_mut_addr_of(sp, new_parser_call)]; diff --git a/src/libsyntax/ext/source_util.rs b/src/libsyntax/ext/source_util.rs index e75e41d0c2d4b..ec48cae3f7652 100644 --- a/src/libsyntax/ext/source_util.rs +++ b/src/libsyntax/ext/source_util.rs @@ -92,15 +92,8 @@ pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::T None => return DummyResult::expr(sp), }; // The file will be added to the code map by the parser - let p = - parse::new_sub_parser_from_file(cx.parse_sess(), - cx.cfg(), - &res_rel_file(cx, - sp, - Path::new(&file)), - true, - None, - sp); + let path = res_rel_file(cx, sp, Path::new(&file)); + let p = parse::new_sub_parser_from_file(cx.parse_sess(), &path, true, None, sp); struct ExpandResult<'a> { p: parse::parser::Parser<'a>, diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index b0696a986e3c0..7e3fe3285695c 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -78,7 +78,6 @@ pub use self::NamedMatch::*; pub use self::ParseResult::*; use self::TokenTreeOrTokenTreeVec::*; -use ast; use ast::Ident; use syntax_pos::{self, BytePos, mk_sp, Span}; use codemap::Spanned; @@ -92,6 +91,7 @@ use parse::token; use print::pprust; use ptr::P; use tokenstream::{self, TokenTree}; +use util::small_vector::SmallVector; use std::mem; use std::rc::Rc; @@ -104,7 +104,7 @@ use std::collections::hash_map::Entry::{Vacant, Occupied}; #[derive(Clone)] enum TokenTreeOrTokenTreeVec { Tt(tokenstream::TokenTree), - TtSeq(Rc>), + TtSeq(Vec), } impl TokenTreeOrTokenTreeVec { @@ -161,7 +161,7 @@ pub fn count_names(ms: &[TokenTree]) -> usize { }) } -pub fn initial_matcher_pos(ms: Rc>, sep: Option, lo: BytePos) +pub fn initial_matcher_pos(ms: Vec, sep: Option, lo: BytePos) -> Box { let match_idx_hi = count_names(&ms[..]); let matches: Vec<_> = (0..match_idx_hi).map(|_| Vec::new()).collect(); @@ -251,14 +251,22 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc]) pub enum ParseResult { Success(T), - /// Arm failed to match - Failure(syntax_pos::Span, String), + /// Arm failed to match. If the second parameter is `token::Eof`, it + /// indicates an unexpected end of macro invocation. Otherwise, it + /// indicates that no rules expected the given token. + Failure(syntax_pos::Span, Token), /// Fatal error (malformed macro?). Abort compilation. Error(syntax_pos::Span, String) } +pub fn parse_failure_msg(tok: Token) -> String { + match tok { + token::Eof => "unexpected end of macro invocation".to_string(), + _ => format!("no rules expected the token `{}`", pprust::token_to_string(&tok)), + } +} + pub type NamedParseResult = ParseResult>>; -pub type PositionalParseResult = ParseResult>>; /// Perform a token equality check, ignoring syntax context (that is, an /// unhygienic comparison) @@ -271,17 +279,10 @@ pub fn token_name_eq(t1 : &Token, t2 : &Token) -> bool { } } -pub fn parse(sess: &ParseSess, - cfg: ast::CrateConfig, - mut rdr: TtReader, - ms: &[TokenTree]) - -> NamedParseResult { - let mut cur_eis = Vec::new(); - cur_eis.push(initial_matcher_pos(Rc::new(ms.iter() - .cloned() - .collect()), - None, - rdr.peek().sp.lo)); +pub fn parse(sess: &ParseSess, mut rdr: TtReader, ms: &[TokenTree]) -> NamedParseResult { + let mut cur_eis = SmallVector::one(initial_matcher_pos(ms.to_owned(), + None, + rdr.peek().sp.lo)); loop { let mut bb_eis = Vec::new(); // black-box parsed by parser.rs @@ -425,8 +426,8 @@ pub fn parse(sess: &ParseSess, cur_eis.push(ei); } TokenTree::Token(_, ref t) => { - let mut ei_t = ei.clone(); if token_name_eq(t,&tok) { + let mut ei_t = ei.clone(); ei_t.idx += 1; next_eis.push(ei_t); } @@ -446,7 +447,7 @@ pub fn parse(sess: &ParseSess, } else if eof_eis.len() > 1 { return Error(sp, "ambiguity: multiple successful parses".to_string()); } else { - return Failure(sp, "unexpected end of macro invocation".to_string()); + return Failure(sp, token::Eof); } } else { if (!bb_eis.is_empty() && !next_eis.is_empty()) @@ -467,8 +468,7 @@ pub fn parse(sess: &ParseSess, } )) } else if bb_eis.is_empty() && next_eis.is_empty() { - return Failure(sp, format!("no rules expected the token `{}`", - pprust::token_to_string(&tok))); + return Failure(sp, tok); } else if !next_eis.is_empty() { /* Now process the next token */ while !next_eis.is_empty() { @@ -476,24 +476,21 @@ pub fn parse(sess: &ParseSess, } rdr.next_token(); } else /* bb_eis.len() == 1 */ { - let mut rust_parser = Parser::new(sess, cfg.clone(), Box::new(rdr.clone())); - - let mut ei = bb_eis.pop().unwrap(); - match ei.top_elts.get_tt(ei.idx) { - TokenTree::Token(span, MatchNt(_, ident)) => { + rdr.next_tok = { + let mut rust_parser = Parser::new(sess, Box::new(&mut rdr)); + let mut ei = bb_eis.pop().unwrap(); + if let TokenTree::Token(span, MatchNt(_, ident)) = ei.top_elts.get_tt(ei.idx) { let match_cur = ei.match_cur; (&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal( parse_nt(&mut rust_parser, span, &ident.name.as_str())))); ei.idx += 1; ei.match_cur += 1; + } else { + unreachable!() } - _ => panic!() - } - cur_eis.push(ei); - - for _ in 0..rust_parser.tokens_consumed { - let _ = rdr.next_token(); - } + cur_eis.push(ei); + Some(TokenAndSpan { tok: rust_parser.token, sp: rust_parser.span }) + }; } } diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 51ef45b97be6f..431e757368c03 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -8,30 +8,29 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use ast; +use {ast, attr}; use syntax_pos::{Span, DUMMY_SP}; -use ext::base::{DummyResult, ExtCtxt, MacResult, SyntaxExtension}; -use ext::base::{NormalTT, TTMacroExpander}; +use ext::base::{DummyResult, ExtCtxt, MacEager, MacResult, SyntaxExtension}; +use ext::base::{IdentMacroExpander, NormalTT, TTMacroExpander}; +use ext::expand::{Expansion, ExpansionKind}; +use ext::placeholders; use ext::tt::macro_parser::{Success, Error, Failure}; use ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal}; -use ext::tt::macro_parser::parse; +use ext::tt::macro_parser::{parse, parse_failure_msg}; +use parse::ParseSess; use parse::lexer::new_tt_reader; use parse::parser::{Parser, Restrictions}; use parse::token::{self, gensym_ident, NtTT, Token}; use parse::token::Token::*; use print; -use ptr::P; use tokenstream::{self, TokenTree}; -use util::small_vector::SmallVector; - -use std::cell::RefCell; use std::collections::{HashMap}; use std::collections::hash_map::{Entry}; use std::rc::Rc; -struct ParserAnyMacro<'a> { - parser: RefCell>, +pub struct ParserAnyMacro<'a> { + parser: Parser<'a>, /// Span of the expansion site of the macro this parser is for site_span: Span, @@ -40,115 +39,25 @@ struct ParserAnyMacro<'a> { } impl<'a> ParserAnyMacro<'a> { - /// Make sure we don't have any tokens left to parse, so we don't - /// silently drop anything. `allow_semi` is so that "optional" - /// semicolons at the end of normal expressions aren't complained - /// about e.g. the semicolon in `macro_rules! kapow { () => { - /// panic!(); } }` doesn't get picked up by .parse_expr(), but it's - /// allowed to be there. - fn ensure_complete_parse(&self, allow_semi: bool, context: &str) { - let mut parser = self.parser.borrow_mut(); - if allow_semi && parser.token == token::Semi { + pub fn make(mut self: Box>, kind: ExpansionKind) -> Expansion { + let ParserAnyMacro { site_span, macro_ident, ref mut parser } = *self; + let expansion = panictry!(parser.parse_expansion(kind, true)); + + // We allow semicolons at the end of expressions -- e.g. the semicolon in + // `macro_rules! m { () => { panic!(); } }` isn't parsed by `.parse_expr()`, + // but `m!()` is allowed in expression positions (c.f. issue #34706). + if kind == ExpansionKind::Expr && parser.token == token::Semi { parser.bump(); } - if parser.token != token::Eof { - let token_str = parser.this_token_to_string(); - let msg = format!("macro expansion ignores token `{}` and any \ - following", - token_str); - let span = parser.span; - let mut err = parser.diagnostic().struct_span_err(span, &msg[..]); - let msg = format!("caused by the macro expansion here; the usage \ - of `{}!` is likely invalid in {} context", - self.macro_ident, context); - err.span_note(self.site_span, &msg[..]) - .emit(); - } - } -} - -impl<'a> MacResult for ParserAnyMacro<'a> { - fn make_expr(self: Box>) -> Option> { - let ret = panictry!(self.parser.borrow_mut().parse_expr()); - self.ensure_complete_parse(true, "expression"); - Some(ret) - } - fn make_pat(self: Box>) -> Option> { - let ret = panictry!(self.parser.borrow_mut().parse_pat()); - self.ensure_complete_parse(false, "pattern"); - Some(ret) - } - fn make_items(self: Box>) -> Option>> { - let mut ret = SmallVector::zero(); - while let Some(item) = panictry!(self.parser.borrow_mut().parse_item()) { - ret.push(item); - } - self.ensure_complete_parse(false, "item"); - Some(ret) - } - - fn make_impl_items(self: Box>) - -> Option> { - let mut ret = SmallVector::zero(); - loop { - let mut parser = self.parser.borrow_mut(); - match parser.token { - token::Eof => break, - _ => ret.push(panictry!(parser.parse_impl_item())) - } - } - self.ensure_complete_parse(false, "item"); - Some(ret) - } - - fn make_trait_items(self: Box>) - -> Option> { - let mut ret = SmallVector::zero(); - loop { - let mut parser = self.parser.borrow_mut(); - match parser.token { - token::Eof => break, - _ => ret.push(panictry!(parser.parse_trait_item())) - } - } - self.ensure_complete_parse(false, "item"); - Some(ret) - } - - - fn make_stmts(self: Box>) - -> Option> { - let mut ret = SmallVector::zero(); - loop { - let mut parser = self.parser.borrow_mut(); - match parser.token { - token::Eof => break, - _ => match parser.parse_full_stmt(true) { - Ok(maybe_stmt) => match maybe_stmt { - Some(stmt) => ret.push(stmt), - None => (), - }, - Err(mut e) => { - e.emit(); - break; - } - } - } - } - self.ensure_complete_parse(false, "statement"); - Some(ret) - } - fn make_ty(self: Box>) -> Option> { - let ret = panictry!(self.parser.borrow_mut().parse_ty()); - self.ensure_complete_parse(false, "type"); - Some(ret) + // Make sure we don't have any tokens left to parse so we don't silently drop anything. + parser.ensure_complete_parse(macro_ident.name, kind.name(), site_span); + expansion } } struct MacroRulesMacroExpander { name: ast::Ident, - imported_from: Option, lhses: Vec, rhses: Vec, valid: bool, @@ -166,7 +75,6 @@ impl TTMacroExpander for MacroRulesMacroExpander { generic_extension(cx, sp, self.name, - self.imported_from, arg, &self.lhses, &self.rhses) @@ -177,7 +85,6 @@ impl TTMacroExpander for MacroRulesMacroExpander { fn generic_extension<'cx>(cx: &'cx ExtCtxt, sp: Span, name: ast::Ident, - imported_from: Option, arg: &[TokenTree], lhses: &[TokenTree], rhses: &[TokenTree]) @@ -190,7 +97,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, // Which arm's failure should we report? (the one furthest along) let mut best_fail_spot = DUMMY_SP; - let mut best_fail_msg = "internal error: ran no matchers".to_string(); + let mut best_fail_tok = None; for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers let lhs_tt = match *lhs { @@ -206,13 +113,11 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, _ => cx.span_bug(sp, "malformed macro rhs"), }; // rhs has holes ( `$id` and `$(...)` that need filled) - let trncbr = new_tt_reader(&cx.parse_sess().span_diagnostic, - Some(named_matches), - imported_from, - rhs); - let mut p = Parser::new(cx.parse_sess(), cx.cfg(), Box::new(trncbr)); + let trncbr = + new_tt_reader(&cx.parse_sess.span_diagnostic, Some(named_matches), rhs); + let mut p = Parser::new(cx.parse_sess(), Box::new(trncbr)); p.directory = cx.current_expansion.module.directory.clone(); - p.restrictions = match cx.current_expansion.in_block { + p.restrictions = match cx.current_expansion.no_noninline_mod { true => Restrictions::NO_NONINLINE_MOD, false => Restrictions::empty(), }; @@ -220,7 +125,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, // Let the context choose how to interpret the result. // Weird, but useful for X-macros. return Box::new(ParserAnyMacro { - parser: RefCell::new(p), + parser: p, // Pass along the original expansion site and the name of the macro // so we can print a useful error message if the parse of the expanded @@ -229,9 +134,9 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, macro_ident: name }) } - Failure(sp, ref msg) => if sp.lo >= best_fail_spot.lo { + Failure(sp, tok) => if sp.lo >= best_fail_spot.lo { best_fail_spot = sp; - best_fail_msg = (*msg).clone(); + best_fail_tok = Some(tok); }, Error(err_sp, ref msg) => { cx.span_fatal(err_sp.substitute_dummy(sp), &msg[..]) @@ -239,7 +144,40 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, } } - cx.span_fatal(best_fail_spot.substitute_dummy(sp), &best_fail_msg[..]); + let best_fail_msg = parse_failure_msg(best_fail_tok.expect("ran no matchers")); + cx.span_fatal(best_fail_spot.substitute_dummy(sp), &best_fail_msg); +} + +pub struct MacroRulesExpander; +impl IdentMacroExpander for MacroRulesExpander { + fn expand(&self, + cx: &mut ExtCtxt, + span: Span, + ident: ast::Ident, + tts: Vec, + attrs: Vec) + -> Box { + let export = attr::contains_name(&attrs, "macro_export"); + let def = ast::MacroDef { + ident: ident, + id: ast::DUMMY_NODE_ID, + span: span, + imported_from: None, + body: tts, + allow_internal_unstable: attr::contains_name(&attrs, "allow_internal_unstable"), + attrs: attrs, + }; + + // If keep_macs is true, expands to a MacEager::items instead. + let result = if cx.ecfg.keep_macs { + MacEager::items(placeholders::reconstructed_macro_rules(&def).make_items()) + } else { + MacEager::items(placeholders::macro_scope_placeholder().make_items()) + }; + + cx.resolver.add_macro(cx.current_expansion.mark, def, export); + result + } } // Note that macro-by-example's input is also matched against a token tree: @@ -248,9 +186,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt, // Holy self-referential! /// Converts a `macro_rules!` invocation into a syntax extension. -pub fn compile<'cx>(cx: &'cx mut ExtCtxt, - def: &ast::MacroDef) -> SyntaxExtension { - +pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension { let lhs_nm = gensym_ident("lhs"); let rhs_nm = gensym_ident("rhs"); @@ -282,19 +218,16 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt, ]; // Parse the macro_rules! invocation (`none` is for no interpolations): - let arg_reader = new_tt_reader(&cx.parse_sess().span_diagnostic, - None, - None, - def.body.clone()); - - let argument_map = match parse(cx.parse_sess(), - cx.cfg(), - arg_reader, - &argument_gram) { + let arg_reader = new_tt_reader(&sess.span_diagnostic, None, def.body.clone()); + + let argument_map = match parse(sess, arg_reader, &argument_gram) { Success(m) => m, - Failure(sp, str) | Error(sp, str) => { - panic!(cx.parse_sess().span_diagnostic - .span_fatal(sp.substitute_dummy(def.span), &str[..])); + Failure(sp, tok) => { + let s = parse_failure_msg(tok); + panic!(sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &s)); + } + Error(sp, s) => { + panic!(sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &s)); } }; @@ -305,32 +238,36 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt, MatchedSeq(ref s, _) => { s.iter().map(|m| match **m { MatchedNonterminal(NtTT(ref tt)) => { - valid &= check_lhs_nt_follows(cx, tt); + valid &= check_lhs_nt_follows(sess, tt); (**tt).clone() } - _ => cx.span_bug(def.span, "wrong-structured lhs") - }).collect() + _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs") + }).collect::>() } - _ => cx.span_bug(def.span, "wrong-structured lhs") + _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs") }; let rhses = match **argument_map.get(&rhs_nm).unwrap() { MatchedSeq(ref s, _) => { s.iter().map(|m| match **m { MatchedNonterminal(NtTT(ref tt)) => (**tt).clone(), - _ => cx.span_bug(def.span, "wrong-structured rhs") + _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs") }).collect() } - _ => cx.span_bug(def.span, "wrong-structured rhs") + _ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs") }; for rhs in &rhses { - valid &= check_rhs(cx, rhs); + valid &= check_rhs(sess, rhs); + } + + // don't abort iteration early, so that errors for multiple lhses can be reported + for lhs in &lhses { + valid &= check_lhs_no_empty_seq(sess, &[lhs.clone()]) } let exp: Box<_> = Box::new(MacroRulesMacroExpander { name: def.ident, - imported_from: def.imported_from, lhses: lhses, rhses: rhses, valid: valid, @@ -339,14 +276,14 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt, NormalTT(exp, Some(def.span), def.allow_internal_unstable) } -fn check_lhs_nt_follows(cx: &mut ExtCtxt, lhs: &TokenTree) -> bool { +fn check_lhs_nt_follows(sess: &ParseSess, lhs: &TokenTree) -> bool { // lhs is going to be like TokenTree::Delimited(...), where the // entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens. match lhs { - &TokenTree::Delimited(_, ref tts) => check_matcher(cx, &tts.tts), + &TokenTree::Delimited(_, ref tts) => check_matcher(sess, &tts.tts), _ => { - cx.span_err(lhs.get_span(), "invalid macro matcher; matchers must \ - be contained in balanced delimiters"); + let msg = "invalid macro matcher; matchers must be contained in balanced delimiters"; + sess.span_diagnostic.span_err(lhs.get_span(), msg); false } } @@ -354,20 +291,52 @@ fn check_lhs_nt_follows(cx: &mut ExtCtxt, lhs: &TokenTree) -> bool { // after parsing/expansion. we can report every error in every macro this way. } -fn check_rhs(cx: &mut ExtCtxt, rhs: &TokenTree) -> bool { +/// Check that the lhs contains no repetition which could match an empty token +/// tree, because then the matcher would hang indefinitely. +fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[TokenTree]) -> bool { + for tt in tts { + match *tt { + TokenTree::Token(_, _) => (), + TokenTree::Delimited(_, ref del) => if !check_lhs_no_empty_seq(sess, &del.tts) { + return false; + }, + TokenTree::Sequence(span, ref seq) => { + if seq.separator.is_none() { + if seq.tts.iter().all(|seq_tt| { + match *seq_tt { + TokenTree::Sequence(_, ref sub_seq) => + sub_seq.op == tokenstream::KleeneOp::ZeroOrMore, + _ => false, + } + }) { + sess.span_diagnostic.span_err(span, "repetition matches empty token tree"); + return false; + } + } + if !check_lhs_no_empty_seq(sess, &seq.tts) { + return false; + } + } + } + } + + true +} + +fn check_rhs(sess: &ParseSess, rhs: &TokenTree) -> bool { match *rhs { TokenTree::Delimited(..) => return true, - _ => cx.span_err(rhs.get_span(), "macro rhs must be delimited") + _ => sess.span_diagnostic.span_err(rhs.get_span(), "macro rhs must be delimited") } false } -fn check_matcher(cx: &mut ExtCtxt, matcher: &[TokenTree]) -> bool { +fn check_matcher(sess: &ParseSess, matcher: &[TokenTree]) -> bool { let first_sets = FirstSets::new(matcher); let empty_suffix = TokenSet::empty(); - let err = cx.parse_sess.span_diagnostic.err_count(); - check_matcher_core(cx, &first_sets, matcher, &empty_suffix); - err == cx.parse_sess.span_diagnostic.err_count() + let err = sess.span_diagnostic.err_count(); + check_matcher_core(sess, &first_sets, matcher, &empty_suffix); + err == sess.span_diagnostic.err_count() } // The FirstSets for a matcher is a mapping from subsequences in the @@ -605,7 +574,7 @@ impl TokenSet { // // Requires that `first_sets` is pre-computed for `matcher`; // see `FirstSets::new`. -fn check_matcher_core(cx: &mut ExtCtxt, +fn check_matcher_core(sess: &ParseSess, first_sets: &FirstSets, matcher: &[TokenTree], follow: &TokenSet) -> TokenSet { @@ -637,7 +606,8 @@ fn check_matcher_core(cx: &mut ExtCtxt, TokenTree::Token(sp, ref tok) => { let can_be_followed_by_any; if let Err(bad_frag) = has_legal_fragment_specifier(tok) { - cx.struct_span_err(sp, &format!("invalid fragment specifier `{}`", bad_frag)) + let msg = format!("invalid fragment specifier `{}`", bad_frag); + sess.span_diagnostic.struct_span_err(sp, &msg) .help("valid fragment specifiers are `ident`, `block`, \ `stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt` \ and `item`") @@ -662,7 +632,7 @@ fn check_matcher_core(cx: &mut ExtCtxt, } TokenTree::Delimited(_, ref d) => { let my_suffix = TokenSet::singleton((d.close_span, Token::CloseDelim(d.delim))); - check_matcher_core(cx, first_sets, &d.tts, &my_suffix); + check_matcher_core(sess, first_sets, &d.tts, &my_suffix); // don't track non NT tokens last.replace_with_irrelevant(); @@ -694,7 +664,7 @@ fn check_matcher_core(cx: &mut ExtCtxt, // At this point, `suffix_first` is built, and // `my_suffix` is some TokenSet that we can use // for checking the interior of `seq_rep`. - let next = check_matcher_core(cx, first_sets, &seq_rep.tts, my_suffix); + let next = check_matcher_core(sess, first_sets, &seq_rep.tts, my_suffix); if next.maybe_empty { last.add_all(&next); } else { @@ -714,9 +684,9 @@ fn check_matcher_core(cx: &mut ExtCtxt, 'each_last: for &(_sp, ref t) in &last.tokens { if let MatchNt(ref name, ref frag_spec) = *t { for &(sp, ref next_token) in &suffix_first.tokens { - match is_in_follow(cx, next_token, &frag_spec.name.as_str()) { + match is_in_follow(next_token, &frag_spec.name.as_str()) { Err((msg, help)) => { - cx.struct_span_err(sp, &msg).help(help).emit(); + sess.span_diagnostic.struct_span_err(sp, &msg).help(help).emit(); // don't bother reporting every source of // conflict for a particular element of `last`. continue 'each_last; @@ -731,7 +701,7 @@ fn check_matcher_core(cx: &mut ExtCtxt, "may be" }; - cx.span_err( + sess.span_diagnostic.span_err( sp, &format!("`${name}:{frag}` {may_be} followed by `{next}`, which \ is not allowed for `{frag}` fragments", @@ -788,7 +758,7 @@ fn frag_can_be_followed_by_any(frag: &str) -> bool { /// break macros that were relying on that binary operator as a /// separator. // when changing this do not forget to update doc/book/macros.md! -fn is_in_follow(_: &ExtCtxt, tok: &Token, frag: &str) -> Result { +fn is_in_follow(tok: &Token, frag: &str) -> Result { if let &CloseDelim(_) = tok { // closing a token tree can never be matched by any fragment; // iow, we always require that `(` and `)` match, etc. diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index 939425378def6..8a6a8e53a3e4c 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -10,14 +10,15 @@ use self::LockstepIterSize::*; use ast::Ident; -use syntax_pos::{Span, DUMMY_SP}; use errors::{Handler, DiagnosticBuilder}; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; use parse::token::{DocComment, MatchNt, SubstNt}; -use parse::token::{Token, Interpolated, NtIdent, NtTT, SpecialMacroVar}; +use parse::token::{Token, Interpolated, NtIdent, NtTT}; use parse::token; use parse::lexer::TokenAndSpan; +use syntax_pos::{Span, DUMMY_SP}; use tokenstream::{self, TokenTree}; +use util::small_vector::SmallVector; use std::rc::Rc; use std::ops::Add; @@ -36,18 +37,16 @@ struct TtFrame { pub struct TtReader<'a> { pub sp_diag: &'a Handler, /// the unzipped tree: - stack: Vec, + stack: SmallVector, /* for MBE-style macro transcription */ interpolations: HashMap>, - imported_from: Option, - // Some => return imported_from as the next token - crate_name_next: Option, repeat_idx: Vec, repeat_len: Vec, /* cached: */ pub cur_tok: Token, pub cur_span: Span, + pub next_tok: Option, /// Transform doc comments. Only useful in macro invocations pub desugar_doc_comments: bool, pub fatal_errs: Vec>, @@ -58,10 +57,9 @@ pub struct TtReader<'a> { /// (and should) be None. pub fn new_tt_reader(sp_diag: &Handler, interp: Option>>, - imported_from: Option, src: Vec) -> TtReader { - new_tt_reader_with_doc_flag(sp_diag, interp, imported_from, src, false) + new_tt_reader_with_doc_flag(sp_diag, interp, src, false) } /// The extra `desugar_doc_comments` flag enables reading doc comments @@ -72,13 +70,12 @@ pub fn new_tt_reader(sp_diag: &Handler, /// (and should) be None. pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler, interp: Option>>, - imported_from: Option, src: Vec, desugar_doc_comments: bool) -> TtReader { let mut r = TtReader { sp_diag: sp_diag, - stack: vec!(TtFrame { + stack: SmallVector::one(TtFrame { forest: TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition { tts: src, // doesn't matter. This merely holds the root unzipping. @@ -92,14 +89,13 @@ pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler, None => HashMap::new(), Some(x) => x, }, - imported_from: imported_from, - crate_name_next: None, repeat_idx: Vec::new(), repeat_len: Vec::new(), desugar_doc_comments: desugar_doc_comments, /* dummy values, never read: */ cur_tok: token::Eof, cur_span: DUMMY_SP, + next_tok: None, fatal_errs: Vec::new(), }; tt_next_token(&mut r); /* get cur_tok and cur_span set up */ @@ -178,20 +174,15 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize { /// Return the next token from the TtReader. /// EFFECT: advances the reader's token field pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { + if let Some(tok) = r.next_tok.take() { + return tok; + } // FIXME(pcwalton): Bad copy? let ret_val = TokenAndSpan { tok: r.cur_tok.clone(), sp: r.cur_span.clone(), }; loop { - match r.crate_name_next.take() { - None => (), - Some(sp) => { - r.cur_span = sp; - r.cur_tok = token::Ident(r.imported_from.unwrap()); - return ret_val; - }, - } let should_pop = match r.stack.last() { None => { assert_eq!(ret_val.tok, token::Eof); @@ -341,18 +332,6 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { sep: None }); } - TokenTree::Token(sp, token::SpecialVarNt(SpecialMacroVar::CrateMacroVar)) => { - r.stack.last_mut().unwrap().idx += 1; - - if r.imported_from.is_some() { - r.cur_span = sp; - r.cur_tok = token::ModSep; - r.crate_name_next = Some(sp); - return ret_val; - } - - // otherwise emit nothing and proceed to the next token - } TokenTree::Token(sp, tok) => { r.cur_span = sp; r.cur_tok = tok; diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index 27b97a0ad665b..129e4a8233803 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -30,12 +30,13 @@ use ast::{self, NodeId, PatKind}; use attr; use codemap::{CodeMap, Spanned}; use syntax_pos::Span; -use errors::Handler; +use errors::{DiagnosticBuilder, Handler}; use visit::{self, FnKind, Visitor}; use parse::ParseSess; use parse::token::InternedString; use std::ascii::AsciiExt; +use std::env; macro_rules! setter { ($field: ident) => {{ @@ -166,6 +167,9 @@ declare_features! ( // RFC 1238 (active, dropck_parametricity, "1.3.0", Some(28498)), + // Allows using the may_dangle attribute; RFC 1327 + (active, dropck_eyepatch, "1.10.0", Some(34761)), + // Allows the use of custom attributes; RFC 572 (active, custom_attribute, "1.0.0", Some(29642)), @@ -252,9 +256,6 @@ declare_features! ( // a...b and ...b (active, inclusive_range_syntax, "1.7.0", Some(28237)), - // `expr?` - (active, question_mark, "1.9.0", Some(31436)), - // impl specialization (RFC 1210) (active, specialization, "1.7.0", Some(31844)), @@ -291,7 +292,7 @@ declare_features! ( (active, item_like_imports, "1.13.0", Some(35120)), // Macros 1.1 - (active, rustc_macro, "1.13.0", Some(35900)), + (active, proc_macro, "1.13.0", Some(35900)), // Allows untagged unions `union U { ... }` (active, untagged_unions, "1.13.0", Some(32836)), @@ -302,6 +303,12 @@ declare_features! ( // Used to identify the `compiler_builtins` crate // rustc internal (active, compiler_builtins, "1.13.0", None), + + // Allows attributes on lifetime/type formal parameters in generics (RFC 1327) + (active, generic_param_attrs, "1.11.0", Some(34761)), + + // Allows field shorthands (`x` meaning `x: x`) in struct literal expressions. + (active, field_init_shorthand, "1.14.0", Some(37340)), ); declare_features! ( @@ -344,6 +351,8 @@ declare_features! ( (accepted, while_let, "1.0.0", None), // Allows `#[deprecated]` attribute (accepted, deprecated, "1.9.0", Some(29935)), + // `expr?` + (accepted, question_mark, "1.14.0", Some(31436)), ); // (changing above list without updating src/doc/reference.md makes @cmr sad) @@ -365,17 +374,34 @@ pub enum AttributeType { pub enum AttributeGate { /// Is gated by a given feature gate, reason /// and function to check if enabled - Gated(&'static str, &'static str, fn(&Features) -> bool), + Gated(Stability, &'static str, &'static str, fn(&Features) -> bool), /// Ungated attribute, can be used on all release channels Ungated, } +impl AttributeGate { + fn is_deprecated(&self) -> bool { + match *self { + Gated(Stability::Deprecated(_), ..) => true, + _ => false, + } + } +} + +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +pub enum Stability { + Unstable, + // Argument is tracking issue link. + Deprecated(&'static str), +} + // fn() is not Debug impl ::std::fmt::Debug for AttributeGate { fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { match *self { - Gated(ref name, ref expl, _) => write!(fmt, "Gated({}, {})", name, expl), + Gated(ref stab, ref name, ref expl, _) => + write!(fmt, "Gated({:?}, {}, {})", stab, name, expl), Ungated => write!(fmt, "Ungated") } } @@ -390,6 +416,10 @@ macro_rules! cfg_fn { }} } +pub fn deprecated_attributes() -> Vec<&'static (&'static str, AttributeType, AttributeGate)> { + KNOWN_ATTRIBUTES.iter().filter(|a| a.2.is_deprecated()).collect() +} + // Attributes that have a special meaning to rustc or rustdoc pub const KNOWN_ATTRIBUTES: &'static [(&'static str, AttributeType, AttributeGate)] = &[ // Normal attributes @@ -426,7 +456,8 @@ pub const KNOWN_ATTRIBUTES: &'static [(&'static str, AttributeType, AttributeGat ("macro_escape", Normal, Ungated), // RFC #1445. - ("structural_match", Whitelisted, Gated("structural_match", + ("structural_match", Whitelisted, Gated(Stability::Unstable, + "structural_match", "the semantics of constant patterns is \ not yet settled", cfg_fn!(structural_match))), @@ -434,140 +465,181 @@ pub const KNOWN_ATTRIBUTES: &'static [(&'static str, AttributeType, AttributeGat // Not used any more, but we can't feature gate it ("no_stack_check", Normal, Ungated), - ("plugin", CrateLevel, Gated("plugin", + ("plugin", CrateLevel, Gated(Stability::Unstable, + "plugin", "compiler plugins are experimental \ and possibly buggy", cfg_fn!(plugin))), ("no_std", CrateLevel, Ungated), - ("no_core", CrateLevel, Gated("no_core", + ("no_core", CrateLevel, Gated(Stability::Unstable, + "no_core", "no_core is experimental", cfg_fn!(no_core))), - ("lang", Normal, Gated("lang_items", + ("lang", Normal, Gated(Stability::Unstable, + "lang_items", "language items are subject to change", cfg_fn!(lang_items))), - ("linkage", Whitelisted, Gated("linkage", + ("linkage", Whitelisted, Gated(Stability::Unstable, + "linkage", "the `linkage` attribute is experimental \ and not portable across platforms", cfg_fn!(linkage))), - ("thread_local", Whitelisted, Gated("thread_local", + ("thread_local", Whitelisted, Gated(Stability::Unstable, + "thread_local", "`#[thread_local]` is an experimental feature, and does \ not currently handle destructors. There is no \ corresponding `#[task_local]` mapping to the task \ model", cfg_fn!(thread_local))), - ("rustc_on_unimplemented", Normal, Gated("on_unimplemented", + ("rustc_on_unimplemented", Normal, Gated(Stability::Unstable, + "on_unimplemented", "the `#[rustc_on_unimplemented]` attribute \ is an experimental feature", cfg_fn!(on_unimplemented))), - ("allocator", Whitelisted, Gated("allocator", + ("allocator", Whitelisted, Gated(Stability::Unstable, + "allocator", "the `#[allocator]` attribute is an experimental feature", cfg_fn!(allocator))), - ("needs_allocator", Normal, Gated("needs_allocator", + ("needs_allocator", Normal, Gated(Stability::Unstable, + "needs_allocator", "the `#[needs_allocator]` \ attribute is an experimental \ feature", cfg_fn!(needs_allocator))), - ("panic_runtime", Whitelisted, Gated("panic_runtime", + ("panic_runtime", Whitelisted, Gated(Stability::Unstable, + "panic_runtime", "the `#[panic_runtime]` attribute is \ an experimental feature", cfg_fn!(panic_runtime))), - ("needs_panic_runtime", Whitelisted, Gated("needs_panic_runtime", + ("needs_panic_runtime", Whitelisted, Gated(Stability::Unstable, + "needs_panic_runtime", "the `#[needs_panic_runtime]` \ attribute is an experimental \ feature", cfg_fn!(needs_panic_runtime))), - ("rustc_variance", Normal, Gated("rustc_attrs", + ("rustc_variance", Normal, Gated(Stability::Unstable, + "rustc_attrs", "the `#[rustc_variance]` attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), - ("rustc_error", Whitelisted, Gated("rustc_attrs", + ("rustc_error", Whitelisted, Gated(Stability::Unstable, + "rustc_attrs", "the `#[rustc_error]` attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), - ("rustc_if_this_changed", Whitelisted, Gated("rustc_attrs", + ("rustc_if_this_changed", Whitelisted, Gated(Stability::Unstable, + "rustc_attrs", "the `#[rustc_if_this_changed]` attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), - ("rustc_then_this_would_need", Whitelisted, Gated("rustc_attrs", + ("rustc_then_this_would_need", Whitelisted, Gated(Stability::Unstable, + "rustc_attrs", "the `#[rustc_if_this_changed]` attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), - ("rustc_dirty", Whitelisted, Gated("rustc_attrs", + ("rustc_dirty", Whitelisted, Gated(Stability::Unstable, + "rustc_attrs", "the `#[rustc_dirty]` attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), - ("rustc_clean", Whitelisted, Gated("rustc_attrs", + ("rustc_clean", Whitelisted, Gated(Stability::Unstable, + "rustc_attrs", "the `#[rustc_clean]` attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), - ("rustc_partition_reused", Whitelisted, Gated("rustc_attrs", + ("rustc_metadata_dirty", Whitelisted, Gated(Stability::Unstable, + "rustc_attrs", + "the `#[rustc_metadata_dirty]` attribute \ + is just used for rustc unit tests \ + and will never be stable", + cfg_fn!(rustc_attrs))), + ("rustc_metadata_clean", Whitelisted, Gated(Stability::Unstable, + "rustc_attrs", + "the `#[rustc_metadata_clean]` attribute \ + is just used for rustc unit tests \ + and will never be stable", + cfg_fn!(rustc_attrs))), + ("rustc_partition_reused", Whitelisted, Gated(Stability::Unstable, + "rustc_attrs", "this attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), - ("rustc_partition_translated", Whitelisted, Gated("rustc_attrs", + ("rustc_partition_translated", Whitelisted, Gated(Stability::Unstable, + "rustc_attrs", "this attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), - ("rustc_symbol_name", Whitelisted, Gated("rustc_attrs", + ("rustc_symbol_name", Whitelisted, Gated(Stability::Unstable, + "rustc_attrs", "internal rustc attributes will never be stable", cfg_fn!(rustc_attrs))), - ("rustc_item_path", Whitelisted, Gated("rustc_attrs", + ("rustc_item_path", Whitelisted, Gated(Stability::Unstable, + "rustc_attrs", "internal rustc attributes will never be stable", cfg_fn!(rustc_attrs))), - ("rustc_move_fragments", Normal, Gated("rustc_attrs", + ("rustc_move_fragments", Normal, Gated(Stability::Unstable, + "rustc_attrs", "the `#[rustc_move_fragments]` attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), - ("rustc_mir", Whitelisted, Gated("rustc_attrs", + ("rustc_mir", Whitelisted, Gated(Stability::Unstable, + "rustc_attrs", "the `#[rustc_mir]` attribute \ is just used for rustc unit tests \ and will never be stable", cfg_fn!(rustc_attrs))), - ("rustc_inherit_overflow_checks", Whitelisted, Gated("rustc_attrs", + ("rustc_inherit_overflow_checks", Whitelisted, Gated(Stability::Unstable, + "rustc_attrs", "the `#[rustc_inherit_overflow_checks]` \ attribute is just used to control \ overflow checking behavior of several \ libcore functions that are inlined \ across crates and will never be stable", cfg_fn!(rustc_attrs))), - ("compiler_builtins", Whitelisted, Gated("compiler_builtins", + ("compiler_builtins", Whitelisted, Gated(Stability::Unstable, + "compiler_builtins", "the `#[compiler_builtins]` attribute is used to \ identify the `compiler_builtins` crate which \ contains compiler-rt intrinsics and will never be \ stable", cfg_fn!(compiler_builtins))), - ("allow_internal_unstable", Normal, Gated("allow_internal_unstable", + ("allow_internal_unstable", Normal, Gated(Stability::Unstable, + "allow_internal_unstable", EXPLAIN_ALLOW_INTERNAL_UNSTABLE, cfg_fn!(allow_internal_unstable))), - ("fundamental", Whitelisted, Gated("fundamental", + ("fundamental", Whitelisted, Gated(Stability::Unstable, + "fundamental", "the `#[fundamental]` attribute \ is an experimental feature", cfg_fn!(fundamental))), - ("linked_from", Normal, Gated("linked_from", + ("linked_from", Normal, Gated(Stability::Unstable, + "linked_from", "the `#[linked_from]` attribute \ is an experimental feature", cfg_fn!(linked_from))), - ("rustc_macro_derive", Normal, Gated("rustc_macro", - "the `#[rustc_macro_derive]` attribute \ - is an experimental feature", - cfg_fn!(rustc_macro))), + ("proc_macro_derive", Normal, Gated(Stability::Unstable, + "proc_macro", + "the `#[proc_macro_derive]` attribute \ + is an experimental feature", + cfg_fn!(proc_macro))), - ("rustc_copy_clone_marker", Whitelisted, Gated("rustc_attrs", + ("rustc_copy_clone_marker", Whitelisted, Gated(Stability::Unstable, + "rustc_attrs", "internal implementation detail", cfg_fn!(rustc_attrs))), @@ -577,7 +649,8 @@ pub const KNOWN_ATTRIBUTES: &'static [(&'static str, AttributeType, AttributeGat // FIXME: #14406 these are processed in trans, which happens after the // lint pass ("cold", Whitelisted, Ungated), - ("naked", Whitelisted, Gated("naked_functions", + ("naked", Whitelisted, Gated(Stability::Unstable, + "naked_functions", "the `#[naked]` attribute \ is an experimental feature", cfg_fn!(naked_functions))), @@ -588,26 +661,38 @@ pub const KNOWN_ATTRIBUTES: &'static [(&'static str, AttributeType, AttributeGat ("link_section", Whitelisted, Ungated), ("no_builtins", Whitelisted, Ungated), ("no_mangle", Whitelisted, Ungated), - ("no_debug", Whitelisted, Gated("no_debug", - "the `#[no_debug]` attribute \ - is an experimental feature", - cfg_fn!(no_debug))), - ("omit_gdb_pretty_printer_section", Whitelisted, Gated("omit_gdb_pretty_printer_section", + ("no_debug", Whitelisted, Gated( + Stability::Deprecated("https://github.com/rust-lang/rust/issues/29721"), + "no_debug", + "the `#[no_debug]` attribute is an experimental feature", + cfg_fn!(no_debug))), + ("omit_gdb_pretty_printer_section", Whitelisted, Gated(Stability::Unstable, + "omit_gdb_pretty_printer_section", "the `#[omit_gdb_pretty_printer_section]` \ attribute is just used for the Rust test \ suite", cfg_fn!(omit_gdb_pretty_printer_section))), ("unsafe_destructor_blind_to_params", Normal, - Gated("dropck_parametricity", + Gated(Stability::Unstable, + "dropck_parametricity", "unsafe_destructor_blind_to_params has unstable semantics \ and may be removed in the future", cfg_fn!(dropck_parametricity))), - ("unwind", Whitelisted, Gated("unwind_attributes", "#[unwind] is experimental", + ("may_dangle", + Normal, + Gated(Stability::Unstable, + "dropck_eyepatch", + "may_dangle has unstable semantics and may be removed in the future", + cfg_fn!(dropck_eyepatch))), + ("unwind", Whitelisted, Gated(Stability::Unstable, + "unwind_attributes", + "#[unwind] is experimental", cfg_fn!(unwind_attributes))), // used in resolve - ("prelude_import", Whitelisted, Gated("prelude_import", + ("prelude_import", Whitelisted, Gated(Stability::Unstable, + "prelude_import", "`#[prelude_import]` is for use by rustc only", cfg_fn!(prelude_import))), @@ -619,10 +704,12 @@ pub const KNOWN_ATTRIBUTES: &'static [(&'static str, AttributeType, AttributeGat ("unstable", Whitelisted, Ungated), ("deprecated", Normal, Ungated), - ("rustc_paren_sugar", Normal, Gated("unboxed_closures", + ("rustc_paren_sugar", Normal, Gated(Stability::Unstable, + "unboxed_closures", "unboxed_closures are still evolving", cfg_fn!(unboxed_closures))), - ("rustc_reflect_like", Whitelisted, Gated("reflect", + ("rustc_reflect_like", Whitelisted, Gated(Stability::Unstable, + "reflect", "defining reflective traits is still evolving", cfg_fn!(reflect))), @@ -644,7 +731,7 @@ const GATED_CFGS: &'static [(&'static str, &'static str, fn(&Features) -> bool)] ("target_vendor", "cfg_target_vendor", cfg_fn!(cfg_target_vendor)), ("target_thread_local", "cfg_target_thread_local", cfg_fn!(cfg_target_thread_local)), ("target_has_atomic", "cfg_target_has_atomic", cfg_fn!(cfg_target_has_atomic)), - ("rustc_macro", "rustc_macro", cfg_fn!(rustc_macro)), + ("proc_macro", "proc_macro", cfg_fn!(proc_macro)), ]; #[derive(Debug, Eq, PartialEq)] @@ -669,16 +756,15 @@ impl GatedCfg { pub fn check_and_emit(&self, sess: &ParseSess, features: &Features) { let (cfg, feature, has_feature) = GATED_CFGS[self.index]; if !has_feature(features) && !sess.codemap().span_allows_unstable(self.span) { - let diagnostic = &sess.span_diagnostic; let explain = format!("`cfg({})` is experimental and subject to change", cfg); - emit_feature_err(diagnostic, feature, self.span, GateIssue::Language, &explain); + emit_feature_err(sess, feature, self.span, GateIssue::Language, &explain); } } } struct Context<'a> { features: &'a Features, - span_handler: &'a Handler, + parse_sess: &'a ParseSess, cm: &'a CodeMap, plugin_attributes: &'a [(String, AttributeType)], } @@ -689,7 +775,7 @@ macro_rules! gate_feature_fn { let has_feature: bool = has_feature(&$cx.features); debug!("gate_feature(feature = {:?}, span = {:?}); has? {}", name, span, has_feature); if !has_feature && !cx.cm.span_allows_unstable(span) { - emit_feature_err(cx.span_handler, name, span, GateIssue::Language, explain); + emit_feature_err(cx.parse_sess, name, span, GateIssue::Language, explain); } }} } @@ -706,7 +792,7 @@ impl<'a> Context<'a> { let name = &*attr.name(); for &(n, ty, ref gateage) in KNOWN_ATTRIBUTES { if n == name { - if let &Gated(ref name, ref desc, ref has_feature) = gateage { + if let &Gated(_, ref name, ref desc, ref has_feature) = gateage { gate_feature_fn!(self, has_feature, attr.span, name, desc); } debug!("check_attribute: {:?} is known, {:?}, {:?}", name, ty, gateage); @@ -746,10 +832,10 @@ impl<'a> Context<'a> { } } -pub fn check_attribute(attr: &ast::Attribute, handler: &Handler, +pub fn check_attribute(attr: &ast::Attribute, parse_sess: &ParseSess, cm: &CodeMap, features: &Features) { let cx = Context { - features: features, span_handler: handler, + features: features, parse_sess: parse_sess, cm: cm, plugin_attributes: &[] }; cx.check_attribute(attr, true); @@ -778,8 +864,15 @@ pub enum GateIssue { Library(Option) } -pub fn emit_feature_err(diag: &Handler, feature: &str, span: Span, issue: GateIssue, +pub fn emit_feature_err(sess: &ParseSess, feature: &str, span: Span, issue: GateIssue, explain: &str) { + feature_err(sess, feature, span, issue, explain).emit(); +} + +pub fn feature_err<'a>(sess: &'a ParseSess, feature: &str, span: Span, issue: GateIssue, + explain: &str) -> DiagnosticBuilder<'a> { + let diag = &sess.span_diagnostic; + let issue = match issue { GateIssue::Language => find_lang_feature_issue(feature), GateIssue::Library(lib) => lib, @@ -792,14 +885,13 @@ pub fn emit_feature_err(diag: &Handler, feature: &str, span: Span, issue: GateIs }; // #23973: do not suggest `#![feature(...)]` if we are in beta/stable - if option_env!("CFG_DISABLE_UNSTABLE_FEATURES").is_some() { - err.emit(); - return; + if sess.unstable_features.is_nightly_build() { + err.help(&format!("add #![feature({})] to the \ + crate attributes to enable", + feature)); } - err.help(&format!("add #![feature({})] to the \ - crate attributes to enable", - feature)); - err.emit(); + + err } const EXPLAIN_BOX_SYNTAX: &'static str = @@ -823,7 +915,12 @@ pub const EXPLAIN_ALLOW_INTERNAL_UNSTABLE: &'static str = "allow_internal_unstable side-steps feature gating and stability checks"; pub const EXPLAIN_CUSTOM_DERIVE: &'static str = - "`#[derive]` for custom traits is not stable enough for use and is subject to change"; + "`#[derive]` for custom traits is not stable enough for use. It is deprecated and will \ + be removed in v1.15"; + +pub const EXPLAIN_DEPR_CUSTOM_DERIVE: &'static str = + "`#[derive]` for custom traits is deprecated and will be removed in v1.15. Prefer using \ + procedural macro custom derive"; pub const EXPLAIN_DERIVE_UNDERSCORE: &'static str = "attributes of the form `#[derive_*]` are reserved for the compiler"; @@ -952,9 +1049,10 @@ impl<'a> Visitor for PostExpansionVisitor<'a> { if attr::contains_name(&i.attrs[..], "simd") { gate_feature_post!(&self, simd, i.span, "SIMD types are experimental and possibly buggy"); - self.context.span_handler.span_warn(i.span, - "the `#[simd]` attribute is deprecated, \ - use `#[repr(simd)]` instead"); + self.context.parse_sess.span_diagnostic.span_warn(i.span, + "the `#[simd]` attribute \ + is deprecated, use \ + `#[repr(simd)]` instead"); } for attr in &i.attrs { if attr.name() == "repr" { @@ -1057,12 +1155,17 @@ impl<'a> Visitor for PostExpansionVisitor<'a> { e.span, "inclusive range syntax is experimental"); } - ast::ExprKind::Try(..) => { - gate_feature_post!(&self, question_mark, e.span, "the `?` operator is not stable"); - } ast::ExprKind::InPlace(..) => { gate_feature_post!(&self, placement_in_syntax, e.span, EXPLAIN_PLACEMENT_IN); } + ast::ExprKind::Struct(_, ref fields, _) => { + for field in fields { + if field.is_shorthand { + gate_feature_post!(&self, field_init_shorthand, field.span, + "struct field shorthands are unstable"); + } + } + } _ => {} } visit::walk_expr(self, e); @@ -1070,14 +1173,14 @@ impl<'a> Visitor for PostExpansionVisitor<'a> { fn visit_pat(&mut self, pattern: &ast::Pat) { match pattern.node { - PatKind::Vec(_, Some(_), ref last) if !last.is_empty() => { + PatKind::Slice(_, Some(_), ref last) if !last.is_empty() => { gate_feature_post!(&self, advanced_slice_patterns, pattern.span, "multiple-element slice matches anywhere \ but at the end of a slice (e.g. \ `[0, ..xs, 0]`) are experimental") } - PatKind::Vec(..) => { + PatKind::Slice(..) => { gate_feature_post!(&self, slice_patterns, pattern.span, "slice pattern syntax is experimental"); @@ -1208,6 +1311,24 @@ impl<'a> Visitor for PostExpansionVisitor<'a> { visit::walk_vis(self, vis) } + + fn visit_generics(&mut self, g: &ast::Generics) { + for t in &g.ty_params { + if !t.attrs.is_empty() { + gate_feature_post!(&self, generic_param_attrs, t.attrs[0].span, + "attributes on type parameter bindings are experimental"); + } + } + visit::walk_generics(self, g) + } + + fn visit_lifetime_def(&mut self, lifetime_def: &ast::LifetimeDef) { + if !lifetime_def.attrs.is_empty() { + gate_feature_post!(&self, generic_param_attrs, lifetime_def.attrs[0].span, + "attributes on lifetime bindings are experimental"); + } + visit::walk_lifetime_def(self, lifetime_def) + } } pub fn get_features(span_handler: &Handler, krate_attrs: &[ast::Attribute]) -> Features { @@ -1263,7 +1384,7 @@ pub fn check_crate(krate: &ast::Crate, maybe_stage_features(&sess.span_diagnostic, krate, unstable); let ctx = Context { features: features, - span_handler: &sess.span_diagnostic, + parse_sess: sess, cm: sess.codemap(), plugin_attributes: plugin_attributes, }; @@ -1275,7 +1396,7 @@ pub enum UnstableFeatures { /// Hard errors for unstable features are active, as on /// beta/stable channels. Disallow, - /// Allow features to me activated, as on nightly. + /// Allow features to be activated, as on nightly. Allow, /// Errors are bypassed for bootstrapping. This is required any time /// during the build that feature-related lints are set to warn or above @@ -1284,6 +1405,27 @@ pub enum UnstableFeatures { Cheat } +impl UnstableFeatures { + pub fn from_environment() -> UnstableFeatures { + // Whether this is a feature-staged build, i.e. on the beta or stable channel + let disable_unstable_features = option_env!("CFG_DISABLE_UNSTABLE_FEATURES").is_some(); + // Whether we should enable unstable features for bootstrapping + let bootstrap = env::var("RUSTC_BOOTSTRAP").is_ok(); + match (disable_unstable_features, bootstrap) { + (_, true) => UnstableFeatures::Cheat, + (true, _) => UnstableFeatures::Disallow, + (false, _) => UnstableFeatures::Allow + } + } + + pub fn is_nightly_build(&self) -> bool { + match *self { + UnstableFeatures::Allow | UnstableFeatures::Cheat => true, + _ => false, + } + } +} + fn maybe_stage_features(span_handler: &Handler, krate: &ast::Crate, unstable: UnstableFeatures) { let allow_features = match unstable { diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 9fb4d0203f41e..4bf6e55d6743b 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -356,7 +356,7 @@ pub fn noop_fold_ty(t: P, fld: &mut T) -> P { id: fld.new_id(id), node: match node { TyKind::Infer | TyKind::ImplicitSelf => node, - TyKind::Vec(ty) => TyKind::Vec(fld.fold_ty(ty)), + TyKind::Slice(ty) => TyKind::Slice(fld.fold_ty(ty)), TyKind::Ptr(mt) => TyKind::Ptr(fld.fold_mt(mt)), TyKind::Rptr(region, mt) => { TyKind::Rptr(fld.fold_opt_lifetime(region), fld.fold_mt(mt)) @@ -385,8 +385,8 @@ pub fn noop_fold_ty(t: P, fld: &mut T) -> P { TyKind::ObjectSum(fld.fold_ty(ty), fld.fold_bounds(bounds)) } - TyKind::FixedLengthVec(ty, e) => { - TyKind::FixedLengthVec(fld.fold_ty(ty), fld.fold_expr(e)) + TyKind::Array(ty, e) => { + TyKind::Array(fld.fold_ty(ty), fld.fold_expr(e)) } TyKind::Typeof(expr) => { TyKind::Typeof(fld.fold_expr(expr)) @@ -478,8 +478,8 @@ pub fn noop_fold_parenthesized_parameter_data(data: ParenthesizedPara pub fn noop_fold_local(l: P, fld: &mut T) -> P { l.map(|Local {id, pat, ty, init, span, attrs}| Local { id: fld.new_id(id), - ty: ty.map(|t| fld.fold_ty(t)), pat: fld.fold_pat(pat), + ty: ty.map(|t| fld.fold_ty(t)), init: init.map(|e| fld.fold_expr(e)), span: fld.new_span(span), attrs: fold_attrs(attrs.into(), fld).into(), @@ -662,8 +662,13 @@ pub fn noop_fold_ty_param_bound(tpb: TyParamBound, fld: &mut T) } pub fn noop_fold_ty_param(tp: TyParam, fld: &mut T) -> TyParam { - let TyParam {id, ident, bounds, default, span} = tp; + let TyParam {attrs, id, ident, bounds, default, span} = tp; + let attrs: Vec<_> = attrs.into(); TyParam { + attrs: attrs.into_iter() + .flat_map(|x| fld.fold_attribute(x).into_iter()) + .collect::>() + .into(), id: fld.new_id(id), ident: ident, bounds: fld.fold_bounds(bounds), @@ -687,7 +692,12 @@ pub fn noop_fold_lifetime(l: Lifetime, fld: &mut T) -> Lifetime { pub fn noop_fold_lifetime_def(l: LifetimeDef, fld: &mut T) -> LifetimeDef { + let attrs: Vec<_> = l.attrs.into(); LifetimeDef { + attrs: attrs.into_iter() + .flat_map(|x| fld.fold_attribute(x).into_iter()) + .collect::>() + .into(), lifetime: fld.fold_lifetime(l.lifetime), bounds: fld.fold_lifetimes(l.bounds), } @@ -813,11 +823,12 @@ pub fn noop_fold_struct_field(f: StructField, fld: &mut T) -> StructF } } -pub fn noop_fold_field(Field {ident, expr, span}: Field, folder: &mut T) -> Field { +pub fn noop_fold_field(f: Field, folder: &mut T) -> Field { Field { - ident: respan(ident.span, folder.fold_ident(ident.node)), - expr: folder.fold_expr(expr), - span: folder.new_span(span) + ident: respan(f.ident.span, folder.fold_ident(f.ident.node)), + expr: folder.fold_expr(f.expr), + span: folder.new_span(f.span), + is_shorthand: f.is_shorthand, } } @@ -860,14 +871,10 @@ pub fn noop_fold_item_kind(i: ItemKind, folder: &mut T) -> ItemKind { ItemKind::Const(folder.fold_ty(t), folder.fold_expr(e)) } ItemKind::Fn(decl, unsafety, constness, abi, generics, body) => { - ItemKind::Fn( - folder.fold_fn_decl(decl), - unsafety, - constness, - abi, - folder.fold_generics(generics), - folder.fold_block(body) - ) + let generics = folder.fold_generics(generics); + let decl = folder.fold_fn_decl(decl); + let body = folder.fold_block(body); + ItemKind::Fn(decl, unsafety, constness, abi, generics, body) } ItemKind::Mod(m) => ItemKind::Mod(folder.fold_mod(m)), ItemKind::ForeignMod(nm) => ItemKind::ForeignMod(folder.fold_foreign_mod(nm)), @@ -875,50 +882,35 @@ pub fn noop_fold_item_kind(i: ItemKind, folder: &mut T) -> ItemKind { ItemKind::Ty(folder.fold_ty(t), folder.fold_generics(generics)) } ItemKind::Enum(enum_definition, generics) => { - ItemKind::Enum( - ast::EnumDef { - variants: enum_definition.variants.move_map(|x| folder.fold_variant(x)), - }, - folder.fold_generics(generics)) + let generics = folder.fold_generics(generics); + let variants = enum_definition.variants.move_map(|x| folder.fold_variant(x)); + ItemKind::Enum(ast::EnumDef { variants: variants }, generics) } ItemKind::Struct(struct_def, generics) => { - let struct_def = folder.fold_variant_data(struct_def); - ItemKind::Struct(struct_def, folder.fold_generics(generics)) + let generics = folder.fold_generics(generics); + ItemKind::Struct(folder.fold_variant_data(struct_def), generics) } ItemKind::Union(struct_def, generics) => { - let struct_def = folder.fold_variant_data(struct_def); - ItemKind::Union(struct_def, folder.fold_generics(generics)) + let generics = folder.fold_generics(generics); + ItemKind::Union(folder.fold_variant_data(struct_def), generics) } ItemKind::DefaultImpl(unsafety, ref trait_ref) => { ItemKind::DefaultImpl(unsafety, folder.fold_trait_ref((*trait_ref).clone())) } - ItemKind::Impl(unsafety, polarity, generics, ifce, ty, impl_items) => { - let new_impl_items = impl_items.move_flat_map(|item| { - folder.fold_impl_item(item) - }); - let ifce = match ifce { - None => None, - Some(ref trait_ref) => { - Some(folder.fold_trait_ref((*trait_ref).clone())) - } - }; - ItemKind::Impl(unsafety, - polarity, - folder.fold_generics(generics), - ifce, - folder.fold_ty(ty), - new_impl_items) - } - ItemKind::Trait(unsafety, generics, bounds, items) => { - let bounds = folder.fold_bounds(bounds); - let items = items.move_flat_map(|item| { - folder.fold_trait_item(item) - }); - ItemKind::Trait(unsafety, - folder.fold_generics(generics), - bounds, - items) - } + ItemKind::Impl(unsafety, polarity, generics, ifce, ty, impl_items) => ItemKind::Impl( + unsafety, + polarity, + folder.fold_generics(generics), + ifce.map(|trait_ref| folder.fold_trait_ref(trait_ref.clone())), + folder.fold_ty(ty), + impl_items.move_flat_map(|item| folder.fold_impl_item(item)), + ), + ItemKind::Trait(unsafety, generics, bounds, items) => ItemKind::Trait( + unsafety, + folder.fold_generics(generics), + folder.fold_bounds(bounds), + items.move_flat_map(|item| folder.fold_trait_item(item)), + ), ItemKind::Mac(m) => ItemKind::Mac(folder.fold_mac(m)), } } @@ -954,9 +946,9 @@ pub fn noop_fold_impl_item(i: ImplItem, folder: &mut T) -> SmallVector { SmallVector::one(ImplItem { id: folder.new_id(i.id), + vis: folder.fold_vis(i.vis), ident: folder.fold_ident(i.ident), attrs: fold_attrs(i.attrs, folder), - vis: folder.fold_vis(i.vis), defaultness: i.defaultness, node: match i.node { ast::ImplItemKind::Const(ty, expr) => { @@ -980,10 +972,8 @@ pub fn noop_fold_mod(Mod {inner, items}: Mod, folder: &mut T) -> Mod } } -pub fn noop_fold_crate(Crate {module, attrs, config, mut exported_macros, span}: Crate, +pub fn noop_fold_crate(Crate {module, attrs, mut exported_macros, span}: Crate, folder: &mut T) -> Crate { - let config = folder.fold_meta_items(config); - let mut items = folder.fold_item(P(ast::Item { ident: keywords::Invalid.ident(), attrs: attrs, @@ -1017,7 +1007,6 @@ pub fn noop_fold_crate(Crate {module, attrs, config, mut exported_mac Crate { module: module, attrs: attrs, - config: config, exported_macros: exported_macros, span: span, } @@ -1031,15 +1020,12 @@ pub fn noop_fold_item(i: P, folder: &mut T) -> SmallVector(Item {id, ident, attrs, node, vis, span}: Item, folder: &mut T) -> Item { - let id = folder.new_id(id); - let node = folder.fold_item_kind(node); - Item { - id: id, + id: folder.new_id(id), + vis: folder.fold_vis(vis), ident: folder.fold_ident(ident), attrs: fold_attrs(attrs, folder), - node: node, - vis: folder.fold_vis(vis), + node: folder.fold_item_kind(node), span: folder.new_span(span) } } @@ -1047,6 +1033,7 @@ pub fn noop_fold_item_simple(Item {id, ident, attrs, node, vis, span} pub fn noop_fold_foreign_item(ni: ForeignItem, folder: &mut T) -> ForeignItem { ForeignItem { id: folder.new_id(ni.id), + vis: folder.fold_vis(ni.vis), ident: folder.fold_ident(ni.ident), attrs: fold_attrs(ni.attrs, folder), node: match ni.node { @@ -1057,7 +1044,6 @@ pub fn noop_fold_foreign_item(ni: ForeignItem, folder: &mut T) -> For ForeignItemKind::Static(folder.fold_ty(t), m) } }, - vis: folder.fold_vis(ni.vis), span: folder.new_span(ni.span) } } @@ -1114,8 +1100,8 @@ pub fn noop_fold_pat(p: P, folder: &mut T) -> P { PatKind::Range(e1, e2) => { PatKind::Range(folder.fold_expr(e1), folder.fold_expr(e2)) }, - PatKind::Vec(before, slice, after) => { - PatKind::Vec(before.move_map(|x| folder.fold_pat(x)), + PatKind::Slice(before, slice, after) => { + PatKind::Slice(before.move_map(|x| folder.fold_pat(x)), slice.map(|x| folder.fold_pat(x)), after.move_map(|x| folder.fold_pat(x))) } diff --git a/src/libsyntax/json.rs b/src/libsyntax/json.rs index a40c30b3e3397..a1c273baeea42 100644 --- a/src/libsyntax/json.rs +++ b/src/libsyntax/json.rs @@ -38,14 +38,24 @@ pub struct JsonEmitter { } impl JsonEmitter { + pub fn stderr(registry: Option, + code_map: Rc) -> JsonEmitter { + JsonEmitter { + dst: Box::new(io::stderr()), + registry: registry, + cm: code_map, + } + } + pub fn basic() -> JsonEmitter { JsonEmitter::stderr(None, Rc::new(CodeMap::new())) } - pub fn stderr(registry: Option, - code_map: Rc) -> JsonEmitter { + pub fn new(dst: Box, + registry: Option, + code_map: Rc) -> JsonEmitter { JsonEmitter { - dst: Box::new(io::stderr()), + dst: dst, registry: registry, cm: code_map, } diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index 4a2c9aff2d2b4..b86e508d89970 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -31,9 +31,12 @@ #![feature(staged_api)] #![feature(str_escape)] #![feature(unicode)] -#![feature(question_mark)] +#![cfg_attr(stage0, feature(question_mark))] #![feature(rustc_diagnostic_macros)] +#![feature(specialization)] +#![feature(dotdot_in_tuple_patterns)] +extern crate core; extern crate serialize; extern crate term; extern crate libc; diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index a0defbc09dc04..3cb34fa3c91c5 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -125,8 +125,8 @@ impl<'a> Parser<'a> { self.expect(&token::OpenDelim(token::Bracket))?; let meta_item = self.parse_meta_item()?; - let hi = self.last_span.hi; self.expect(&token::CloseDelim(token::Bracket))?; + let hi = self.prev_span.hi; (mk_sp(lo, hi), meta_item, style) } @@ -231,16 +231,16 @@ impl<'a> Parser<'a> { token::Eq => { self.bump(); let lit = self.parse_unsuffixed_lit()?; - let hi = self.last_span.hi; + let hi = self.prev_span.hi; Ok(P(spanned(lo, hi, ast::MetaItemKind::NameValue(name, lit)))) } token::OpenDelim(token::Paren) => { let inner_items = self.parse_meta_seq()?; - let hi = self.last_span.hi; + let hi = self.prev_span.hi; Ok(P(spanned(lo, hi, ast::MetaItemKind::List(name, inner_items)))) } _ => { - let hi = self.last_span.hi; + let hi = self.prev_span.hi; Ok(P(spanned(lo, hi, ast::MetaItemKind::Word(name)))) } } @@ -253,14 +253,14 @@ impl<'a> Parser<'a> { match self.parse_unsuffixed_lit() { Ok(lit) => { - return Ok(spanned(lo, self.last_span.hi, ast::NestedMetaItemKind::Literal(lit))) + return Ok(spanned(lo, self.prev_span.hi, ast::NestedMetaItemKind::Literal(lit))) } Err(ref mut err) => self.diagnostic().cancel(err) } match self.parse_meta_item() { Ok(mi) => { - return Ok(spanned(lo, self.last_span.hi, ast::NestedMetaItemKind::MetaItem(mi))) + return Ok(spanned(lo, self.prev_span.hi, ast::NestedMetaItemKind::MetaItem(mi))) } Err(ref mut err) => self.diagnostic().cancel(err) } diff --git a/src/libsyntax/parse/lexer/comments.rs b/src/libsyntax/parse/lexer/comments.rs index 5eb5605ea71a0..ba83a55ea7937 100644 --- a/src/libsyntax/parse/lexer/comments.rs +++ b/src/libsyntax/parse/lexer/comments.rs @@ -24,7 +24,7 @@ use str::char_at; use std::io::Read; use std::usize; -#[derive(Clone, Copy, PartialEq)] +#[derive(Clone, Copy, PartialEq, Debug)] pub enum CommentStyle { /// No code on either side of each line of the comment Isolated, @@ -149,25 +149,24 @@ fn push_blank_line_comment(rdr: &StringReader, comments: &mut Vec) { comments.push(Comment { style: BlankLine, lines: Vec::new(), - pos: rdr.last_pos, + pos: rdr.pos, }); } fn consume_whitespace_counting_blank_lines(rdr: &mut StringReader, comments: &mut Vec) { - while is_pattern_whitespace(rdr.curr) && !rdr.is_eof() { - if rdr.col == CharPos(0) && rdr.curr_is('\n') { + while is_pattern_whitespace(rdr.ch) && !rdr.is_eof() { + if rdr.ch_is('\n') { push_blank_line_comment(rdr, &mut *comments); } rdr.bump(); } } - fn read_shebang_comment(rdr: &mut StringReader, code_to_the_left: bool, comments: &mut Vec) { debug!(">>> shebang comment"); - let p = rdr.last_pos; + let p = rdr.pos; debug!("<<< shebang comment"); comments.push(Comment { style: if code_to_the_left { Trailing } else { Isolated }, @@ -180,9 +179,9 @@ fn read_line_comments(rdr: &mut StringReader, code_to_the_left: bool, comments: &mut Vec) { debug!(">>> line comments"); - let p = rdr.last_pos; + let p = rdr.pos; let mut lines: Vec = Vec::new(); - while rdr.curr_is('/') && rdr.nextch_is('/') { + while rdr.ch_is('/') && rdr.nextch_is('/') { let line = rdr.read_one_line_comment(); debug!("{}", line); // Doc comments are not put in comments. @@ -240,7 +239,7 @@ fn read_block_comment(rdr: &mut StringReader, code_to_the_left: bool, comments: &mut Vec) { debug!(">>> block comment"); - let p = rdr.last_pos; + let p = rdr.pos; let mut lines: Vec = Vec::new(); let col = rdr.col; rdr.bump(); @@ -249,9 +248,9 @@ fn read_block_comment(rdr: &mut StringReader, let mut curr_line = String::from("/*"); // doc-comments are not really comments, they are attributes - if (rdr.curr_is('*') && !rdr.nextch_is('*')) || rdr.curr_is('!') { - while !(rdr.curr_is('*') && rdr.nextch_is('/')) && !rdr.is_eof() { - curr_line.push(rdr.curr.unwrap()); + if (rdr.ch_is('*') && !rdr.nextch_is('*')) || rdr.ch_is('!') { + while !(rdr.ch_is('*') && rdr.nextch_is('/')) && !rdr.is_eof() { + curr_line.push(rdr.ch.unwrap()); rdr.bump(); } if !rdr.is_eof() { @@ -271,19 +270,19 @@ fn read_block_comment(rdr: &mut StringReader, if rdr.is_eof() { panic!(rdr.fatal("unterminated block comment")); } - if rdr.curr_is('\n') { + if rdr.ch_is('\n') { trim_whitespace_prefix_and_push_line(&mut lines, curr_line, col); curr_line = String::new(); rdr.bump(); } else { - curr_line.push(rdr.curr.unwrap()); - if rdr.curr_is('/') && rdr.nextch_is('*') { + curr_line.push(rdr.ch.unwrap()); + if rdr.ch_is('/') && rdr.nextch_is('*') { rdr.bump(); rdr.bump(); curr_line.push('*'); level += 1; } else { - if rdr.curr_is('*') && rdr.nextch_is('/') { + if rdr.ch_is('*') && rdr.nextch_is('/') { rdr.bump(); rdr.bump(); curr_line.push('/'); @@ -305,7 +304,7 @@ fn read_block_comment(rdr: &mut StringReader, Isolated }; rdr.consume_non_eol_whitespace(); - if !rdr.is_eof() && !rdr.curr_is('\n') && lines.len() == 1 { + if !rdr.is_eof() && !rdr.ch_is('\n') && lines.len() == 1 { style = Mixed; } debug!("<<< block comment"); @@ -317,14 +316,22 @@ fn read_block_comment(rdr: &mut StringReader, } -fn consume_comment(rdr: &mut StringReader, code_to_the_left: bool, comments: &mut Vec) { +fn consume_comment(rdr: &mut StringReader, + comments: &mut Vec, + code_to_the_left: &mut bool, + anything_to_the_left: &mut bool) { debug!(">>> consume comment"); - if rdr.curr_is('/') && rdr.nextch_is('/') { - read_line_comments(rdr, code_to_the_left, comments); - } else if rdr.curr_is('/') && rdr.nextch_is('*') { - read_block_comment(rdr, code_to_the_left, comments); - } else if rdr.curr_is('#') && rdr.nextch_is('!') { - read_shebang_comment(rdr, code_to_the_left, comments); + if rdr.ch_is('/') && rdr.nextch_is('/') { + read_line_comments(rdr, *code_to_the_left, comments); + *code_to_the_left = false; + *anything_to_the_left = false; + } else if rdr.ch_is('/') && rdr.nextch_is('*') { + read_block_comment(rdr, *code_to_the_left, comments); + *anything_to_the_left = true; + } else if rdr.ch_is('#') && rdr.nextch_is('!') { + read_shebang_comment(rdr, *code_to_the_left, comments); + *code_to_the_left = false; + *anything_to_the_left = false; } else { panic!(); } @@ -352,24 +359,30 @@ pub fn gather_comments_and_literals(span_diagnostic: &errors::Handler, let mut comments: Vec = Vec::new(); let mut literals: Vec = Vec::new(); - let mut first_read: bool = true; + let mut code_to_the_left = false; // Only code + let mut anything_to_the_left = false; // Code or comments while !rdr.is_eof() { loop { - let mut code_to_the_left = !first_read; + // Eat all the whitespace and count blank lines. rdr.consume_non_eol_whitespace(); - if rdr.curr_is('\n') { - code_to_the_left = false; + if rdr.ch_is('\n') { + if anything_to_the_left { + rdr.bump(); // The line is not blank, do not count. + } consume_whitespace_counting_blank_lines(&mut rdr, &mut comments); + code_to_the_left = false; + anything_to_the_left = false; } - while rdr.peeking_at_comment() { - consume_comment(&mut rdr, code_to_the_left, &mut comments); - consume_whitespace_counting_blank_lines(&mut rdr, &mut comments); + // Eat one comment group + if rdr.peeking_at_comment() { + consume_comment(&mut rdr, &mut comments, + &mut code_to_the_left, &mut anything_to_the_left); + } else { + break } - break; } - - let bstart = rdr.last_pos; + let bstart = rdr.pos; rdr.next_token(); // discard, and look ahead; we're working with internal state let TokenAndSpan { tok, sp } = rdr.peek(); @@ -384,7 +397,8 @@ pub fn gather_comments_and_literals(span_diagnostic: &errors::Handler, } else { debug!("tok: {}", pprust::token_to_string(&tok)); } - first_read = false; + code_to_the_left = true; + anything_to_the_left = true; } (comments, literals) diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index 9e9ea09646088..5e20f6e419276 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -74,17 +74,29 @@ pub struct TokenAndSpan { pub sp: Span, } +impl Default for TokenAndSpan { + fn default() -> Self { + TokenAndSpan { tok: token::Underscore, sp: syntax_pos::DUMMY_SP } + } +} + pub struct StringReader<'a> { pub span_diagnostic: &'a Handler, /// The absolute offset within the codemap of the next character to read + pub next_pos: BytePos, + /// The absolute offset within the codemap of the current character pub pos: BytePos, - /// The absolute offset within the codemap of the last character read(curr) - pub last_pos: BytePos, /// The column of the next character to read pub col: CharPos, - /// The last character to be read - pub curr: Option, + /// The current character (which has been read from self.pos) + pub ch: Option, pub filemap: Rc, + /// If Some, stop reading the source at this position (inclusive). + pub terminator: Option, + /// Whether to record new-lines in filemap. This is only necessary the first + /// time a filemap is lexed. If part of a filemap is being re-lexed, this + /// should be set to false. + pub save_new_lines: bool, // cached: pub peek_tok: token::Token, pub peek_span: Span, @@ -96,7 +108,14 @@ pub struct StringReader<'a> { impl<'a> Reader for StringReader<'a> { fn is_eof(&self) -> bool { - self.curr.is_none() + if self.ch.is_none() { + return true; + } + + match self.terminator { + Some(t) => self.next_pos > t, + None => false, + } } /// Return the next token. EFFECT: advances the string_reader. fn try_next_token(&mut self) -> Result { @@ -131,7 +150,7 @@ impl<'a> Reader for StringReader<'a> { impl<'a> Reader for TtReader<'a> { fn is_eof(&self) -> bool { - self.cur_tok == token::Eof + self.peek().tok == token::Eof } fn try_next_token(&mut self) -> Result { assert!(self.fatal_errs.is_empty()); @@ -152,18 +171,47 @@ impl<'a> Reader for TtReader<'a> { self.fatal_errs.clear(); } fn peek(&self) -> TokenAndSpan { - TokenAndSpan { + self.next_tok.clone().unwrap_or(TokenAndSpan { tok: self.cur_tok.clone(), sp: self.cur_span, - } + }) + } +} + +impl<'a, 'b> Reader for &'b mut TtReader<'a> { + fn is_eof(&self) -> bool { + (**self).is_eof() + } + fn try_next_token(&mut self) -> Result { + (**self).try_next_token() + } + fn fatal(&self, m: &str) -> FatalError { + (**self).fatal(m) + } + fn err(&self, m: &str) { + (**self).err(m) + } + fn emit_fatal_errors(&mut self) { + (**self).emit_fatal_errors() + } + fn peek(&self) -> TokenAndSpan { + (**self).peek() } } impl<'a> StringReader<'a> { - /// For comments.rs, which hackily pokes into pos and curr + /// For comments.rs, which hackily pokes into next_pos and ch pub fn new_raw<'b>(span_diagnostic: &'b Handler, filemap: Rc) -> StringReader<'b> { + let mut sr = StringReader::new_raw_internal(span_diagnostic, filemap); + sr.bump(); + sr + } + + fn new_raw_internal<'b>(span_diagnostic: &'b Handler, + filemap: Rc) + -> StringReader<'b> { if filemap.src.is_none() { span_diagnostic.bug(&format!("Cannot lex filemap \ without source: {}", @@ -172,21 +220,21 @@ impl<'a> StringReader<'a> { let source_text = (*filemap.src.as_ref().unwrap()).clone(); - let mut sr = StringReader { + StringReader { span_diagnostic: span_diagnostic, + next_pos: filemap.start_pos, pos: filemap.start_pos, - last_pos: filemap.start_pos, col: CharPos(0), - curr: Some('\n'), + ch: Some('\n'), filemap: filemap, + terminator: None, + save_new_lines: true, // dummy values; not read peek_tok: token::Eof, peek_span: syntax_pos::DUMMY_SP, source_text: source_text, fatal_errs: Vec::new(), - }; - sr.bump(); - sr + } } pub fn new<'b>(span_diagnostic: &'b Handler, @@ -200,8 +248,8 @@ impl<'a> StringReader<'a> { sr } - pub fn curr_is(&self, c: char) -> bool { - self.curr == Some(c) + pub fn ch_is(&self, c: char) -> bool { + self.ch == Some(c) } /// Report a fatal lexical error with a given span. @@ -296,9 +344,9 @@ impl<'a> StringReader<'a> { self.peek_tok = token::Eof; self.peek_span = syntax_pos::mk_sp(self.filemap.end_pos, self.filemap.end_pos); } else { - let start_bytepos = self.last_pos; + let start_bytepos = self.pos; self.peek_tok = self.next_token_inner()?; - self.peek_span = syntax_pos::mk_sp(start_bytepos, self.last_pos); + self.peek_span = syntax_pos::mk_sp(start_bytepos, self.pos); }; } } @@ -310,19 +358,19 @@ impl<'a> StringReader<'a> { } /// Calls `f` with a string slice of the source text spanning from `start` - /// up to but excluding `self.last_pos`, meaning the slice does not include - /// the character `self.curr`. + /// up to but excluding `self.pos`, meaning the slice does not include + /// the character `self.ch`. pub fn with_str_from(&self, start: BytePos, f: F) -> T where F: FnOnce(&str) -> T { - self.with_str_from_to(start, self.last_pos, f) + self.with_str_from_to(start, self.pos, f) } /// Create a Name from a given offset to the current offset, each /// adjusted 1 towards each other (assumes that on either side there is a /// single-byte delimiter). pub fn name_from(&self, start: BytePos) -> ast::Name { - debug!("taking an ident from {:?} to {:?}", start, self.last_pos); + debug!("taking an ident from {:?} to {:?}", start, self.pos); self.with_str_from(start, token::intern) } @@ -393,32 +441,35 @@ impl<'a> StringReader<'a> { /// Advance the StringReader by one character. If a newline is /// discovered, add it to the FileMap's list of line start offsets. pub fn bump(&mut self) { - self.last_pos = self.pos; - let current_byte_offset = self.byte_offset(self.pos).to_usize(); - if current_byte_offset < self.source_text.len() { - assert!(self.curr.is_some()); - let last_char = self.curr.unwrap(); - let ch = char_at(&self.source_text, current_byte_offset); - let next = current_byte_offset + ch.len_utf8(); - let byte_offset_diff = next - current_byte_offset; - self.pos = self.pos + Pos::from_usize(byte_offset_diff); - self.curr = Some(ch); - self.col = self.col + CharPos(1); - if last_char == '\n' { - self.filemap.next_line(self.last_pos); + let new_pos = self.next_pos; + let new_byte_offset = self.byte_offset(new_pos).to_usize(); + if new_byte_offset < self.source_text.len() { + let old_ch_is_newline = self.ch.unwrap() == '\n'; + let new_ch = char_at(&self.source_text, new_byte_offset); + let new_ch_len = new_ch.len_utf8(); + + self.ch = Some(new_ch); + self.pos = new_pos; + self.next_pos = new_pos + Pos::from_usize(new_ch_len); + if old_ch_is_newline { + if self.save_new_lines { + self.filemap.next_line(self.pos); + } self.col = CharPos(0); + } else { + self.col = self.col + CharPos(1); } - - if byte_offset_diff > 1 { - self.filemap.record_multibyte_char(self.last_pos, byte_offset_diff); + if new_ch_len > 1 { + self.filemap.record_multibyte_char(self.pos, new_ch_len); } } else { - self.curr = None; + self.ch = None; + self.pos = new_pos; } } pub fn nextch(&self) -> Option { - let offset = self.byte_offset(self.pos).to_usize(); + let offset = self.byte_offset(self.next_pos).to_usize(); if offset < self.source_text.len() { Some(char_at(&self.source_text, offset)) } else { @@ -431,7 +482,7 @@ impl<'a> StringReader<'a> { } pub fn nextnextch(&self) -> Option { - let offset = self.byte_offset(self.pos).to_usize(); + let offset = self.byte_offset(self.next_pos).to_usize(); let s = &self.source_text[..]; if offset >= s.len() { return None; @@ -450,11 +501,11 @@ impl<'a> StringReader<'a> { /// Eats *, if possible. fn scan_optional_raw_name(&mut self) -> Option { - if !ident_start(self.curr) { + if !ident_start(self.ch) { return None; } - let start = self.last_pos; - while ident_continue(self.curr) { + let start = self.pos; + while ident_continue(self.ch) { self.bump(); } @@ -467,41 +518,37 @@ impl<'a> StringReader<'a> { }) } - /// PRECONDITION: self.curr is not whitespace + /// PRECONDITION: self.ch is not whitespace /// Eats any kind of comment. fn scan_comment(&mut self) -> Option { - if let Some(c) = self.curr { + if let Some(c) = self.ch { if c.is_whitespace() { - self.span_diagnostic.span_err(syntax_pos::mk_sp(self.last_pos, self.last_pos), + self.span_diagnostic.span_err(syntax_pos::mk_sp(self.pos, self.pos), "called consume_any_line_comment, but there \ was whitespace"); } } - if self.curr_is('/') { + if self.ch_is('/') { match self.nextch() { Some('/') => { self.bump(); self.bump(); // line comments starting with "///" or "//!" are doc-comments - let doc_comment = self.curr_is('/') || self.curr_is('!'); - let start_bpos = if doc_comment { - self.pos - BytePos(3) - } else { - self.last_pos - BytePos(2) - }; + let doc_comment = self.ch_is('/') || self.ch_is('!'); + let start_bpos = self.pos - BytePos(2); while !self.is_eof() { - match self.curr.unwrap() { + match self.ch.unwrap() { '\n' => break, '\r' => { if self.nextch_is('\n') { // CRLF break; } else if doc_comment { - self.err_span_(self.last_pos, - self.pos, + self.err_span_(self.pos, + self.next_pos, "bare CR not allowed in doc-comment"); } } @@ -521,13 +568,13 @@ impl<'a> StringReader<'a> { Some(TokenAndSpan { tok: tok, - sp: syntax_pos::mk_sp(start_bpos, self.last_pos), + sp: syntax_pos::mk_sp(start_bpos, self.pos), }) }) } else { Some(TokenAndSpan { tok: token::Comment, - sp: syntax_pos::mk_sp(start_bpos, self.last_pos), + sp: syntax_pos::mk_sp(start_bpos, self.pos), }) }; } @@ -538,7 +585,7 @@ impl<'a> StringReader<'a> { } _ => None, } - } else if self.curr_is('#') { + } else if self.ch_is('#') { if self.nextch_is('!') { // Parse an inner attribute. @@ -550,17 +597,17 @@ impl<'a> StringReader<'a> { // we're at the beginning of the file... let cmap = CodeMap::new(); cmap.files.borrow_mut().push(self.filemap.clone()); - let loc = cmap.lookup_char_pos_adj(self.last_pos); + let loc = cmap.lookup_char_pos_adj(self.pos); debug!("Skipping a shebang"); if loc.line == 1 && loc.col == CharPos(0) { // FIXME: Add shebang "token", return it - let start = self.last_pos; - while !self.curr_is('\n') && !self.is_eof() { + let start = self.pos; + while !self.ch_is('\n') && !self.is_eof() { self.bump(); } return Some(TokenAndSpan { tok: token::Shebang(self.name_from(start)), - sp: syntax_pos::mk_sp(start, self.last_pos), + sp: syntax_pos::mk_sp(start, self.pos), }); } } @@ -573,7 +620,7 @@ impl<'a> StringReader<'a> { /// If there is whitespace, shebang, or a comment, scan it. Otherwise, /// return None. fn scan_whitespace_or_comment(&mut self) -> Option { - match self.curr.unwrap_or('\0') { + match self.ch.unwrap_or('\0') { // # to handle shebang at start of file -- this is the entry point // for skipping over all "junk" '/' | '#' => { @@ -582,13 +629,13 @@ impl<'a> StringReader<'a> { c }, c if is_pattern_whitespace(Some(c)) => { - let start_bpos = self.last_pos; - while is_pattern_whitespace(self.curr) { + let start_bpos = self.pos; + while is_pattern_whitespace(self.ch) { self.bump(); } let c = Some(TokenAndSpan { tok: token::Whitespace, - sp: syntax_pos::mk_sp(start_bpos, self.last_pos), + sp: syntax_pos::mk_sp(start_bpos, self.pos), }); debug!("scanning whitespace: {:?}", c); c @@ -600,8 +647,8 @@ impl<'a> StringReader<'a> { /// Might return a sugared-doc-attr fn scan_block_comment(&mut self) -> Option { // block comments starting with "/**" or "/*!" are doc-comments - let is_doc_comment = self.curr_is('*') || self.curr_is('!'); - let start_bpos = self.last_pos - BytePos(2); + let is_doc_comment = self.ch_is('*') || self.ch_is('!'); + let start_bpos = self.pos - BytePos(2); let mut level: isize = 1; let mut has_cr = false; @@ -612,10 +659,10 @@ impl<'a> StringReader<'a> { } else { "unterminated block comment" }; - let last_bpos = self.last_pos; + let last_bpos = self.pos; panic!(self.fatal_span_(start_bpos, last_bpos, msg)); } - let n = self.curr.unwrap(); + let n = self.ch.unwrap(); match n { '/' if self.nextch_is('*') => { level += 1; @@ -650,7 +697,7 @@ impl<'a> StringReader<'a> { Some(TokenAndSpan { tok: tok, - sp: syntax_pos::mk_sp(start_bpos, self.last_pos), + sp: syntax_pos::mk_sp(start_bpos, self.pos), }) }) } @@ -665,7 +712,7 @@ impl<'a> StringReader<'a> { assert!(real_radix <= scan_radix); let mut len = 0; loop { - let c = self.curr; + let c = self.ch; if c == Some('_') { debug!("skipping a _"); self.bump(); @@ -677,8 +724,8 @@ impl<'a> StringReader<'a> { // check that the hypothetical digit is actually // in range for the true radix if c.unwrap().to_digit(real_radix).is_none() { - self.err_span_(self.last_pos, - self.pos, + self.err_span_(self.pos, + self.next_pos, &format!("invalid digit for a base {} literal", real_radix)); } len += 1; @@ -693,12 +740,12 @@ impl<'a> StringReader<'a> { fn scan_number(&mut self, c: char) -> token::Lit { let num_digits; let mut base = 10; - let start_bpos = self.last_pos; + let start_bpos = self.pos; self.bump(); if c == '0' { - match self.curr.unwrap_or('\0') { + match self.ch.unwrap_or('\0') { 'b' => { self.bump(); base = 2; @@ -730,7 +777,7 @@ impl<'a> StringReader<'a> { if num_digits == 0 { self.err_span_(start_bpos, - self.last_pos, + self.pos, "no valid digits found for number"); return token::Integer(token::intern("0")); } @@ -738,26 +785,26 @@ impl<'a> StringReader<'a> { // might be a float, but don't be greedy if this is actually an // integer literal followed by field/method access or a range pattern // (`0..2` and `12.foo()`) - if self.curr_is('.') && !self.nextch_is('.') && + if self.ch_is('.') && !self.nextch_is('.') && !self.nextch() .unwrap_or('\0') .is_xid_start() { // might have stuff after the ., and if it does, it needs to start // with a number self.bump(); - if self.curr.unwrap_or('\0').is_digit(10) { + if self.ch.unwrap_or('\0').is_digit(10) { self.scan_digits(10, 10); self.scan_float_exponent(); } - let last_pos = self.last_pos; - self.check_float_base(start_bpos, last_pos, base); + let pos = self.pos; + self.check_float_base(start_bpos, pos, base); return token::Float(self.name_from(start_bpos)); } else { // it might be a float if it has an exponent - if self.curr_is('e') || self.curr_is('E') { + if self.ch_is('e') || self.ch_is('E') { self.scan_float_exponent(); - let last_pos = self.last_pos; - self.check_float_base(start_bpos, last_pos, base); + let pos = self.pos; + self.check_float_base(start_bpos, pos, base); return token::Float(self.name_from(start_bpos)); } // but we certainly have an integer! @@ -769,30 +816,30 @@ impl<'a> StringReader<'a> { /// error if too many or too few digits are encountered. fn scan_hex_digits(&mut self, n_digits: usize, delim: char, below_0x7f_only: bool) -> bool { debug!("scanning {} digits until {:?}", n_digits, delim); - let start_bpos = self.last_pos; + let start_bpos = self.pos; let mut accum_int = 0; let mut valid = true; for _ in 0..n_digits { if self.is_eof() { - let last_bpos = self.last_pos; + let last_bpos = self.pos; panic!(self.fatal_span_(start_bpos, last_bpos, "unterminated numeric character escape")); } - if self.curr_is(delim) { - let last_bpos = self.last_pos; + if self.ch_is(delim) { + let last_bpos = self.pos; self.err_span_(start_bpos, last_bpos, "numeric character escape is too short"); valid = false; break; } - let c = self.curr.unwrap_or('\x00'); + let c = self.ch.unwrap_or('\x00'); accum_int *= 16; accum_int += c.to_digit(16).unwrap_or_else(|| { - self.err_span_char(self.last_pos, - self.pos, + self.err_span_char(self.pos, + self.next_pos, "invalid character in numeric character escape", c); @@ -804,7 +851,7 @@ impl<'a> StringReader<'a> { if below_0x7f_only && accum_int >= 0x80 { self.err_span_(start_bpos, - self.last_pos, + self.pos, "this form of character escape may only be used with characters in \ the range [\\x00-\\x7f]"); valid = false; @@ -813,7 +860,7 @@ impl<'a> StringReader<'a> { match char::from_u32(accum_int) { Some(_) => valid, None => { - let last_bpos = self.last_pos; + let last_bpos = self.pos; self.err_span_(start_bpos, last_bpos, "invalid numeric character escape"); false } @@ -834,8 +881,8 @@ impl<'a> StringReader<'a> { match first_source_char { '\\' => { // '\X' for some X must be a character constant: - let escaped = self.curr; - let escaped_pos = self.last_pos; + let escaped = self.ch; + let escaped_pos = self.pos; self.bump(); match escaped { None => {} // EOF here is an error that will be checked later. @@ -844,10 +891,10 @@ impl<'a> StringReader<'a> { 'n' | 'r' | 't' | '\\' | '\'' | '"' | '0' => true, 'x' => self.scan_byte_escape(delim, !ascii_only), 'u' => { - let valid = if self.curr_is('{') { + let valid = if self.ch_is('{') { self.scan_unicode_escape(delim) && !ascii_only } else { - let span = syntax_pos::mk_sp(start, self.last_pos); + let span = syntax_pos::mk_sp(start, self.pos); self.span_diagnostic .struct_span_err(span, "incorrect unicode escape sequence") .span_help(span, @@ -858,7 +905,7 @@ impl<'a> StringReader<'a> { }; if ascii_only { self.err_span_(start, - self.last_pos, + self.pos, "unicode escape sequences cannot be used as a \ byte or in a byte string"); } @@ -869,14 +916,14 @@ impl<'a> StringReader<'a> { self.consume_whitespace(); true } - '\r' if delim == '"' && self.curr_is('\n') => { + '\r' if delim == '"' && self.ch_is('\n') => { self.consume_whitespace(); true } c => { - let last_pos = self.last_pos; + let pos = self.pos; let mut err = self.struct_err_span_char(escaped_pos, - last_pos, + pos, if ascii_only { "unknown byte escape" } else { @@ -885,13 +932,13 @@ impl<'a> StringReader<'a> { }, c); if e == '\r' { - err.span_help(syntax_pos::mk_sp(escaped_pos, last_pos), + err.span_help(syntax_pos::mk_sp(escaped_pos, pos), "this is an isolated carriage return; consider \ checking your editor and version control \ settings"); } if (e == '{' || e == '}') && !ascii_only { - err.span_help(syntax_pos::mk_sp(escaped_pos, last_pos), + err.span_help(syntax_pos::mk_sp(escaped_pos, pos), "if used in a formatting string, curly braces \ are escaped with `{{` and `}}`"); } @@ -903,9 +950,9 @@ impl<'a> StringReader<'a> { } } '\t' | '\n' | '\r' | '\'' if delim == '\'' => { - let last_pos = self.last_pos; + let pos = self.pos; self.err_span_char(start, - last_pos, + pos, if ascii_only { "byte constant must be escaped" } else { @@ -915,21 +962,21 @@ impl<'a> StringReader<'a> { return false; } '\r' => { - if self.curr_is('\n') { + if self.ch_is('\n') { self.bump(); return true; } else { self.err_span_(start, - self.last_pos, + self.pos, "bare CR not allowed in string, use \\r instead"); return false; } } _ => { if ascii_only && first_source_char > '\x7F' { - let last_pos = self.last_pos; + let pos = self.pos; self.err_span_(start, - last_pos, + pos, "byte constant must be ASCII. Use a \\xHH escape for a \ non-ASCII byte"); return false; @@ -945,29 +992,29 @@ impl<'a> StringReader<'a> { /// will read at least one digit, and up to 6, and pass over the }. fn scan_unicode_escape(&mut self, delim: char) -> bool { self.bump(); // past the { - let start_bpos = self.last_pos; + let start_bpos = self.pos; let mut count = 0; let mut accum_int = 0; let mut valid = true; - while !self.curr_is('}') && count <= 6 { - let c = match self.curr { + while !self.ch_is('}') && count <= 6 { + let c = match self.ch { Some(c) => c, None => { panic!(self.fatal_span_(start_bpos, - self.last_pos, + self.pos, "unterminated unicode escape (found EOF)")); } }; accum_int *= 16; accum_int += c.to_digit(16).unwrap_or_else(|| { if c == delim { - panic!(self.fatal_span_(self.last_pos, - self.pos, + panic!(self.fatal_span_(self.pos, + self.next_pos, "unterminated unicode escape (needed a `}`)")); } else { - self.err_span_char(self.last_pos, - self.pos, + self.err_span_char(self.pos, + self.next_pos, "invalid character in unicode escape", c); } @@ -980,14 +1027,14 @@ impl<'a> StringReader<'a> { if count > 6 { self.err_span_(start_bpos, - self.last_pos, + self.pos, "overlong unicode escape (can have at most 6 hex digits)"); valid = false; } if valid && (char::from_u32(accum_int).is_none() || count == 0) { self.err_span_(start_bpos, - self.last_pos, + self.pos, "invalid unicode character escape"); valid = false; } @@ -998,14 +1045,14 @@ impl<'a> StringReader<'a> { /// Scan over a float exponent. fn scan_float_exponent(&mut self) { - if self.curr_is('e') || self.curr_is('E') { + if self.ch_is('e') || self.ch_is('E') { self.bump(); - if self.curr_is('-') || self.curr_is('+') { + if self.ch_is('-') || self.ch_is('+') { self.bump(); } if self.scan_digits(10, 10) == 0 { - self.err_span_(self.last_pos, - self.pos, + self.err_span_(self.pos, + self.next_pos, "expected at least one digit in exponent") } } @@ -1036,7 +1083,7 @@ impl<'a> StringReader<'a> { fn binop(&mut self, op: token::BinOpToken) -> token::Token { self.bump(); - if self.curr_is('=') { + if self.ch_is('=') { self.bump(); return token::BinOpEq(op); } else { @@ -1047,7 +1094,7 @@ impl<'a> StringReader<'a> { /// Return the next token from the string, advances the input past that /// token, and updates the interner fn next_token_inner(&mut self) -> Result { - let c = self.curr; + let c = self.ch; if ident_start(c) && match (c.unwrap(), self.nextch(), self.nextnextch()) { // Note: r as in r" or r#" is part of a raw string literal, @@ -1061,8 +1108,8 @@ impl<'a> StringReader<'a> { ('b', Some('r'), Some('#')) => false, _ => true, } { - let start = self.last_pos; - while ident_continue(self.curr) { + let start = self.pos; + while ident_continue(self.ch) { self.bump(); } @@ -1095,9 +1142,9 @@ impl<'a> StringReader<'a> { } '.' => { self.bump(); - return if self.curr_is('.') { + return if self.ch_is('.') { self.bump(); - if self.curr_is('.') { + if self.ch_is('.') { self.bump(); Ok(token::DotDotDot) } else { @@ -1149,7 +1196,7 @@ impl<'a> StringReader<'a> { } ':' => { self.bump(); - if self.curr_is(':') { + if self.ch_is(':') { self.bump(); return Ok(token::ModSep); } else { @@ -1165,10 +1212,10 @@ impl<'a> StringReader<'a> { // Multi-byte tokens. '=' => { self.bump(); - if self.curr_is('=') { + if self.ch_is('=') { self.bump(); return Ok(token::EqEq); - } else if self.curr_is('>') { + } else if self.ch_is('>') { self.bump(); return Ok(token::FatArrow); } else { @@ -1177,7 +1224,7 @@ impl<'a> StringReader<'a> { } '!' => { self.bump(); - if self.curr_is('=') { + if self.ch_is('=') { self.bump(); return Ok(token::Ne); } else { @@ -1186,7 +1233,7 @@ impl<'a> StringReader<'a> { } '<' => { self.bump(); - match self.curr.unwrap_or('\x00') { + match self.ch.unwrap_or('\x00') { '=' => { self.bump(); return Ok(token::Le); @@ -1196,7 +1243,7 @@ impl<'a> StringReader<'a> { } '-' => { self.bump(); - match self.curr.unwrap_or('\x00') { + match self.ch.unwrap_or('\x00') { _ => { return Ok(token::LArrow); } @@ -1209,7 +1256,7 @@ impl<'a> StringReader<'a> { } '>' => { self.bump(); - match self.curr.unwrap_or('\x00') { + match self.ch.unwrap_or('\x00') { '=' => { self.bump(); return Ok(token::Ge); @@ -1224,25 +1271,25 @@ impl<'a> StringReader<'a> { } '\'' => { // Either a character constant 'a' OR a lifetime name 'abc - let start_with_quote = self.last_pos; + let start_with_quote = self.pos; self.bump(); - let start = self.last_pos; + let start = self.pos; // the eof will be picked up by the final `'` check below - let c2 = self.curr.unwrap_or('\x00'); + let c2 = self.ch.unwrap_or('\x00'); self.bump(); // If the character is an ident start not followed by another single // quote, then this is a lifetime name: - if ident_start(Some(c2)) && !self.curr_is('\'') { - while ident_continue(self.curr) { + if ident_start(Some(c2)) && !self.ch_is('\'') { + while ident_continue(self.ch) { self.bump(); } // lifetimes shouldn't end with a single quote // if we find one, then this is an invalid character literal - if self.curr_is('\'') { + if self.ch_is('\'') { panic!(self.fatal_span_verbose( - start_with_quote, self.pos, + start_with_quote, self.next_pos, String::from("character literal may only contain one codepoint"))); } @@ -1260,7 +1307,7 @@ impl<'a> StringReader<'a> { str_to_ident(lifetime_name) }); let keyword_checking_token = &token::Ident(keyword_checking_ident); - let last_bpos = self.last_pos; + let last_bpos = self.pos; if keyword_checking_token.is_any_keyword() && !keyword_checking_token.is_keyword(keywords::Static) { self.err_span_(start, last_bpos, "lifetimes cannot use keyword names"); @@ -1275,9 +1322,9 @@ impl<'a> StringReader<'a> { false, '\''); - if !self.curr_is('\'') { + if !self.ch_is('\'') { panic!(self.fatal_span_verbose( - start_with_quote, self.last_pos, + start_with_quote, self.pos, String::from("character literal may only contain one codepoint"))); } @@ -1286,13 +1333,13 @@ impl<'a> StringReader<'a> { } else { token::intern("0") }; - self.bump(); // advance curr past token + self.bump(); // advance ch past token let suffix = self.scan_optional_raw_name(); return Ok(token::Literal(token::Char(id), suffix)); } 'b' => { self.bump(); - let lit = match self.curr { + let lit = match self.ch { Some('\'') => self.scan_byte(), Some('"') => self.scan_byte_string(), Some('r') => self.scan_raw_byte_string(), @@ -1302,19 +1349,19 @@ impl<'a> StringReader<'a> { return Ok(token::Literal(lit, suffix)); } '"' => { - let start_bpos = self.last_pos; + let start_bpos = self.pos; let mut valid = true; self.bump(); - while !self.curr_is('"') { + while !self.ch_is('"') { if self.is_eof() { - let last_bpos = self.last_pos; + let last_bpos = self.pos; panic!(self.fatal_span_(start_bpos, last_bpos, "unterminated double quote string")); } - let ch_start = self.last_pos; - let ch = self.curr.unwrap(); + let ch_start = self.pos; + let ch = self.ch.unwrap(); self.bump(); valid &= self.scan_char_or_byte(ch_start, ch, @@ -1333,20 +1380,20 @@ impl<'a> StringReader<'a> { return Ok(token::Literal(token::Str_(id), suffix)); } 'r' => { - let start_bpos = self.last_pos; + let start_bpos = self.pos; self.bump(); let mut hash_count = 0; - while self.curr_is('#') { + while self.ch_is('#') { self.bump(); hash_count += 1; } if self.is_eof() { - let last_bpos = self.last_pos; + let last_bpos = self.pos; panic!(self.fatal_span_(start_bpos, last_bpos, "unterminated raw string")); - } else if !self.curr_is('"') { - let last_bpos = self.last_pos; - let curr_char = self.curr.unwrap(); + } else if !self.ch_is('"') { + let last_bpos = self.pos; + let curr_char = self.ch.unwrap(); panic!(self.fatal_span_char(start_bpos, last_bpos, "found invalid character; only `#` is allowed \ @@ -1354,27 +1401,27 @@ impl<'a> StringReader<'a> { curr_char)); } self.bump(); - let content_start_bpos = self.last_pos; + let content_start_bpos = self.pos; let mut content_end_bpos; let mut valid = true; 'outer: loop { if self.is_eof() { - let last_bpos = self.last_pos; + let last_bpos = self.pos; panic!(self.fatal_span_(start_bpos, last_bpos, "unterminated raw string")); } - // if self.curr_is('"') { - // content_end_bpos = self.last_pos; + // if self.ch_is('"') { + // content_end_bpos = self.pos; // for _ in 0..hash_count { // self.bump(); - // if !self.curr_is('#') { + // if !self.ch_is('#') { // continue 'outer; - let c = self.curr.unwrap(); + let c = self.ch.unwrap(); match c { '"' => { - content_end_bpos = self.last_pos; + content_end_bpos = self.pos; for _ in 0..hash_count { self.bump(); - if !self.curr_is('#') { + if !self.ch_is('#') { continue 'outer; } } @@ -1382,7 +1429,7 @@ impl<'a> StringReader<'a> { } '\r' => { if !self.nextch_is('\n') { - let last_bpos = self.last_pos; + let last_bpos = self.pos; self.err_span_(start_bpos, last_bpos, "bare CR not allowed in raw string, use \\r \ @@ -1449,8 +1496,8 @@ impl<'a> StringReader<'a> { return Ok(self.binop(token::Percent)); } c => { - let last_bpos = self.last_pos; - let bpos = self.pos; + let last_bpos = self.pos; + let bpos = self.next_pos; let mut err = self.struct_fatal_span_char(last_bpos, bpos, "unknown start of token", @@ -1463,18 +1510,18 @@ impl<'a> StringReader<'a> { } fn consume_whitespace(&mut self) { - while is_pattern_whitespace(self.curr) && !self.is_eof() { + while is_pattern_whitespace(self.ch) && !self.is_eof() { self.bump(); } } fn read_to_eol(&mut self) -> String { let mut val = String::new(); - while !self.curr_is('\n') && !self.is_eof() { - val.push(self.curr.unwrap()); + while !self.ch_is('\n') && !self.is_eof() { + val.push(self.ch.unwrap()); self.bump(); } - if self.curr_is('\n') { + if self.ch_is('\n') { self.bump(); } return val; @@ -1488,23 +1535,23 @@ impl<'a> StringReader<'a> { } fn consume_non_eol_whitespace(&mut self) { - while is_pattern_whitespace(self.curr) && !self.curr_is('\n') && !self.is_eof() { + while is_pattern_whitespace(self.ch) && !self.ch_is('\n') && !self.is_eof() { self.bump(); } } fn peeking_at_comment(&self) -> bool { - (self.curr_is('/') && self.nextch_is('/')) || (self.curr_is('/') && self.nextch_is('*')) || + (self.ch_is('/') && self.nextch_is('/')) || (self.ch_is('/') && self.nextch_is('*')) || // consider shebangs comments, but not inner attributes - (self.curr_is('#') && self.nextch_is('!') && !self.nextnextch_is('[')) + (self.ch_is('#') && self.nextch_is('!') && !self.nextnextch_is('[')) } fn scan_byte(&mut self) -> token::Lit { self.bump(); - let start = self.last_pos; + let start = self.pos; // the eof will be picked up by the final `'` check below - let c2 = self.curr.unwrap_or('\x00'); + let c2 = self.ch.unwrap_or('\x00'); self.bump(); let valid = self.scan_char_or_byte(start, @@ -1512,13 +1559,13 @@ impl<'a> StringReader<'a> { // ascii_only = true, '\''); - if !self.curr_is('\'') { + if !self.ch_is('\'') { // Byte offsetting here is okay because the // character before position `start` are an // ascii single quote and ascii 'b'. - let last_pos = self.last_pos; + let pos = self.pos; panic!(self.fatal_span_verbose(start - BytePos(2), - last_pos, + pos, "unterminated byte constant".to_string())); } @@ -1527,7 +1574,7 @@ impl<'a> StringReader<'a> { } else { token::intern("?") }; - self.bump(); // advance curr past token + self.bump(); // advance ch past token return token::Byte(id); } @@ -1537,17 +1584,17 @@ impl<'a> StringReader<'a> { fn scan_byte_string(&mut self) -> token::Lit { self.bump(); - let start = self.last_pos; + let start = self.pos; let mut valid = true; - while !self.curr_is('"') { + while !self.ch_is('"') { if self.is_eof() { - let last_pos = self.last_pos; - panic!(self.fatal_span_(start, last_pos, "unterminated double quote byte string")); + let pos = self.pos; + panic!(self.fatal_span_(start, pos, "unterminated double quote byte string")); } - let ch_start = self.last_pos; - let ch = self.curr.unwrap(); + let ch_start = self.pos; + let ch = self.ch.unwrap(); self.bump(); valid &= self.scan_char_or_byte(ch_start, ch, @@ -1565,40 +1612,40 @@ impl<'a> StringReader<'a> { } fn scan_raw_byte_string(&mut self) -> token::Lit { - let start_bpos = self.last_pos; + let start_bpos = self.pos; self.bump(); let mut hash_count = 0; - while self.curr_is('#') { + while self.ch_is('#') { self.bump(); hash_count += 1; } if self.is_eof() { - let last_pos = self.last_pos; - panic!(self.fatal_span_(start_bpos, last_pos, "unterminated raw string")); - } else if !self.curr_is('"') { - let last_pos = self.last_pos; - let ch = self.curr.unwrap(); + let pos = self.pos; + panic!(self.fatal_span_(start_bpos, pos, "unterminated raw string")); + } else if !self.ch_is('"') { + let pos = self.pos; + let ch = self.ch.unwrap(); panic!(self.fatal_span_char(start_bpos, - last_pos, + pos, "found invalid character; only `#` is allowed in raw \ string delimitation", ch)); } self.bump(); - let content_start_bpos = self.last_pos; + let content_start_bpos = self.pos; let mut content_end_bpos; 'outer: loop { - match self.curr { + match self.ch { None => { - let last_pos = self.last_pos; - panic!(self.fatal_span_(start_bpos, last_pos, "unterminated raw string")) + let pos = self.pos; + panic!(self.fatal_span_(start_bpos, pos, "unterminated raw string")) } Some('"') => { - content_end_bpos = self.last_pos; + content_end_bpos = self.pos; for _ in 0..hash_count { self.bump(); - if !self.curr_is('#') { + if !self.ch_is('#') { continue 'outer; } } @@ -1606,8 +1653,8 @@ impl<'a> StringReader<'a> { } Some(c) => { if c > '\x7F' { - let last_pos = self.last_pos; - self.err_span_char(last_pos, last_pos, "raw byte string must be ASCII", c); + let pos = self.pos; + self.err_span_char(pos, pos, "raw byte string must be ASCII", c); } } } @@ -1721,7 +1768,7 @@ mod tests { assert_eq!(tok1, tok2); assert_eq!(string_reader.next_token().tok, token::Whitespace); // the 'main' id is already read: - assert_eq!(string_reader.last_pos.clone(), BytePos(28)); + assert_eq!(string_reader.pos.clone(), BytePos(28)); // read another token: let tok3 = string_reader.next_token(); let tok4 = TokenAndSpan { @@ -1734,7 +1781,7 @@ mod tests { }; assert_eq!(tok3, tok4); // the lparen is already read: - assert_eq!(string_reader.last_pos.clone(), BytePos(29)) + assert_eq!(string_reader.pos.clone(), BytePos(29)) } // check that the given reader produces the desired stream diff --git a/src/libsyntax/parse/lexer/unicode_chars.rs b/src/libsyntax/parse/lexer/unicode_chars.rs index dab97d1d5a6ff..1e08b20b7e1f4 100644 --- a/src/libsyntax/parse/lexer/unicode_chars.rs +++ b/src/libsyntax/parse/lexer/unicode_chars.rs @@ -234,7 +234,7 @@ pub fn check_for_substitution<'a>(reader: &StringReader<'a>, .iter() .find(|&&(c, _, _)| c == ch) .map(|&(_, u_name, ascii_char)| { - let span = make_span(reader.last_pos, reader.pos); + let span = make_span(reader.pos, reader.next_pos); match ASCII_ARRAY.iter().find(|&&(c, _)| c == ascii_char) { Some(&(ascii_char, ascii_name)) => { let msg = diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index a1eceb6921c6d..12408c7d3c95b 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -10,10 +10,11 @@ //! The main parser interface -use ast; +use ast::{self, CrateConfig}; use codemap::CodeMap; use syntax_pos::{self, Span, FileMap}; use errors::{Handler, ColorConfig, DiagnosticBuilder}; +use feature_gate::UnstableFeatures; use parse::parser::Parser; use parse::token::InternedString; use ptr::P; @@ -42,13 +43,15 @@ pub mod obsolete; /// Info about a parsing session. pub struct ParseSess { pub span_diagnostic: Handler, // better be the same as the one in the reader! + pub unstable_features: UnstableFeatures, + pub config: CrateConfig, /// Used to determine and report recursive mod inclusions included_mod_stack: RefCell>, code_map: Rc, } impl ParseSess { - pub fn new() -> ParseSess { + pub fn new() -> Self { let cm = Rc::new(CodeMap::new()); let handler = Handler::with_tty_emitter(ColorConfig::Auto, true, @@ -60,6 +63,8 @@ impl ParseSess { pub fn with_span_handler(handler: Handler, code_map: Rc) -> ParseSess { ParseSess { span_diagnostic: handler, + unstable_features: UnstableFeatures::from_environment(), + config: Vec::new(), included_mod_stack: RefCell::new(vec![]), code_map: code_map } @@ -75,146 +80,90 @@ impl ParseSess { // uses a HOF to parse anything, and includes file and // source_str. -pub fn parse_crate_from_file<'a>(input: &Path, - cfg: ast::CrateConfig, - sess: &'a ParseSess) - -> PResult<'a, ast::Crate> { - let mut parser = new_parser_from_file(sess, cfg, input); +pub fn parse_crate_from_file<'a>(input: &Path, sess: &'a ParseSess) -> PResult<'a, ast::Crate> { + let mut parser = new_parser_from_file(sess, input); parser.parse_crate_mod() } -pub fn parse_crate_attrs_from_file<'a>(input: &Path, - cfg: ast::CrateConfig, - sess: &'a ParseSess) +pub fn parse_crate_attrs_from_file<'a>(input: &Path, sess: &'a ParseSess) -> PResult<'a, Vec> { - let mut parser = new_parser_from_file(sess, cfg, input); + let mut parser = new_parser_from_file(sess, input); parser.parse_inner_attributes() } -pub fn parse_crate_from_source_str<'a>(name: String, - source: String, - cfg: ast::CrateConfig, - sess: &'a ParseSess) +pub fn parse_crate_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess) -> PResult<'a, ast::Crate> { - let mut p = new_parser_from_source_str(sess, - cfg, - name, - source); - p.parse_crate_mod() + new_parser_from_source_str(sess, name, source).parse_crate_mod() } -pub fn parse_crate_attrs_from_source_str<'a>(name: String, - source: String, - cfg: ast::CrateConfig, - sess: &'a ParseSess) +pub fn parse_crate_attrs_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess) -> PResult<'a, Vec> { - let mut p = new_parser_from_source_str(sess, - cfg, - name, - source); - p.parse_inner_attributes() + new_parser_from_source_str(sess, name, source).parse_inner_attributes() } -pub fn parse_expr_from_source_str<'a>(name: String, - source: String, - cfg: ast::CrateConfig, - sess: &'a ParseSess) +pub fn parse_expr_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess) -> PResult<'a, P> { - let mut p = new_parser_from_source_str(sess, cfg, name, source); - p.parse_expr() + new_parser_from_source_str(sess, name, source).parse_expr() } /// Parses an item. /// /// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and`Err` /// when a syntax error occurred. -pub fn parse_item_from_source_str<'a>(name: String, - source: String, - cfg: ast::CrateConfig, - sess: &'a ParseSess) +pub fn parse_item_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess) -> PResult<'a, Option>> { - let mut p = new_parser_from_source_str(sess, cfg, name, source); - p.parse_item() + new_parser_from_source_str(sess, name, source).parse_item() } -pub fn parse_meta_from_source_str<'a>(name: String, - source: String, - cfg: ast::CrateConfig, - sess: &'a ParseSess) +pub fn parse_meta_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess) -> PResult<'a, P> { - let mut p = new_parser_from_source_str(sess, cfg, name, source); - p.parse_meta_item() + new_parser_from_source_str(sess, name, source).parse_meta_item() } -pub fn parse_stmt_from_source_str<'a>(name: String, - source: String, - cfg: ast::CrateConfig, - sess: &'a ParseSess) +pub fn parse_stmt_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess) -> PResult<'a, Option> { - let mut p = new_parser_from_source_str( - sess, - cfg, - name, - source - ); - p.parse_stmt() + new_parser_from_source_str(sess, name, source).parse_stmt() } // Warning: This parses with quote_depth > 0, which is not the default. -pub fn parse_tts_from_source_str<'a>(name: String, - source: String, - cfg: ast::CrateConfig, - sess: &'a ParseSess) +pub fn parse_tts_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess) -> PResult<'a, Vec> { - let mut p = new_parser_from_source_str( - sess, - cfg, - name, - source - ); + let mut p = new_parser_from_source_str(sess, name, source); p.quote_depth += 1; // right now this is re-creating the token trees from ... token trees. p.parse_all_token_trees() } // Create a new parser from a source string -pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess, - cfg: ast::CrateConfig, - name: String, - source: String) +pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess, name: String, source: String) -> Parser<'a> { - filemap_to_parser(sess, sess.codemap().new_filemap(name, None, source), cfg) + filemap_to_parser(sess, sess.codemap().new_filemap(name, None, source)) } /// Create a new parser, handling errors as appropriate /// if the file doesn't exist -pub fn new_parser_from_file<'a>(sess: &'a ParseSess, - cfg: ast::CrateConfig, - path: &Path) -> Parser<'a> { - filemap_to_parser(sess, file_to_filemap(sess, path, None), cfg) +pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path) -> Parser<'a> { + filemap_to_parser(sess, file_to_filemap(sess, path, None)) } /// Given a session, a crate config, a path, and a span, add /// the file at the given path to the codemap, and return a parser. /// On an error, use the given span as the source of the problem. pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, - cfg: ast::CrateConfig, path: &Path, owns_directory: bool, module_name: Option, sp: Span) -> Parser<'a> { - let mut p = filemap_to_parser(sess, file_to_filemap(sess, path, Some(sp)), cfg); + let mut p = filemap_to_parser(sess, file_to_filemap(sess, path, Some(sp))); p.owns_directory = owns_directory; p.root_module_name = module_name; p } /// Given a filemap and config, return a parser -pub fn filemap_to_parser<'a>(sess: &'a ParseSess, - filemap: Rc, - cfg: ast::CrateConfig) -> Parser<'a> { +pub fn filemap_to_parser<'a>(sess: &'a ParseSess, filemap: Rc, ) -> Parser<'a> { let end_pos = filemap.end_pos; - let mut parser = tts_to_parser(sess, filemap_to_tts(sess, filemap), cfg); + let mut parser = tts_to_parser(sess, filemap_to_tts(sess, filemap)); if parser.token == token::Eof && parser.span == syntax_pos::DUMMY_SP { parser.span = syntax_pos::mk_sp(end_pos, end_pos); @@ -225,18 +174,13 @@ pub fn filemap_to_parser<'a>(sess: &'a ParseSess, // must preserve old name for now, because quote! from the *existing* // compiler expands into it -pub fn new_parser_from_tts<'a>(sess: &'a ParseSess, - cfg: ast::CrateConfig, - tts: Vec) +pub fn new_parser_from_tts<'a>(sess: &'a ParseSess, tts: Vec) -> Parser<'a> { - tts_to_parser(sess, tts, cfg) + tts_to_parser(sess, tts) } -pub fn new_parser_from_ts<'a>(sess: &'a ParseSess, - cfg: ast::CrateConfig, - ts: tokenstream::TokenStream) - -> Parser<'a> { - tts_to_parser(sess, ts.to_tts(), cfg) +pub fn new_parser_from_ts<'a>(sess: &'a ParseSess, ts: tokenstream::TokenStream) -> Parser<'a> { + tts_to_parser(sess, ts.to_tts()) } @@ -263,18 +207,15 @@ pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc) -> Vec { // it appears to me that the cfg doesn't matter here... indeed, // parsing tt's probably shouldn't require a parser at all. - let cfg = Vec::new(); let srdr = lexer::StringReader::new(&sess.span_diagnostic, filemap); - let mut p1 = Parser::new(sess, cfg, Box::new(srdr)); + let mut p1 = Parser::new(sess, Box::new(srdr)); panictry!(p1.parse_all_token_trees()) } -/// Given tts and cfg, produce a parser -pub fn tts_to_parser<'a>(sess: &'a ParseSess, - tts: Vec, - cfg: ast::CrateConfig) -> Parser<'a> { - let trdr = lexer::new_tt_reader(&sess.span_diagnostic, None, None, tts); - let mut p = Parser::new(sess, cfg, Box::new(trdr)); +/// Given tts and the ParseSess, produce a parser +pub fn tts_to_parser<'a>(sess: &'a ParseSess, tts: Vec) -> Parser<'a> { + let trdr = lexer::new_tt_reader(&sess.span_diagnostic, None, tts); + let mut p = Parser::new(sess, Box::new(trdr)); p.check_unknown_macro_variable(); p } @@ -286,52 +227,37 @@ pub fn tts_to_parser<'a>(sess: &'a ParseSess, pub fn char_lit(lit: &str) -> (char, isize) { use std::char; - let mut chars = lit.chars(); - match (chars.next(), chars.next()) { - (Some(c), None) if c != '\\' => return (c, 1), - (Some('\\'), Some(c)) => match c { - '"' => return ('"', 2), - 'n' => return ('\n', 2), - 'r' => return ('\r', 2), - 't' => return ('\t', 2), - '\\' => return ('\\', 2), - '\'' => return ('\'', 2), - '0' => return ('\0', 2), - _ => {} - }, - _ => panic!("lexer accepted invalid char escape `{}`", lit) - }; - - fn esc(len: usize, lit: &str) -> Option<(char, isize)> { - u32::from_str_radix(&lit[2..len], 16).ok() - .and_then(char::from_u32) - .map(|x| (x, len as isize)) + // Handle non-escaped chars first. + if lit.as_bytes()[0] != b'\\' { + // If the first byte isn't '\\' it might part of a multi-byte char, so + // get the char with chars(). + let c = lit.chars().next().unwrap(); + return (c, 1); } - let unicode_escape = || -> Option<(char, isize)> { - if lit.as_bytes()[2] == b'{' { - let idx = lit.find('}').unwrap_or_else(|| { - panic!("lexer should have rejected a bad character escape {}", lit) - }); - - let subslice = &lit[3..idx]; - u32::from_str_radix(subslice, 16).ok() - .and_then(char::from_u32) - .map(|x| (x, subslice.chars().count() as isize + 4)) - } else { - esc(6, lit) + // Handle escaped chars. + match lit.as_bytes()[1] as char { + '"' => ('"', 2), + 'n' => ('\n', 2), + 'r' => ('\r', 2), + 't' => ('\t', 2), + '\\' => ('\\', 2), + '\'' => ('\'', 2), + '0' => ('\0', 2), + 'x' => { + let v = u32::from_str_radix(&lit[2..4], 16).unwrap(); + let c = char::from_u32(v).unwrap(); + (c, 4) } - }; - - // Unicode escapes - return match lit.as_bytes()[1] as char { - 'x' | 'X' => esc(4, lit), - 'u' => unicode_escape(), - 'U' => esc(10, lit), - _ => None, - }.unwrap_or_else(|| { - panic!("lexer should have rejected a bad character escape {}", lit) - }) + 'u' => { + assert!(lit.as_bytes()[2] == b'{'); + let idx = lit.find('}').unwrap(); + let v = u32::from_str_radix(&lit[3..idx], 16).unwrap(); + let c = char::from_u32(v).unwrap(); + (c, (idx + 1) as isize) + } + _ => panic!("lexer should have rejected a bad character escape {}", lit) + } } /// Parse a string representing a string literal into its final form. Does @@ -698,12 +624,12 @@ mod tests { node: ast::ExprKind::Path(None, ast::Path { span: sp(0, 1), global: false, - segments: vec!( + segments: vec![ ast::PathSegment { identifier: str_to_ident("a"), parameters: ast::PathParameters::none(), } - ), + ], }), span: sp(0, 1), attrs: ThinVec::new(), @@ -717,7 +643,7 @@ mod tests { node: ast::ExprKind::Path(None, ast::Path { span: sp(0, 6), global: true, - segments: vec!( + segments: vec![ ast::PathSegment { identifier: str_to_ident("a"), parameters: ast::PathParameters::none(), @@ -726,7 +652,7 @@ mod tests { identifier: str_to_ident("b"), parameters: ast::PathParameters::none(), } - ) + ] }), span: sp(0, 6), attrs: ThinVec::new(), @@ -837,12 +763,12 @@ mod tests { node:ast::ExprKind::Path(None, ast::Path{ span: sp(7, 8), global: false, - segments: vec!( + segments: vec![ ast::PathSegment { identifier: str_to_ident("d"), parameters: ast::PathParameters::none(), } - ), + ], }), span:sp(7,8), attrs: ThinVec::new(), @@ -860,12 +786,12 @@ mod tests { node: ast::ExprKind::Path(None, ast::Path { span:sp(0,1), global:false, - segments: vec!( + segments: vec![ ast::PathSegment { identifier: str_to_ident("b"), parameters: ast::PathParameters::none(), } - ), + ], }), span: sp(0,1), attrs: ThinVec::new()})), @@ -902,18 +828,18 @@ mod tests { attrs:Vec::new(), id: ast::DUMMY_NODE_ID, node: ast::ItemKind::Fn(P(ast::FnDecl { - inputs: vec!(ast::Arg{ + inputs: vec![ast::Arg{ ty: P(ast::Ty{id: ast::DUMMY_NODE_ID, node: ast::TyKind::Path(None, ast::Path{ span:sp(10,13), global:false, - segments: vec!( + segments: vec![ ast::PathSegment { identifier: str_to_ident("i32"), parameters: ast::PathParameters::none(), } - ), + ], }), span:sp(10,13) }), @@ -929,7 +855,7 @@ mod tests { span: sp(6,7) }), id: ast::DUMMY_NODE_ID - }), + }], output: ast::FunctionRetTy::Default(sp(15, 15)), variadic: false }), @@ -949,14 +875,14 @@ mod tests { span: syntax_pos::DUMMY_SP, }, P(ast::Block { - stmts: vec!(ast::Stmt { + stmts: vec![ast::Stmt { node: ast::StmtKind::Semi(P(ast::Expr{ id: ast::DUMMY_NODE_ID, node: ast::ExprKind::Path(None, ast::Path{ span:sp(17,18), global:false, - segments: vec!( + segments: vec![ ast::PathSegment { identifier: str_to_ident( @@ -964,12 +890,12 @@ mod tests { parameters: ast::PathParameters::none(), } - ), + ], }), span: sp(17,18), attrs: ThinVec::new()})), id: ast::DUMMY_NODE_ID, - span: sp(17,19)}), + span: sp(17,19)}], id: ast::DUMMY_NODE_ID, rules: ast::BlockCheckMode::Default, // no idea span: sp(15,21), @@ -1069,13 +995,13 @@ mod tests { let name = "".to_string(); let source = "/// doc comment\r\nfn foo() {}".to_string(); - let item = parse_item_from_source_str(name.clone(), source, Vec::new(), &sess) + let item = parse_item_from_source_str(name.clone(), source, &sess) .unwrap().unwrap(); let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); assert_eq!(&doc[..], "/// doc comment"); let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string(); - let item = parse_item_from_source_str(name.clone(), source, Vec::new(), &sess) + let item = parse_item_from_source_str(name.clone(), source, &sess) .unwrap().unwrap(); let docs = item.attrs.iter().filter(|a| &*a.name() == "doc") .map(|a| a.value_str().unwrap().to_string()).collect::>(); @@ -1083,7 +1009,7 @@ mod tests { assert_eq!(&docs[..], b); let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string(); - let item = parse_item_from_source_str(name, source, Vec::new(), &sess).unwrap().unwrap(); + let item = parse_item_from_source_str(name, source, &sess).unwrap().unwrap(); let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); assert_eq!(&doc[..], "/** doc comment\n * with CRLF */"); } @@ -1092,7 +1018,7 @@ mod tests { fn ttdelim_span() { let sess = ParseSess::new(); let expr = parse::parse_expr_from_source_str("foo".to_string(), - "foo!( fn main() { body } )".to_string(), vec![], &sess).unwrap(); + "foo!( fn main() { body } )".to_string(), &sess).unwrap(); let tts = match expr.node { ast::ExprKind::Mac(ref mac) => mac.node.tts.clone(), diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index d0936fd981b50..b80aa667be6a1 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -15,7 +15,7 @@ use ast::Unsafety; use ast::{Mod, Arg, Arm, Attribute, BindingMode, TraitItemKind}; use ast::Block; use ast::{BlockCheckMode, CaptureBy}; -use ast::{Constness, Crate, CrateConfig}; +use ast::{Constness, Crate}; use ast::Defaultness; use ast::EnumDef; use ast::{Expr, ExprKind, RangeLimits}; @@ -48,8 +48,7 @@ use parse::classify; use parse::common::SeqSep; use parse::lexer::{Reader, TokenAndSpan}; use parse::obsolete::ObsoleteSyntax; -use parse::token::{self, intern, MatchNt, SubstNt, SpecialVarNt, InternedString}; -use parse::token::{keywords, SpecialMacroVar}; +use parse::token::{self, intern, keywords, MatchNt, SubstNt, InternedString}; use parse::{new_sub_parser_from_file, ParseSess}; use util::parser::{AssocOp, Fixity}; use print::pprust; @@ -237,6 +236,31 @@ fn maybe_append(mut lhs: Vec, rhs: Option>) lhs } +#[derive(PartialEq)] +enum PrevTokenKind { + DocComment, + Comma, + Interpolated, + Eof, + Other, +} + +// Simple circular buffer used for keeping few next tokens. +#[derive(Default)] +struct LookaheadBuffer { + buffer: [TokenAndSpan; LOOKAHEAD_BUFFER_CAPACITY], + start: usize, + end: usize, +} + +const LOOKAHEAD_BUFFER_CAPACITY: usize = 8; + +impl LookaheadBuffer { + fn len(&self) -> usize { + (LOOKAHEAD_BUFFER_CAPACITY + self.end - self.start) % LOOKAHEAD_BUFFER_CAPACITY + } +} + /* ident is handled by common.rs */ pub struct Parser<'a> { @@ -245,16 +269,11 @@ pub struct Parser<'a> { pub token: token::Token, /// the span of the current token: pub span: Span, - /// the span of the prior token: - pub last_span: Span, - pub cfg: CrateConfig, - /// the previous token or None (only stashed sometimes). - pub last_token: Option>, - last_token_interpolated: bool, - last_token_eof: bool, - pub buffer: [TokenAndSpan; 4], - pub buffer_start: isize, - pub buffer_end: isize, + /// the span of the previous token: + pub prev_span: Span, + /// the previous token kind + prev_token_kind: PrevTokenKind, + lookahead_buffer: LookaheadBuffer, pub tokens_consumed: usize, pub restrictions: Restrictions, pub quote_depth: usize, // not (yet) related to the quasiquoter @@ -338,11 +357,7 @@ impl From> for LhsExpr { } impl<'a> Parser<'a> { - pub fn new(sess: &'a ParseSess, - cfg: ast::CrateConfig, - mut rdr: Box) - -> Parser<'a> - { + pub fn new(sess: &'a ParseSess, mut rdr: Box) -> Self { let tok0 = rdr.real_token(); let span = tok0.sp; let mut directory = match span { @@ -350,29 +365,15 @@ impl<'a> Parser<'a> { _ => PathBuf::from(sess.codemap().span_to_filename(span)), }; directory.pop(); - let placeholder = TokenAndSpan { - tok: token::Underscore, - sp: span, - }; Parser { reader: rdr, sess: sess, - cfg: cfg, token: tok0.tok, span: span, - last_span: span, - last_token: None, - last_token_interpolated: false, - last_token_eof: false, - buffer: [ - placeholder.clone(), - placeholder.clone(), - placeholder.clone(), - placeholder.clone(), - ], - buffer_start: 0, - buffer_end: 0, + prev_span: span, + prev_token_kind: PrevTokenKind::Other, + lookahead_buffer: Default::default(), tokens_consumed: 0, restrictions: Restrictions::empty(), quote_depth: 0, @@ -409,8 +410,7 @@ impl<'a> Parser<'a> { pub fn unexpected_last(&self, t: &token::Token) -> PResult<'a, T> { let token_str = Parser::token_to_string(t); - let last_span = self.last_span; - Err(self.span_fatal(last_span, &format!("unexpected token: `{}`", token_str))) + Err(self.span_fatal(self.prev_span, &format!("unexpected token: `{}`", token_str))) } pub fn unexpected(&mut self) -> PResult<'a, T> { @@ -500,8 +500,8 @@ impl<'a> Parser<'a> { expr: PResult<'a, P>) -> PResult<'a, (Span, P)> { expr.map(|e| { - if self.last_token_interpolated { - (self.last_span, e) + if self.prev_token_kind == PrevTokenKind::Interpolated { + (self.prev_span, e) } else { (e.span, e) } @@ -520,30 +520,23 @@ impl<'a> Parser<'a> { self.bug("ident interpolation not converted to real token"); } _ => { - let last_token = self.last_token.clone().map(|t| *t); - Err(match last_token { - Some(token::DocComment(_)) => self.span_fatal_help(self.last_span, + Err(if self.prev_token_kind == PrevTokenKind::DocComment { + self.span_fatal_help(self.prev_span, "found a documentation comment that doesn't document anything", "doc comments must come before what they document, maybe a comment was \ - intended with `//`?"), - _ => { + intended with `//`?") + } else { let mut err = self.fatal(&format!("expected identifier, found `{}`", self.this_token_to_string())); if self.token == token::Underscore { err.note("`_` is a wildcard pattern, not an identifier"); } err - } - }) + }) } } } - fn parse_ident_into_path(&mut self) -> PResult<'a, ast::Path> { - let ident = self.parse_ident()?; - Ok(ast::Path::from_ident(self.last_span, ident)) - } - /// Check if the next token is `tok`, and return `true` if so. /// /// This method will automatically add `tok` to `expected_tokens` if `tok` is not @@ -808,10 +801,12 @@ impl<'a> Parser<'a> { /// Eat and discard tokens until one of `kets` is encountered. Respects token trees, /// passes through any errors encountered. Used for error recovery. pub fn eat_to_tokens(&mut self, kets: &[&token::Token]) { + let handler = self.diagnostic(); + self.parse_seq_to_before_tokens(kets, SeqSep::none(), |p| p.parse_token_tree(), - |mut e| e.cancel()); + |mut e| handler.cancel(&mut e)); } /// Parse a sequence, including the closing delimiter. The function @@ -853,7 +848,7 @@ impl<'a> Parser<'a> { Fe: FnMut(DiagnosticBuilder) { let mut first: bool = true; - let mut v = vec!(); + let mut v = vec![]; while !kets.contains(&&self.token) { match sep.sep { Some(ref t) => { @@ -923,39 +918,29 @@ impl<'a> Parser<'a> { /// Advance the parser by one token pub fn bump(&mut self) { - if self.last_token_eof { + if self.prev_token_kind == PrevTokenKind::Eof { // Bumping after EOF is a bad sign, usually an infinite loop. self.bug("attempted to bump the parser past EOF (may be stuck in a loop)"); } - if self.token == token::Eof { - self.last_token_eof = true; - } + self.prev_span = self.span; - self.last_span = self.span; - // Stash token for error recovery (sometimes; clone is not necessarily cheap). - self.last_token = if self.token.is_ident() || - self.token.is_path() || - self.token.is_doc_comment() || - self.token == token::Comma { - Some(Box::new(self.token.clone())) - } else { - None + // Record last token kind for possible error recovery. + self.prev_token_kind = match self.token { + token::DocComment(..) => PrevTokenKind::DocComment, + token::Comma => PrevTokenKind::Comma, + token::Interpolated(..) => PrevTokenKind::Interpolated, + token::Eof => PrevTokenKind::Eof, + _ => PrevTokenKind::Other, }; - self.last_token_interpolated = self.token.is_interpolated(); - let next = if self.buffer_start == self.buffer_end { + + let next = if self.lookahead_buffer.start == self.lookahead_buffer.end { self.reader.real_token() } else { // Avoid token copies with `replace`. - let buffer_start = self.buffer_start as usize; - let next_index = (buffer_start + 1) & 3; - self.buffer_start = next_index as isize; - - let placeholder = TokenAndSpan { - tok: token::Underscore, - sp: self.span, - }; - mem::replace(&mut self.buffer[buffer_start], placeholder) + let old_start = self.lookahead_buffer.start; + self.lookahead_buffer.start = (old_start + 1) % LOOKAHEAD_BUFFER_CAPACITY; + mem::replace(&mut self.lookahead_buffer.buffer[old_start], Default::default()) }; self.span = next.sp; self.token = next.tok; @@ -978,32 +963,32 @@ impl<'a> Parser<'a> { next: token::Token, lo: BytePos, hi: BytePos) { - self.last_span = mk_sp(self.span.lo, lo); - // It would be incorrect to just stash current token, but fortunately - // for tokens currently using `bump_with`, last_token will be of no - // use anyway. - self.last_token = None; - self.last_token_interpolated = false; + self.prev_span = mk_sp(self.span.lo, lo); + // It would be incorrect to record the kind of the current token, but + // fortunately for tokens currently using `bump_with`, the + // prev_token_kind will be of no use anyway. + self.prev_token_kind = PrevTokenKind::Other; self.span = mk_sp(lo, hi); self.token = next; self.expected_tokens.clear(); } - pub fn buffer_length(&mut self) -> isize { - if self.buffer_start <= self.buffer_end { - return self.buffer_end - self.buffer_start; - } - return (4 - self.buffer_start) + self.buffer_end; - } - pub fn look_ahead(&mut self, distance: usize, f: F) -> R where + pub fn look_ahead(&mut self, dist: usize, f: F) -> R where F: FnOnce(&token::Token) -> R, { - let dist = distance as isize; - while self.buffer_length() < dist { - self.buffer[self.buffer_end as usize] = self.reader.real_token(); - self.buffer_end = (self.buffer_end + 1) & 3; + if dist == 0 { + f(&self.token) + } else if dist < LOOKAHEAD_BUFFER_CAPACITY { + while self.lookahead_buffer.len() < dist { + self.lookahead_buffer.buffer[self.lookahead_buffer.end] = self.reader.real_token(); + self.lookahead_buffer.end = + (self.lookahead_buffer.end + 1) % LOOKAHEAD_BUFFER_CAPACITY; + } + let index = (self.lookahead_buffer.start + dist - 1) % LOOKAHEAD_BUFFER_CAPACITY; + f(&self.lookahead_buffer.buffer[index].tok) + } else { + self.bug("lookahead distance is too large"); } - f(&self.buffer[((self.buffer_start + dist - 1) & 3) as usize].tok) } pub fn fatal(&self, m: &str) -> DiagnosticBuilder<'a> { self.sess.span_diagnostic.struct_span_fatal(self.span, m) @@ -1040,6 +1025,10 @@ impl<'a> Parser<'a> { self.sess.span_diagnostic.abort_if_errors(); } + fn cancel(&self, err: &mut DiagnosticBuilder) { + self.sess.span_diagnostic.cancel(err) + } + pub fn diagnostic(&self) -> &'a errors::Handler { &self.sess.span_diagnostic } @@ -1115,14 +1104,12 @@ impl<'a> Parser<'a> { let bounds = self.parse_ty_param_bounds(BoundParsingMode::Modified)?; if !bounds.iter().any(|b| if let TraitTyParamBound(..) = *b { true } else { false }) { - let last_span = self.last_span; - self.span_err(last_span, "at least one trait must be specified"); + self.span_err(self.prev_span, "at least one trait must be specified"); } Ok(ast::TyKind::ImplTrait(bounds)) } - pub fn parse_ty_path(&mut self) -> PResult<'a, TyKind> { Ok(TyKind::Path(None, self.parse_path(PathStyle::Type)?)) } @@ -1180,7 +1167,7 @@ impl<'a> Parser<'a> { let lo = self.span.lo; let (name, node) = if self.eat_keyword(keywords::Type) { - let TyParam {ident, bounds, default, ..} = self.parse_ty_param()?; + let TyParam {ident, bounds, default, ..} = self.parse_ty_param(vec![])?; self.expect(&token::Semi)?; (ident, TraitItemKind::Type(bounds, default)) } else if self.is_const_item() { @@ -1198,100 +1185,93 @@ impl<'a> Parser<'a> { None }; (ident, TraitItemKind::Const(ty, default)) - } else if !self.token.is_any_keyword() - && self.look_ahead(1, |t| *t == token::Not) - && (self.look_ahead(2, |t| *t == token::OpenDelim(token::Paren)) - || self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))) { - // trait item macro. - // code copied from parse_macro_use_or_failure... abstraction! - let lo = self.span.lo; - let pth = self.parse_ident_into_path()?; - self.expect(&token::Not)?; + } else if self.token.is_path_start() { + // trait item macro. + // code copied from parse_macro_use_or_failure... abstraction! + let lo = self.span.lo; + let pth = self.parse_path(PathStyle::Mod)?; + self.expect(&token::Not)?; - // eat a matched-delimiter token tree: - let delim = self.expect_open_delim()?; - let tts = self.parse_seq_to_end(&token::CloseDelim(delim), - SeqSep::none(), - |pp| pp.parse_token_tree())?; - let m_ = Mac_ { path: pth, tts: tts }; - let m: ast::Mac = codemap::Spanned { node: m_, - span: mk_sp(lo, - self.last_span.hi) }; - if delim != token::Brace { - self.expect(&token::Semi)? - } - (keywords::Invalid.ident(), ast::TraitItemKind::Macro(m)) - } else { - let (constness, unsafety, abi) = match self.parse_fn_front_matter() { - Ok(cua) => cua, - Err(e) => { - loop { - match self.token { - token::Eof => break, - token::CloseDelim(token::Brace) | - token::Semi => { - self.bump(); - break; - } - token::OpenDelim(token::Brace) => { - self.parse_token_tree()?; - break; - } - _ => self.bump() + // eat a matched-delimiter token tree: + let delim = self.expect_open_delim()?; + let tts = self.parse_seq_to_end(&token::CloseDelim(delim), + SeqSep::none(), + |pp| pp.parse_token_tree())?; + if delim != token::Brace { + self.expect(&token::Semi)? + } + + let mac = spanned(lo, self.prev_span.hi, Mac_ { path: pth, tts: tts }); + (keywords::Invalid.ident(), ast::TraitItemKind::Macro(mac)) + } else { + let (constness, unsafety, abi) = match self.parse_fn_front_matter() { + Ok(cua) => cua, + Err(e) => { + loop { + match self.token { + token::Eof => break, + token::CloseDelim(token::Brace) | + token::Semi => { + self.bump(); + break; + } + token::OpenDelim(token::Brace) => { + self.parse_token_tree()?; + break; } + _ => self.bump(), } - - return Err(e); } - }; - let ident = self.parse_ident()?; - let mut generics = self.parse_generics()?; + return Err(e); + } + }; - let d = self.parse_fn_decl_with_self(|p: &mut Parser<'a>|{ - // This is somewhat dubious; We don't want to allow - // argument names to be left off if there is a - // definition... - p.parse_arg_general(false) - })?; + let ident = self.parse_ident()?; + let mut generics = self.parse_generics()?; - generics.where_clause = self.parse_where_clause()?; - let sig = ast::MethodSig { - unsafety: unsafety, - constness: constness, - decl: d, - generics: generics, - abi: abi, - }; + let d = self.parse_fn_decl_with_self(|p: &mut Parser<'a>|{ + // This is somewhat dubious; We don't want to allow + // argument names to be left off if there is a + // definition... + p.parse_arg_general(false) + })?; - let body = match self.token { - token::Semi => { - self.bump(); - debug!("parse_trait_methods(): parsing required method"); - None - } - token::OpenDelim(token::Brace) => { - debug!("parse_trait_methods(): parsing provided method"); - let (inner_attrs, body) = - self.parse_inner_attrs_and_block()?; - attrs.extend(inner_attrs.iter().cloned()); - Some(body) - } + generics.where_clause = self.parse_where_clause()?; + let sig = ast::MethodSig { + unsafety: unsafety, + constness: constness, + decl: d, + generics: generics, + abi: abi, + }; - _ => { - let token_str = self.this_token_to_string(); - return Err(self.fatal(&format!("expected `;` or `{{`, found `{}`", - token_str)[..])) - } - }; - (ident, ast::TraitItemKind::Method(sig, body)) + let body = match self.token { + token::Semi => { + self.bump(); + debug!("parse_trait_methods(): parsing required method"); + None + } + token::OpenDelim(token::Brace) => { + debug!("parse_trait_methods(): parsing provided method"); + let (inner_attrs, body) = self.parse_inner_attrs_and_block()?; + attrs.extend(inner_attrs.iter().cloned()); + Some(body) + } + _ => { + let token_str = self.this_token_to_string(); + return Err(self.fatal(&format!("expected `;` or `{{`, found `{}`", token_str))); + } }; + (ident, ast::TraitItemKind::Method(sig, body)) + }; + Ok(TraitItem { id: ast::DUMMY_NODE_ID, ident: name, attrs: attrs, node: node, - span: mk_sp(lo, self.last_span.hi), + span: mk_sp(lo, self.prev_span.hi), }) } @@ -1338,13 +1318,13 @@ impl<'a> Parser<'a> { // In type grammar, `+` is treated like a binary operator, // and hence both L and R side are required. if bounds.is_empty() { - let last_span = self.last_span; - self.span_err(last_span, + let prev_span = self.prev_span; + self.span_err(prev_span, "at least one type parameter bound \ must be specified"); } - let sp = mk_sp(lo, self.last_span.hi); + let sp = mk_sp(lo, self.prev_span.hi); let sum = ast::TyKind::ObjectSum(lhs, bounds); Ok(P(Ty {id: ast::DUMMY_NODE_ID, node: sum, span: sp})) } @@ -1394,8 +1374,8 @@ impl<'a> Parser<'a> { // Parse the `; e` in `[ i32; e ]` // where `e` is a const expression let t = match self.maybe_parse_fixed_length_of_vec()? { - None => TyKind::Vec(t), - Some(suffix) => TyKind::FixedLengthVec(t, suffix) + None => TyKind::Slice(t), + Some(suffix) => TyKind::Array(t, suffix) }; self.expect(&token::CloseDelim(token::Bracket))?; t @@ -1426,9 +1406,8 @@ impl<'a> Parser<'a> { TyKind::Path(Some(qself), path) } else if self.token.is_path_start() { let path = self.parse_path(PathStyle::Type)?; - if self.check(&token::Not) { + if self.eat(&token::Not) { // MACRO INVOCATION - self.bump(); let delim = self.expect_open_delim()?; let tts = self.parse_seq_to_end(&token::CloseDelim(delim), SeqSep::none(), @@ -1447,7 +1426,7 @@ impl<'a> Parser<'a> { return Err(self.fatal(&msg)); }; - let sp = mk_sp(lo, self.last_span.hi); + let sp = mk_sp(lo, self.prev_span.hi); Ok(P(Ty {id: ast::DUMMY_NODE_ID, node: t, span: sp})) } @@ -1465,7 +1444,7 @@ impl<'a> Parser<'a> { } else if self.eat_keyword(keywords::Const) { Mutability::Immutable } else { - let span = self.last_span; + let span = self.prev_span; self.span_err(span, "expected mut or const in raw pointer type (use \ `*mut T` or `*const T` as appropriate)"); @@ -1508,7 +1487,7 @@ impl<'a> Parser<'a> { pat } else { debug!("parse_arg_general ident_to_pat"); - let sp = self.last_span; + let sp = self.prev_span; let spanned = Spanned { span: sp, node: keywords::Invalid.ident() }; P(Pat { id: ast::DUMMY_NODE_ID, @@ -1633,7 +1612,7 @@ impl<'a> Parser<'a> { let lit = self.parse_lit_token()?; lit }; - Ok(codemap::Spanned { node: lit, span: mk_sp(lo, self.last_span.hi) }) + Ok(codemap::Spanned { node: lit, span: mk_sp(lo, self.prev_span.hi) }) } /// matches '-' lit | lit @@ -1642,11 +1621,11 @@ impl<'a> Parser<'a> { let minus_present = self.eat(&token::BinOp(token::Minus)); let lo = self.span.lo; let literal = P(self.parse_lit()?); - let hi = self.last_span.hi; + let hi = self.prev_span.hi; let expr = self.mk_expr(lo, hi, ExprKind::Lit(literal), ThinVec::new()); if minus_present { - let minus_hi = self.last_span.hi; + let minus_hi = self.prev_span.hi; let unary = self.mk_unary(UnOp::Neg, expr); Ok(self.mk_expr(minus_lo, minus_hi, unary, ThinVec::new())) } else { @@ -1681,7 +1660,7 @@ impl<'a> Parser<'a> { /// `::F::a::` pub fn parse_qualified_path(&mut self, mode: PathStyle) -> PResult<'a, (QSelf, ast::Path)> { - let span = self.last_span; + let span = self.prev_span; let self_type = self.parse_ty_sum()?; let mut path = if self.eat_keyword(keywords::As) { self.parse_path(PathStyle::Type)? @@ -1714,7 +1693,7 @@ impl<'a> Parser<'a> { }; path.segments.extend(segments); - path.span.hi = self.last_span.hi; + path.span.hi = self.prev_span.hi; Ok((qself, path)) } @@ -1752,7 +1731,7 @@ impl<'a> Parser<'a> { }; // Assemble the span. - let span = mk_sp(lo, self.last_span.hi); + let span = mk_sp(lo, self.prev_span.hi); // Assemble the result. Ok(ast::Path { @@ -1772,6 +1751,17 @@ impl<'a> Parser<'a> { // First, parse an identifier. let identifier = self.parse_path_segment_ident()?; + if self.check(&token::ModSep) && self.look_ahead(1, |t| *t == token::Lt) { + self.bump(); + let prev_span = self.prev_span; + + let mut err = self.diagnostic().struct_span_err(prev_span, + "unexpected token: `::`"); + err.help( + "use `<...>` instead of `::<...>` if you meant to specify type arguments"); + err.emit(); + } + // Parse types, optionally. let parameters = if self.eat_lt() { let (lifetimes, types, bindings) = self.parse_generic_values_after_lt()?; @@ -1782,7 +1772,7 @@ impl<'a> Parser<'a> { bindings: P::from_vec(bindings), }) } else if self.eat(&token::OpenDelim(token::Paren)) { - let lo = self.last_span.lo; + let lo = self.prev_span.lo; let inputs = self.parse_seq_to_end( &token::CloseDelim(token::Paren), @@ -1795,7 +1785,7 @@ impl<'a> Parser<'a> { None }; - let hi = self.last_span.hi; + let hi = self.prev_span.hi; ast::PathParameters::Parenthesized(ast::ParenthesizedParameterData { span: mk_sp(lo, hi), @@ -1919,10 +1909,22 @@ impl<'a> Parser<'a> { /// Parses `lifetime_defs = [ lifetime_defs { ',' lifetime_defs } ]` where `lifetime_def = /// lifetime [':' lifetimes]` - pub fn parse_lifetime_defs(&mut self) -> PResult<'a, Vec> { - + /// + /// If `followed_by_ty_params` is None, then we are in a context + /// where only lifetime parameters are allowed, and thus we should + /// error if we encounter attributes after the bound lifetimes. + /// + /// If `followed_by_ty_params` is Some(r), then there may be type + /// parameter bindings after the lifetimes, so we should pass + /// along the parsed attributes to be attached to the first such + /// type parmeter. + pub fn parse_lifetime_defs(&mut self, + followed_by_ty_params: Option<&mut Vec>) + -> PResult<'a, Vec> + { let mut res = Vec::new(); loop { + let attrs = self.parse_outer_attributes()?; match self.token { token::Lifetime(_) => { let lifetime = self.parse_lifetime()?; @@ -1932,11 +1934,20 @@ impl<'a> Parser<'a> { } else { Vec::new() }; - res.push(ast::LifetimeDef { lifetime: lifetime, + res.push(ast::LifetimeDef { attrs: attrs.into(), + lifetime: lifetime, bounds: bounds }); } _ => { + if let Some(recv) = followed_by_ty_params { + assert!(recv.is_empty()); + *recv = attrs; + } else { + let msg = "trailing attribute after lifetime parameters"; + return Err(self.fatal(msg)); + } + debug!("parse_lifetime_defs ret {:?}", res); return Ok(res); } } @@ -2001,17 +2012,30 @@ impl<'a> Parser<'a> { } } - /// Parse ident COLON expr + /// Parse ident (COLON expr)? pub fn parse_field(&mut self) -> PResult<'a, Field> { let lo = self.span.lo; - let i = self.parse_field_name()?; - let hi = self.last_span.hi; - self.expect(&token::Colon)?; - let e = self.parse_expr()?; + let hi; + + // Check if a colon exists one ahead. This means we're parsing a fieldname. + let (fieldname, expr, is_shorthand) = if self.look_ahead(1, |t| t == &token::Colon) { + let fieldname = self.parse_field_name()?; + self.bump(); + hi = self.prev_span.hi; + (fieldname, self.parse_expr()?, false) + } else { + let fieldname = self.parse_ident()?; + hi = self.prev_span.hi; + + // Mimic `x: x` for the `x` field shorthand. + let path = ast::Path::from_ident(mk_sp(lo, hi), fieldname); + (fieldname, self.mk_expr(lo, hi, ExprKind::Path(None, path), ThinVec::new()), true) + }; Ok(ast::Field { - ident: spanned(lo, hi, i), - span: mk_sp(lo, e.span.hi), - expr: e, + ident: spanned(lo, hi, fieldname), + span: mk_sp(lo, expr.span.hi), + expr: expr, + is_shorthand: is_shorthand, }) } @@ -2160,7 +2184,7 @@ impl<'a> Parser<'a> { } self.bump(); - hi = self.last_span.hi; + hi = self.prev_span.hi; return if es.len() == 1 && !trailing_comma { Ok(self.mk_expr(lo, hi, ExprKind::Paren(es.into_iter().nth(0).unwrap()), attrs)) } else { @@ -2200,16 +2224,16 @@ impl<'a> Parser<'a> { SeqSep::trailing_allowed(token::Comma), |p| Ok(p.parse_expr()?) )?; - let mut exprs = vec!(first_expr); + let mut exprs = vec![first_expr]; exprs.extend(remaining_exprs); ex = ExprKind::Vec(exprs); } else { // Vector with one element. self.expect(&token::CloseDelim(token::Bracket))?; - ex = ExprKind::Vec(vec!(first_expr)); + ex = ExprKind::Vec(vec![first_expr]); } } - hi = self.last_span.hi; + hi = self.prev_span.hi; } _ => { if self.eat_lt() { @@ -2219,18 +2243,18 @@ impl<'a> Parser<'a> { return Ok(self.mk_expr(lo, hi, ExprKind::Path(Some(qself), path), attrs)); } if self.eat_keyword(keywords::Move) { - let lo = self.last_span.lo; + let lo = self.prev_span.lo; return self.parse_lambda_expr(lo, CaptureBy::Value, attrs); } if self.eat_keyword(keywords::If) { return self.parse_if_expr(attrs); } if self.eat_keyword(keywords::For) { - let lo = self.last_span.lo; + let lo = self.prev_span.lo; return self.parse_for_expr(None, lo, attrs); } if self.eat_keyword(keywords::While) { - let lo = self.last_span.lo; + let lo = self.prev_span.lo; return self.parse_while_expr(None, lo, attrs); } if self.token.is_lifetime() { @@ -2251,7 +2275,7 @@ impl<'a> Parser<'a> { return Err(self.fatal("expected `while`, `for`, or `loop` after a label")) } if self.eat_keyword(keywords::Loop) { - let lo = self.last_span.lo; + let lo = self.prev_span.lo; return self.parse_loop_expr(None, lo, attrs); } if self.eat_keyword(keywords::Continue) { @@ -2265,7 +2289,7 @@ impl<'a> Parser<'a> { } else { ExprKind::Continue(None) }; - let hi = self.last_span.hi; + let hi = self.prev_span.hi; return Ok(self.mk_expr(lo, hi, ex, attrs)); } if self.eat_keyword(keywords::Match) { @@ -2295,7 +2319,7 @@ impl<'a> Parser<'a> { } else { ex = ExprKind::Break(None); } - hi = self.last_span.hi; + hi = self.prev_span.hi; } else if self.token.is_keyword(keywords::Let) { // Catch this syntax error here, instead of in `check_strict_keywords`, so // that we can explicitly mention that let is not to be used as an expression @@ -2306,21 +2330,14 @@ impl<'a> Parser<'a> { let pth = self.parse_path(PathStyle::Expr)?; // `!`, as an operator, is prefix, so we know this isn't that - if self.check(&token::Not) { + if self.eat(&token::Not) { // MACRO INVOCATION expression - self.bump(); - let delim = self.expect_open_delim()?; - let tts = self.parse_seq_to_end( - &token::CloseDelim(delim), - SeqSep::none(), - |p| p.parse_token_tree())?; - let hi = self.last_span.hi; - - return Ok(self.mk_mac_expr(lo, - hi, - Mac_ { path: pth, tts: tts }, - attrs)); + let tts = self.parse_seq_to_end(&token::CloseDelim(delim), + SeqSep::none(), + |p| p.parse_token_tree())?; + let hi = self.prev_span.hi; + return Ok(self.mk_mac_expr(lo, hi, Mac_ { path: pth, tts: tts }, attrs)); } if self.check(&token::OpenDelim(token::Brace)) { // This is a struct literal, unless we're prohibited @@ -2329,51 +2346,7 @@ impl<'a> Parser<'a> { Restrictions::RESTRICTION_NO_STRUCT_LITERAL ); if !prohibited { - // It's a struct literal. - self.bump(); - let mut fields = Vec::new(); - let mut base = None; - - attrs.extend(self.parse_inner_attributes()?); - - while self.token != token::CloseDelim(token::Brace) { - if self.eat(&token::DotDot) { - match self.parse_expr() { - Ok(e) => { - base = Some(e); - } - Err(mut e) => { - e.emit(); - self.recover_stmt(); - } - } - break; - } - - match self.parse_field() { - Ok(f) => fields.push(f), - Err(mut e) => { - e.emit(); - self.recover_stmt(); - break; - } - } - - match self.expect_one_of(&[token::Comma], - &[token::CloseDelim(token::Brace)]) { - Ok(()) => {} - Err(mut e) => { - e.emit(); - self.recover_stmt(); - break; - } - } - } - - hi = self.span.hi; - self.expect(&token::CloseDelim(token::Brace))?; - ex = ExprKind::Struct(pth, fields, base); - return Ok(self.mk_expr(lo, hi, ex, attrs)); + return self.parse_struct_expr(lo, pth, attrs); } } @@ -2386,7 +2359,7 @@ impl<'a> Parser<'a> { ex = ExprKind::Lit(P(lit)); } Err(mut err) => { - err.cancel(); + self.cancel(&mut err); let msg = format!("expected expression, found {}", self.this_token_descr()); return Err(self.fatal(&msg)); @@ -2399,6 +2372,53 @@ impl<'a> Parser<'a> { return Ok(self.mk_expr(lo, hi, ex, attrs)); } + fn parse_struct_expr(&mut self, lo: BytePos, pth: ast::Path, mut attrs: ThinVec) + -> PResult<'a, P> { + self.bump(); + let mut fields = Vec::new(); + let mut base = None; + + attrs.extend(self.parse_inner_attributes()?); + + while self.token != token::CloseDelim(token::Brace) { + if self.eat(&token::DotDot) { + match self.parse_expr() { + Ok(e) => { + base = Some(e); + } + Err(mut e) => { + e.emit(); + self.recover_stmt(); + } + } + break; + } + + match self.parse_field() { + Ok(f) => fields.push(f), + Err(mut e) => { + e.emit(); + self.recover_stmt(); + break; + } + } + + match self.expect_one_of(&[token::Comma], + &[token::CloseDelim(token::Brace)]) { + Ok(()) => {} + Err(mut e) => { + e.emit(); + self.recover_stmt(); + break; + } + } + } + + let hi = self.span.hi; + self.expect(&token::CloseDelim(token::Brace))?; + return Ok(self.mk_expr(lo, hi, ExprKind::Struct(pth, fields, base), attrs)); + } + fn parse_or_use_outer_attributes(&mut self, already_parsed_attrs: Option>) -> PResult<'a, ThinVec> { @@ -2481,8 +2501,8 @@ impl<'a> Parser<'a> { }; if !bindings.is_empty() { - let last_span = self.last_span; - self.span_err(last_span, "type bindings are only permitted on trait paths"); + let prev_span = self.prev_span; + self.span_err(prev_span, "type bindings are only permitted on trait paths"); } Ok(match self.token { @@ -2494,7 +2514,7 @@ impl<'a> Parser<'a> { SeqSep::trailing_allowed(token::Comma), |p| Ok(p.parse_expr()?) )?; - let hi = self.last_span.hi; + let hi = self.prev_span.hi; es.insert(0, self_value); let id = spanned(ident_span.lo, ident_span.hi, ident); @@ -2504,8 +2524,8 @@ impl<'a> Parser<'a> { // Field access. _ => { if !tys.is_empty() { - let last_span = self.last_span; - self.span_err(last_span, + let prev_span = self.prev_span; + self.span_err(prev_span, "field expressions may not \ have type parameters"); } @@ -2523,7 +2543,7 @@ impl<'a> Parser<'a> { loop { // expr? while self.eat(&token::Question) { - let hi = self.last_span.hi; + let hi = self.prev_span.hi; e = self.mk_expr(lo, hi, ExprKind::Try(e), ThinVec::new()); } @@ -2531,7 +2551,7 @@ impl<'a> Parser<'a> { if self.eat(&token::Dot) { match self.token { token::Ident(i) => { - let dot_pos = self.last_span.hi; + let dot_pos = self.prev_span.hi; hi = self.span.hi; self.bump(); @@ -2543,7 +2563,7 @@ impl<'a> Parser<'a> { // A tuple index may not have a suffix self.expect_no_suffix(sp, "tuple index", suf); - let dot = self.last_span.hi; + let dot = self.prev_span.hi; hi = self.span.hi; self.bump(); @@ -2555,16 +2575,16 @@ impl<'a> Parser<'a> { e = self.mk_expr(lo, hi, field, ThinVec::new()); } None => { - let last_span = self.last_span; - self.span_err(last_span, "invalid tuple or tuple struct index"); + let prev_span = self.prev_span; + self.span_err(prev_span, "invalid tuple or tuple struct index"); } } } token::Literal(token::Float(n), _suf) => { self.bump(); - let last_span = self.last_span; + let prev_span = self.prev_span; let fstr = n.as_str(); - let mut err = self.diagnostic().struct_span_err(last_span, + let mut err = self.diagnostic().struct_span_err(prev_span, &format!("unexpected token: `{}`", n.as_str())); if fstr.chars().all(|x| "0123456789.".contains(x)) { let float = match fstr.parse::().ok() { @@ -2583,7 +2603,7 @@ impl<'a> Parser<'a> { let actual = self.this_token_to_string(); self.span_err(self.span, &format!("unexpected token: `{}`", actual)); - let dot_pos = self.last_span.hi; + let dot_pos = self.prev_span.hi; e = self.parse_dot_suffix(keywords::Invalid.ident(), mk_sp(dot_pos, dot_pos), e, lo)?; @@ -2601,7 +2621,7 @@ impl<'a> Parser<'a> { SeqSep::trailing_allowed(token::Comma), |p| Ok(p.parse_expr()?) )?; - hi = self.last_span.hi; + hi = self.prev_span.hi; let nd = self.mk_call(e, es); e = self.mk_expr(lo, hi, nd, ThinVec::new()); @@ -2647,8 +2667,12 @@ impl<'a> Parser<'a> { num_captures: name_num }))); } else if self.token.is_keyword(keywords::Crate) { + let ident = match self.token { + token::Ident(id) => ast::Ident { name: token::intern("$crate"), ..id }, + _ => unreachable!(), + }; self.bump(); - return Ok(TokenTree::Token(sp, SpecialVarNt(SpecialMacroVar::CrateMacroVar))); + return Ok(TokenTree::Token(sp, token::Ident(ident))); } else { sp = mk_sp(sp.lo, self.span.hi); self.parse_ident().unwrap_or_else(|mut e| { @@ -2944,8 +2968,8 @@ impl<'a> Parser<'a> { self.expected_tokens.push(TokenType::Operator); while let Some(op) = AssocOp::from_token(&self.token) { - let lhs_span = if self.last_token_interpolated { - self.last_span + let lhs_span = if self.prev_token_kind == PrevTokenKind::Interpolated { + self.prev_span } else { lhs.span }; @@ -3138,7 +3162,7 @@ impl<'a> Parser<'a> { if self.check_keyword(keywords::Let) { return self.parse_if_let_expr(attrs); } - let lo = self.last_span.lo; + let lo = self.prev_span.lo; let cond = self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL, None)?; let thn = self.parse_block()?; let mut els: Option> = None; @@ -3154,7 +3178,7 @@ impl<'a> Parser<'a> { /// Parse an 'if let' expression ('if' token already eaten) pub fn parse_if_let_expr(&mut self, attrs: ThinVec) -> PResult<'a, P> { - let lo = self.last_span.lo; + let lo = self.prev_span.lo; self.expect_keyword(keywords::Let)?; let pat = self.parse_pat()?; self.expect(&token::Eq)?; @@ -3177,7 +3201,7 @@ impl<'a> Parser<'a> { -> PResult<'a, P> { let decl = self.parse_fn_block_decl()?; - let decl_hi = self.last_span.hi; + let decl_hi = self.prev_span.hi; let body = match decl.output { FunctionRetTy::Default(_) => { // If no explicit return type is given, parse any @@ -3230,7 +3254,7 @@ impl<'a> Parser<'a> { let (iattrs, loop_block) = self.parse_inner_attrs_and_block()?; attrs.extend(iattrs); - let hi = self.last_span.hi; + let hi = self.prev_span.hi; Ok(self.mk_expr(span_lo, hi, ExprKind::ForLoop(pat, expr, loop_block, opt_ident), @@ -3278,8 +3302,8 @@ impl<'a> Parser<'a> { // `match` token already eaten fn parse_match_expr(&mut self, mut attrs: ThinVec) -> PResult<'a, P> { - let match_span = self.last_span; - let lo = self.last_span.lo; + let match_span = self.prev_span; + let lo = self.prev_span.lo; let discriminant = self.parse_expr_res(Restrictions::RESTRICTION_NO_STRUCT_LITERAL, None)?; if let Err(mut e) = self.expect(&token::OpenDelim(token::Brace)) { @@ -3401,7 +3425,7 @@ impl<'a> Parser<'a> { } } else if ddpos.is_some() && self.eat(&token::DotDot) { // Emit a friendly error, ignore `..` and continue parsing - self.span_err(self.last_span, "`..` can only be used once per \ + self.span_err(self.prev_span, "`..` can only be used once per \ tuple or tuple struct pattern"); } else { fields.push(self.parse_pat()?); @@ -3512,7 +3536,7 @@ impl<'a> Parser<'a> { let is_ref = self.eat_keyword(keywords::Ref); let is_mut = self.eat_keyword(keywords::Mut); let fieldname = self.parse_ident()?; - hi = self.last_span.hi; + hi = self.prev_span.hi; let bind_type = match (is_ref, is_mut) { (true, true) => BindingMode::ByRef(Mutability::Mutable), @@ -3520,7 +3544,7 @@ impl<'a> Parser<'a> { (false, true) => BindingMode::ByValue(Mutability::Mutable), (false, false) => BindingMode::ByValue(Mutability::Immutable), }; - let fieldpath = codemap::Spanned{span:self.last_span, node:fieldname}; + let fieldpath = codemap::Spanned{span:self.prev_span, node:fieldname}; let fieldpat = P(ast::Pat{ id: ast::DUMMY_NODE_ID, node: PatKind::Ident(bind_type, fieldpath, None), @@ -3559,7 +3583,7 @@ impl<'a> Parser<'a> { // Parse an unqualified path (None, self.parse_path(PathStyle::Expr)?) }; - let hi = self.last_span.hi; + let hi = self.prev_span.hi; Ok(self.mk_expr(lo, hi, ExprKind::Path(qself, path), ThinVec::new())) } else { self.parse_pat_literal_maybe_minus() @@ -3573,39 +3597,37 @@ impl<'a> Parser<'a> { let lo = self.span.lo; let pat; match self.token { - token::Underscore => { - // Parse _ - self.bump(); - pat = PatKind::Wild; - } - token::BinOp(token::And) | token::AndAnd => { - // Parse &pat / &mut pat - self.expect_and()?; - let mutbl = self.parse_mutability()?; - if let token::Lifetime(ident) = self.token { - return Err(self.fatal(&format!("unexpected lifetime `{}` in pattern", ident))); + token::Underscore => { + // Parse _ + self.bump(); + pat = PatKind::Wild; + } + token::BinOp(token::And) | token::AndAnd => { + // Parse &pat / &mut pat + self.expect_and()?; + let mutbl = self.parse_mutability()?; + if let token::Lifetime(ident) = self.token { + return Err(self.fatal(&format!("unexpected lifetime `{}` in pattern", ident))); + } + let subpat = self.parse_pat()?; + pat = PatKind::Ref(subpat, mutbl); + } + token::OpenDelim(token::Paren) => { + // Parse (pat,pat,pat,...) as tuple pattern + self.bump(); + let (fields, ddpos) = self.parse_pat_tuple_elements(true)?; + self.expect(&token::CloseDelim(token::Paren))?; + pat = PatKind::Tuple(fields, ddpos); + } + token::OpenDelim(token::Bracket) => { + // Parse [pat,pat,...] as slice pattern + self.bump(); + let (before, slice, after) = self.parse_pat_vec_elements()?; + self.expect(&token::CloseDelim(token::Bracket))?; + pat = PatKind::Slice(before, slice, after); } - - let subpat = self.parse_pat()?; - pat = PatKind::Ref(subpat, mutbl); - } - token::OpenDelim(token::Paren) => { - // Parse (pat,pat,pat,...) as tuple pattern - self.bump(); - let (fields, ddpos) = self.parse_pat_tuple_elements(true)?; - self.expect(&token::CloseDelim(token::Paren))?; - pat = PatKind::Tuple(fields, ddpos); - } - token::OpenDelim(token::Bracket) => { - // Parse [pat,pat,...] as slice pattern - self.bump(); - let (before, slice, after) = self.parse_pat_vec_elements()?; - self.expect(&token::CloseDelim(token::Bracket))?; - pat = PatKind::Vec(before, slice, after); - } - _ => { // At this point, token != _, &, &&, (, [ - if self.eat_keyword(keywords::Mut) { + _ => if self.eat_keyword(keywords::Mut) { // Parse mut ident @ pat pat = self.parse_pat_ident(BindingMode::ByValue(Mutability::Mutable))?; } else if self.eat_keyword(keywords::Ref) { @@ -3616,53 +3638,49 @@ impl<'a> Parser<'a> { // Parse box pat let subpat = self.parse_pat()?; pat = PatKind::Box(subpat); + } else if self.token.is_ident() && !self.token.is_any_keyword() && + self.look_ahead(1, |t| match *t { + token::OpenDelim(token::Paren) | token::OpenDelim(token::Brace) | + token::DotDotDot | token::ModSep | token::Not => false, + _ => true, + }) { + // Parse ident @ pat + // This can give false positives and parse nullary enums, + // they are dealt with later in resolve + let binding_mode = BindingMode::ByValue(Mutability::Immutable); + pat = self.parse_pat_ident(binding_mode)?; } else if self.token.is_path_start() { // Parse pattern starting with a path - if self.token.is_ident() && self.look_ahead(1, |t| *t != token::DotDotDot && - *t != token::OpenDelim(token::Brace) && - *t != token::OpenDelim(token::Paren) && - *t != token::ModSep) { - // Plain idents have some extra abilities here compared to general paths - if self.look_ahead(1, |t| *t == token::Not) { + let (qself, path) = if self.eat_lt() { + // Parse a qualified path + let (qself, path) = self.parse_qualified_path(PathStyle::Expr)?; + (Some(qself), path) + } else { + // Parse an unqualified path + (None, self.parse_path(PathStyle::Expr)?) + }; + match self.token { + token::Not if qself.is_none() => { // Parse macro invocation - let path = self.parse_ident_into_path()?; self.bump(); let delim = self.expect_open_delim()?; - let tts = self.parse_seq_to_end( - &token::CloseDelim(delim), - SeqSep::none(), |p| p.parse_token_tree())?; - let mac = Mac_ { path: path, tts: tts }; - pat = PatKind::Mac(codemap::Spanned {node: mac, - span: mk_sp(lo, self.last_span.hi)}); - } else { - // Parse ident @ pat - // This can give false positives and parse nullary enums, - // they are dealt with later in resolve - let binding_mode = BindingMode::ByValue(Mutability::Immutable); - pat = self.parse_pat_ident(binding_mode)?; + let tts = self.parse_seq_to_end(&token::CloseDelim(delim), + SeqSep::none(), + |p| p.parse_token_tree())?; + let mac = spanned(lo, self.prev_span.hi, Mac_ { path: path, tts: tts }); + pat = PatKind::Mac(mac); } - } else { - let (qself, path) = if self.eat_lt() { - // Parse a qualified path - let (qself, path) = - self.parse_qualified_path(PathStyle::Expr)?; - (Some(qself), path) - } else { - // Parse an unqualified path - (None, self.parse_path(PathStyle::Expr)?) - }; - match self.token { - token::DotDotDot => { + token::DotDotDot => { // Parse range - let hi = self.last_span.hi; + let hi = self.prev_span.hi; let begin = self.mk_expr(lo, hi, ExprKind::Path(qself, path), ThinVec::new()); self.bump(); let end = self.parse_pat_range_end()?; pat = PatKind::Range(begin, end); - } - token::OpenDelim(token::Brace) => { - if qself.is_some() { + } + token::OpenDelim(token::Brace) => { + if qself.is_some() { return Err(self.fatal("unexpected `{` after qualified path")); } // Parse struct pattern @@ -3674,8 +3692,8 @@ impl<'a> Parser<'a> { }); self.bump(); pat = PatKind::Struct(path, fields, etc); - } - token::OpenDelim(token::Paren) => { + } + token::OpenDelim(token::Paren) => { if qself.is_some() { return Err(self.fatal("unexpected `(` after qualified path")); } @@ -3684,11 +3702,8 @@ impl<'a> Parser<'a> { let (fields, ddpos) = self.parse_pat_tuple_elements(false)?; self.expect(&token::CloseDelim(token::Paren))?; pat = PatKind::TupleStruct(path, fields, ddpos) - } - _ => { - pat = PatKind::Path(qself, path); - } } + _ => pat = PatKind::Path(qself, path), } } else { // Try to parse everything else as literal with optional minus @@ -3702,16 +3717,15 @@ impl<'a> Parser<'a> { } } Err(mut err) => { - err.cancel(); + self.cancel(&mut err); let msg = format!("expected pattern, found {}", self.this_token_descr()); return Err(self.fatal(&msg)); } } } - } } - let hi = self.last_span.hi; + let hi = self.prev_span.hi; Ok(P(ast::Pat { id: ast::DUMMY_NODE_ID, node: pat, @@ -3726,8 +3740,8 @@ impl<'a> Parser<'a> { binding_mode: ast::BindingMode) -> PResult<'a, PatKind> { let ident = self.parse_ident()?; - let last_span = self.last_span; - let name = codemap::Spanned{span: last_span, node: ident}; + let prev_span = self.prev_span; + let name = codemap::Spanned{span: prev_span, node: ident}; let sub = if self.eat(&token::At) { Some(self.parse_pat()?) } else { @@ -3741,9 +3755,8 @@ impl<'a> Parser<'a> { // binding mode then we do not end up here, because the lookahead // will direct us over to parse_enum_variant() if self.token == token::OpenDelim(token::Paren) { - let last_span = self.last_span; return Err(self.span_fatal( - last_span, + self.prev_span, "expected identifier, found enum pattern")) } @@ -3765,7 +3778,7 @@ impl<'a> Parser<'a> { pat: pat, init: init, id: ast::DUMMY_NODE_ID, - span: mk_sp(lo, self.last_span.hi), + span: mk_sp(lo, self.prev_span.hi), attrs: attrs, })) } @@ -3780,7 +3793,7 @@ impl<'a> Parser<'a> { self.expect(&token::Colon)?; let ty = self.parse_ty_sum()?; Ok(StructField { - span: mk_sp(lo, self.last_span.hi), + span: mk_sp(lo, self.prev_span.hi), ident: Some(name), vis: vis, id: ast::DUMMY_NODE_ID, @@ -3798,7 +3811,7 @@ impl<'a> Parser<'a> { _ => "expected item after attributes", }; - self.span_err(self.last_span, message); + self.span_err(self.prev_span, message); } /// Parse a statement. This stops just before trailing semicolons on everything but items. @@ -3868,15 +3881,22 @@ impl<'a> Parser<'a> { } } - fn parse_stmt_(&mut self, macro_expanded: bool) -> Option { - self.parse_stmt_without_recovery(macro_expanded).unwrap_or_else(|mut e| { + fn parse_stmt_(&mut self, macro_legacy_warnings: bool) -> Option { + self.parse_stmt_without_recovery(macro_legacy_warnings).unwrap_or_else(|mut e| { e.emit(); self.recover_stmt_(SemiColonMode::Break); None }) } - fn parse_stmt_without_recovery(&mut self, macro_expanded: bool) -> PResult<'a, Option> { + fn is_union_item(&mut self) -> bool { + self.token.is_keyword(keywords::Union) && + self.look_ahead(1, |t| t.is_ident() && !t.is_any_keyword()) + } + + fn parse_stmt_without_recovery(&mut self, + macro_legacy_warnings: bool) + -> PResult<'a, Option> { maybe_whole!(Some deref self, NtStmt); let attrs = self.parse_outer_attributes()?; @@ -3886,18 +3906,35 @@ impl<'a> Parser<'a> { Stmt { id: ast::DUMMY_NODE_ID, node: StmtKind::Local(self.parse_local(attrs.into())?), - span: mk_sp(lo, self.last_span.hi), + span: mk_sp(lo, self.prev_span.hi), } - } else if self.token.is_ident() - && !self.token.is_any_keyword() - && self.look_ahead(1, |t| *t == token::Not) { - // it's a macro invocation: + // Starts like a simple path, but not a union item. + } else if self.token.is_path_start() && + !self.token.is_qpath_start() && + !self.is_union_item() { + let pth = self.parse_path(PathStyle::Expr)?; - // Potential trouble: if we allow macros with paths instead of - // idents, we'd need to look ahead past the whole path here... - let pth = self.parse_ident_into_path()?; - self.bump(); + if !self.eat(&token::Not) { + let expr = if self.check(&token::OpenDelim(token::Brace)) { + self.parse_struct_expr(lo, pth, ThinVec::new())? + } else { + let hi = self.prev_span.hi; + self.mk_expr(lo, hi, ExprKind::Path(None, pth), ThinVec::new()) + }; + let expr = self.with_res(Restrictions::RESTRICTION_STMT_EXPR, |this| { + let expr = this.parse_dot_or_call_expr_with(expr, lo, attrs.into())?; + this.parse_assoc_expr_with(0, LhsExpr::AlreadyParsed(expr)) + })?; + + return Ok(Some(Stmt { + id: ast::DUMMY_NODE_ID, + node: StmtKind::Expr(expr), + span: mk_sp(lo, self.prev_span.hi), + })); + } + + // it's a macro invocation let id = match self.token { token::OpenDelim(_) => keywords::Invalid.ident(), // no special identifier _ => self.parse_ident()?, @@ -3929,7 +3966,7 @@ impl<'a> Parser<'a> { SeqSep::none(), |p| p.parse_token_tree() )?; - let hi = self.last_span.hi; + let hi = self.prev_span.hi; let style = if delim == token::Brace { MacStmtStyle::Braces @@ -3946,7 +3983,7 @@ impl<'a> Parser<'a> { // We used to incorrectly stop parsing macro-expanded statements here. // If the next token will be an error anyway but could have parsed with the // earlier behavior, stop parsing here and emit a warning to avoid breakage. - else if macro_expanded && self.token.can_begin_expr() && match self.token { + else if macro_legacy_warnings && self.token.can_begin_expr() && match self.token { // These can continue an expression, so we can't stop parsing and warn. token::OpenDelim(token::Paren) | token::OpenDelim(token::Bracket) | token::BinOp(token::Minus) | token::BinOp(token::Star) | @@ -3974,8 +4011,7 @@ impl<'a> Parser<'a> { // Require a semicolon or braces. if style != MacStmtStyle::Braces { if !self.eat(&token::Semi) { - let last_span = self.last_span; - self.span_err(last_span, + self.span_err(self.prev_span, "macros that expand to items must \ either be surrounded with braces or \ followed by a semicolon"); @@ -4006,13 +4042,13 @@ impl<'a> Parser<'a> { None => { let unused_attrs = |attrs: &[_], s: &mut Self| { if attrs.len() > 0 { - let last_token = s.last_token.clone().map(|t| *t); - match last_token { - Some(token::DocComment(_)) => s.span_err_help(s.last_span, + if s.prev_token_kind == PrevTokenKind::DocComment { + s.span_err_help(s.prev_span, "found a documentation comment that doesn't document anything", "doc comments must come before what they document, maybe a \ - comment was intended with `//`?"), - _ => s.span_err(s.span, "expected statement after outer attribute"), + comment was intended with `//`?"); + } else { + s.span_err(s.span, "expected statement after outer attribute"); } } }; @@ -4070,13 +4106,13 @@ impl<'a> Parser<'a> { let mut stmt_span = stmt.span; // expand the span to include the semicolon, if it exists if self.eat(&token::Semi) { - stmt_span.hi = self.last_span.hi; + stmt_span.hi = self.prev_span.hi; } e.span_help(stmt_span, "try placing this code inside a block"); } Err(mut e) => { self.recover_stmt_(SemiColonMode::Break); - e.cancel(); + self.cancel(&mut e); } _ => () } @@ -4116,13 +4152,13 @@ impl<'a> Parser<'a> { stmts: stmts, id: ast::DUMMY_NODE_ID, rules: s, - span: mk_sp(lo, self.last_span.hi), + span: mk_sp(lo, self.prev_span.hi), })) } /// Parse a statement, including the trailing semicolon. - pub fn parse_full_stmt(&mut self, macro_expanded: bool) -> PResult<'a, Option> { - let mut stmt = match self.parse_stmt_(macro_expanded) { + pub fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option> { + let mut stmt = match self.parse_stmt_(macro_legacy_warnings) { Some(stmt) => stmt, None => return Ok(None), }; @@ -4142,7 +4178,7 @@ impl<'a> Parser<'a> { } StmtKind::Local(..) => { // We used to incorrectly allow a macro-expanded let statement to lack a semicolon. - if macro_expanded && self.token != token::Semi { + if macro_legacy_warnings && self.token != token::Semi { self.warn_missing_semicolon(); } else { self.expect_one_of(&[token::Semi], &[])?; @@ -4155,7 +4191,7 @@ impl<'a> Parser<'a> { stmt = stmt.add_trailing_semicolon(); } - stmt.span.hi = self.last_span.hi; + stmt.span.hi = self.prev_span.hi; Ok(Some(stmt)) } @@ -4188,7 +4224,7 @@ impl<'a> Parser<'a> { mode: BoundParsingMode) -> PResult<'a, TyParamBounds> { - let mut result = vec!(); + let mut result = vec![]; loop { let question_span = self.span; let ate_question = self.eat(&token::Question); @@ -4232,7 +4268,7 @@ impl<'a> Parser<'a> { } /// Matches typaram = IDENT (`?` unbound)? optbounds ( EQ ty )? - fn parse_ty_param(&mut self) -> PResult<'a, TyParam> { + fn parse_ty_param(&mut self, preceding_attrs: Vec) -> PResult<'a, TyParam> { let span = self.span; let ident = self.parse_ident()?; @@ -4246,6 +4282,7 @@ impl<'a> Parser<'a> { }; Ok(TyParam { + attrs: preceding_attrs.into(), ident: ident, id: ast::DUMMY_NODE_ID, bounds: bounds, @@ -4266,20 +4303,42 @@ impl<'a> Parser<'a> { let span_lo = self.span.lo; if self.eat(&token::Lt) { - let lifetime_defs = self.parse_lifetime_defs()?; + // Upon encountering attribute in generics list, we do not + // know if it is attached to lifetime or to type param. + // + // Solution: 1. eagerly parse attributes in tandem with + // lifetime defs, 2. store last set of parsed (and unused) + // attributes in `attrs`, and 3. pass in those attributes + // when parsing formal type param after lifetime defs. + let mut attrs = vec![]; + let lifetime_defs = self.parse_lifetime_defs(Some(&mut attrs))?; let mut seen_default = false; + let mut post_lifetime_attrs = Some(attrs); let ty_params = self.parse_seq_to_gt(Some(token::Comma), |p| { p.forbid_lifetime()?; - let ty_param = p.parse_ty_param()?; + // Move out of `post_lifetime_attrs` if present. O/w + // not first type param: parse attributes anew. + let attrs = match post_lifetime_attrs.as_mut() { + None => p.parse_outer_attributes()?, + Some(attrs) => mem::replace(attrs, vec![]), + }; + post_lifetime_attrs = None; + let ty_param = p.parse_ty_param(attrs)?; if ty_param.default.is_some() { seen_default = true; } else if seen_default { - let last_span = p.last_span; - p.span_err(last_span, + let prev_span = p.prev_span; + p.span_err(prev_span, "type parameters with a default must be trailing"); } Ok(ty_param) })?; + if let Some(attrs) = post_lifetime_attrs { + if !attrs.is_empty() { + self.span_err(attrs[0].span, + "trailing attribute after lifetime parameters"); + } + } Ok(ast::Generics { lifetimes: lifetime_defs, ty_params: ty_params, @@ -4287,7 +4346,7 @@ impl<'a> Parser<'a> { id: ast::DUMMY_NODE_ID, predicates: Vec::new(), }, - span: mk_sp(span_lo, self.last_span.hi), + span: mk_sp(span_lo, self.prev_span.hi), }) } else { Ok(ast::Generics::default()) @@ -4302,9 +4361,7 @@ impl<'a> Parser<'a> { let missing_comma = !lifetimes.is_empty() && !self.token.is_like_gt() && - self.last_token - .as_ref().map_or(true, - |x| &**x != &token::Comma); + self.prev_token_kind != PrevTokenKind::Comma; if missing_comma { @@ -4317,7 +4374,7 @@ impl<'a> Parser<'a> { let span_hi = match self.parse_ty() { Ok(..) => self.span.hi, Err(ref mut err) => { - err.cancel(); + self.cancel(err); span_hi } }; @@ -4411,7 +4468,7 @@ impl<'a> Parser<'a> { let bounds = self.parse_lifetimes(token::BinOp(token::Plus))?; - let hi = self.last_span.hi; + let hi = self.prev_span.hi; let span = mk_sp(lo, hi); where_clause.predicates.push(ast::WherePredicate::RegionPredicate( @@ -4429,7 +4486,7 @@ impl<'a> Parser<'a> { let bound_lifetimes = if self.eat_keyword(keywords::For) { // Higher ranked constraint. self.expect(&token::Lt)?; - let lifetime_defs = self.parse_lifetime_defs()?; + let lifetime_defs = self.parse_lifetime_defs(None)?; self.expect_gt()?; lifetime_defs } else { @@ -4440,7 +4497,7 @@ impl<'a> Parser<'a> { if self.eat(&token::Colon) { let bounds = self.parse_ty_param_bounds(BoundParsingMode::Bare)?; - let hi = self.last_span.hi; + let hi = self.prev_span.hi; let span = mk_sp(lo, hi); if bounds.is_empty() { @@ -4460,7 +4517,7 @@ impl<'a> Parser<'a> { parsed_something = true; } else if self.eat(&token::Eq) { // let ty = try!(self.parse_ty()); - let hi = self.last_span.hi; + let hi = self.prev_span.hi; let span = mk_sp(lo, hi); // where_clause.predicates.push( // ast::WherePredicate::EqPredicate(ast::WhereEqPredicate { @@ -4475,8 +4532,8 @@ impl<'a> Parser<'a> { "equality constraints are not yet supported \ in where clauses (#20041)"); } else { - let last_span = self.last_span; - self.span_err(last_span, + let prev_span = self.prev_span; + self.span_err(prev_span, "unexpected token in `where` clause"); } } @@ -4488,8 +4545,8 @@ impl<'a> Parser<'a> { } if !parsed_something { - let last_span = self.last_span; - self.span_err(last_span, + let prev_span = self.prev_span; + self.span_err(prev_span, "a `where` clause must have at least one predicate \ in it"); } @@ -4562,9 +4619,13 @@ impl<'a> Parser<'a> { fn parse_self_arg(&mut self) -> PResult<'a, Option> { let expect_ident = |this: &mut Self| match this.token { // Preserve hygienic context. - token::Ident(ident) => { this.bump(); codemap::respan(this.last_span, ident) } + token::Ident(ident) => { this.bump(); codemap::respan(this.prev_span, ident) } _ => unreachable!() }; + let isolated_self = |this: &mut Self, n| { + this.look_ahead(n, |t| t.is_keyword(keywords::SelfValue)) && + this.look_ahead(n + 1, |t| t != &token::ModSep) + }; // Parse optional self parameter of a method. // Only a limited set of initial token sequences is considered self parameters, anything @@ -4577,22 +4638,22 @@ impl<'a> Parser<'a> { // &'lt self // &'lt mut self // ¬_self - if self.look_ahead(1, |t| t.is_keyword(keywords::SelfValue)) { + if isolated_self(self, 1) { self.bump(); (SelfKind::Region(None, Mutability::Immutable), expect_ident(self)) } else if self.look_ahead(1, |t| t.is_keyword(keywords::Mut)) && - self.look_ahead(2, |t| t.is_keyword(keywords::SelfValue)) { + isolated_self(self, 2) { self.bump(); self.bump(); (SelfKind::Region(None, Mutability::Mutable), expect_ident(self)) } else if self.look_ahead(1, |t| t.is_lifetime()) && - self.look_ahead(2, |t| t.is_keyword(keywords::SelfValue)) { + isolated_self(self, 2) { self.bump(); let lt = self.parse_lifetime()?; (SelfKind::Region(Some(lt), Mutability::Immutable), expect_ident(self)) } else if self.look_ahead(1, |t| t.is_lifetime()) && self.look_ahead(2, |t| t.is_keyword(keywords::Mut)) && - self.look_ahead(3, |t| t.is_keyword(keywords::SelfValue)) { + isolated_self(self, 3) { self.bump(); let lt = self.parse_lifetime()?; self.bump(); @@ -4607,12 +4668,12 @@ impl<'a> Parser<'a> { // *mut self // *not_self // Emit special error for `self` cases. - if self.look_ahead(1, |t| t.is_keyword(keywords::SelfValue)) { + if isolated_self(self, 1) { self.bump(); self.span_err(self.span, "cannot pass `self` by raw pointer"); (SelfKind::Value(Mutability::Immutable), expect_ident(self)) } else if self.look_ahead(1, |t| t.is_mutability()) && - self.look_ahead(2, |t| t.is_keyword(keywords::SelfValue)) { + isolated_self(self, 2) { self.bump(); self.bump(); self.span_err(self.span, "cannot pass `self` by raw pointer"); @@ -4622,7 +4683,7 @@ impl<'a> Parser<'a> { } } token::Ident(..) => { - if self.token.is_keyword(keywords::SelfValue) { + if isolated_self(self, 0) { // self // self: TYPE let eself_ident = expect_ident(self); @@ -4633,7 +4694,7 @@ impl<'a> Parser<'a> { (SelfKind::Value(Mutability::Immutable), eself_ident) } } else if self.token.is_keyword(keywords::Mut) && - self.look_ahead(1, |t| t.is_keyword(keywords::SelfValue)) { + isolated_self(self, 1) { // mut self // mut self: TYPE self.bump(); @@ -4651,7 +4712,7 @@ impl<'a> Parser<'a> { _ => return Ok(None), }; - let eself = codemap::respan(mk_sp(eself_lo, self.last_span.hi), eself); + let eself = codemap::respan(mk_sp(eself_lo, self.prev_span.hi), eself); Ok(Some(Arg::from_self(eself, eself_ident))) } @@ -4769,7 +4830,7 @@ impl<'a> Parser<'a> { ast::Unsafety, abi::Abi)> { let is_const_fn = self.eat_keyword(keywords::Const); - let const_span = self.last_span; + let const_span = self.prev_span; let unsafety = self.parse_unsafety()?; let (constness, unsafety, abi) = if is_const_fn { (respan(const_span, Constness::Const), unsafety, Abi::Rust) @@ -4779,7 +4840,7 @@ impl<'a> Parser<'a> { } else { Abi::Rust }; - (respan(self.last_span, Constness::NotConst), unsafety, abi) + (respan(self.prev_span, Constness::NotConst), unsafety, abi) }; self.expect_keyword(keywords::Fn)?; Ok((constness, unsafety, abi)) @@ -4816,7 +4877,7 @@ impl<'a> Parser<'a> { Ok(ImplItem { id: ast::DUMMY_NODE_ID, - span: mk_sp(lo, self.last_span.hi), + span: mk_sp(lo, self.prev_span.hi), ident: name, vis: vis, defaultness: defaultness, @@ -4853,17 +4914,14 @@ impl<'a> Parser<'a> { fn parse_impl_method(&mut self, vis: &Visibility) -> PResult<'a, (Ident, Vec, ast::ImplItemKind)> { // code copied from parse_macro_use_or_failure... abstraction! - if !self.token.is_any_keyword() - && self.look_ahead(1, |t| *t == token::Not) - && (self.look_ahead(2, |t| *t == token::OpenDelim(token::Paren)) - || self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))) { + if self.token.is_path_start() { // method macro. - let last_span = self.last_span; - self.complain_if_pub_macro(&vis, last_span); + let prev_span = self.prev_span; + self.complain_if_pub_macro(&vis, prev_span); let lo = self.span.lo; - let pth = self.parse_ident_into_path()?; + let pth = self.parse_path(PathStyle::Mod)?; self.expect(&token::Not)?; // eat a matched-delimiter token tree: @@ -4871,14 +4929,12 @@ impl<'a> Parser<'a> { let tts = self.parse_seq_to_end(&token::CloseDelim(delim), SeqSep::none(), |p| p.parse_token_tree())?; - let m_ = Mac_ { path: pth, tts: tts }; - let m: ast::Mac = codemap::Spanned { node: m_, - span: mk_sp(lo, - self.last_span.hi) }; if delim != token::Brace { self.expect(&token::Semi)? } - Ok((keywords::Invalid.ident(), vec![], ast::ImplItemKind::Macro(m))) + + let mac = spanned(lo, self.prev_span.hi, Mac_ { path: pth, tts: tts }); + Ok((keywords::Invalid.ident(), vec![], ast::ImplItemKind::Macro(mac))) } else { let (constness, unsafety, abi) = self.parse_fn_front_matter()?; let ident = self.parse_ident()?; @@ -5002,7 +5058,7 @@ impl<'a> Parser<'a> { fn parse_late_bound_lifetime_defs(&mut self) -> PResult<'a, Vec> { if self.eat_keyword(keywords::For) { self.expect(&token::Lt)?; - let lifetime_defs = self.parse_lifetime_defs()?; + let lifetime_defs = self.parse_lifetime_defs(None)?; self.expect_gt()?; Ok(lifetime_defs) } else { @@ -5018,7 +5074,7 @@ impl<'a> Parser<'a> { Ok(ast::PolyTraitRef { bound_lifetimes: lifetime_defs, trait_ref: self.parse_trait_ref()?, - span: mk_sp(lo, self.last_span.hi), + span: mk_sp(lo, self.prev_span.hi), }) } @@ -5094,7 +5150,11 @@ impl<'a> Parser<'a> { let mut fields = Vec::new(); if self.eat(&token::OpenDelim(token::Brace)) { while self.token != token::CloseDelim(token::Brace) { - fields.push(self.parse_struct_decl_field()?); + fields.push(self.parse_struct_decl_field().map_err(|e| { + self.recover_stmt(); + self.eat(&token::CloseDelim(token::Brace)); + e + })?); } self.bump(); @@ -5184,7 +5244,7 @@ impl<'a> Parser<'a> { // If `allow_path` is false, just parse the `pub` in `pub(path)` (but still parse `pub(crate)`) fn parse_visibility(&mut self, allow_path: bool) -> PResult<'a, Visibility> { let pub_crate = |this: &mut Self| { - let span = this.last_span; + let span = this.prev_span; this.expect(&token::CloseDelim(token::Paren))?; Ok(Visibility::Crate(span)) }; @@ -5235,7 +5295,7 @@ impl<'a> Parser<'a> { let hi = if self.span == syntax_pos::DUMMY_SP { inner_lo } else { - self.last_span.hi + self.prev_span.hi }; Ok(ast::Mod { @@ -5260,39 +5320,51 @@ impl<'a> Parser<'a> { /// Parse a `mod { ... }` or `mod ;` item fn parse_item_mod(&mut self, outer_attrs: &[Attribute]) -> PResult<'a, ItemInfo> { - let outer_attrs = ::config::StripUnconfigured { - config: &self.cfg, - sess: self.sess, - should_test: false, // irrelevant - features: None, // don't perform gated feature checking - }.process_cfg_attrs(outer_attrs.to_owned()); + let (in_cfg, outer_attrs) = { + let mut strip_unconfigured = ::config::StripUnconfigured { + sess: self.sess, + should_test: false, // irrelevant + features: None, // don't perform gated feature checking + }; + let outer_attrs = strip_unconfigured.process_cfg_attrs(outer_attrs.to_owned()); + (strip_unconfigured.in_cfg(&outer_attrs), outer_attrs) + }; let id_span = self.span; let id = self.parse_ident()?; if self.check(&token::Semi) { self.bump(); - // This mod is in an external file. Let's go get it! - let (m, attrs) = self.eval_src_mod(id, &outer_attrs, id_span)?; - Ok((id, m, Some(attrs))) + if in_cfg { + // This mod is in an external file. Let's go get it! + let (m, attrs) = self.eval_src_mod(id, &outer_attrs, id_span)?; + Ok((id, m, Some(attrs))) + } else { + let placeholder = ast::Mod { inner: syntax_pos::DUMMY_SP, items: Vec::new() }; + Ok((id, ItemKind::Mod(placeholder), None)) + } } else { let directory = self.directory.clone(); - self.push_directory(id, &outer_attrs); + let restrictions = self.push_directory(id, &outer_attrs); self.expect(&token::OpenDelim(token::Brace))?; let mod_inner_lo = self.span.lo; let attrs = self.parse_inner_attributes()?; - let m = self.parse_mod_items(&token::CloseDelim(token::Brace), mod_inner_lo)?; + let m = self.with_res(restrictions, |this| { + this.parse_mod_items(&token::CloseDelim(token::Brace), mod_inner_lo) + })?; self.directory = directory; Ok((id, ItemKind::Mod(m), Some(attrs))) } } - fn push_directory(&mut self, id: Ident, attrs: &[Attribute]) { - let default_path = self.id_to_interned_str(id); - let file_path = match ::attr::first_attr_value_str_by_name(attrs, "path") { - Some(d) => d, - None => default_path, - }; - self.directory.push(&*file_path) + fn push_directory(&mut self, id: Ident, attrs: &[Attribute]) -> Restrictions { + if let Some(path) = ::attr::first_attr_value_str_by_name(attrs, "path") { + self.directory.push(&*path); + self.restrictions - Restrictions::NO_NONINLINE_MOD + } else { + let default_path = self.id_to_interned_str(id); + self.directory.push(&*default_path); + self.restrictions + } } pub fn submod_path_from_attr(attrs: &[ast::Attribute], dir_path: &Path) -> Option { @@ -5417,12 +5489,7 @@ impl<'a> Parser<'a> { included_mod_stack.push(path.clone()); drop(included_mod_stack); - let mut p0 = new_sub_parser_from_file(self.sess, - self.cfg.clone(), - &path, - owns_directory, - Some(name), - id_sp); + let mut p0 = new_sub_parser_from_file(self.sess, &path, owns_directory, Some(name), id_sp); let mod_inner_lo = p0.span.lo; let mod_attrs = p0.parse_inner_attributes()?; let m0 = p0.parse_mod_items(&token::Eof, mod_inner_lo)?; @@ -5491,9 +5558,9 @@ impl<'a> Parser<'a> { }; self.expect(&token::Semi)?; - let last_span = self.last_span; + let prev_span = self.prev_span; Ok(self.mk_item(lo, - last_span.hi, + prev_span.hi, ident, ItemKind::ExternCrate(maybe_path), visibility, @@ -5528,13 +5595,13 @@ impl<'a> Parser<'a> { } self.expect(&token::CloseDelim(token::Brace))?; - let last_span = self.last_span; + let prev_span = self.prev_span; let m = ast::ForeignMod { abi: abi, items: foreign_items }; Ok(self.mk_item(lo, - last_span.hi, + prev_span.hi, keywords::Invalid.ident(), ItemKind::ForeignMod(m), visibility, @@ -5587,7 +5654,7 @@ impl<'a> Parser<'a> { data: struct_def, disr_expr: disr_expr, }; - variants.push(spanned(vlo, self.last_span.hi, vr)); + variants.push(spanned(vlo, self.prev_span.hi, vr)); if !self.eat(&token::Comma) { break; } } @@ -5609,7 +5676,11 @@ impl<'a> Parser<'a> { generics.where_clause = self.parse_where_clause()?; self.expect(&token::OpenDelim(token::Brace))?; - let enum_definition = self.parse_enum_def(&generics)?; + let enum_definition = self.parse_enum_def(&generics).map_err(|e| { + self.recover_stmt(); + self.eat(&token::CloseDelim(token::Brace)); + e + })?; Ok((id, ItemKind::Enum(enum_definition, generics), None)) } @@ -5624,9 +5695,9 @@ impl<'a> Parser<'a> { match abi::lookup(&s.as_str()) { Some(abi) => Ok(Some(abi)), None => { - let last_span = self.last_span; + let prev_span = self.prev_span; self.span_err( - last_span, + prev_span, &format!("invalid ABI: expected one of [{}], \ found `{}`", abi::all_names().join(", "), @@ -5668,9 +5739,9 @@ impl<'a> Parser<'a> { let item_ = ItemKind::Use(self.parse_view_path()?); self.expect(&token::Semi)?; - let last_span = self.last_span; + let prev_span = self.prev_span; let item = self.mk_item(lo, - last_span.hi, + prev_span.hi, keywords::Invalid.ident(), item_, visibility, @@ -5687,15 +5758,15 @@ impl<'a> Parser<'a> { if self.eat_keyword(keywords::Fn) { // EXTERN FUNCTION ITEM - let fn_span = self.last_span; + let fn_span = self.prev_span; let abi = opt_abi.unwrap_or(Abi::C); let (ident, item_, extra_attrs) = self.parse_item_fn(Unsafety::Normal, respan(fn_span, Constness::NotConst), abi)?; - let last_span = self.last_span; + let prev_span = self.prev_span; let item = self.mk_item(lo, - last_span.hi, + prev_span.hi, ident, item_, visibility, @@ -5716,9 +5787,9 @@ impl<'a> Parser<'a> { Mutability::Immutable }; let (ident, item_, extra_attrs) = self.parse_item_const(Some(m))?; - let last_span = self.last_span; + let prev_span = self.prev_span; let item = self.mk_item(lo, - last_span.hi, + prev_span.hi, ident, item_, visibility, @@ -5726,7 +5797,7 @@ impl<'a> Parser<'a> { return Ok(Some(item)); } if self.eat_keyword(keywords::Const) { - let const_span = self.last_span; + let const_span = self.prev_span; if self.check_keyword(keywords::Fn) || (self.check_keyword(keywords::Unsafe) && self.look_ahead(1, |t| t.is_keyword(keywords::Fn))) { @@ -5741,9 +5812,9 @@ impl<'a> Parser<'a> { self.parse_item_fn(unsafety, respan(const_span, Constness::Const), Abi::Rust)?; - let last_span = self.last_span; + let prev_span = self.prev_span; let item = self.mk_item(lo, - last_span.hi, + prev_span.hi, ident, item_, visibility, @@ -5753,15 +5824,15 @@ impl<'a> Parser<'a> { // CONST ITEM if self.eat_keyword(keywords::Mut) { - let last_span = self.last_span; - self.diagnostic().struct_span_err(last_span, "const globals cannot be mutable") + let prev_span = self.prev_span; + self.diagnostic().struct_span_err(prev_span, "const globals cannot be mutable") .help("did you mean to declare a static?") .emit(); } let (ident, item_, extra_attrs) = self.parse_item_const(None)?; - let last_span = self.last_span; + let prev_span = self.prev_span; let item = self.mk_item(lo, - last_span.hi, + prev_span.hi, ident, item_, visibility, @@ -5776,9 +5847,9 @@ impl<'a> Parser<'a> { self.expect_keyword(keywords::Trait)?; let (ident, item_, extra_attrs) = self.parse_item_trait(ast::Unsafety::Unsafe)?; - let last_span = self.last_span; + let prev_span = self.prev_span; let item = self.mk_item(lo, - last_span.hi, + prev_span.hi, ident, item_, visibility, @@ -5792,9 +5863,9 @@ impl<'a> Parser<'a> { self.expect_keyword(keywords::Unsafe)?; self.expect_keyword(keywords::Impl)?; let (ident, item_, extra_attrs) = self.parse_item_impl(ast::Unsafety::Unsafe)?; - let last_span = self.last_span; + let prev_span = self.prev_span; let item = self.mk_item(lo, - last_span.hi, + prev_span.hi, ident, item_, visibility, @@ -5804,14 +5875,14 @@ impl<'a> Parser<'a> { if self.check_keyword(keywords::Fn) { // FUNCTION ITEM self.bump(); - let fn_span = self.last_span; + let fn_span = self.prev_span; let (ident, item_, extra_attrs) = self.parse_item_fn(Unsafety::Normal, respan(fn_span, Constness::NotConst), Abi::Rust)?; - let last_span = self.last_span; + let prev_span = self.prev_span; let item = self.mk_item(lo, - last_span.hi, + prev_span.hi, ident, item_, visibility, @@ -5828,14 +5899,14 @@ impl<'a> Parser<'a> { Abi::Rust }; self.expect_keyword(keywords::Fn)?; - let fn_span = self.last_span; + let fn_span = self.prev_span; let (ident, item_, extra_attrs) = self.parse_item_fn(Unsafety::Unsafe, respan(fn_span, Constness::NotConst), abi)?; - let last_span = self.last_span; + let prev_span = self.prev_span; let item = self.mk_item(lo, - last_span.hi, + prev_span.hi, ident, item_, visibility, @@ -5846,9 +5917,9 @@ impl<'a> Parser<'a> { // MODULE ITEM let (ident, item_, extra_attrs) = self.parse_item_mod(&attrs[..])?; - let last_span = self.last_span; + let prev_span = self.prev_span; let item = self.mk_item(lo, - last_span.hi, + prev_span.hi, ident, item_, visibility, @@ -5858,9 +5929,9 @@ impl<'a> Parser<'a> { if self.eat_keyword(keywords::Type) { // TYPE ITEM let (ident, item_, extra_attrs) = self.parse_item_type()?; - let last_span = self.last_span; + let prev_span = self.prev_span; let item = self.mk_item(lo, - last_span.hi, + prev_span.hi, ident, item_, visibility, @@ -5870,9 +5941,9 @@ impl<'a> Parser<'a> { if self.eat_keyword(keywords::Enum) { // ENUM ITEM let (ident, item_, extra_attrs) = self.parse_item_enum()?; - let last_span = self.last_span; + let prev_span = self.prev_span; let item = self.mk_item(lo, - last_span.hi, + prev_span.hi, ident, item_, visibility, @@ -5883,9 +5954,9 @@ impl<'a> Parser<'a> { // TRAIT ITEM let (ident, item_, extra_attrs) = self.parse_item_trait(ast::Unsafety::Normal)?; - let last_span = self.last_span; + let prev_span = self.prev_span; let item = self.mk_item(lo, - last_span.hi, + prev_span.hi, ident, item_, visibility, @@ -5895,9 +5966,9 @@ impl<'a> Parser<'a> { if self.eat_keyword(keywords::Impl) { // IMPL ITEM let (ident, item_, extra_attrs) = self.parse_item_impl(ast::Unsafety::Normal)?; - let last_span = self.last_span; + let prev_span = self.prev_span; let item = self.mk_item(lo, - last_span.hi, + prev_span.hi, ident, item_, visibility, @@ -5907,23 +5978,22 @@ impl<'a> Parser<'a> { if self.eat_keyword(keywords::Struct) { // STRUCT ITEM let (ident, item_, extra_attrs) = self.parse_item_struct()?; - let last_span = self.last_span; + let prev_span = self.prev_span; let item = self.mk_item(lo, - last_span.hi, + prev_span.hi, ident, item_, visibility, maybe_append(attrs, extra_attrs)); return Ok(Some(item)); } - if self.check_keyword(keywords::Union) && - self.look_ahead(1, |t| t.is_ident() && !t.is_any_keyword()) { + if self.is_union_item() { // UNION ITEM self.bump(); let (ident, item_, extra_attrs) = self.parse_item_union()?; - let last_span = self.last_span; + let prev_span = self.prev_span; let item = self.mk_item(lo, - last_span.hi, + prev_span.hi, ident, item_, visibility, @@ -5966,20 +6036,16 @@ impl<'a> Parser<'a> { lo: BytePos, visibility: Visibility ) -> PResult<'a, Option>> { - if macros_allowed && !self.token.is_any_keyword() - && self.look_ahead(1, |t| *t == token::Not) - && (self.look_ahead(2, |t| t.is_ident()) - || self.look_ahead(2, |t| *t == token::OpenDelim(token::Paren)) - || self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))) { + if macros_allowed && self.token.is_path_start() { // MACRO INVOCATION ITEM - let last_span = self.last_span; - self.complain_if_pub_macro(&visibility, last_span); + let prev_span = self.prev_span; + self.complain_if_pub_macro(&visibility, prev_span); let mac_lo = self.span.lo; // item macro. - let pth = self.parse_ident_into_path()?; + let pth = self.parse_path(PathStyle::Mod)?; self.expect(&token::Not)?; // a 'special' identifier (like what `macro_rules!` uses) @@ -5995,30 +6061,19 @@ impl<'a> Parser<'a> { let tts = self.parse_seq_to_end(&token::CloseDelim(delim), SeqSep::none(), |p| p.parse_token_tree())?; - // single-variant-enum... : - let m = Mac_ { path: pth, tts: tts }; - let m: ast::Mac = codemap::Spanned { node: m, - span: mk_sp(mac_lo, - self.last_span.hi) }; - if delim != token::Brace { if !self.eat(&token::Semi) { - let last_span = self.last_span; - self.span_err(last_span, + let prev_span = self.prev_span; + self.span_err(prev_span, "macros that expand to items must either \ be surrounded with braces or followed by \ a semicolon"); } } - let item_ = ItemKind::Mac(m); - let last_span = self.last_span; - let item = self.mk_item(lo, - last_span.hi, - id, - item_, - visibility, - attrs); + let hi = self.prev_span.hi; + let mac = spanned(mac_lo, hi, Mac_ { path: pth, tts: tts }); + let item = self.mk_item(lo, hi, id, ItemKind::Mac(mac), visibility, attrs); return Ok(Some(item)); } @@ -6026,8 +6081,8 @@ impl<'a> Parser<'a> { match visibility { Visibility::Inherited => {} _ => { - let last_span = self.last_span; - return Err(self.span_fatal(last_span, "unmatched visibility `pub`")); + let prev_span = self.prev_span; + return Err(self.span_fatal(prev_span, "unmatched visibility `pub`")); } } @@ -6058,7 +6113,7 @@ impl<'a> Parser<'a> { rename: rename, id: ast::DUMMY_NODE_ID }; - let hi = this.last_span.hi; + let hi = this.prev_span.hi; Ok(spanned(lo, hi, node)) }) } @@ -6078,15 +6133,20 @@ impl<'a> Parser<'a> { /// MOD_SEP? LBRACE item_seq RBRACE fn parse_view_path(&mut self) -> PResult<'a, P> { let lo = self.span.lo; - if self.check(&token::OpenDelim(token::Brace)) || self.is_import_coupler() { - // `{foo, bar}` or `::{foo, bar}` + if self.check(&token::OpenDelim(token::Brace)) || self.check(&token::BinOp(token::Star)) || + self.is_import_coupler() { + // `{foo, bar}`, `::{foo, bar}`, `*`, or `::*`. let prefix = ast::Path { global: self.eat(&token::ModSep), segments: Vec::new(), span: mk_sp(lo, self.span.hi), }; - let items = self.parse_path_list_items()?; - Ok(P(spanned(lo, self.span.hi, ViewPathList(prefix, items)))) + let view_path_kind = if self.eat(&token::BinOp(token::Star)) { + ViewPathGlob(prefix) + } else { + ViewPathList(prefix, self.parse_path_list_items()?) + }; + Ok(P(spanned(lo, self.span.hi, view_path_kind))) } else { let prefix = self.parse_path(PathStyle::Mod)?; if self.is_import_coupler() { @@ -6103,7 +6163,7 @@ impl<'a> Parser<'a> { // `foo::bar` or `foo::bar as baz` let rename = self.parse_rename()?. unwrap_or(prefix.segments.last().unwrap().identifier); - Ok(P(spanned(lo, self.last_span.hi, ViewPathSimple(rename, prefix)))) + Ok(P(spanned(lo, self.prev_span.hi, ViewPathSimple(rename, prefix)))) } } } @@ -6123,7 +6183,6 @@ impl<'a> Parser<'a> { Ok(ast::Crate { attrs: self.parse_inner_attributes()?, module: self.parse_mod_items(&token::Eof, lo)?, - config: self.cfg.clone(), span: mk_sp(lo, self.span.lo), exported_macros: Vec::new(), }) @@ -6151,7 +6210,7 @@ impl<'a> Parser<'a> { pub fn parse_str(&mut self) -> PResult<'a, (InternedString, StrStyle)> { match self.parse_optional_str() { Some((s, style, suf)) => { - let sp = self.last_span; + let sp = self.prev_span; self.expect_no_suffix(sp, "string literal", suf); Ok((s, style)) } diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index ff01d37581544..4d0da660302ae 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -14,7 +14,7 @@ pub use self::DelimToken::*; pub use self::Lit::*; pub use self::Token::*; -use ast::{self, BinOpKind}; +use ast::{self}; use ptr::P; use util::interner::Interner; use tokenstream; @@ -52,21 +52,6 @@ pub enum DelimToken { NoDelim, } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)] -pub enum SpecialMacroVar { - /// `$crate` will be filled in with the name of the crate a macro was - /// imported from, if any. - CrateMacroVar, -} - -impl SpecialMacroVar { - pub fn as_str(self) -> &'static str { - match self { - SpecialMacroVar::CrateMacroVar => "crate", - } - } -} - #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)] pub enum Lit { Byte(ast::Name), @@ -148,8 +133,6 @@ pub enum Token { // In right-hand-sides of MBE macros: /// A syntactic variable that will be filled in by macro expansion. SubstNt(ast::Ident), - /// A macro variable with special meaning. - SpecialVarNt(SpecialMacroVar), // Junk. These carry no data because we don't really care about the data // they *would* carry, and don't really want to allocate a new ident for @@ -176,10 +159,8 @@ impl Token { /// Returns `true` if the token can appear at the start of an expression. pub fn can_begin_expr(&self) -> bool { match *self { - OpenDelim(_) => true, + OpenDelim(..) => true, Ident(..) => true, - Underscore => true, - Tilde => true, Literal(..) => true, Not => true, BinOp(Minus) => true, @@ -189,6 +170,7 @@ impl Token { OrOr => true, // in lambda syntax AndAnd => true, // double borrow DotDot | DotDotDot => true, // range notation + Lt | BinOp(Shl) => true, // associated path ModSep => true, Interpolated(NtExpr(..)) => true, Interpolated(NtIdent(..)) => true, @@ -253,34 +235,13 @@ impl Token { self.is_keyword(keywords::Const) } - pub fn is_path_start(&self) -> bool { - self == &ModSep || self == &Lt || self.is_path() || - self.is_path_segment_keyword() || self.is_ident() && !self.is_any_keyword() + pub fn is_qpath_start(&self) -> bool { + self == &Lt || self == &BinOp(Shl) } - /// Maps a token to its corresponding binary operator. - pub fn to_binop(&self) -> Option { - match *self { - BinOp(Star) => Some(BinOpKind::Mul), - BinOp(Slash) => Some(BinOpKind::Div), - BinOp(Percent) => Some(BinOpKind::Rem), - BinOp(Plus) => Some(BinOpKind::Add), - BinOp(Minus) => Some(BinOpKind::Sub), - BinOp(Shl) => Some(BinOpKind::Shl), - BinOp(Shr) => Some(BinOpKind::Shr), - BinOp(And) => Some(BinOpKind::BitAnd), - BinOp(Caret) => Some(BinOpKind::BitXor), - BinOp(Or) => Some(BinOpKind::BitOr), - Lt => Some(BinOpKind::Lt), - Le => Some(BinOpKind::Le), - Ge => Some(BinOpKind::Ge), - Gt => Some(BinOpKind::Gt), - EqEq => Some(BinOpKind::Eq), - Ne => Some(BinOpKind::Ne), - AndAnd => Some(BinOpKind::And), - OrOr => Some(BinOpKind::Or), - _ => None, - } + pub fn is_path_start(&self) -> bool { + self == &ModSep || self.is_qpath_start() || self.is_path() || + self.is_path_segment_keyword() || self.is_ident() && !self.is_any_keyword() } /// Returns `true` if the token is a given keyword, `kw`. @@ -503,27 +464,20 @@ pub fn clear_ident_interner() { /// somehow. #[derive(Clone, PartialEq, Hash, PartialOrd, Eq, Ord)] pub struct InternedString { - string: Rc, + string: Rc, } impl InternedString { #[inline] pub fn new(string: &'static str) -> InternedString { InternedString { - string: Rc::new(string.to_owned()), - } - } - - #[inline] - fn new_from_rc_str(string: Rc) -> InternedString { - InternedString { - string: string, + string: Rc::__from_str(string), } } #[inline] pub fn new_from_name(name: ast::Name) -> InternedString { - with_ident_interner(|interner| InternedString::new_from_rc_str(interner.get(name))) + with_ident_interner(|interner| InternedString { string: interner.get(name) }) } } @@ -591,7 +545,7 @@ impl PartialEq for str { impl Decodable for InternedString { fn decode(d: &mut D) -> Result { - Ok(intern(d.read_str()?.as_ref()).as_str()) + Ok(intern(&d.read_str()?).as_str()) } } diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index 32b66da4d96ce..792239e721932 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -125,9 +125,8 @@ impl fmt::Display for Token { } } -fn buf_str(toks: &[Token], szs: &[isize], left: usize, right: usize, lim: usize) -> String { - let n = toks.len(); - assert_eq!(n, szs.len()); +fn buf_str(buf: &[BufEntry], left: usize, right: usize, lim: usize) -> String { + let n = buf.len(); let mut i = left; let mut l = lim; let mut s = String::from("["); @@ -136,7 +135,7 @@ fn buf_str(toks: &[Token], szs: &[isize], left: usize, right: usize, lim: usize) if i != left { s.push_str(", "); } - s.push_str(&format!("{}={}", szs[i], &toks[i])); + s.push_str(&format!("{}={}", buf[i].size, &buf[i].token)); i += 1; i %= n; } @@ -159,13 +158,9 @@ pub struct PrintStackElem { const SIZE_INFINITY: isize = 0xffff; pub fn mk_printer<'a>(out: Box, linewidth: usize) -> Printer<'a> { - // Yes 55, it makes the ring buffers big enough to never - // fall behind. + // Yes 55, it makes the ring buffers big enough to never fall behind. let n: usize = 55 * linewidth; debug!("mk_printer {}", linewidth); - let token = vec![Token::Eof; n]; - let size = vec![0; n]; - let scan_stack = VecDeque::with_capacity(n); Printer { out: out, buf_len: n, @@ -173,11 +168,10 @@ pub fn mk_printer<'a>(out: Box, linewidth: usize) -> Printer<'a> { space: linewidth as isize, left: 0, right: 0, - token: token, - size: size, + buf: vec![BufEntry { token: Token::Eof, size: 0 }; n], left_total: 0, right_total: 0, - scan_stack: scan_stack, + scan_stack: VecDeque::new(), print_stack: Vec::new(), pending_indentation: 0 } @@ -269,10 +263,8 @@ pub struct Printer<'a> { left: usize, /// Index of right side of input stream right: usize, - /// Ring-buffer stream goes through - token: Vec , - /// Ring-buffer of calculated sizes - size: Vec , + /// Ring-buffer of tokens and calculated sizes + buf: Vec, /// Running size of stream "...left" left_total: isize, /// Running size of stream "...right" @@ -283,20 +275,26 @@ pub struct Printer<'a> { /// Begin (if there is any) on top of it. Stuff is flushed off the /// bottom as it becomes irrelevant due to the primary ring-buffer /// advancing. - scan_stack: VecDeque , + scan_stack: VecDeque, /// Stack of blocks-in-progress being flushed by print print_stack: Vec , /// Buffered indentation to avoid writing trailing whitespace pending_indentation: isize, } +#[derive(Clone)] +struct BufEntry { + token: Token, + size: isize, +} + impl<'a> Printer<'a> { pub fn last_token(&mut self) -> Token { - self.token[self.right].clone() + self.buf[self.right].token.clone() } // be very careful with this! pub fn replace_last_token(&mut self, t: Token) { - self.token[self.right] = t; + self.buf[self.right].token = t; } pub fn pretty_print(&mut self, token: Token) -> io::Result<()> { debug!("pp Vec<{},{}>", self.left, self.right); @@ -318,8 +316,7 @@ impl<'a> Printer<'a> { } else { self.advance_right(); } debug!("pp Begin({})/buffer Vec<{},{}>", b.offset, self.left, self.right); - self.token[self.right] = token; - self.size[self.right] = -self.right_total; + self.buf[self.right] = BufEntry { token: token, size: -self.right_total }; let right = self.right; self.scan_push(right); Ok(()) @@ -331,8 +328,7 @@ impl<'a> Printer<'a> { } else { debug!("pp End/buffer Vec<{},{}>", self.left, self.right); self.advance_right(); - self.token[self.right] = token; - self.size[self.right] = -1; + self.buf[self.right] = BufEntry { token: token, size: -1 }; let right = self.right; self.scan_push(right); Ok(()) @@ -350,8 +346,7 @@ impl<'a> Printer<'a> { self.check_stack(0); let right = self.right; self.scan_push(right); - self.token[self.right] = token; - self.size[self.right] = -self.right_total; + self.buf[self.right] = BufEntry { token: token, size: -self.right_total }; self.right_total += b.blank_space; Ok(()) } @@ -364,8 +359,7 @@ impl<'a> Printer<'a> { debug!("pp String('{}')/buffer Vec<{},{}>", s, self.left, self.right); self.advance_right(); - self.token[self.right] = Token::String(s, len); - self.size[self.right] = len; + self.buf[self.right] = BufEntry { token: Token::String(s, len), size: len }; self.right_total += len; self.check_stream() } @@ -381,7 +375,7 @@ impl<'a> Printer<'a> { if Some(&self.left) == self.scan_stack.back() { debug!("setting {} to infinity and popping", self.left); let scanned = self.scan_pop_bottom(); - self.size[scanned] = SIZE_INFINITY; + self.buf[scanned].size = SIZE_INFINITY; } self.advance_left()?; if self.left != self.right { @@ -410,12 +404,12 @@ impl<'a> Printer<'a> { } pub fn advance_left(&mut self) -> io::Result<()> { debug!("advance_left Vec<{},{}>, sizeof({})={}", self.left, self.right, - self.left, self.size[self.left]); + self.left, self.buf[self.left].size); - let mut left_size = self.size[self.left]; + let mut left_size = self.buf[self.left].size; while left_size >= 0 { - let left = self.token[self.left].clone(); + let left = self.buf[self.left].token.clone(); let len = match left { Token::Break(b) => b.blank_space, @@ -437,7 +431,7 @@ impl<'a> Printer<'a> { self.left += 1; self.left %= self.buf_len; - left_size = self.size[self.left]; + left_size = self.buf[self.left].size; } Ok(()) @@ -445,23 +439,23 @@ impl<'a> Printer<'a> { pub fn check_stack(&mut self, k: isize) { if !self.scan_stack.is_empty() { let x = self.scan_top(); - match self.token[x] { + match self.buf[x].token { Token::Begin(_) => { if k > 0 { let popped = self.scan_pop(); - self.size[popped] = self.size[x] + self.right_total; + self.buf[popped].size = self.buf[x].size + self.right_total; self.check_stack(k - 1); } } Token::End => { // paper says + not =, but that makes no sense. let popped = self.scan_pop(); - self.size[popped] = 1; + self.buf[popped].size = 1; self.check_stack(k + 1); } _ => { let popped = self.scan_pop(); - self.size[popped] = self.size[x] + self.right_total; + self.buf[popped].size = self.buf[x].size + self.right_total; if k > 0 { self.check_stack(k); } @@ -499,8 +493,7 @@ impl<'a> Printer<'a> { pub fn print(&mut self, token: Token, l: isize) -> io::Result<()> { debug!("print {} {} (remaining line space={})", token, l, self.space); - debug!("{}", buf_str(&self.token, - &self.size, + debug!("{}", buf_str(&self.buf, self.left, self.right, 6)); diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index 8563d27908db6..b0bd644674300 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -112,7 +112,7 @@ pub fn print_crate<'a>(cm: &'a CodeMap, out, ann, is_expanded); - if is_expanded && !std_inject::no_std(krate) { + if is_expanded && !std_inject::injected_crate_name(krate).is_none() { // We need to print `#![no_std]` (and its feature gate) so that // compiling pretty-printed source won't inject libstd again. // However we don't want these attributes in the AST because @@ -285,8 +285,6 @@ pub fn token_to_string(tok: &Token) -> String { token::Comment => "/* */".to_string(), token::Shebang(s) => format!("/* shebang: {}*/", s), - token::SpecialVarNt(var) => format!("${}", var.as_str()), - token::Interpolated(ref nt) => match *nt { token::NtExpr(ref e) => expr_to_string(&e), token::NtMeta(ref e) => meta_item_to_string(&e), @@ -545,15 +543,12 @@ pub trait PrintState<'a> { } fn maybe_print_comment(&mut self, pos: BytePos) -> io::Result<()> { - loop { - match self.next_comment() { - Some(ref cmnt) => { - if (*cmnt).pos < pos { - try!(self.print_comment(cmnt)); - self.cur_cmnt_and_lit().cur_cmnt += 1; - } else { break; } - } - _ => break + while let Some(ref cmnt) = self.next_comment() { + if cmnt.pos < pos { + try!(self.print_comment(cmnt)); + self.cur_cmnt_and_lit().cur_cmnt += 1; + } else { + break } } Ok(()) @@ -581,7 +576,9 @@ pub trait PrintState<'a> { Ok(()) } comments::Trailing => { - try!(word(self.writer(), " ")); + if !self.is_bol() { + try!(word(self.writer(), " ")); + } if cmnt.lines.len() == 1 { try!(word(self.writer(), &cmnt.lines[0])); hardbreak(self.writer()) @@ -972,7 +969,7 @@ impl<'a> State<'a> { try!(self.maybe_print_comment(ty.span.lo)); try!(self.ibox(0)); match ty.node { - ast::TyKind::Vec(ref ty) => { + ast::TyKind::Slice(ref ty) => { try!(word(&mut self.s, "[")); try!(self.print_type(&ty)); try!(word(&mut self.s, "]")); @@ -1039,7 +1036,7 @@ impl<'a> State<'a> { ast::TyKind::ImplTrait(ref bounds) => { try!(self.print_bounds("impl ", &bounds[..])); } - ast::TyKind::FixedLengthVec(ref ty, ref v) => { + ast::TyKind::Array(ref ty, ref v) => { try!(word(&mut self.s, "[")); try!(self.print_type(&ty)); try!(word(&mut self.s, "; ")); @@ -1361,6 +1358,7 @@ impl<'a> State<'a> { if comma { try!(self.word_space(",")) } + try!(self.print_outer_attributes_inline(&lifetime_def.attrs)); try!(self.print_lifetime_bounds(&lifetime_def.lifetime, &lifetime_def.bounds)); comma = true; } @@ -1715,6 +1713,7 @@ impl<'a> State<'a> { for (i, st) in blk.stmts.iter().enumerate() { match st.node { ast::StmtKind::Expr(ref expr) if i == blk.stmts.len() - 1 => { + try!(self.maybe_print_comment(st.span.lo)); try!(self.space_if_not_bol()); try!(self.print_expr_outer_attr_style(&expr, false)); try!(self.maybe_print_trailing_comment(expr.span, Some(blk.span.hi))); @@ -1894,8 +1893,10 @@ impl<'a> State<'a> { &fields[..], |s, field| { try!(s.ibox(INDENT_UNIT)); - try!(s.print_ident(field.ident.node)); - try!(s.word_space(":")); + if !field.is_shorthand { + try!(s.print_ident(field.ident.node)); + try!(s.word_space(":")); + } try!(s.print_expr(&field.expr)); s.end() }, @@ -2276,7 +2277,7 @@ impl<'a> State<'a> { Ok(()) })); - let mut options = vec!(); + let mut options = vec![]; if a.volatile { options.push("volatile"); } @@ -2450,13 +2451,10 @@ impl<'a> State<'a> { |s, ty| s.print_type(&ty))); try!(word(&mut self.s, ")")); - match data.output { - None => { } - Some(ref ty) => { - try!(self.space_if_not_bol()); - try!(self.word_space("->")); - try!(self.print_type(&ty)); - } + if let Some(ref ty) = data.output { + try!(self.space_if_not_bol()); + try!(self.word_space("->")); + try!(self.print_type(&ty)); } } } @@ -2573,7 +2571,7 @@ impl<'a> State<'a> { try!(word(&mut self.s, "...")); try!(self.print_expr(&end)); } - PatKind::Vec(ref before, ref slice, ref after) => { + PatKind::Slice(ref before, ref slice, ref after) => { try!(word(&mut self.s, "[")); try!(self.commasep(Inconsistent, &before[..], @@ -2604,6 +2602,7 @@ impl<'a> State<'a> { } try!(self.cbox(INDENT_UNIT)); try!(self.ibox(0)); + try!(self.maybe_print_comment(arm.pats[0].span.lo)); try!(self.print_outer_attributes(&arm.attrs)); let mut first = true; for p in &arm.pats { @@ -2803,6 +2802,7 @@ impl<'a> State<'a> { try!(self.commasep(Inconsistent, &ints[..], |s, &idx| { if idx < generics.lifetimes.len() { let lifetime_def = &generics.lifetimes[idx]; + try!(s.print_outer_attributes_inline(&lifetime_def.attrs)); s.print_lifetime_bounds(&lifetime_def.lifetime, &lifetime_def.bounds) } else { let idx = idx - generics.lifetimes.len(); @@ -2816,6 +2816,7 @@ impl<'a> State<'a> { } pub fn print_ty_param(&mut self, param: &ast::TyParam) -> io::Result<()> { + try!(self.print_outer_attributes_inline(¶m.attrs)); try!(self.print_ident(param.ident)); try!(self.print_bounds(":", ¶m.bounds)); match param.default { @@ -3007,15 +3008,11 @@ impl<'a> State<'a> { _ => return Ok(()) }; if let Some(ref cmnt) = self.next_comment() { - if (*cmnt).style != comments::Trailing { return Ok(()) } + if cmnt.style != comments::Trailing { return Ok(()) } let span_line = cm.lookup_char_pos(span.hi); - let comment_line = cm.lookup_char_pos((*cmnt).pos); - let mut next = (*cmnt).pos + BytePos(1); - if let Some(p) = next_pos { - next = p; - } - if span.hi < (*cmnt).pos && (*cmnt).pos < next && - span_line.line == comment_line.line { + let comment_line = cm.lookup_char_pos(cmnt.pos); + let next = next_pos.unwrap_or(cmnt.pos + BytePos(1)); + if span.hi < cmnt.pos && cmnt.pos < next && span_line.line == comment_line.line { self.print_comment(cmnt)?; self.cur_cmnt_and_lit.cur_cmnt += 1; } diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs index d1454ab06cbc8..1b63a2b70763a 100644 --- a/src/libsyntax/std_inject.rs +++ b/src/libsyntax/std_inject.rs @@ -34,23 +34,25 @@ fn ignored_span(sess: &ParseSess, sp: Span) -> Span { return sp; } -pub fn no_core(krate: &ast::Crate) -> bool { - attr::contains_name(&krate.attrs, "no_core") -} - -pub fn no_std(krate: &ast::Crate) -> bool { - attr::contains_name(&krate.attrs, "no_std") || no_core(krate) +pub fn injected_crate_name(krate: &ast::Crate) -> Option<&'static str> { + if attr::contains_name(&krate.attrs, "no_core") { + None + } else if attr::contains_name(&krate.attrs, "no_std") { + Some("core") + } else { + Some("std") + } } pub fn maybe_inject_crates_ref(sess: &ParseSess, mut krate: ast::Crate, alt_std_name: Option) -> ast::Crate { - if no_core(&krate) { - return krate; - } + let name = match injected_crate_name(&krate) { + Some(name) => name, + None => return krate, + }; - let name = if no_std(&krate) { "core" } else { "std" }; let crate_name = token::intern(&alt_std_name.unwrap_or(name.to_string())); krate.module.items.insert(0, P(ast::Item { diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index 6327e8f71bcd5..618878c1f7980 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -19,7 +19,7 @@ use std::iter; use std::slice; use std::mem; use std::vec; -use attr; +use attr::{self, HasAttrs}; use syntax_pos::{self, DUMMY_SP, NO_EXPANSION, Span, FileMap, BytePos}; use std::rc::Rc; @@ -119,7 +119,7 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> { } debug!("current path: {}", path_name_i(&self.cx.path)); - let i = if is_test_fn(&self.cx, &i) || is_bench_fn(&self.cx, &i) { + if is_test_fn(&self.cx, &i) || is_bench_fn(&self.cx, &i) { match i.node { ast::ItemKind::Fn(_, ast::Unsafety::Unsafe, _, _, _, _) => { let diag = self.cx.span_diagnostic; @@ -136,54 +136,37 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> { }; self.cx.testfns.push(test); self.tests.push(i.ident); - // debug!("have {} test/bench functions", - // cx.testfns.len()); - - // Make all tests public so we can call them from outside - // the module (note that the tests are re-exported and must - // be made public themselves to avoid privacy errors). - i.map(|mut i| { - i.vis = ast::Visibility::Public; - i - }) } } - } else { - i - }; + } + let mut item = i.unwrap(); // We don't want to recurse into anything other than mods, since // mods or tests inside of functions will break things - let res = match i.node { - ast::ItemKind::Mod(..) => fold::noop_fold_item(i, self), - _ => SmallVector::one(i), - }; + if let ast::ItemKind::Mod(module) = item.node { + let tests = mem::replace(&mut self.tests, Vec::new()); + let tested_submods = mem::replace(&mut self.tested_submods, Vec::new()); + let mut mod_folded = fold::noop_fold_mod(module, self); + let tests = mem::replace(&mut self.tests, tests); + let tested_submods = mem::replace(&mut self.tested_submods, tested_submods); + + if !tests.is_empty() || !tested_submods.is_empty() { + let (it, sym) = mk_reexport_mod(&mut self.cx, item.id, tests, tested_submods); + mod_folded.items.push(it); + + if !self.cx.path.is_empty() { + self.tested_submods.push((self.cx.path[self.cx.path.len()-1], sym)); + } else { + debug!("pushing nothing, sym: {:?}", sym); + self.cx.toplevel_reexport = Some(sym); + } + } + item.node = ast::ItemKind::Mod(mod_folded); + } if ident.name != keywords::Invalid.name() { self.cx.path.pop(); } - res - } - - fn fold_mod(&mut self, m: ast::Mod) -> ast::Mod { - let tests = mem::replace(&mut self.tests, Vec::new()); - let tested_submods = mem::replace(&mut self.tested_submods, Vec::new()); - let mut mod_folded = fold::noop_fold_mod(m, self); - let tests = mem::replace(&mut self.tests, tests); - let tested_submods = mem::replace(&mut self.tested_submods, tested_submods); - - if !tests.is_empty() || !tested_submods.is_empty() { - let (it, sym) = mk_reexport_mod(&mut self.cx, tests, tested_submods); - mod_folded.items.push(it); - - if !self.cx.path.is_empty() { - self.tested_submods.push((self.cx.path[self.cx.path.len()-1], sym)); - } else { - debug!("pushing nothing, sym: {:?}", sym); - self.cx.toplevel_reexport = Some(sym); - } - } - - mod_folded + SmallVector::one(P(item)) } fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { mac } @@ -239,16 +222,24 @@ impl fold::Folder for EntryPointCleaner { fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { mac } } -fn mk_reexport_mod(cx: &mut TestCtxt, tests: Vec, +fn mk_reexport_mod(cx: &mut TestCtxt, parent: ast::NodeId, tests: Vec, tested_submods: Vec<(ast::Ident, ast::Ident)>) -> (P, ast::Ident) { let super_ = token::str_to_ident("super"); + // Generate imports with `#[allow(private_in_public)]` to work around issue #36768. + let allow_private_in_public = cx.ext_cx.attribute(DUMMY_SP, cx.ext_cx.meta_list( + DUMMY_SP, + InternedString::new("allow"), + vec![cx.ext_cx.meta_list_item_word(DUMMY_SP, InternedString::new("private_in_public"))], + )); let items = tests.into_iter().map(|r| { cx.ext_cx.item_use_simple(DUMMY_SP, ast::Visibility::Public, cx.ext_cx.path(DUMMY_SP, vec![super_, r])) + .map_attrs(|_| vec![allow_private_in_public.clone()]) }).chain(tested_submods.into_iter().map(|(r, sym)| { let path = cx.ext_cx.path(DUMMY_SP, vec![super_, r, sym]); cx.ext_cx.item_use_simple_(DUMMY_SP, ast::Visibility::Public, r, path) + .map_attrs(|_| vec![allow_private_in_public.clone()]) })).collect(); let reexport_mod = ast::Mod { @@ -257,6 +248,8 @@ fn mk_reexport_mod(cx: &mut TestCtxt, tests: Vec, }; let sym = token::gensym_ident("__test_reexports"); + let parent = if parent == ast::DUMMY_NODE_ID { ast::CRATE_NODE_ID } else { parent }; + cx.ext_cx.current_expansion.mark = cx.ext_cx.resolver.get_module_scope(parent); let it = cx.ext_cx.monotonic_expander().fold_item(P(ast::Item { ident: sym.clone(), attrs: Vec::new(), @@ -281,7 +274,7 @@ fn generate_test_harness(sess: &ParseSess, let mut cx: TestCtxt = TestCtxt { sess: sess, span_diagnostic: sd, - ext_cx: ExtCtxt::new(sess, vec![], ExpansionConfig::default("test".to_string()), resolver), + ext_cx: ExtCtxt::new(sess, ExpansionConfig::default("test".to_string()), resolver), path: Vec::new(), testfns: Vec::new(), reexport_test_harness_main: reexport_test_harness_main, @@ -437,7 +430,7 @@ fn mk_std(cx: &TestCtxt) -> P { let (vi, vis, ident) = if cx.is_test_crate { (ast::ItemKind::Use( P(nospan(ast::ViewPathSimple(id_test, - path_node(vec!(id_test)))))), + path_node(vec![id_test]))))), ast::Visibility::Public, keywords::Invalid.ident()) } else { (ast::ItemKind::ExternCrate(None), ast::Visibility::Inherited, id_test) @@ -579,7 +572,7 @@ fn mk_tests(cx: &TestCtxt) -> P { let static_lt = ecx.lifetime(sp, keywords::StaticLifetime.name()); // &'static [self::test::TestDescAndFn] let static_type = ecx.ty_rptr(sp, - ecx.ty(sp, ast::TyKind::Vec(struct_type)), + ecx.ty(sp, ast::TyKind::Slice(struct_type)), Some(static_lt), ast::Mutability::Immutable); // static TESTS: $static_type = &[...]; diff --git a/src/libsyntax/tokenstream.rs b/src/libsyntax/tokenstream.rs index 7b1df6f0e97be..9e644e59e86a7 100644 --- a/src/libsyntax/tokenstream.rs +++ b/src/libsyntax/tokenstream.rs @@ -33,6 +33,7 @@ use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use parse::lexer; use parse; use parse::token::{self, Token, Lit, Nonterminal}; +use print::pprust; use std::fmt; use std::iter::*; @@ -133,7 +134,6 @@ impl TokenTree { AttrStyle::Inner => 3, } } - TokenTree::Token(_, token::SpecialVarNt(..)) => 2, TokenTree::Token(_, token::MatchNt(..)) => 3, TokenTree::Token(_, token::Interpolated(Nonterminal::NtTT(..))) => 1, TokenTree::Delimited(_, ref delimed) => delimed.tts.len() + 2, @@ -187,11 +187,6 @@ impl TokenTree { } delimed.tts[index - 1].clone() } - (&TokenTree::Token(sp, token::SpecialVarNt(var)), _) => { - let v = [TokenTree::Token(sp, token::Dollar), - TokenTree::Token(sp, token::Ident(token::str_to_ident(var.as_str())))]; - v[index].clone() - } (&TokenTree::Token(sp, token::MatchNt(name, kind)), _) => { let v = [TokenTree::Token(sp, token::SubstNt(name)), TokenTree::Token(sp, token::Colon), @@ -222,11 +217,10 @@ impl TokenTree { -> macro_parser::NamedParseResult { // `None` is because we're not interpolating let arg_rdr = lexer::new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic, - None, None, tts.iter().cloned().collect(), true); - macro_parser::parse(cx.parse_sess(), cx.cfg(), arg_rdr, mtch) + macro_parser::parse(cx.parse_sess(), arg_rdr, mtch) } /// Check if this TokenTree is equal to the other, regardless of span information. @@ -781,6 +775,12 @@ impl TokenStream { } } +impl fmt::Display for TokenStream { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.write_str(&pprust::tts_to_string(&self.to_tts())) + } +} + // FIXME Reimplement this iterator to hold onto a slice iterator for a leaf, getting the // next leaf's iterator when the current one is exhausted. pub struct Iter<'a> { diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs index 6bb409715aa39..f56c6cedcd186 100644 --- a/src/libsyntax/util/interner.rs +++ b/src/libsyntax/util/interner.rs @@ -14,23 +14,13 @@ use ast::Name; -use std::borrow::Borrow; use std::collections::HashMap; use std::rc::Rc; -#[derive(PartialEq, Eq, Hash)] -struct RcStr(Rc); - -impl Borrow for RcStr { - fn borrow(&self) -> &str { - &self.0 - } -} - #[derive(Default)] pub struct Interner { - names: HashMap, - strings: Vec>, + names: HashMap, Name>, + strings: Vec>, } /// When traits can extend traits, we should extend index to get [] @@ -47,22 +37,22 @@ impl Interner { this } - pub fn intern + Into>(&mut self, string: T) -> Name { - if let Some(&name) = self.names.get(string.borrow()) { + pub fn intern(&mut self, string: &str) -> Name { + if let Some(&name) = self.names.get(string) { return name; } let name = Name(self.strings.len() as u32); - let string = Rc::new(string.into()); + let string = Rc::__from_str(string); self.strings.push(string.clone()); - self.names.insert(RcStr(string), name); + self.names.insert(string, name); name } pub fn gensym(&mut self, string: &str) -> Name { let gensym = Name(self.strings.len() as u32); // leave out of `names` to avoid colliding - self.strings.push(Rc::new(string.to_owned())); + self.strings.push(Rc::__from_str(string)); gensym } @@ -75,7 +65,7 @@ impl Interner { gensym } - pub fn get(&self, name: Name) -> Rc { + pub fn get(&self, name: Name) -> Rc { self.strings[name.0 as usize].clone() } @@ -109,13 +99,13 @@ mod tests { assert_eq!(i.gensym("dog"), Name(4)); // gensym tests again with gensym_copy: assert_eq!(i.gensym_copy(Name(2)), Name(5)); - assert_eq!(*i.get(Name(5)), "zebra"); + assert_eq!(&*i.get(Name(5)), "zebra"); assert_eq!(i.gensym_copy(Name(2)), Name(6)); - assert_eq!(*i.get(Name(6)), "zebra"); - assert_eq!(*i.get(Name(0)), "dog"); - assert_eq!(*i.get(Name(1)), "cat"); - assert_eq!(*i.get(Name(2)), "zebra"); - assert_eq!(*i.get(Name(3)), "zebra"); - assert_eq!(*i.get(Name(4)), "dog"); + assert_eq!(&*i.get(Name(6)), "zebra"); + assert_eq!(&*i.get(Name(0)), "dog"); + assert_eq!(&*i.get(Name(1)), "cat"); + assert_eq!(&*i.get(Name(2)), "zebra"); + assert_eq!(&*i.get(Name(3)), "zebra"); + assert_eq!(&*i.get(Name(4)), "dog"); } } diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs index f59428bf536cf..76d3f2a063c18 100644 --- a/src/libsyntax/util/parser_testing.rs +++ b/src/libsyntax/util/parser_testing.rs @@ -25,10 +25,7 @@ pub fn string_to_tts(source_str: String) -> Vec { /// Map string to parser (via tts) pub fn string_to_parser<'a>(ps: &'a ParseSess, source_str: String) -> Parser<'a> { - new_parser_from_source_str(ps, - Vec::new(), - "bogofile".to_string(), - source_str) + new_parser_from_source_str(ps, "bogofile".to_string(), source_str) } fn with_error_checking_parse<'a, T, F>(s: String, ps: &'a ParseSess, f: F) -> T where diff --git a/src/libsyntax/util/small_vector.rs b/src/libsyntax/util/small_vector.rs index 373dfc4ddfac5..9be7dbd68174e 100644 --- a/src/libsyntax/util/small_vector.rs +++ b/src/libsyntax/util/small_vector.rs @@ -11,6 +11,7 @@ use self::SmallVectorRepr::*; use self::IntoIterRepr::*; +use core::ops; use std::iter::{IntoIterator, FromIterator}; use std::mem; use std::slice; @@ -19,10 +20,12 @@ use std::vec; use util::move_map::MoveMap; /// A vector type optimized for cases where the size is almost always 0 or 1 +#[derive(Clone)] pub struct SmallVector { repr: SmallVectorRepr, } +#[derive(Clone)] enum SmallVectorRepr { Zero, One(T), @@ -75,16 +78,11 @@ impl SmallVector { } pub fn as_slice(&self) -> &[T] { - match self.repr { - Zero => { - let result: &[T] = &[]; - result - } - One(ref v) => { - unsafe { slice::from_raw_parts(v, 1) } - } - Many(ref vs) => vs - } + self + } + + pub fn as_mut_slice(&mut self) -> &mut [T] { + self } pub fn pop(&mut self) -> Option { @@ -107,7 +105,7 @@ impl SmallVector { One(..) => { let one = mem::replace(&mut self.repr, Zero); match one { - One(v1) => mem::replace(&mut self.repr, Many(vec!(v1, v))), + One(v1) => mem::replace(&mut self.repr, Many(vec![v1, v])), _ => unreachable!() }; } @@ -163,6 +161,38 @@ impl SmallVector { } } +impl ops::Deref for SmallVector { + type Target = [T]; + + fn deref(&self) -> &[T] { + match self.repr { + Zero => { + let result: &[T] = &[]; + result + } + One(ref v) => { + unsafe { slice::from_raw_parts(v, 1) } + } + Many(ref vs) => vs + } + } +} + +impl ops::DerefMut for SmallVector { + fn deref_mut(&mut self) -> &mut [T] { + match self.repr { + Zero => { + let result: &mut [T] = &mut []; + result + } + One(ref mut v) => { + unsafe { slice::from_raw_parts_mut(v, 1) } + } + Many(ref mut vs) => vs + } + } +} + impl IntoIterator for SmallVector { type Item = T; type IntoIter = IntoIter; @@ -284,12 +314,12 @@ mod tests { #[test] #[should_panic] fn test_expect_one_many() { - SmallVector::many(vec!(1, 2)).expect_one(""); + SmallVector::many(vec![1, 2]).expect_one(""); } #[test] fn test_expect_one_one() { assert_eq!(1, SmallVector::one(1).expect_one("")); - assert_eq!(1, SmallVector::many(vec!(1)).expect_one("")); + assert_eq!(1, SmallVector::many(vec![1]).expect_one("")); } } diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index efd9b027504f0..7fb3e5c6bee1d 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -201,6 +201,7 @@ pub fn walk_lifetime(visitor: &mut V, lifetime: &Lifetime) { pub fn walk_lifetime_def(visitor: &mut V, lifetime_def: &LifetimeDef) { visitor.visit_lifetime(&lifetime_def.lifetime); walk_list!(visitor, visit_lifetime, &lifetime_def.bounds); + walk_list!(visitor, visit_attribute, &*lifetime_def.attrs); } pub fn walk_poly_trait_ref(visitor: &mut V, trait_ref: &PolyTraitRef, _: &TraitBoundModifier) @@ -313,7 +314,7 @@ pub fn walk_variant(visitor: &mut V, variant: &Variant, generics: &Generics, pub fn walk_ty(visitor: &mut V, typ: &Ty) { match typ.node { - TyKind::Vec(ref ty) | TyKind::Paren(ref ty) => { + TyKind::Slice(ref ty) | TyKind::Paren(ref ty) => { visitor.visit_ty(ty) } TyKind::Ptr(ref mutable_type) => { @@ -341,7 +342,7 @@ pub fn walk_ty(visitor: &mut V, typ: &Ty) { visitor.visit_ty(ty); walk_list!(visitor, visit_ty_param_bound, bounds); } - TyKind::FixedLengthVec(ref ty, ref expression) => { + TyKind::Array(ref ty, ref expression) => { visitor.visit_ty(ty); visitor.visit_expr(expression) } @@ -434,7 +435,7 @@ pub fn walk_pat(visitor: &mut V, pattern: &Pat) { visitor.visit_expr(upper_bound) } PatKind::Wild => (), - PatKind::Vec(ref prepatterns, ref slice_pattern, ref postpatterns) => { + PatKind::Slice(ref prepatterns, ref slice_pattern, ref postpatterns) => { walk_list!(visitor, visit_pat, prepatterns); walk_list!(visitor, visit_pat, slice_pattern); walk_list!(visitor, visit_pat, postpatterns); @@ -474,6 +475,7 @@ pub fn walk_generics(visitor: &mut V, generics: &Generics) { visitor.visit_ident(param.span, param.ident); walk_list!(visitor, visit_ty_param_bound, ¶m.bounds); walk_list!(visitor, visit_ty, ¶m.default); + walk_list!(visitor, visit_attribute, &*param.attrs); } walk_list!(visitor, visit_lifetime_def, &generics.lifetimes); for predicate in &generics.where_clause.predicates { @@ -532,8 +534,8 @@ pub fn walk_fn_kind(visitor: &mut V, function_kind: FnKind) { pub fn walk_fn(visitor: &mut V, kind: FnKind, declaration: &FnDecl, body: &Block, _span: Span) where V: Visitor, { - walk_fn_decl(visitor, declaration); walk_fn_kind(visitor, kind); + walk_fn_decl(visitor, declaration); visitor.visit_block(body) } @@ -652,13 +654,13 @@ pub fn walk_expr(visitor: &mut V, expression: &Expr) { walk_list!(visitor, visit_expr, subexpressions); } ExprKind::Call(ref callee_expression, ref arguments) => { + visitor.visit_expr(callee_expression); walk_list!(visitor, visit_expr, arguments); - visitor.visit_expr(callee_expression) } ExprKind::MethodCall(ref ident, ref types, ref arguments) => { visitor.visit_ident(ident.span, ident.node); - walk_list!(visitor, visit_expr, arguments); walk_list!(visitor, visit_ty, types); + walk_list!(visitor, visit_expr, arguments); } ExprKind::Binary(_, ref left_expression, ref right_expression) => { visitor.visit_expr(left_expression); @@ -717,12 +719,12 @@ pub fn walk_expr(visitor: &mut V, expression: &Expr) { } ExprKind::Block(ref block) => visitor.visit_block(block), ExprKind::Assign(ref left_hand_expression, ref right_hand_expression) => { + visitor.visit_expr(left_hand_expression); visitor.visit_expr(right_hand_expression); - visitor.visit_expr(left_hand_expression) } ExprKind::AssignOp(_, ref left_expression, ref right_expression) => { + visitor.visit_expr(left_expression); visitor.visit_expr(right_expression); - visitor.visit_expr(left_expression) } ExprKind::Field(ref subexpression, ref ident) => { visitor.visit_expr(subexpression); diff --git a/src/libsyntax_ext/Cargo.toml b/src/libsyntax_ext/Cargo.toml index 6910e6400d4f8..960db792a623e 100644 --- a/src/libsyntax_ext/Cargo.toml +++ b/src/libsyntax_ext/Cargo.toml @@ -11,7 +11,7 @@ crate-type = ["dylib"] [dependencies] fmt_macros = { path = "../libfmt_macros" } log = { path = "../liblog" } +proc_macro = { path = "../libproc_macro" } rustc_errors = { path = "../librustc_errors" } -rustc_macro = { path = "../librustc_macro" } syntax = { path = "../libsyntax" } syntax_pos = { path = "../libsyntax_pos" } diff --git a/src/libsyntax_ext/asm.rs b/src/libsyntax_ext/asm.rs index 77425b809de1d..24c515e502808 100644 --- a/src/libsyntax_ext/asm.rs +++ b/src/libsyntax_ext/asm.rs @@ -53,7 +53,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, tts: &[tokenstream::TokenTree]) -> Box { if !cx.ecfg.enable_asm() { - feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic, + feature_gate::emit_feature_err(&cx.parse_sess, "asm", sp, feature_gate::GateIssue::Language, @@ -107,7 +107,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, if p2.token != token::Eof { let mut extra_tts = panictry!(p2.parse_all_token_trees()); extra_tts.extend(tts[first_colon..].iter().cloned()); - p = parse::tts_to_parser(cx.parse_sess, extra_tts, cx.cfg()); + p = parse::tts_to_parser(cx.parse_sess, extra_tts); } asm = s; @@ -122,7 +122,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, let (constraint, _str_style) = panictry!(p.parse_str()); - let span = p.last_span; + let span = p.prev_span; panictry!(p.expect(&token::OpenDelim(token::Paren))); let out = panictry!(p.parse_expr()); @@ -167,9 +167,9 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, let (constraint, _str_style) = panictry!(p.parse_str()); if constraint.starts_with("=") { - cx.span_err(p.last_span, "input operand constraint contains '='"); + cx.span_err(p.prev_span, "input operand constraint contains '='"); } else if constraint.starts_with("+") { - cx.span_err(p.last_span, "input operand constraint contains '+'"); + cx.span_err(p.prev_span, "input operand constraint contains '+'"); } panictry!(p.expect(&token::OpenDelim(token::Paren))); @@ -189,9 +189,9 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, let (s, _str_style) = panictry!(p.parse_str()); if OPTIONS.iter().any(|&opt| s == opt) { - cx.span_warn(p.last_span, "expected a clobber, found an option"); + cx.span_warn(p.prev_span, "expected a clobber, found an option"); } else if s.starts_with("{") || s.ends_with("}") { - cx.span_err(p.last_span, "clobber should not be surrounded by braces"); + cx.span_err(p.prev_span, "clobber should not be surrounded by braces"); } clobs.push(s); @@ -209,7 +209,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, } else if option == "intel" { dialect = AsmDialect::Intel; } else { - cx.span_warn(p.last_span, "unrecognized option"); + cx.span_warn(p.prev_span, "unrecognized option"); } if p.token == token::Comma { diff --git a/src/libsyntax_ext/cfg.rs b/src/libsyntax_ext/cfg.rs index 169ef9ab7d635..98da49545f927 100644 --- a/src/libsyntax_ext/cfg.rs +++ b/src/libsyntax_ext/cfg.rs @@ -32,6 +32,6 @@ pub fn expand_cfg<'cx>(cx: &mut ExtCtxt, return DummyResult::expr(sp); } - let matches_cfg = attr::cfg_matches(&cx.cfg, &cfg, cx.parse_sess, cx.ecfg.features); + let matches_cfg = attr::cfg_matches(&cfg, cx.parse_sess, cx.ecfg.features); MacEager::expr(cx.expr_bool(sp, matches_cfg)) } diff --git a/src/libsyntax_ext/concat_idents.rs b/src/libsyntax_ext/concat_idents.rs index 15aaf3c78237f..e56c6e2229a75 100644 --- a/src/libsyntax_ext/concat_idents.rs +++ b/src/libsyntax_ext/concat_idents.rs @@ -23,7 +23,7 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt, tts: &[TokenTree]) -> Box { if !cx.ecfg.enable_concat_idents() { - feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic, + feature_gate::emit_feature_err(&cx.parse_sess, "concat_idents", sp, feature_gate::GateIssue::Language, diff --git a/src/libsyntax_ext/deriving/cmp/partial_eq.rs b/src/libsyntax_ext/deriving/cmp/partial_eq.rs index 64b8829dad7b1..c46d4b34173f6 100644 --- a/src/libsyntax_ext/deriving/cmp/partial_eq.rs +++ b/src/libsyntax_ext/deriving/cmp/partial_eq.rs @@ -65,12 +65,12 @@ pub fn expand_deriving_partial_eq(cx: &mut ExtCtxt, macro_rules! md { ($name:expr, $f:ident) => { { let inline = cx.meta_word(span, InternedString::new("inline")); - let attrs = vec!(cx.attribute(span, inline)); + let attrs = vec![cx.attribute(span, inline)]; MethodDef { name: $name, generics: LifetimeBounds::empty(), explicit_self: borrowed_explicit_self(), - args: vec!(borrowed_self()), + args: vec![borrowed_self()], ret_ty: Literal(path_local!(bool)), attributes: attrs, is_unsafe: false, diff --git a/src/libsyntax_ext/deriving/cmp/partial_ord.rs b/src/libsyntax_ext/deriving/cmp/partial_ord.rs index 99d60c43c5457..597ff306b3dd8 100644 --- a/src/libsyntax_ext/deriving/cmp/partial_ord.rs +++ b/src/libsyntax_ext/deriving/cmp/partial_ord.rs @@ -28,12 +28,12 @@ pub fn expand_deriving_partial_ord(cx: &mut ExtCtxt, macro_rules! md { ($name:expr, $op:expr, $equal:expr) => { { let inline = cx.meta_word(span, InternedString::new("inline")); - let attrs = vec!(cx.attribute(span, inline)); + let attrs = vec![cx.attribute(span, inline)]; MethodDef { name: $name, generics: LifetimeBounds::empty(), explicit_self: borrowed_explicit_self(), - args: vec!(borrowed_self()), + args: vec![borrowed_self()], ret_ty: Literal(path_local!(bool)), attributes: attrs, is_unsafe: false, diff --git a/src/libsyntax_ext/deriving/custom.rs b/src/libsyntax_ext/deriving/custom.rs index 465fc0016e5e8..f8cb1294a6663 100644 --- a/src/libsyntax_ext/deriving/custom.rs +++ b/src/libsyntax_ext/deriving/custom.rs @@ -11,11 +11,11 @@ use std::panic; use errors::FatalError; -use rustc_macro::{TokenStream, __internal}; +use proc_macro::{TokenStream, __internal}; use syntax::ast::{self, ItemKind}; use syntax::codemap::{ExpnInfo, MacroAttribute, NameAndSpan, Span}; use syntax::ext::base::*; -use syntax::fold::{self, Folder}; +use syntax::fold::Folder; use syntax::parse::token::intern; use syntax::print::pprust; @@ -97,14 +97,3 @@ impl MultiItemModifier for CustomDerive { } } -struct ChangeSpan { span: Span } - -impl Folder for ChangeSpan { - fn new_span(&mut self, _sp: Span) -> Span { - self.span - } - - fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { - fold::noop_fold_mac(mac, self) - } -} diff --git a/src/libsyntax_ext/deriving/decodable.rs b/src/libsyntax_ext/deriving/decodable.rs index 22b9eb8e75445..10db56d46f6df 100644 --- a/src/libsyntax_ext/deriving/decodable.rs +++ b/src/libsyntax_ext/deriving/decodable.rs @@ -79,9 +79,9 @@ fn expand_deriving_decodable_imp(cx: &mut ExtCtxt, ret_ty: Literal(Path::new_(pathvec_std!(cx, core::result::Result), None, - vec!(Box::new(Self_), Box::new(Literal(Path::new_( + vec![Box::new(Self_), Box::new(Literal(Path::new_( vec![typaram, "Error"], None, vec![], false - )))), + )))], true)), attributes: Vec::new(), is_unsafe: false, diff --git a/src/libsyntax_ext/deriving/encodable.rs b/src/libsyntax_ext/deriving/encodable.rs index a4074184b6e81..640296d7f06fc 100644 --- a/src/libsyntax_ext/deriving/encodable.rs +++ b/src/libsyntax_ext/deriving/encodable.rs @@ -139,23 +139,23 @@ fn expand_deriving_encodable_imp(cx: &mut ExtCtxt, generics: LifetimeBounds::empty(), is_unsafe: false, supports_unions: false, - methods: vec!( + methods: vec![ MethodDef { name: "encode", generics: LifetimeBounds { lifetimes: Vec::new(), bounds: vec![(typaram, - vec![Path::new_(vec![krate, "Encoder"], None, vec!(), true)])] + vec![Path::new_(vec![krate, "Encoder"], None, vec![], true)])] }, explicit_self: borrowed_explicit_self(), - args: vec!(Ptr(Box::new(Literal(Path::new_local(typaram))), - Borrowed(None, Mutability::Mutable))), + args: vec![Ptr(Box::new(Literal(Path::new_local(typaram))), + Borrowed(None, Mutability::Mutable))], ret_ty: Literal(Path::new_( pathvec_std!(cx, core::result::Result), None, - vec!(Box::new(Tuple(Vec::new())), Box::new(Literal(Path::new_( + vec![Box::new(Tuple(Vec::new())), Box::new(Literal(Path::new_( vec![typaram, "Error"], None, vec![], false - )))), + )))], true )), attributes: Vec::new(), @@ -165,7 +165,7 @@ fn expand_deriving_encodable_imp(cx: &mut ExtCtxt, encodable_substructure(a, b, c, krate) })), } - ), + ], associated_types: Vec::new(), }; diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs index 339a6c477ccd5..e6b63be3efc0d 100644 --- a/src/libsyntax_ext/deriving/generic/mod.rs +++ b/src/libsyntax_ext/deriving/generic/mod.rs @@ -536,7 +536,7 @@ impl<'a> TraitDef<'a> { bounds.push((*declared_bound).clone()); } - cx.typaram(self.span, ty_param.ident, P::from_vec(bounds), None) + cx.typaram(self.span, ty_param.ident, vec![], P::from_vec(bounds), None) })); // and similarly for where clauses @@ -780,17 +780,17 @@ fn find_repr_type_name(diagnostic: &Handler, type_attrs: &[ast::Attribute]) -> & attr::ReprAny | attr::ReprPacked | attr::ReprSimd => continue, attr::ReprExtern => "i32", - attr::ReprInt(_, attr::SignedInt(ast::IntTy::Is)) => "isize", - attr::ReprInt(_, attr::SignedInt(ast::IntTy::I8)) => "i8", - attr::ReprInt(_, attr::SignedInt(ast::IntTy::I16)) => "i16", - attr::ReprInt(_, attr::SignedInt(ast::IntTy::I32)) => "i32", - attr::ReprInt(_, attr::SignedInt(ast::IntTy::I64)) => "i64", - - attr::ReprInt(_, attr::UnsignedInt(ast::UintTy::Us)) => "usize", - attr::ReprInt(_, attr::UnsignedInt(ast::UintTy::U8)) => "u8", - attr::ReprInt(_, attr::UnsignedInt(ast::UintTy::U16)) => "u16", - attr::ReprInt(_, attr::UnsignedInt(ast::UintTy::U32)) => "u32", - attr::ReprInt(_, attr::UnsignedInt(ast::UintTy::U64)) => "u64", + attr::ReprInt(attr::SignedInt(ast::IntTy::Is)) => "isize", + attr::ReprInt(attr::SignedInt(ast::IntTy::I8)) => "i8", + attr::ReprInt(attr::SignedInt(ast::IntTy::I16)) => "i16", + attr::ReprInt(attr::SignedInt(ast::IntTy::I32)) => "i32", + attr::ReprInt(attr::SignedInt(ast::IntTy::I64)) => "i64", + + attr::ReprInt(attr::UnsignedInt(ast::UintTy::Us)) => "usize", + attr::ReprInt(attr::UnsignedInt(ast::UintTy::U8)) => "u8", + attr::ReprInt(attr::UnsignedInt(ast::UintTy::U16)) => "u16", + attr::ReprInt(attr::UnsignedInt(ast::UintTy::U32)) => "u32", + attr::ReprInt(attr::UnsignedInt(ast::UintTy::U64)) => "u64", } } } @@ -1460,8 +1460,9 @@ impl<'a> MethodDef<'a> { .iter() .map(|v| { let ident = v.node.name; + let sp = Span { expn_id: trait_.span.expn_id, ..v.span }; let summary = trait_.summarise_struct(cx, &v.node.data); - (ident, v.span, summary) + (ident, sp, summary) }) .collect(); self.call_substructure_method(cx, @@ -1545,7 +1546,7 @@ impl<'a> TraitDef<'a> { cx.span_bug(sp, "a braced struct with unnamed fields in `derive`"); } codemap::Spanned { - span: pat.span, + span: Span { expn_id: self.span.expn_id, ..pat.span }, node: ast::FieldPat { ident: ident.unwrap(), pat: pat, @@ -1576,7 +1577,8 @@ impl<'a> TraitDef<'a> { mutbl: ast::Mutability) -> (P, Vec<(Span, Option, P, &'a [ast::Attribute])>) { let variant_ident = variant.node.name; - let variant_path = cx.path(variant.span, vec![enum_ident, variant_ident]); + let sp = Span { expn_id: self.span.expn_id, ..variant.span }; + let variant_path = cx.path(sp, vec![enum_ident, variant_ident]); self.create_struct_pattern(cx, variant_path, &variant.node.data, prefix, mutbl) } } diff --git a/src/libsyntax_ext/deriving/generic/ty.rs b/src/libsyntax_ext/deriving/generic/ty.rs index 210878b7c9f0e..4749d082bc0ec 100644 --- a/src/libsyntax_ext/deriving/generic/ty.rs +++ b/src/libsyntax_ext/deriving/generic/ty.rs @@ -194,6 +194,7 @@ impl<'a> Ty<'a> { fn mk_ty_param(cx: &ExtCtxt, span: Span, name: &str, + attrs: &[ast::Attribute], bounds: &[Path], self_ident: Ident, self_generics: &Generics) @@ -204,7 +205,7 @@ fn mk_ty_param(cx: &ExtCtxt, cx.typarambound(path) }) .collect(); - cx.typaram(span, cx.ident_of(name), bounds, None) + cx.typaram(span, cx.ident_of(name), attrs.to_owned(), bounds, None) } fn mk_generics(lifetimes: Vec, ty_params: Vec, span: Span) @@ -246,7 +247,7 @@ impl<'a> LifetimeBounds<'a> { let bounds = bounds.iter() .map(|b| cx.lifetime(span, cx.ident_of(*b).name)) .collect(); - cx.lifetime_def(span, cx.ident_of(*lt).name, bounds) + cx.lifetime_def(span, cx.ident_of(*lt).name, vec![], bounds) }) .collect(); let ty_params = self.bounds @@ -254,7 +255,7 @@ impl<'a> LifetimeBounds<'a> { .map(|t| { match *t { (ref name, ref bounds) => { - mk_ty_param(cx, span, *name, bounds, self_ty, self_generics) + mk_ty_param(cx, span, *name, &[], bounds, self_ty, self_generics) } } }) diff --git a/src/libsyntax_ext/deriving/mod.rs b/src/libsyntax_ext/deriving/mod.rs index 6162beb80eccc..c2bfead568612 100644 --- a/src/libsyntax_ext/deriving/mod.rs +++ b/src/libsyntax_ext/deriving/mod.rs @@ -11,7 +11,8 @@ //! The compiler code necessary to implement the `#[derive]` extensions. use syntax::ast::{self, MetaItem}; -use syntax::ext::base::{Annotatable, ExtCtxt}; +use syntax::attr::HasAttrs; +use syntax::ext::base::{Annotatable, ExtCtxt, SyntaxExtension}; use syntax::ext::build::AstBuilder; use syntax::feature_gate; use syntax::codemap; @@ -27,7 +28,7 @@ macro_rules! pathvec { macro_rules! path { ($($x:tt)*) => ( - ::ext::deriving::generic::ty::Path::new( pathvec!( $($x)* ) ) + ::ext::deriving::generic::ty::Path::new( pathvec![ $($x)* ] ) ) } @@ -39,7 +40,7 @@ macro_rules! path_local { macro_rules! pathvec_std { ($cx:expr, $first:ident :: $($rest:ident)::+) => ({ - let mut v = pathvec!($($rest)::+); + let mut v = pathvec![$($rest)::+]; if let Some(s) = $cx.crate_root { v.insert(0, s); } @@ -104,15 +105,150 @@ pub fn expand_derive(cx: &mut ExtCtxt, } }; - if mitem.value_str().is_some() { - cx.span_err(mitem.span, "unexpected value in `derive`"); + let mut derive_attrs = Vec::new(); + item = item.map_attrs(|attrs| { + let partition = attrs.into_iter().partition(|attr| &attr.name() == "derive"); + derive_attrs = partition.0; + partition.1 + }); + + // Expand `#[derive]`s after other attribute macro invocations. + if cx.resolver.find_attr_invoc(&mut item.attrs.clone()).is_some() { + return vec![Annotatable::Item(item.map_attrs(|mut attrs| { + attrs.push(cx.attribute(span, P(mitem.clone()))); + attrs.extend(derive_attrs); + attrs + }))]; + } + + let get_traits = |mitem: &MetaItem, cx: &ExtCtxt| { + if mitem.value_str().is_some() { + cx.span_err(mitem.span, "unexpected value in `derive`"); + } + + let traits = mitem.meta_item_list().unwrap_or(&[]).to_owned(); + if traits.is_empty() { + cx.span_warn(mitem.span, "empty trait list in `derive`"); + } + traits + }; + + let mut traits = get_traits(mitem, cx); + for derive_attr in derive_attrs { + traits.extend(get_traits(&derive_attr.node.value, cx)); + } + + // First, weed out malformed #[derive] + traits.retain(|titem| { + if titem.word().is_none() { + cx.span_err(titem.span, "malformed `derive` entry"); + false + } else { + true + } + }); + + // Next, check for old-style #[derive(Foo)] + // + // These all get expanded to `#[derive_Foo]` and will get expanded first. If + // we actually add any attributes here then we return to get those expanded + // and then eventually we'll come back to finish off the other derive modes. + let mut new_attributes = Vec::new(); + traits.retain(|titem| { + let tword = titem.word().unwrap(); + let tname = tword.name(); + + if is_builtin_trait(&tname) || { + let derive_mode = + ast::Path::from_ident(titem.span, ast::Ident::with_empty_ctxt(intern(&tname))); + cx.resolver.resolve_macro(cx.current_expansion.mark, &derive_mode, false).map(|ext| { + if let SyntaxExtension::CustomDerive(_) = *ext { true } else { false } + }).unwrap_or(false) + } { + return true; + } + + if !cx.ecfg.enable_custom_derive() { + feature_gate::emit_feature_err(&cx.parse_sess, + "custom_derive", + titem.span, + feature_gate::GateIssue::Language, + feature_gate::EXPLAIN_CUSTOM_DERIVE); + } else { + cx.span_warn(titem.span, feature_gate::EXPLAIN_DEPR_CUSTOM_DERIVE); + let name = intern_and_get_ident(&format!("derive_{}", tname)); + let mitem = cx.meta_word(titem.span, name); + new_attributes.push(cx.attribute(mitem.span, mitem)); + } + false + }); + if new_attributes.len() > 0 { + item = item.map(|mut i| { + i.attrs.extend(new_attributes); + if traits.len() > 0 { + let list = cx.meta_list(mitem.span, + intern_and_get_ident("derive"), + traits); + i.attrs.push(cx.attribute(mitem.span, list)); + } + i + }); + return vec![Annotatable::Item(item)] } - let traits = mitem.meta_item_list().unwrap_or(&[]); - if traits.is_empty() { - cx.span_warn(mitem.span, "empty trait list in `derive`"); + // Now check for macros-1.1 style custom #[derive]. + // + // Expand each of them in order given, but *before* we expand any built-in + // derive modes. The logic here is to: + // + // 1. Collect the remaining `#[derive]` annotations into a list. If + // there are any left, attach a `#[derive]` attribute to the item + // that we're currently expanding with the remaining derive modes. + // 2. Manufacture a `#[derive(Foo)]` attribute to pass to the expander. + // 3. Expand the current item we're expanding, getting back a list of + // items that replace it. + // 4. Extend the returned list with the current list of items we've + // collected so far. + // 5. Return everything! + // + // If custom derive extensions end up threading through the `#[derive]` + // attribute, we'll get called again later on to continue expanding + // those modes. + let macros_11_derive = traits.iter() + .cloned() + .enumerate() + .filter(|&(_, ref name)| !is_builtin_trait(&name.name().unwrap())) + .next(); + if let Some((i, titem)) = macros_11_derive { + let tname = ast::Ident::with_empty_ctxt(intern(&titem.name().unwrap())); + let path = ast::Path::from_ident(titem.span, tname); + let ext = cx.resolver.resolve_macro(cx.current_expansion.mark, &path, false).unwrap(); + + traits.remove(i); + if traits.len() > 0 { + item = item.map(|mut i| { + let list = cx.meta_list(mitem.span, + intern_and_get_ident("derive"), + traits); + i.attrs.push(cx.attribute(mitem.span, list)); + i + }); + } + let titem = cx.meta_list_item_word(titem.span, titem.name().unwrap()); + let mitem = cx.meta_list(titem.span, + intern_and_get_ident("derive"), + vec![titem]); + let item = Annotatable::Item(item); + if let SyntaxExtension::CustomDerive(ref ext) = *ext { + return ext.expand(cx, mitem.span, &mitem, item); + } else { + unreachable!() + } } + // Ok, at this point we know that there are no old-style `#[derive_Foo]` nor + // any macros-1.1 style `#[derive(Foo)]`. Expand all built-in traits here. + // RFC #1445. `#[derive(PartialEq, Eq)]` adds a (trusted) // `#[structural_match]` attribute. if traits.iter().filter_map(|t| t.name()).any(|t| t == "PartialEq") && @@ -141,103 +277,33 @@ pub fn expand_derive(cx: &mut ExtCtxt, }); } - let mut other_items = Vec::new(); - - let mut iter = traits.iter(); - while let Some(titem) = iter.next() { - - let tword = match titem.word() { - Some(name) => name, - None => { - cx.span_err(titem.span, "malformed `derive` entry"); - continue - } + let mut items = Vec::new(); + for titem in traits.iter() { + let tname = titem.word().unwrap().name(); + let name = intern_and_get_ident(&format!("derive({})", tname)); + let mitem = cx.meta_word(titem.span, name); + + let span = Span { + expn_id: cx.codemap().record_expansion(codemap::ExpnInfo { + call_site: titem.span, + callee: codemap::NameAndSpan { + format: codemap::MacroAttribute(intern(&format!("derive({})", tname))), + span: Some(titem.span), + allow_internal_unstable: true, + }, + }), + ..titem.span }; - let tname = tword.name(); - // If this is a built-in derive mode, then we expand it immediately - // here. - if is_builtin_trait(&tname) { - let name = intern_and_get_ident(&format!("derive({})", tname)); - let mitem = cx.meta_word(titem.span, name); - - let span = Span { - expn_id: cx.codemap().record_expansion(codemap::ExpnInfo { - call_site: titem.span, - callee: codemap::NameAndSpan { - format: codemap::MacroAttribute(intern(&format!("derive({})", tname))), - span: Some(titem.span), - allow_internal_unstable: true, - }, - }), - ..titem.span - }; - - let my_item = Annotatable::Item(item); - expand_builtin(&tname, cx, span, &mitem, &my_item, &mut |a| { - other_items.push(a); - }); - item = my_item.expect_item(); - - // Otherwise if this is a `rustc_macro`-style derive mode, we process it - // here. The logic here is to: - // - // 1. Collect the remaining `#[derive]` annotations into a list. If - // there are any left, attach a `#[derive]` attribute to the item - // that we're currently expanding with the remaining derive modes. - // 2. Manufacture a `#[derive(Foo)]` attribute to pass to the expander. - // 3. Expand the current item we're expanding, getting back a list of - // items that replace it. - // 4. Extend the returned list with the current list of items we've - // collected so far. - // 5. Return everything! - // - // If custom derive extensions end up threading through the `#[derive]` - // attribute, we'll get called again later on to continue expanding - // those modes. - } else if let Some(ext) = cx.derive_modes.remove(&tname) { - let remaining_derives = iter.cloned().collect::>(); - if remaining_derives.len() > 0 { - let list = cx.meta_list(titem.span, - intern_and_get_ident("derive"), - remaining_derives); - let attr = cx.attribute(titem.span, list); - item = item.map(|mut i| { - i.attrs.push(attr); - i - }); - } - let titem = cx.meta_list_item_word(titem.span, tname.clone()); - let mitem = cx.meta_list(titem.span, - intern_and_get_ident("derive"), - vec![titem]); - let item = Annotatable::Item(item); - let mut items = ext.expand(cx, mitem.span, &mitem, item); - items.extend(other_items); - cx.derive_modes.insert(tname.clone(), ext); - return items - - // If we've gotten this far then it means that we're in the territory of - // the old custom derive mechanism. If the feature isn't enabled, we - // issue an error, otherwise manufacture the `derive_Foo` attribute. - } else if !cx.ecfg.enable_custom_derive() { - feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic, - "custom_derive", - titem.span, - feature_gate::GateIssue::Language, - feature_gate::EXPLAIN_CUSTOM_DERIVE); - } else { - let name = intern_and_get_ident(&format!("derive_{}", tname)); - let mitem = cx.meta_word(titem.span, name); - item = item.map(|mut i| { - i.attrs.push(cx.attribute(mitem.span, mitem)); - i - }); - } + let my_item = Annotatable::Item(item); + expand_builtin(&tname, cx, span, &mitem, &my_item, &mut |a| { + items.push(a); + }); + item = my_item.expect_item(); } - other_items.insert(0, Annotatable::Item(item)); - return other_items + items.insert(0, Annotatable::Item(item)); + return items } macro_rules! derive_traits { diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs index 892ebcfa76129..de78f859f0f61 100644 --- a/src/libsyntax_ext/format.rs +++ b/src/libsyntax_ext/format.rs @@ -506,7 +506,7 @@ impl<'a, 'b> Context<'a, 'b> { -> P { let sp = piece_ty.span; let ty = ecx.ty_rptr(sp, - ecx.ty(sp, ast::TyKind::Vec(piece_ty)), + ecx.ty(sp, ast::TyKind::Slice(piece_ty)), Some(ecx.lifetime(sp, keywords::StaticLifetime.name())), ast::Mutability::Immutable); let slice = ecx.expr_vec_slice(sp, pieces); diff --git a/src/libsyntax_ext/lib.rs b/src/libsyntax_ext/lib.rs index 3a6212e5445ce..f336b26ae41f2 100644 --- a/src/libsyntax_ext/lib.rs +++ b/src/libsyntax_ext/lib.rs @@ -20,8 +20,8 @@ #![cfg_attr(not(stage0), deny(warnings))] #![feature(dotdot_in_tuple_patterns)] -#![feature(rustc_macro_lib)] -#![feature(rustc_macro_internals)] +#![feature(proc_macro_lib)] +#![feature(proc_macro_internals)] #![feature(rustc_private)] #![feature(staged_api)] @@ -31,7 +31,7 @@ extern crate log; #[macro_use] extern crate syntax; extern crate syntax_pos; -extern crate rustc_macro; +extern crate proc_macro; extern crate rustc_errors as errors; mod asm; @@ -43,27 +43,30 @@ mod format; mod log_syntax; mod trace_macros; -pub mod rustc_macro_registrar; +pub mod proc_macro_registrar; // for custom_derive pub mod deriving; use std::rc::Rc; use syntax::ast; -use syntax::ext::base::{MacroExpanderFn, MacroRulesTT, NormalTT, MultiModifier}; -use syntax::ext::hygiene::Mark; +use syntax::ext::base::{MacroExpanderFn, NormalTT, IdentTT, MultiModifier, NamedSyntaxExtension}; +use syntax::ext::tt::macro_rules::MacroRulesExpander; use syntax::parse::token::intern; -pub fn register_builtins(resolver: &mut syntax::ext::base::Resolver, enable_quotes: bool) { +pub fn register_builtins(resolver: &mut syntax::ext::base::Resolver, + user_exts: Vec, + enable_quotes: bool) { let mut register = |name, ext| { - resolver.add_macro(Mark::root(), ast::Ident::with_empty_ctxt(intern(name)), Rc::new(ext)); + resolver.add_ext(ast::Ident::with_empty_ctxt(name), Rc::new(ext)); }; - register("macro_rules", MacroRulesTT); + register(intern("macro_rules"), IdentTT(Box::new(MacroRulesExpander), None, false)); macro_rules! register { ($( $name:ident: $f:expr, )*) => { $( - register(stringify!($name), NormalTT(Box::new($f as MacroExpanderFn), None, false)); + register(intern(stringify!($name)), + NormalTT(Box::new($f as MacroExpanderFn), None, false)); )* } } @@ -108,7 +111,11 @@ pub fn register_builtins(resolver: &mut syntax::ext::base::Resolver, enable_quot } // format_args uses `unstable` things internally. - register("format_args", NormalTT(Box::new(format::expand_format_args), None, true)); + register(intern("format_args"), NormalTT(Box::new(format::expand_format_args), None, true)); - register("derive", MultiModifier(Box::new(deriving::expand_derive))); + register(intern("derive"), MultiModifier(Box::new(deriving::expand_derive))); + + for (name, ext) in user_exts { + register(name, ext); + } } diff --git a/src/libsyntax_ext/log_syntax.rs b/src/libsyntax_ext/log_syntax.rs index 7242b9865a92c..71f1951d5d455 100644 --- a/src/libsyntax_ext/log_syntax.rs +++ b/src/libsyntax_ext/log_syntax.rs @@ -19,7 +19,7 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut base::ExtCtxt, tts: &[tokenstream::TokenTree]) -> Box { if !cx.ecfg.enable_log_syntax() { - feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic, + feature_gate::emit_feature_err(&cx.parse_sess, "log_syntax", sp, feature_gate::GateIssue::Language, diff --git a/src/libsyntax_ext/rustc_macro_registrar.rs b/src/libsyntax_ext/proc_macro_registrar.rs similarity index 81% rename from src/libsyntax_ext/rustc_macro_registrar.rs rename to src/libsyntax_ext/proc_macro_registrar.rs index ce3e53cdf97f4..f49a5f0e0706e 100644 --- a/src/libsyntax_ext/rustc_macro_registrar.rs +++ b/src/libsyntax_ext/proc_macro_registrar.rs @@ -36,45 +36,45 @@ struct CollectCustomDerives<'a> { derives: Vec, in_root: bool, handler: &'a errors::Handler, - is_rustc_macro_crate: bool, + is_proc_macro_crate: bool, } pub fn modify(sess: &ParseSess, resolver: &mut ::syntax::ext::base::Resolver, mut krate: ast::Crate, - is_rustc_macro_crate: bool, + is_proc_macro_crate: bool, num_crate_types: usize, handler: &errors::Handler, features: &Features) -> ast::Crate { - let ecfg = ExpansionConfig::default("rustc_macro".to_string()); - let mut cx = ExtCtxt::new(sess, Vec::new(), ecfg, resolver); + let ecfg = ExpansionConfig::default("proc_macro".to_string()); + let mut cx = ExtCtxt::new(sess, ecfg, resolver); let mut collect = CollectCustomDerives { derives: Vec::new(), in_root: true, handler: handler, - is_rustc_macro_crate: is_rustc_macro_crate, + is_proc_macro_crate: is_proc_macro_crate, }; visit::walk_crate(&mut collect, &krate); - if !is_rustc_macro_crate { + if !is_proc_macro_crate { return krate - } else if !features.rustc_macro { - let mut err = handler.struct_err("the `rustc-macro` crate type is \ + } else if !features.proc_macro { + let mut err = handler.struct_err("the `proc-macro` crate type is \ experimental"); - err.help("add #![feature(rustc_macro)] to the crate attributes to \ + err.help("add #![feature(proc_macro)] to the crate attributes to \ enable"); err.emit(); } if num_crate_types > 1 { - handler.err("cannot mix `rustc-macro` crate type with others"); + handler.err("cannot mix `proc-macro` crate type with others"); } krate.module.items.push(mk_registrar(&mut cx, &collect.derives)); if krate.exported_macros.len() > 0 { - handler.err("cannot export macro_rules! macros from a `rustc-macro` \ + handler.err("cannot export macro_rules! macros from a `proc-macro` \ crate type currently"); } @@ -83,13 +83,13 @@ pub fn modify(sess: &ParseSess, impl<'a> CollectCustomDerives<'a> { fn check_not_pub_in_root(&self, vis: &ast::Visibility, sp: Span) { - if self.is_rustc_macro_crate && + if self.is_proc_macro_crate && self.in_root && *vis == ast::Visibility::Public { self.handler.span_err(sp, - "`rustc-macro` crate types cannot \ + "`proc-macro` crate types cannot \ export any items other than functions \ - tagged with `#[rustc_macro_derive]` \ + tagged with `#[proc_macro_derive]` \ currently"); } } @@ -100,7 +100,7 @@ impl<'a> Visitor for CollectCustomDerives<'a> { // First up, make sure we're checking a bare function. If we're not then // we're just not interested in this item. // - // If we find one, try to locate a `#[rustc_macro_derive]` attribute on + // If we find one, try to locate a `#[proc_macro_derive]` attribute on // it. match item.node { ast::ItemKind::Fn(..) => {} @@ -111,7 +111,7 @@ impl<'a> Visitor for CollectCustomDerives<'a> { } let mut attrs = item.attrs.iter() - .filter(|a| a.check_name("rustc_macro_derive")); + .filter(|a| a.check_name("proc_macro_derive")); let attr = match attrs.next() { Some(attr) => attr, None => { @@ -121,25 +121,25 @@ impl<'a> Visitor for CollectCustomDerives<'a> { }; if let Some(a) = attrs.next() { - self.handler.span_err(a.span(), "multiple `#[rustc_macro_derive]` \ + self.handler.span_err(a.span(), "multiple `#[proc_macro_derive]` \ attributes found"); } - if !self.is_rustc_macro_crate { + if !self.is_proc_macro_crate { self.handler.span_err(attr.span(), - "the `#[rustc_macro_derive]` attribute is \ - only usable with crates of the `rustc-macro` \ + "the `#[proc_macro_derive]` attribute is \ + only usable with crates of the `proc-macro` \ crate type"); } - // Once we've located the `#[rustc_macro_derive]` attribute, verify - // that it's of the form `#[rustc_macro_derive(Foo)]` + // Once we've located the `#[proc_macro_derive]` attribute, verify + // that it's of the form `#[proc_macro_derive(Foo)]` let list = match attr.meta_item_list() { Some(list) => list, None => { self.handler.span_err(attr.span(), "attribute must be of form: \ - #[rustc_macro_derive(TraitName)]"); + #[proc_macro_derive(TraitName)]"); return } }; @@ -177,7 +177,7 @@ impl<'a> Visitor for CollectCustomDerives<'a> { function_name: item.ident, }); } else { - let msg = "functions tagged with `#[rustc_macro_derive]` must \ + let msg = "functions tagged with `#[proc_macro_derive]` must \ currently reside in the root of the crate"; self.handler.span_err(item.span, msg); } @@ -202,9 +202,9 @@ impl<'a> Visitor for CollectCustomDerives<'a> { // Creates a new module which looks like: // // mod $gensym { -// extern crate rustc_macro; +// extern crate proc_macro; // -// use rustc_macro::__internal::Registry; +// use proc_macro::__internal::Registry; // // #[plugin_registrar] // fn registrar(registrar: &mut Registry) { @@ -218,16 +218,16 @@ fn mk_registrar(cx: &mut ExtCtxt, let eid = cx.codemap().record_expansion(ExpnInfo { call_site: DUMMY_SP, callee: NameAndSpan { - format: MacroAttribute(token::intern("rustc_macro")), + format: MacroAttribute(token::intern("proc_macro")), span: None, allow_internal_unstable: true, } }); let span = Span { expn_id: eid, ..DUMMY_SP }; - let rustc_macro = token::str_to_ident("rustc_macro"); + let proc_macro = token::str_to_ident("proc_macro"); let krate = cx.item(span, - rustc_macro, + proc_macro, Vec::new(), ast::ItemKind::ExternCrate(None)); @@ -241,7 +241,7 @@ fn mk_registrar(cx: &mut ExtCtxt, (path, trait_name) }).map(|(path, trait_name)| { let registrar = cx.expr_ident(span, registrar); - let ufcs_path = cx.path(span, vec![rustc_macro, __internal, registry, + let ufcs_path = cx.path(span, vec![proc_macro, __internal, registry, register_custom_derive]); cx.expr_call(span, cx.expr_path(ufcs_path), @@ -250,7 +250,7 @@ fn mk_registrar(cx: &mut ExtCtxt, cx.stmt_expr(expr) }).collect::>(); - let path = cx.path(span, vec![rustc_macro, __internal, registry]); + let path = cx.path(span, vec![proc_macro, __internal, registry]); let registrar_path = cx.ty_path(path); let arg_ty = cx.ty_rptr(span, registrar_path, None, ast::Mutability::Mutable); let func = cx.item_fn(span, diff --git a/src/libsyntax_ext/trace_macros.rs b/src/libsyntax_ext/trace_macros.rs index 794169ae3429c..9578af6810078 100644 --- a/src/libsyntax_ext/trace_macros.rs +++ b/src/libsyntax_ext/trace_macros.rs @@ -20,7 +20,7 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt, tt: &[TokenTree]) -> Box { if !cx.ecfg.enable_trace_macros() { - feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic, + feature_gate::emit_feature_err(&cx.parse_sess, "trace_macros", sp, feature_gate::GateIssue::Language, diff --git a/src/libsyntax_pos/lib.rs b/src/libsyntax_pos/lib.rs index d835f8058fa0e..d83d3a6c5cfac 100644 --- a/src/libsyntax_pos/lib.rs +++ b/src/libsyntax_pos/lib.rs @@ -27,7 +27,8 @@ #![allow(unused_attributes)] #![feature(rustc_private)] #![feature(staged_api)] -#![feature(question_mark)] +#![cfg_attr(stage0, feature(question_mark))] +#![feature(specialization)] use std::cell::{Cell, RefCell}; use std::ops::{Add, Sub}; @@ -96,24 +97,6 @@ impl Span { self.lo == other.lo && self.hi == other.hi } - /// Returns `Some(span)`, a union of `self` and `other`, on overlap. - pub fn merge(self, other: Span) -> Option { - if self.expn_id != other.expn_id { - return None; - } - - if (self.lo <= other.lo && self.hi > other.lo) || - (self.lo >= other.lo && self.lo < other.hi) { - Some(Span { - lo: cmp::min(self.lo, other.lo), - hi: cmp::max(self.hi, other.hi), - expn_id: self.expn_id, - }) - } else { - None - } - } - /// Returns `Some(span)`, where the start is trimmed by the end of `other` pub fn trim_start(self, other: Span) -> Option { if self.hi > other.hi { @@ -137,8 +120,8 @@ pub struct SpanLabel { pub label: Option, } -impl Encodable for Span { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { +impl serialize::UseSpecializedEncodable for Span { + fn default_encode(&self, s: &mut S) -> Result<(), S::Error> { s.emit_struct("Span", 2, |s| { s.emit_struct_field("lo", 0, |s| { self.lo.encode(s) @@ -151,17 +134,11 @@ impl Encodable for Span { } } -impl Decodable for Span { - fn decode(d: &mut D) -> Result { +impl serialize::UseSpecializedDecodable for Span { + fn default_decode(d: &mut D) -> Result { d.read_struct("Span", 2, |d| { - let lo = d.read_struct_field("lo", 0, |d| { - BytePos::decode(d) - })?; - - let hi = d.read_struct_field("hi", 1, |d| { - BytePos::decode(d) - })?; - + let lo = d.read_struct_field("lo", 0, Decodable::decode)?; + let hi = d.read_struct_field("hi", 1, Decodable::decode)?; Ok(mk_sp(lo, hi)) }) } @@ -504,6 +481,9 @@ impl FileMap { self.src.is_none() } + pub fn byte_length(&self) -> u32 { + self.end_pos.0 - self.start_pos.0 + } pub fn count_lines(&self) -> usize { self.lines.borrow().len() } diff --git a/src/libterm/lib.rs b/src/libterm/lib.rs index 0244e26579693..caef808f474ec 100644 --- a/src/libterm/lib.rs +++ b/src/libterm/lib.rs @@ -59,7 +59,7 @@ #![cfg_attr(windows, feature(libc))] // Handle rustfmt skips #![feature(custom_attribute)] -#![feature(question_mark)] +#![cfg_attr(stage0, feature(question_mark))] #![allow(unused_attributes)] use std::io::prelude::*; diff --git a/src/libterm/terminfo/searcher.rs b/src/libterm/terminfo/searcher.rs index e869c5083373c..4b1df7d170dbc 100644 --- a/src/libterm/terminfo/searcher.rs +++ b/src/libterm/terminfo/searcher.rs @@ -48,10 +48,12 @@ pub fn get_dbpath_for_term(term: &str) -> Option { // According to /etc/terminfo/README, after looking at // ~/.terminfo, ncurses will search /etc/terminfo, then // /lib/terminfo, and eventually /usr/share/terminfo. + // On Haiku the database can be found at /boot/system/data/terminfo Err(..) => { dirs_to_search.push(PathBuf::from("/etc/terminfo")); dirs_to_search.push(PathBuf::from("/lib/terminfo")); dirs_to_search.push(PathBuf::from("/usr/share/terminfo")); + dirs_to_search.push(PathBuf::from("/boot/system/data/terminfo")); } } } diff --git a/src/libtest/lib.rs b/src/libtest/lib.rs index 2b4193306ddf5..95ae6eb2efe8e 100644 --- a/src/libtest/lib.rs +++ b/src/libtest/lib.rs @@ -34,13 +34,11 @@ #![cfg_attr(not(stage0), deny(warnings))] #![feature(asm)] -#![feature(box_syntax)] -#![feature(fnbox)] #![feature(libc)] #![feature(rustc_private)] #![feature(set_stdio)] #![feature(staged_api)] -#![feature(question_mark)] +#![cfg_attr(stage0, feature(question_mark))] #![feature(panic_unwind)] extern crate getopts; @@ -56,8 +54,7 @@ use self::TestEvent::*; use self::NamePadding::*; use self::OutputLocation::*; -use std::boxed::FnBox; - +use std::panic::{catch_unwind, AssertUnwindSafe}; use std::any::Any; use std::cmp; use std::collections::BTreeMap; @@ -135,6 +132,16 @@ pub trait TDynBenchFn: Send { fn run(&self, harness: &mut Bencher); } +pub trait FnBox: Send + 'static { + fn call_box(self: Box, t: T); +} + +impl FnBox for F { + fn call_box(self: Box, t: T) { + (*self)(t) + } +} + // A function that runs a test. If the function returns successfully, // the test succeeds; if the function panics then the test fails. We // may need to come up with a more clever definition of test in order @@ -143,8 +150,8 @@ pub enum TestFn { StaticTestFn(fn()), StaticBenchFn(fn(&mut Bencher)), StaticMetricFn(fn(&mut MetricMap)), - DynTestFn(Box), - DynMetricFn(Box), + DynTestFn(Box>), + DynMetricFn(Box FnBox<&'a mut MetricMap>>), DynBenchFn(Box), } @@ -303,6 +310,7 @@ pub struct TestOpts { pub color: ColorConfig, pub quiet: bool, pub test_threads: Option, + pub skip: Vec, } impl TestOpts { @@ -318,6 +326,7 @@ impl TestOpts { color: AutoColor, quiet: false, test_threads: None, + skip: vec![], } } } @@ -327,7 +336,7 @@ pub type OptRes = Result; #[cfg_attr(rustfmt, rustfmt_skip)] fn optgroups() -> Vec { - vec!(getopts::optflag("", "ignored", "Run ignored tests"), + vec![getopts::optflag("", "ignored", "Run ignored tests"), getopts::optflag("", "test", "Run tests and not benchmarks"), getopts::optflag("", "bench", "Run benchmarks instead of tests"), getopts::optflag("h", "help", "Display this message (longer with --help)"), @@ -337,11 +346,13 @@ fn optgroups() -> Vec { task, allow printing directly"), getopts::optopt("", "test-threads", "Number of threads used for running tests \ in parallel", "n_threads"), + getopts::optmulti("", "skip", "Skip tests whose names contain FILTER (this flag can \ + be used multiple times)","FILTER"), getopts::optflag("q", "quiet", "Display one character per test instead of one line"), getopts::optopt("", "color", "Configure coloring of output: auto = colorize if stdout is a tty and tests are run on serially (default); always = always colorize output; - never = never colorize output;", "auto|always|never")) + never = never colorize output;", "auto|always|never")] } fn usage(binary: &str) { @@ -446,6 +457,7 @@ pub fn parse_opts(args: &[String]) -> Option { color: color, quiet: quiet, test_threads: test_threads, + skip: matches.opt_strs("skip"), }; Some(Ok(test_opts)) @@ -1024,7 +1036,8 @@ fn get_concurrency() -> usize { target_os = "ios", target_os = "android", target_os = "solaris", - target_os = "emscripten"))] + target_os = "emscripten", + target_os = "fuchsia"))] fn num_cpus() -> usize { unsafe { libc::sysconf(libc::_SC_NPROCESSORS_ONLN) as usize } } @@ -1080,6 +1093,12 @@ fn get_concurrency() -> usize { } cpus as usize } + + #[cfg(target_os = "haiku")] + fn num_cpus() -> usize { + // FIXME: implement + 1 + } } pub fn filter_tests(opts: &TestOpts, tests: Vec) -> Vec { @@ -1095,6 +1114,11 @@ pub fn filter_tests(opts: &TestOpts, tests: Vec) -> Vec) -> Vec) -> Vec { // convert benchmarks to tests, if we're not benchmarking them - tests.into_iter() - .map(|x| { - let testfn = match x.testfn { - DynBenchFn(bench) => { - DynTestFn(Box::new(move || bench::run_once(|b| bench.run(b)))) - } - StaticBenchFn(benchfn) => { - DynTestFn(Box::new(move || bench::run_once(|b| benchfn(b)))) - } - f => f, - }; - TestDescAndFn { - desc: x.desc, - testfn: testfn, - } - }) - .collect() + tests.into_iter().map(|x| { + let testfn = match x.testfn { + DynBenchFn(bench) => { + DynTestFn(Box::new(move |()| { + bench::run_once(|b| bench.run(b)) + })) + } + StaticBenchFn(benchfn) => { + DynTestFn(Box::new(move |()| { + bench::run_once(|b| benchfn(b)) + })) + } + f => f, + }; + TestDescAndFn { + desc: x.desc, + testfn: testfn, + } + }).collect() } pub fn run_test(opts: &TestOpts, @@ -1155,7 +1181,7 @@ pub fn run_test(opts: &TestOpts, fn run_test_inner(desc: TestDesc, monitor_ch: Sender, nocapture: bool, - testfn: Box) { + testfn: Box>) { struct Sink(Arc>>); impl Write for Sink { fn write(&mut self, data: &[u8]) -> io::Result { @@ -1166,26 +1192,49 @@ pub fn run_test(opts: &TestOpts, } } - thread::spawn(move || { - let data = Arc::new(Mutex::new(Vec::new())); - let data2 = data.clone(); - let cfg = thread::Builder::new().name(match desc.name { - DynTestName(ref name) => name.clone(), - StaticTestName(name) => name.to_owned(), - }); + // Buffer for capturing standard I/O + let data = Arc::new(Mutex::new(Vec::new())); + let data2 = data.clone(); + + let name = desc.name.clone(); + let runtest = move || { + let oldio = if !nocapture { + Some(( + io::set_print(Some(Box::new(Sink(data2.clone())))), + io::set_panic(Some(Box::new(Sink(data2)))) + )) + } else { + None + }; - let result_guard = cfg.spawn(move || { - if !nocapture { - io::set_print(box Sink(data2.clone())); - io::set_panic(box Sink(data2)); - } - testfn() - }) - .unwrap(); - let test_result = calc_result(&desc, result_guard.join()); + let result = catch_unwind(AssertUnwindSafe(|| { + testfn.call_box(()) + })); + + if let Some((printio, panicio)) = oldio { + io::set_print(printio); + io::set_panic(panicio); + }; + + let test_result = calc_result(&desc, result); let stdout = data.lock().unwrap().to_vec(); monitor_ch.send((desc.clone(), test_result, stdout)).unwrap(); - }); + }; + + + // If the platform is single-threaded we're just going to run + // the test synchronously, regardless of the concurrency + // level. + let supports_threads = !cfg!(target_os = "emscripten"); + if supports_threads { + let cfg = thread::Builder::new().name(match name { + DynTestName(ref name) => name.clone(), + StaticTestName(name) => name.to_owned(), + }); + cfg.spawn(runtest).unwrap(); + } else { + runtest(); + } } match testfn { @@ -1201,7 +1250,7 @@ pub fn run_test(opts: &TestOpts, } DynMetricFn(f) => { let mut mm = MetricMap::new(); - f.call_box((&mut mm,)); + f.call_box(&mut mm); monitor_ch.send((desc, TrMetrics(mm), Vec::new())).unwrap(); return; } @@ -1212,7 +1261,8 @@ pub fn run_test(opts: &TestOpts, return; } DynTestFn(f) => run_test_inner(desc, monitor_ch, opts.nocapture, f), - StaticTestFn(f) => run_test_inner(desc, monitor_ch, opts.nocapture, Box::new(f)), + StaticTestFn(f) => run_test_inner(desc, monitor_ch, opts.nocapture, + Box::new(move |()| f())), } } @@ -1275,7 +1325,7 @@ impl MetricMap { /// /// This function is a no-op, and does not even read from `dummy`. #[cfg(not(any(all(target_os = "nacl", target_arch = "le32"), - target_arch = "asmjs")))] + target_arch = "asmjs", target_arch = "wasm32")))] pub fn black_box(dummy: T) -> T { // we need to "use" the argument in some way LLVM can't // introspect. @@ -1283,7 +1333,7 @@ pub fn black_box(dummy: T) -> T { dummy } #[cfg(any(all(target_os = "nacl", target_arch = "le32"), - target_arch = "asmjs"))] + target_arch = "asmjs", target_arch = "wasm32"))] #[inline(never)] pub fn black_box(dummy: T) -> T { dummy @@ -1447,7 +1497,7 @@ mod tests { ignore: true, should_panic: ShouldPanic::No, }, - testfn: DynTestFn(Box::new(move || f())), + testfn: DynTestFn(Box::new(move |()| f())), }; let (tx, rx) = channel(); run_test(&TestOpts::new(), false, desc, tx); @@ -1464,7 +1514,7 @@ mod tests { ignore: true, should_panic: ShouldPanic::No, }, - testfn: DynTestFn(Box::new(move || f())), + testfn: DynTestFn(Box::new(move |()| f())), }; let (tx, rx) = channel(); run_test(&TestOpts::new(), false, desc, tx); @@ -1483,7 +1533,7 @@ mod tests { ignore: false, should_panic: ShouldPanic::Yes, }, - testfn: DynTestFn(Box::new(move || f())), + testfn: DynTestFn(Box::new(move |()| f())), }; let (tx, rx) = channel(); run_test(&TestOpts::new(), false, desc, tx); @@ -1502,7 +1552,7 @@ mod tests { ignore: false, should_panic: ShouldPanic::YesWithMessage("error message"), }, - testfn: DynTestFn(Box::new(move || f())), + testfn: DynTestFn(Box::new(move |()| f())), }; let (tx, rx) = channel(); run_test(&TestOpts::new(), false, desc, tx); @@ -1521,7 +1571,7 @@ mod tests { ignore: false, should_panic: ShouldPanic::YesWithMessage("foobar"), }, - testfn: DynTestFn(Box::new(move || f())), + testfn: DynTestFn(Box::new(move |()| f())), }; let (tx, rx) = channel(); run_test(&TestOpts::new(), false, desc, tx); @@ -1538,7 +1588,7 @@ mod tests { ignore: false, should_panic: ShouldPanic::Yes, }, - testfn: DynTestFn(Box::new(move || f())), + testfn: DynTestFn(Box::new(move |()| f())), }; let (tx, rx) = channel(); run_test(&TestOpts::new(), false, desc, tx); @@ -1571,7 +1621,7 @@ mod tests { ignore: true, should_panic: ShouldPanic::No, }, - testfn: DynTestFn(Box::new(move || {})), + testfn: DynTestFn(Box::new(move |()| {})), }, TestDescAndFn { desc: TestDesc { @@ -1579,7 +1629,7 @@ mod tests { ignore: false, should_panic: ShouldPanic::No, }, - testfn: DynTestFn(Box::new(move || {})), + testfn: DynTestFn(Box::new(move |()| {})), }]; let filtered = filter_tests(&opts, tests); @@ -1612,7 +1662,7 @@ mod tests { ignore: false, should_panic: ShouldPanic::No, }, - testfn: DynTestFn(Box::new(testfn)), + testfn: DynTestFn(Box::new(move |()| testfn())), }; tests.push(test); } diff --git a/src/libunwind/build.rs b/src/libunwind/build.rs index fd446f5a4f942..db41a368a1680 100644 --- a/src/libunwind/build.rs +++ b/src/libunwind/build.rs @@ -13,7 +13,7 @@ use std::env; fn main() { println!("cargo:rustc-cfg=cargobuild"); - let target = env::var("TARGET").unwrap(); + let target = env::var("TARGET").expect("TARGET was not set"); if target.contains("linux") { if target.contains("musl") && !target.contains("mips") { @@ -35,5 +35,7 @@ fn main() { println!("cargo:rustc-link-lib=gcc_pic"); } else if target.contains("windows-gnu") { println!("cargo:rustc-link-lib=gcc_eh"); + } else if target.contains("fuchsia") { + println!("cargo:rustc-link-lib=unwind"); } } diff --git a/src/libunwind/libunwind.rs b/src/libunwind/libunwind.rs index 3900ba65293c0..bbac6c07c570a 100644 --- a/src/libunwind/libunwind.rs +++ b/src/libunwind/libunwind.rs @@ -65,7 +65,7 @@ pub const unwinder_private_data_size: usize = 2; #[cfg(target_arch = "s390x")] pub const unwinder_private_data_size: usize = 2; -#[cfg(target_arch = "asmjs")] +#[cfg(target_os = "emscripten")] pub const unwinder_private_data_size: usize = 20; #[repr(C)] @@ -241,6 +241,7 @@ if #[cfg(not(all(target_os = "ios", target_arch = "arm")))] { #[cfg_attr(any(all(target_os = "linux", not(target_env = "musl")), target_os = "freebsd", target_os = "solaris", + target_os = "haiku", all(target_os = "linux", target_env = "musl", not(target_arch = "x86"), @@ -251,6 +252,8 @@ if #[cfg(not(all(target_os = "ios", target_arch = "arm")))] { any(target_arch = "x86", target_arch = "x86_64"), not(test)), link(name = "unwind", kind = "static"))] +#[cfg_attr(target_os = "fuchsia", + link(name = "unwind"))] #[cfg_attr(any(target_os = "android", target_os = "openbsd"), link(name = "gcc"))] #[cfg_attr(all(target_os = "netbsd", not(target_vendor = "rumprun")), diff --git a/src/llvm b/src/llvm index 16b79d01fd6d9..c1d962263bf76 160000 --- a/src/llvm +++ b/src/llvm @@ -1 +1 @@ -Subproject commit 16b79d01fd6d942cf3c9120b92df56b13ec92665 +Subproject commit c1d962263bf76a10bea0c761621fcd98d6214b2e diff --git a/src/rtstartup/rsbegin.rs b/src/rtstartup/rsbegin.rs index b57b7e8432167..65c85697ce720 100644 --- a/src/rtstartup/rsbegin.rs +++ b/src/rtstartup/rsbegin.rs @@ -44,7 +44,7 @@ pub mod eh_frames { // Scratch space for unwinder's internal book-keeping. // This is defined as `struct object` in $GCC/libgcc/unwind-dw2-fde.h. - static mut obj: [isize; 6] = [0; 6]; + static mut OBJ: [isize; 6] = [0; 6]; // Unwind info registration/deregistration routines. // See the docs of `unwind` module in libstd. @@ -56,13 +56,13 @@ pub mod eh_frames { unsafe fn init() { // register unwind info on module startup rust_eh_register_frames(&__EH_FRAME_BEGIN__ as *const u8, - &mut obj as *mut _ as *mut u8); + &mut OBJ as *mut _ as *mut u8); } unsafe fn uninit() { // unregister on shutdown rust_eh_unregister_frames(&__EH_FRAME_BEGIN__ as *const u8, - &mut obj as *mut _ as *mut u8); + &mut OBJ as *mut _ as *mut u8); } // MSVC-specific init/uninit routine registration diff --git a/src/rust-installer b/src/rust-installer index 755bc3db4ff79..4f994850808a5 160000 --- a/src/rust-installer +++ b/src/rust-installer @@ -1 +1 @@ -Subproject commit 755bc3db4ff795865ea31b5b4f38ac920d8acacb +Subproject commit 4f994850808a572e2cc8d43f968893c8e942e9bf diff --git a/src/rustc/Cargo.lock b/src/rustc/Cargo.lock deleted file mode 100644 index 3377fc43d8a60..0000000000000 --- a/src/rustc/Cargo.lock +++ /dev/null @@ -1,425 +0,0 @@ -[root] -name = "rustc-main" -version = "0.0.0" -dependencies = [ - "rustc_back 0.0.0", - "rustc_driver 0.0.0", - "rustdoc 0.0.0", -] - -[[package]] -name = "arena" -version = "0.0.0" - -[[package]] -name = "build_helper" -version = "0.1.0" - -[[package]] -name = "flate" -version = "0.0.0" -dependencies = [ - "build_helper 0.1.0", - "gcc 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "fmt_macros" -version = "0.0.0" - -[[package]] -name = "gcc" -version = "0.3.28" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "graphviz" -version = "0.0.0" - -[[package]] -name = "log" -version = "0.0.0" - -[[package]] -name = "proc_macro" -version = "0.0.0" -dependencies = [ - "log 0.0.0", - "rustc_plugin 0.0.0", - "syntax 0.0.0", - "syntax_pos 0.0.0", -] - -[[package]] -name = "rbml" -version = "0.0.0" -dependencies = [ - "log 0.0.0", - "serialize 0.0.0", -] - -[[package]] -name = "rustc" -version = "0.0.0" -dependencies = [ - "arena 0.0.0", - "flate 0.0.0", - "fmt_macros 0.0.0", - "graphviz 0.0.0", - "log 0.0.0", - "rbml 0.0.0", - "rustc_back 0.0.0", - "rustc_bitflags 0.0.0", - "rustc_const_math 0.0.0", - "rustc_data_structures 0.0.0", - "rustc_errors 0.0.0", - "rustc_llvm 0.0.0", - "serialize 0.0.0", - "syntax 0.0.0", - "syntax_pos 0.0.0", -] - -[[package]] -name = "rustc_back" -version = "0.0.0" -dependencies = [ - "log 0.0.0", - "serialize 0.0.0", - "syntax 0.0.0", -] - -[[package]] -name = "rustc_bitflags" -version = "0.0.0" - -[[package]] -name = "rustc_borrowck" -version = "0.0.0" -dependencies = [ - "graphviz 0.0.0", - "log 0.0.0", - "rustc 0.0.0", - "rustc_data_structures 0.0.0", - "rustc_errors 0.0.0", - "rustc_mir 0.0.0", - "syntax 0.0.0", - "syntax_pos 0.0.0", -] - -[[package]] -name = "rustc_const_eval" -version = "0.0.0" -dependencies = [ - "graphviz 0.0.0", - "log 0.0.0", - "rustc 0.0.0", - "rustc_back 0.0.0", - "rustc_const_math 0.0.0", - "rustc_errors 0.0.0", - "serialize 0.0.0", - "syntax 0.0.0", - "syntax_pos 0.0.0", -] - -[[package]] -name = "rustc_const_math" -version = "0.0.0" -dependencies = [ - "log 0.0.0", - "serialize 0.0.0", - "syntax 0.0.0", -] - -[[package]] -name = "rustc_data_structures" -version = "0.0.0" -dependencies = [ - "log 0.0.0", - "serialize 0.0.0", -] - -[[package]] -name = "rustc_driver" -version = "0.0.0" -dependencies = [ - "arena 0.0.0", - "flate 0.0.0", - "graphviz 0.0.0", - "log 0.0.0", - "proc_macro 0.0.0", - "rustc 0.0.0", - "rustc_back 0.0.0", - "rustc_borrowck 0.0.0", - "rustc_const_eval 0.0.0", - "rustc_errors 0.0.0", - "rustc_incremental 0.0.0", - "rustc_lint 0.0.0", - "rustc_llvm 0.0.0", - "rustc_metadata 0.0.0", - "rustc_mir 0.0.0", - "rustc_passes 0.0.0", - "rustc_plugin 0.0.0", - "rustc_privacy 0.0.0", - "rustc_resolve 0.0.0", - "rustc_save_analysis 0.0.0", - "rustc_trans 0.0.0", - "rustc_typeck 0.0.0", - "serialize 0.0.0", - "syntax 0.0.0", - "syntax_ext 0.0.0", - "syntax_pos 0.0.0", -] - -[[package]] -name = "rustc_errors" -version = "0.0.0" -dependencies = [ - "log 0.0.0", - "serialize 0.0.0", - "syntax_pos 0.0.0", -] - -[[package]] -name = "rustc_incremental" -version = "0.0.0" -dependencies = [ - "graphviz 0.0.0", - "log 0.0.0", - "rbml 0.0.0", - "rustc 0.0.0", - "rustc_data_structures 0.0.0", - "serialize 0.0.0", - "syntax 0.0.0", - "syntax_pos 0.0.0", -] - -[[package]] -name = "rustc_lint" -version = "0.0.0" -dependencies = [ - "log 0.0.0", - "rustc 0.0.0", - "rustc_back 0.0.0", - "rustc_const_eval 0.0.0", - "syntax 0.0.0", - "syntax_pos 0.0.0", -] - -[[package]] -name = "rustc_llvm" -version = "0.0.0" -dependencies = [ - "build_helper 0.1.0", - "gcc 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc_bitflags 0.0.0", -] - -[[package]] -name = "rustc_macro" -version = "0.0.0" -dependencies = [ - "syntax 0.0.0", -] - -[[package]] -name = "rustc_metadata" -version = "0.0.0" -dependencies = [ - "flate 0.0.0", - "log 0.0.0", - "rbml 0.0.0", - "rustc 0.0.0", - "rustc_back 0.0.0", - "rustc_bitflags 0.0.0", - "rustc_const_math 0.0.0", - "rustc_data_structures 0.0.0", - "rustc_errors 0.0.0", - "rustc_llvm 0.0.0", - "rustc_macro 0.0.0", - "serialize 0.0.0", - "syntax 0.0.0", - "syntax_ext 0.0.0", - "syntax_pos 0.0.0", -] - -[[package]] -name = "rustc_mir" -version = "0.0.0" -dependencies = [ - "graphviz 0.0.0", - "log 0.0.0", - "rustc 0.0.0", - "rustc_back 0.0.0", - "rustc_bitflags 0.0.0", - "rustc_const_eval 0.0.0", - "rustc_const_math 0.0.0", - "rustc_data_structures 0.0.0", - "syntax 0.0.0", - "syntax_pos 0.0.0", -] - -[[package]] -name = "rustc_passes" -version = "0.0.0" -dependencies = [ - "log 0.0.0", - "rustc 0.0.0", - "rustc_const_eval 0.0.0", - "rustc_const_math 0.0.0", - "rustc_errors 0.0.0", - "syntax 0.0.0", - "syntax_pos 0.0.0", -] - -[[package]] -name = "rustc_platform_intrinsics" -version = "0.0.0" - -[[package]] -name = "rustc_plugin" -version = "0.0.0" -dependencies = [ - "log 0.0.0", - "rustc 0.0.0", - "rustc_back 0.0.0", - "rustc_bitflags 0.0.0", - "rustc_errors 0.0.0", - "rustc_metadata 0.0.0", - "syntax 0.0.0", - "syntax_pos 0.0.0", -] - -[[package]] -name = "rustc_privacy" -version = "0.0.0" -dependencies = [ - "rustc 0.0.0", - "syntax 0.0.0", - "syntax_pos 0.0.0", -] - -[[package]] -name = "rustc_resolve" -version = "0.0.0" -dependencies = [ - "arena 0.0.0", - "log 0.0.0", - "rustc 0.0.0", - "rustc_errors 0.0.0", - "syntax 0.0.0", - "syntax_pos 0.0.0", -] - -[[package]] -name = "rustc_save_analysis" -version = "0.0.0" -dependencies = [ - "log 0.0.0", - "rustc 0.0.0", - "serialize 0.0.0", - "syntax 0.0.0", - "syntax_pos 0.0.0", -] - -[[package]] -name = "rustc_trans" -version = "0.0.0" -dependencies = [ - "arena 0.0.0", - "flate 0.0.0", - "graphviz 0.0.0", - "log 0.0.0", - "rustc 0.0.0", - "rustc_back 0.0.0", - "rustc_const_eval 0.0.0", - "rustc_const_math 0.0.0", - "rustc_data_structures 0.0.0", - "rustc_errors 0.0.0", - "rustc_incremental 0.0.0", - "rustc_llvm 0.0.0", - "rustc_platform_intrinsics 0.0.0", - "serialize 0.0.0", - "syntax 0.0.0", - "syntax_pos 0.0.0", -] - -[[package]] -name = "rustc_typeck" -version = "0.0.0" -dependencies = [ - "arena 0.0.0", - "fmt_macros 0.0.0", - "log 0.0.0", - "rustc 0.0.0", - "rustc_back 0.0.0", - "rustc_const_eval 0.0.0", - "rustc_const_math 0.0.0", - "rustc_errors 0.0.0", - "rustc_platform_intrinsics 0.0.0", - "syntax 0.0.0", - "syntax_pos 0.0.0", -] - -[[package]] -name = "rustdoc" -version = "0.0.0" -dependencies = [ - "arena 0.0.0", - "build_helper 0.1.0", - "gcc 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.0.0", - "rustc 0.0.0", - "rustc_back 0.0.0", - "rustc_const_eval 0.0.0", - "rustc_const_math 0.0.0", - "rustc_data_structures 0.0.0", - "rustc_driver 0.0.0", - "rustc_errors 0.0.0", - "rustc_lint 0.0.0", - "rustc_metadata 0.0.0", - "rustc_resolve 0.0.0", - "rustc_trans 0.0.0", - "serialize 0.0.0", - "syntax 0.0.0", - "syntax_pos 0.0.0", -] - -[[package]] -name = "serialize" -version = "0.0.0" -dependencies = [ - "log 0.0.0", -] - -[[package]] -name = "syntax" -version = "0.0.0" -dependencies = [ - "log 0.0.0", - "rustc_bitflags 0.0.0", - "rustc_errors 0.0.0", - "serialize 0.0.0", - "syntax_pos 0.0.0", -] - -[[package]] -name = "syntax_ext" -version = "0.0.0" -dependencies = [ - "fmt_macros 0.0.0", - "log 0.0.0", - "rustc_errors 0.0.0", - "rustc_macro 0.0.0", - "syntax 0.0.0", - "syntax_pos 0.0.0", -] - -[[package]] -name = "syntax_pos" -version = "0.0.0" -dependencies = [ - "serialize 0.0.0", -] - -[metadata] -"checksum gcc 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)" = "3da3a2cbaeb01363c8e3704fd9fd0eb2ceb17c6f27abd4c1ef040fb57d20dc79" diff --git a/src/rustc/std_shim/Cargo.lock b/src/rustc/std_shim/Cargo.lock deleted file mode 100644 index b460235545759..0000000000000 --- a/src/rustc/std_shim/Cargo.lock +++ /dev/null @@ -1,132 +0,0 @@ -[root] -name = "std_shim" -version = "0.1.0" -dependencies = [ - "core 0.0.0", - "std 0.0.0", -] - -[[package]] -name = "alloc" -version = "0.0.0" -dependencies = [ - "core 0.0.0", -] - -[[package]] -name = "alloc_jemalloc" -version = "0.0.0" -dependencies = [ - "build_helper 0.1.0", - "core 0.0.0", - "gcc 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.0.0", -] - -[[package]] -name = "alloc_system" -version = "0.0.0" -dependencies = [ - "core 0.0.0", - "libc 0.0.0", -] - -[[package]] -name = "build_helper" -version = "0.1.0" - -[[package]] -name = "collections" -version = "0.0.0" -dependencies = [ - "alloc 0.0.0", - "core 0.0.0", - "rustc_unicode 0.0.0", -] - -[[package]] -name = "compiler_builtins" -version = "0.0.0" -dependencies = [ - "core 0.0.0", - "gcc 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "core" -version = "0.0.0" - -[[package]] -name = "gcc" -version = "0.3.27" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "libc" -version = "0.0.0" -dependencies = [ - "core 0.0.0", -] - -[[package]] -name = "panic_abort" -version = "0.0.0" -dependencies = [ - "core 0.0.0", - "libc 0.0.0", -] - -[[package]] -name = "panic_unwind" -version = "0.0.0" -dependencies = [ - "alloc 0.0.0", - "core 0.0.0", - "libc 0.0.0", - "unwind 0.0.0", -] - -[[package]] -name = "rand" -version = "0.0.0" -dependencies = [ - "core 0.0.0", -] - -[[package]] -name = "rustc_unicode" -version = "0.0.0" -dependencies = [ - "core 0.0.0", -] - -[[package]] -name = "std" -version = "0.0.0" -dependencies = [ - "alloc 0.0.0", - "alloc_jemalloc 0.0.0", - "alloc_system 0.0.0", - "build_helper 0.1.0", - "collections 0.0.0", - "compiler_builtins 0.0.0", - "core 0.0.0", - "gcc 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)", - "libc 0.0.0", - "panic_abort 0.0.0", - "panic_unwind 0.0.0", - "rand 0.0.0", - "rustc_unicode 0.0.0", - "unwind 0.0.0", -] - -[[package]] -name = "unwind" -version = "0.0.0" -dependencies = [ - "core 0.0.0", - "libc 0.0.0", -] - -[metadata] -"checksum gcc 0.3.27 (registry+https://github.com/rust-lang/crates.io-index)" = "806e63121fbf30760b060a5fc2d1e9f47e1bd356d183e8870367c6c12cc9d5ed" diff --git a/src/rustc/test_shim/Cargo.lock b/src/rustc/test_shim/Cargo.lock deleted file mode 100644 index 73df56d3594f1..0000000000000 --- a/src/rustc/test_shim/Cargo.lock +++ /dev/null @@ -1,23 +0,0 @@ -[root] -name = "test_shim" -version = "0.1.0" -dependencies = [ - "test 0.0.0", -] - -[[package]] -name = "getopts" -version = "0.0.0" - -[[package]] -name = "term" -version = "0.0.0" - -[[package]] -name = "test" -version = "0.0.0" -dependencies = [ - "getopts 0.0.0", - "term 0.0.0", -] - diff --git a/src/rustllvm/ArchiveWrapper.cpp b/src/rustllvm/ArchiveWrapper.cpp index 1e873b5345c43..12cd81ec70044 100644 --- a/src/rustllvm/ArchiveWrapper.cpp +++ b/src/rustllvm/ArchiveWrapper.cpp @@ -22,7 +22,7 @@ struct RustArchiveMember { Archive::Child child; RustArchiveMember(): filename(NULL), name(NULL), -#if LLVM_VERSION_MINOR >= 8 +#if LLVM_VERSION_GE(3, 8) child(NULL, NULL, NULL) #else child(NULL, NULL) @@ -35,7 +35,7 @@ struct RustArchiveMember { struct RustArchiveIterator { Archive::child_iterator cur; Archive::child_iterator end; -#if LLVM_VERSION_MINOR >= 9 +#if LLVM_VERSION_GE(3, 9) Error err; #endif }; @@ -81,7 +81,7 @@ LLVMRustOpenArchive(char *path) { return nullptr; } -#if LLVM_VERSION_MINOR <= 8 +#if LLVM_VERSION_LE(3, 8) ErrorOr> archive_or = #else Expected> archive_or = @@ -89,7 +89,7 @@ LLVMRustOpenArchive(char *path) { Archive::create(buf_or.get()->getMemBufferRef()); if (!archive_or) { -#if LLVM_VERSION_MINOR <= 8 +#if LLVM_VERSION_LE(3, 8) LLVMRustSetLastError(archive_or.getError().message().c_str()); #else LLVMRustSetLastError(toString(archive_or.takeError()).c_str()); @@ -112,7 +112,7 @@ extern "C" LLVMRustArchiveIteratorRef LLVMRustArchiveIteratorNew(LLVMRustArchiveRef ra) { Archive *ar = ra->getBinary(); RustArchiveIterator *rai = new RustArchiveIterator(); -#if LLVM_VERSION_MINOR <= 8 +#if LLVM_VERSION_LE(3, 8) rai->cur = ar->child_begin(); #else rai->cur = ar->child_begin(rai->err); @@ -127,7 +127,7 @@ LLVMRustArchiveIteratorNew(LLVMRustArchiveRef ra) { extern "C" LLVMRustArchiveChildConstRef LLVMRustArchiveIteratorNext(LLVMRustArchiveIteratorRef rai) { -#if LLVM_VERSION_MINOR >= 9 +#if LLVM_VERSION_GE(3, 9) if (rai->err) { LLVMRustSetLastError(toString(std::move(rai->err)).c_str()); return NULL; @@ -135,7 +135,7 @@ LLVMRustArchiveIteratorNext(LLVMRustArchiveIteratorRef rai) { #endif if (rai->cur == rai->end) return NULL; -#if LLVM_VERSION_MINOR == 8 +#if LLVM_VERSION_EQ(3, 8) const ErrorOr* cur = rai->cur.operator->(); if (!*cur) { LLVMRustSetLastError(cur->getError().message().c_str()); @@ -207,7 +207,7 @@ LLVMRustWriteArchive(char *Dst, bool WriteSymbtab, LLVMRustArchiveKind rust_kind) { -#if LLVM_VERSION_MINOR <= 8 +#if LLVM_VERSION_LE(3, 8) std::vector Members; #else std::vector Members; @@ -218,20 +218,20 @@ LLVMRustWriteArchive(char *Dst, auto Member = NewMembers[i]; assert(Member->name); if (Member->filename) { -#if LLVM_VERSION_MINOR >= 9 +#if LLVM_VERSION_GE(3, 9) Expected MOrErr = NewArchiveMember::getFile(Member->filename, true); if (!MOrErr) { LLVMRustSetLastError(toString(MOrErr.takeError()).c_str()); return LLVMRustResult::Failure; } Members.push_back(std::move(*MOrErr)); -#elif LLVM_VERSION_MINOR == 8 +#elif LLVM_VERSION_EQ(3, 8) Members.push_back(NewArchiveIterator(Member->filename)); #else Members.push_back(NewArchiveIterator(Member->filename, Member->name)); #endif } else { -#if LLVM_VERSION_MINOR <= 8 +#if LLVM_VERSION_LE(3, 8) Members.push_back(NewArchiveIterator(Member->child, Member->name)); #else Expected MOrErr = NewArchiveMember::getOldMember(Member->child, true); @@ -243,7 +243,7 @@ LLVMRustWriteArchive(char *Dst, #endif } } -#if LLVM_VERSION_MINOR >= 8 +#if LLVM_VERSION_GE(3, 8) auto pair = writeArchive(Dst, Members, WriteSymbtab, Kind, true, false); #else auto pair = writeArchive(Dst, Members, WriteSymbtab, Kind, true); diff --git a/src/rustllvm/PassWrapper.cpp b/src/rustllvm/PassWrapper.cpp index a271987210b67..60093e9bd37a8 100644 --- a/src/rustllvm/PassWrapper.cpp +++ b/src/rustllvm/PassWrapper.cpp @@ -46,7 +46,7 @@ LLVMInitializePasses() { initializeVectorization(Registry); initializeIPO(Registry); initializeAnalysis(Registry); -#if LLVM_VERSION_MINOR == 7 +#if LLVM_VERSION_EQ(3, 7) initializeIPA(Registry); #endif initializeTransformUtils(Registry); @@ -297,7 +297,7 @@ LLVMRustCreateTargetMachine(const char *triple, bool FunctionSections, bool DataSections) { -#if LLVM_VERSION_MINOR <= 8 +#if LLVM_VERSION_LE(3, 8) Reloc::Model RM; #else Optional RM; @@ -316,7 +316,7 @@ LLVMRustCreateTargetMachine(const char *triple, RM = Reloc::DynamicNoPIC; break; default: -#if LLVM_VERSION_MINOR <= 8 +#if LLVM_VERSION_LE(3, 8) RM = Reloc::Default; #endif break; @@ -337,7 +337,7 @@ LLVMRustCreateTargetMachine(const char *triple, } TargetOptions Options; -#if LLVM_VERSION_MINOR <= 8 +#if LLVM_VERSION_LE(3, 8) Options.PositionIndependentExecutable = PositionIndependentExecutable; #endif @@ -539,7 +539,7 @@ extern "C" void LLVMRustRunRestrictionPass(LLVMModuleRef M, char **symbols, size_t len) { llvm::legacy::PassManager passes; -#if LLVM_VERSION_MINOR <= 8 +#if LLVM_VERSION_LE(3, 8) ArrayRef ref(symbols, len); passes.add(llvm::createInternalizePass(ref)); #else @@ -593,7 +593,7 @@ LLVMRustGetModuleDataLayout(LLVMModuleRef M) { extern "C" void LLVMRustSetModulePIELevel(LLVMModuleRef M) { -#if LLVM_VERSION_MINOR >= 9 +#if LLVM_VERSION_GE(3, 9) unwrap(M)->setPIELevel(PIELevel::Level::Large); #endif } diff --git a/src/rustllvm/RustWrapper.cpp b/src/rustllvm/RustWrapper.cpp index 124eb1eba4f7b..369388caa0496 100644 --- a/src/rustllvm/RustWrapper.cpp +++ b/src/rustllvm/RustWrapper.cpp @@ -394,7 +394,7 @@ extern "C" LLVMRustMetadataRef LLVMRustDIBuilderCreateSubroutineType( LLVMRustMetadataRef File, LLVMRustMetadataRef ParameterTypes) { return wrap(Builder->createSubroutineType( -#if LLVM_VERSION_MINOR == 7 +#if LLVM_VERSION_EQ(3, 7) unwrapDI(File), #endif DITypeRefArray(unwrap(ParameterTypes)))); @@ -416,7 +416,7 @@ extern "C" LLVMRustMetadataRef LLVMRustDIBuilderCreateFunction( LLVMValueRef Fn, LLVMRustMetadataRef TParam, LLVMRustMetadataRef Decl) { -#if LLVM_VERSION_MINOR >= 8 +#if LLVM_VERSION_GE(3, 8) DITemplateParameterArray TParams = DITemplateParameterArray(unwrap(TParam)); DISubprogram *Sub = Builder->createFunction( @@ -562,10 +562,8 @@ extern "C" LLVMRustMetadataRef LLVMRustDIBuilderCreateVariable( LLVMRustMetadataRef Ty, bool AlwaysPreserve, unsigned Flags, - int64_t* AddrOps, - unsigned AddrOpsCount, unsigned ArgNo) { -#if LLVM_VERSION_MINOR >= 8 +#if LLVM_VERSION_GE(3, 8) if (Tag == 0x100) { // DW_TAG_auto_variable return wrap(Builder->createAutoVariable( unwrapDI(Scope), Name, @@ -645,23 +643,6 @@ extern "C" LLVMValueRef LLVMRustDIBuilderInsertDeclareAtEnd( unwrap(InsertAtEnd))); } -extern "C" LLVMValueRef LLVMRustDIBuilderInsertDeclareBefore( - LLVMRustDIBuilderRef Builder, - LLVMValueRef Val, - LLVMRustMetadataRef VarInfo, - int64_t* AddrOps, - unsigned AddrOpsCount, - LLVMValueRef DL, - LLVMValueRef InsertBefore) { - return wrap(Builder->insertDeclare( - unwrap(Val), - unwrap(VarInfo), - Builder->createExpression( - llvm::ArrayRef(AddrOps, AddrOpsCount)), - DebugLoc(cast(unwrap(DL)->getMetadata())), - unwrap(InsertBefore))); -} - extern "C" LLVMRustMetadataRef LLVMRustDIBuilderCreateEnumerator( LLVMRustDIBuilderRef Builder, const char* Name, @@ -814,7 +795,7 @@ LLVMRustLinkInExternalBitcode(LLVMModuleRef dst, char *bc, size_t len) { raw_string_ostream Stream(Err); DiagnosticPrinterRawOStream DP(Stream); -#if LLVM_VERSION_MINOR >= 8 +#if LLVM_VERSION_GE(3, 8) if (Linker::linkModules(*Dst, std::move(Src.get()))) { #else if (Linker::LinkModules(Dst, Src->get(), [&](const DiagnosticInfo &DI) { DI.print(DP); })) { @@ -937,14 +918,14 @@ to_rust(DiagnosticKind kind) return LLVMRustDiagnosticKind::OptimizationRemarkMissed; case DK_OptimizationRemarkAnalysis: return LLVMRustDiagnosticKind::OptimizationRemarkAnalysis; -#if LLVM_VERSION_MINOR >= 8 +#if LLVM_VERSION_GE(3, 8) case DK_OptimizationRemarkAnalysisFPCommute: return LLVMRustDiagnosticKind::OptimizationRemarkAnalysisFPCommute; case DK_OptimizationRemarkAnalysisAliasing: return LLVMRustDiagnosticKind::OptimizationRemarkAnalysisAliasing; #endif default: -#if LLVM_VERSION_MINOR >= 9 +#if LLVM_VERSION_GE(3, 9) return (kind >= DK_FirstRemark && kind <= DK_LastRemark) ? LLVMRustDiagnosticKind::OptimizationRemarkOther : LLVMRustDiagnosticKind::Other; @@ -994,7 +975,7 @@ extern "C" LLVMTypeKind LLVMRustGetTypeKind(LLVMTypeRef Ty) { return LLVMVectorTypeKind; case Type::X86_MMXTyID: return LLVMX86_MMXTypeKind; -#if LLVM_VERSION_MINOR >= 8 +#if LLVM_VERSION_GE(3, 8) case Type::TokenTyID: return LLVMTokenTypeKind; #endif @@ -1043,7 +1024,7 @@ LLVMRustBuildCleanupPad(LLVMBuilderRef Builder, unsigned ArgCnt, LLVMValueRef *LLArgs, const char *Name) { -#if LLVM_VERSION_MINOR >= 8 +#if LLVM_VERSION_GE(3, 8) Value **Args = unwrap(LLArgs); if (ParentPad == NULL) { Type *Ty = Type::getTokenTy(unwrap(Builder)->getContext()); @@ -1061,7 +1042,7 @@ extern "C" LLVMValueRef LLVMRustBuildCleanupRet(LLVMBuilderRef Builder, LLVMValueRef CleanupPad, LLVMBasicBlockRef UnwindBB) { -#if LLVM_VERSION_MINOR >= 8 +#if LLVM_VERSION_GE(3, 8) CleanupPadInst *Inst = cast(unwrap(CleanupPad)); return wrap(unwrap(Builder)->CreateCleanupRet(Inst, unwrap(UnwindBB))); #else @@ -1075,7 +1056,7 @@ LLVMRustBuildCatchPad(LLVMBuilderRef Builder, unsigned ArgCnt, LLVMValueRef *LLArgs, const char *Name) { -#if LLVM_VERSION_MINOR >= 8 +#if LLVM_VERSION_GE(3, 8) Value **Args = unwrap(LLArgs); return wrap(unwrap(Builder)->CreateCatchPad(unwrap(ParentPad), ArrayRef(Args, ArgCnt), @@ -1089,7 +1070,7 @@ extern "C" LLVMValueRef LLVMRustBuildCatchRet(LLVMBuilderRef Builder, LLVMValueRef Pad, LLVMBasicBlockRef BB) { -#if LLVM_VERSION_MINOR >= 8 +#if LLVM_VERSION_GE(3, 8) return wrap(unwrap(Builder)->CreateCatchRet(cast(unwrap(Pad)), unwrap(BB))); #else @@ -1103,7 +1084,7 @@ LLVMRustBuildCatchSwitch(LLVMBuilderRef Builder, LLVMBasicBlockRef BB, unsigned NumHandlers, const char *Name) { -#if LLVM_VERSION_MINOR >= 8 +#if LLVM_VERSION_GE(3, 8) if (ParentPad == NULL) { Type *Ty = Type::getTokenTy(unwrap(Builder)->getContext()); ParentPad = wrap(Constant::getNullValue(Ty)); @@ -1120,7 +1101,7 @@ LLVMRustBuildCatchSwitch(LLVMBuilderRef Builder, extern "C" void LLVMRustAddHandler(LLVMValueRef CatchSwitchRef, LLVMBasicBlockRef Handler) { -#if LLVM_VERSION_MINOR >= 8 +#if LLVM_VERSION_GE(3, 8) Value *CatchSwitch = unwrap(CatchSwitchRef); cast(CatchSwitch)->addHandler(unwrap(Handler)); #endif @@ -1129,14 +1110,14 @@ LLVMRustAddHandler(LLVMValueRef CatchSwitchRef, extern "C" void LLVMRustSetPersonalityFn(LLVMBuilderRef B, LLVMValueRef Personality) { -#if LLVM_VERSION_MINOR >= 8 +#if LLVM_VERSION_GE(3, 8) unwrap(B)->GetInsertBlock() ->getParent() ->setPersonalityFn(cast(unwrap(Personality))); #endif } -#if LLVM_VERSION_MINOR >= 8 +#if LLVM_VERSION_GE(3, 8) extern "C" OperandBundleDef* LLVMRustBuildOperandBundleDef(const char *Name, LLVMValueRef *Inputs, @@ -1302,7 +1283,7 @@ static LLVMLinkage from_rust(LLVMRustLinkage linkage) { return LLVMCommonLinkage; default: llvm_unreachable("Invalid LLVMRustLinkage value!"); - } + } } extern "C" LLVMRustLinkage LLVMRustGetLinkage(LLVMValueRef V) { diff --git a/src/rustllvm/llvm-auto-clean-trigger b/src/rustllvm/llvm-auto-clean-trigger index 1080070d21a3b..37fded948e1de 100644 --- a/src/rustllvm/llvm-auto-clean-trigger +++ b/src/rustllvm/llvm-auto-clean-trigger @@ -1,4 +1,4 @@ # If this file is modified, then llvm will be forcibly cleaned and then rebuilt. # The actual contents of this file do not matter, but to trigger a change on the # build bots then the contents should be changed so git updates the mtime. -2016-08-30 +2016-10-29 diff --git a/src/rustllvm/rustllvm.h b/src/rustllvm/rustllvm.h index 5aae11fb456b6..ffe94d1e22f20 100644 --- a/src/rustllvm/rustllvm.h +++ b/src/rustllvm/rustllvm.h @@ -45,7 +45,16 @@ #include "llvm-c/ExecutionEngine.h" #include "llvm-c/Object.h" -#if LLVM_VERSION_MINOR >= 7 +#define LLVM_VERSION_GE(major, minor) \ + (LLVM_VERSION_MAJOR > (major) || LLVM_VERSION_MAJOR == (major) && LLVM_VERSION_MINOR >= (minor)) + +#define LLVM_VERSION_EQ(major, minor) \ + (LLVM_VERSION_MAJOR == (major) && LLVM_VERSION_MINOR == (minor)) + +#define LLVM_VERSION_LE(major, minor) \ + (LLVM_VERSION_MAJOR < (major) || LLVM_VERSION_MAJOR == (major) && LLVM_VERSION_MINOR <= (minor)) + +#if LLVM_VERSION_GE(3, 7) #include "llvm/IR/LegacyPassManager.h" #else #include "llvm/PassManager.h" diff --git a/src/stage0.txt b/src/stage0.txt index f9cf819dc9487..ac2050a6fc8f1 100644 --- a/src/stage0.txt +++ b/src/stage0.txt @@ -12,6 +12,5 @@ # tarball for a stable release you'll likely see `1.x.0-$date` where `1.x.0` was # released on `$date` -rustc: beta-2016-08-17 -rustc_key: 195e6261 -cargo: nightly-2016-08-21 +rustc: beta-2016-09-28 +cargo: nightly-2016-09-26 diff --git a/src/test/codegen-units/item-collection/instantiation-through-vtable.rs b/src/test/codegen-units/item-collection/instantiation-through-vtable.rs index 06e547f0dd037..ad466671cf79b 100644 --- a/src/test/codegen-units/item-collection/instantiation-through-vtable.rs +++ b/src/test/codegen-units/item-collection/instantiation-through-vtable.rs @@ -31,6 +31,7 @@ impl Trait for Struct { fn main() { let s1 = Struct { _a: 0u32 }; + //~ TRANS_ITEM drop-glue i8 //~ TRANS_ITEM fn instantiation_through_vtable::{{impl}}[0]::foo[0] //~ TRANS_ITEM fn instantiation_through_vtable::{{impl}}[0]::bar[0] let _ = &s1 as &Trait; @@ -40,5 +41,3 @@ fn main() { //~ TRANS_ITEM fn instantiation_through_vtable::{{impl}}[0]::bar[0] let _ = &s1 as &Trait; } - -//~ TRANS_ITEM drop-glue i8 diff --git a/src/test/codegen-units/item-collection/overloaded-operators.rs b/src/test/codegen-units/item-collection/overloaded-operators.rs index c275eb954b094..0295311334b6b 100644 --- a/src/test/codegen-units/item-collection/overloaded-operators.rs +++ b/src/test/codegen-units/item-collection/overloaded-operators.rs @@ -45,8 +45,8 @@ impl IndexMut for Indexable { } -//~ TRANS_ITEM fn overloaded_operators::{{impl}}[2]::eq[0] -//~ TRANS_ITEM fn overloaded_operators::{{impl}}[2]::ne[0] +//~ TRANS_ITEM fn overloaded_operators::{{impl}}[4]::eq[0] +//~ TRANS_ITEM fn overloaded_operators::{{impl}}[4]::ne[0] #[derive(PartialEq)] pub struct Equatable(u32); @@ -54,7 +54,7 @@ pub struct Equatable(u32); impl Add for Equatable { type Output = u32; - //~ TRANS_ITEM fn overloaded_operators::{{impl}}[3]::add[0] + //~ TRANS_ITEM fn overloaded_operators::{{impl}}[2]::add[0] fn add(self, rhs: u32) -> u32 { self.0 + rhs } @@ -63,7 +63,7 @@ impl Add for Equatable { impl Deref for Equatable { type Target = u32; - //~ TRANS_ITEM fn overloaded_operators::{{impl}}[4]::deref[0] + //~ TRANS_ITEM fn overloaded_operators::{{impl}}[3]::deref[0] fn deref(&self) -> &Self::Target { &self.0 } diff --git a/src/test/codegen-units/item-collection/unsizing.rs b/src/test/codegen-units/item-collection/unsizing.rs index 5c67ab7a82646..cd4cc258f7a68 100644 --- a/src/test/codegen-units/item-collection/unsizing.rs +++ b/src/test/codegen-units/item-collection/unsizing.rs @@ -57,6 +57,7 @@ fn main() { // simple case let bool_sized = &true; + //~ TRANS_ITEM drop-glue i8 //~ TRANS_ITEM fn unsizing::{{impl}}[0]::foo[0] let _bool_unsized = bool_sized as &Trait; @@ -78,5 +79,3 @@ fn main() //~ TRANS_ITEM fn unsizing::{{impl}}[3]::foo[0] let _wrapper_sized = wrapper_sized as Wrapper; } - -//~ TRANS_ITEM drop-glue i8 diff --git a/src/test/codegen-units/partitioning/local-inlining.rs b/src/test/codegen-units/partitioning/local-inlining.rs index 5eb1cbc2199f7..84ca8b1b0f641 100644 --- a/src/test/codegen-units/partitioning/local-inlining.rs +++ b/src/test/codegen-units/partitioning/local-inlining.rs @@ -19,7 +19,7 @@ mod inline { // Important: This function should show up in all codegen units where it is inlined - //~ TRANS_ITEM fn local_inlining::inline[0]::inlined_function[0] @@ local_inlining-inline[External] local_inlining-user1[Available] local_inlining-user2[Available] + //~ TRANS_ITEM fn local_inlining::inline[0]::inlined_function[0] @@ local_inlining-user1[Internal] local_inlining-user2[Internal] #[inline(always)] pub fn inlined_function() { diff --git a/src/test/codegen-units/partitioning/local-transitive-inlining.rs b/src/test/codegen-units/partitioning/local-transitive-inlining.rs index 28c4698eabd1f..7e37f9ccaa5a9 100644 --- a/src/test/codegen-units/partitioning/local-transitive-inlining.rs +++ b/src/test/codegen-units/partitioning/local-transitive-inlining.rs @@ -18,7 +18,7 @@ mod inline { - //~ TRANS_ITEM fn local_transitive_inlining::inline[0]::inlined_function[0] @@ local_transitive_inlining-inline[External] local_transitive_inlining-direct_user[Available] local_transitive_inlining-indirect_user[Available] + //~ TRANS_ITEM fn local_transitive_inlining::inline[0]::inlined_function[0] @@ local_transitive_inlining-indirect_user[Internal] #[inline(always)] pub fn inlined_function() { @@ -29,7 +29,7 @@ mod inline { mod direct_user { use super::inline; - //~ TRANS_ITEM fn local_transitive_inlining::direct_user[0]::foo[0] @@ local_transitive_inlining-direct_user[External] local_transitive_inlining-indirect_user[Available] + //~ TRANS_ITEM fn local_transitive_inlining::direct_user[0]::foo[0] @@ local_transitive_inlining-indirect_user[Internal] #[inline(always)] pub fn foo() { inline::inlined_function(); diff --git a/src/test/codegen-units/partitioning/vtable-through-const.rs b/src/test/codegen-units/partitioning/vtable-through-const.rs index b40bb7f60973a..0007eaae28971 100644 --- a/src/test/codegen-units/partitioning/vtable-through-const.rs +++ b/src/test/codegen-units/partitioning/vtable-through-const.rs @@ -69,6 +69,7 @@ mod mod1 { //~ TRANS_ITEM fn vtable_through_const::main[0] @@ vtable_through_const[External] fn main() { + //~ TRANS_ITEM drop-glue i8 @@ vtable_through_const[Internal] // Since Trait1::do_something() is instantiated via its default implementation, // it is considered a generic and is instantiated here only because it is @@ -89,5 +90,3 @@ fn main() { //~ TRANS_ITEM fn vtable_through_const::mod1[0]::id[0] @@ vtable_through_const[Internal] mod1::ID_CHAR('x'); } - -//~ TRANS_ITEM drop-glue i8 diff --git a/src/test/codegen/abi-sysv64.rs b/src/test/codegen/abi-sysv64.rs index 2b8e8a1b6b2c7..4f6a50eab4915 100644 --- a/src/test/codegen/abi-sysv64.rs +++ b/src/test/codegen/abi-sysv64.rs @@ -12,6 +12,9 @@ // llvm. Also checks that the abi-sysv64 feature gate allows usage // of the sysv64 abi. +// ignore-arm +// ignore-aarch64 + // compile-flags: -C no-prepopulate-passes #![crate_type = "lib"] diff --git a/src/test/codegen/consts.rs b/src/test/codegen/consts.rs index 36a582ca73709..33b4221b73338 100644 --- a/src/test/codegen/consts.rs +++ b/src/test/codegen/consts.rs @@ -19,12 +19,12 @@ // CHECK: @STATIC = {{.*}}, align 4 // This checks the constants from inline_enum_const -// CHECK: @ref{{[0-9]+}} = {{.*}}, align 2 +// CHECK: @ref.{{[0-9]+}} = {{.*}}, align 2 // This checks the constants from {low,high}_align_const, they share the same // constant, but the alignment differs, so the higher one should be used -// CHECK: [[LOW_HIGH:@ref[0-9]+]] = {{.*}}, align 4 -// CHECK: [[LOW_HIGH_REF:@const[0-9]+]] = {{.*}} [[LOW_HIGH]] +// CHECK: [[LOW_HIGH:@ref.[0-9]+]] = {{.*}}, align 4 +// CHECK: [[LOW_HIGH_REF:@const.[0-9]+]] = {{.*}} [[LOW_HIGH]] #[derive(Copy, Clone)] diff --git a/src/test/codegen/enum-bounds-check.rs b/src/test/codegen/enum-bounds-check.rs new file mode 100644 index 0000000000000..4cfb5a752dfdc --- /dev/null +++ b/src/test/codegen/enum-bounds-check.rs @@ -0,0 +1,24 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags: -O + +#![crate_type = "lib"] + +pub enum Foo { + A, B +} + +// CHECK-LABEL: @lookup +#[no_mangle] +pub fn lookup(buf: &[u8; 2], f: Foo) -> u8 { + // CHECK-NOT: panic_bounds_check + buf[f as usize] +} diff --git a/src/test/codegen/issue-32364.rs b/src/test/codegen/issue-32364.rs index 926987be0e049..401253a315fbf 100644 --- a/src/test/codegen/issue-32364.rs +++ b/src/test/codegen/issue-32364.rs @@ -8,6 +8,9 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +// ignore-arm +// ignore-aarch64 + // compile-flags: -C no-prepopulate-passes struct Foo; diff --git a/src/test/codegen/lifetime_start_end.rs b/src/test/codegen/lifetime_start_end.rs index cf91e7a8bcb66..81f6cf309da50 100644 --- a/src/test/codegen/lifetime_start_end.rs +++ b/src/test/codegen/lifetime_start_end.rs @@ -30,11 +30,11 @@ pub fn test() { // CHECK: [[S_b:%[0-9]+]] = bitcast %"2.std::option::Option"** %b to i8* // CHECK: call void @llvm.lifetime.start(i{{[0-9 ]+}}, i8* [[S_b]]) -// CHECK: [[S_tmp2:%[0-9]+]] = bitcast %"2.std::option::Option"* %tmp2 to i8* -// CHECK: call void @llvm.lifetime.start(i{{[0-9 ]+}}, i8* [[S_tmp2]]) +// CHECK: [[S__5:%[0-9]+]] = bitcast %"2.std::option::Option"* %_5 to i8* +// CHECK: call void @llvm.lifetime.start(i{{[0-9 ]+}}, i8* [[S__5]]) -// CHECK: [[E_tmp2:%[0-9]+]] = bitcast %"2.std::option::Option"* %tmp2 to i8* -// CHECK: call void @llvm.lifetime.end(i{{[0-9 ]+}}, i8* [[E_tmp2]]) +// CHECK: [[E__5:%[0-9]+]] = bitcast %"2.std::option::Option"* %_5 to i8* +// CHECK: call void @llvm.lifetime.end(i{{[0-9 ]+}}, i8* [[E__5]]) // CHECK: [[E_b:%[0-9]+]] = bitcast %"2.std::option::Option"** %b to i8* // CHECK: call void @llvm.lifetime.end(i{{[0-9 ]+}}, i8* [[E_b]]) diff --git a/src/test/codegen/link_section.rs b/src/test/codegen/link_section.rs index 5ad3854c05ccc..98214dc5c6f3d 100644 --- a/src/test/codegen/link_section.rs +++ b/src/test/codegen/link_section.rs @@ -22,12 +22,12 @@ pub enum E { B(f32) } -// CHECK: @VAR2 = constant {{.*}} { i32 0, i32 666, {{.*}} }, section ".test_two" +// CHECK: @VAR2 = constant {{.*}} { i32 0, i32 666 }, section ".test_two" #[no_mangle] #[link_section = ".test_two"] pub static VAR2: E = E::A(666); -// CHECK: @VAR3 = constant {{.*}} { i32 1, float 1.000000e+00, {{.*}} }, section ".test_three" +// CHECK: @VAR3 = constant {{.*}} { i32 1, float 1.000000e+00 }, section ".test_three" #[no_mangle] #[link_section = ".test_three"] pub static VAR3: E = E::B(1.); diff --git a/src/test/codegen/zip.rs b/src/test/codegen/zip.rs index 6c956364bf80f..d0051c5165fe1 100644 --- a/src/test/codegen/zip.rs +++ b/src/test/codegen/zip.rs @@ -20,3 +20,12 @@ pub fn zip_copy(xs: &[u8], ys: &mut [u8]) { *y = *x; } } + +// CHECK-LABEL: @zip_copy_mapped +#[no_mangle] +pub fn zip_copy_mapped(xs: &[u8], ys: &mut [u8]) { +// CHECK: memcpy + for (x, y) in xs.iter().map(|&x| x).zip(ys) { + *y = x; + } +} diff --git a/src/test/compile-fail-fulldeps/auxiliary/macro_crate_test.rs b/src/test/compile-fail-fulldeps/auxiliary/macro_crate_test.rs index 5b1ecfed24278..60697cc8b6ace 100644 --- a/src/test/compile-fail-fulldeps/auxiliary/macro_crate_test.rs +++ b/src/test/compile-fail-fulldeps/auxiliary/macro_crate_test.rs @@ -56,8 +56,7 @@ fn expand_make_a_1(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) fn expand_identity(cx: &mut ExtCtxt, _span: Span, tts: &[TokenTree]) -> Box { // Parse an expression and emit it unchanged. - let mut parser = parse::new_parser_from_tts(cx.parse_sess(), - cx.cfg(), tts.to_vec()); + let mut parser = parse::new_parser_from_tts(cx.parse_sess(), tts.to_vec()); let expr = parser.parse_expr().unwrap(); MacEager::expr(quote_expr!(&mut *cx, $expr)) } diff --git a/src/test/compile-fail-fulldeps/derive-no-std-not-supported.rs b/src/test/compile-fail-fulldeps/derive-no-std-not-supported.rs index 01c81a8bbcee4..6ae5544d68699 100644 --- a/src/test/compile-fail-fulldeps/derive-no-std-not-supported.rs +++ b/src/test/compile-fail-fulldeps/derive-no-std-not-supported.rs @@ -10,7 +10,6 @@ #![no_std] -extern crate core; extern crate rand; extern crate serialize as rustc_serialize; diff --git a/src/test/compile-fail-fulldeps/dropck_tarena_cycle_checked.rs b/src/test/compile-fail-fulldeps/dropck_tarena_cycle_checked.rs index d36293a484d4e..bbdc59c843172 100644 --- a/src/test/compile-fail-fulldeps/dropck_tarena_cycle_checked.rs +++ b/src/test/compile-fail-fulldeps/dropck_tarena_cycle_checked.rs @@ -123,5 +123,5 @@ fn f<'a>(arena: &'a TypedArena>) { fn main() { let arena = TypedArena::new(); - f(&arena); //~ ERROR `arena` does not live long enough -} + f(&arena); +} //~ ERROR `arena` does not live long enough diff --git a/src/test/compile-fail-fulldeps/dropck_tarena_unsound_drop.rs b/src/test/compile-fail-fulldeps/dropck_tarena_unsound_drop.rs index 6cbed34c7ad94..46cb76055745a 100644 --- a/src/test/compile-fail-fulldeps/dropck_tarena_unsound_drop.rs +++ b/src/test/compile-fail-fulldeps/dropck_tarena_unsound_drop.rs @@ -46,5 +46,6 @@ fn f<'a>(_arena: &'a TypedArena>) {} fn main() { let arena: TypedArena = TypedArena::new(); - f(&arena); //~ ERROR `arena` does not live long enough -} + f(&arena); +} //~ ERROR `arena` does not live long enough + diff --git a/src/test/compile-fail-fulldeps/issue-18986.rs b/src/test/compile-fail-fulldeps/issue-18986.rs index 3c32cb947b382..95af37605441d 100644 --- a/src/test/compile-fail-fulldeps/issue-18986.rs +++ b/src/test/compile-fail-fulldeps/issue-18986.rs @@ -15,6 +15,6 @@ pub use use_from_trait_xc::Trait; fn main() { match () { - Trait { x: 42 } => () //~ ERROR expected variant, struct or type alias, found trait `Trait` + Trait { x: 42 } => () //~ ERROR expected struct, variant or union type, found trait `Trait` } } diff --git a/src/test/compile-fail-fulldeps/macro-crate-doesnt-resolve.rs b/src/test/compile-fail-fulldeps/macro-crate-doesnt-resolve.rs index 1fbde00a3dfde..f563a1f88d000 100644 --- a/src/test/compile-fail-fulldeps/macro-crate-doesnt-resolve.rs +++ b/src/test/compile-fail-fulldeps/macro-crate-doesnt-resolve.rs @@ -14,6 +14,5 @@ extern crate macro_crate_test; fn main() { - macro_crate_test::foo(); - //~^ ERROR failed to resolve. Use of undeclared type or module `macro_crate_test` + macro_crate_test::foo(); //~ ERROR unresolved name } diff --git a/src/test/compile-fail-fulldeps/macro-crate-unknown-crate.rs b/src/test/compile-fail-fulldeps/no-link-unknown-crate.rs similarity index 95% rename from src/test/compile-fail-fulldeps/macro-crate-unknown-crate.rs rename to src/test/compile-fail-fulldeps/no-link-unknown-crate.rs index 65657eea1efb0..8e4692bdee197 100644 --- a/src/test/compile-fail-fulldeps/macro-crate-unknown-crate.rs +++ b/src/test/compile-fail-fulldeps/no-link-unknown-crate.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#[macro_use] #[no_link] +#[no_link] extern crate doesnt_exist; //~ ERROR can't find crate fn main() {} diff --git a/src/test/compile-fail-fulldeps/rustc-macro/at-the-root.rs b/src/test/compile-fail-fulldeps/proc-macro/at-the-root.rs similarity index 73% rename from src/test/compile-fail-fulldeps/rustc-macro/at-the-root.rs rename to src/test/compile-fail-fulldeps/proc-macro/at-the-root.rs index 46724523d1c70..b03e1e4f91928 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/at-the-root.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/at-the-root.rs @@ -8,15 +8,15 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![crate_type = "rustc-macro"] -#![feature(rustc_macro)] +#![crate_type = "proc-macro"] +#![feature(proc_macro)] -extern crate rustc_macro; +extern crate proc_macro; -pub mod a { //~ `rustc-macro` crate types cannot export any items - use rustc_macro::TokenStream; +pub mod a { //~ `proc-macro` crate types cannot export any items + use proc_macro::TokenStream; - #[rustc_macro_derive(B)] + #[proc_macro_derive(B)] pub fn bar(a: TokenStream) -> TokenStream { //~^ ERROR: must currently reside in the root of the crate a diff --git a/src/test/compile-fail-fulldeps/proc-macro/attribute.rs b/src/test/compile-fail-fulldeps/proc-macro/attribute.rs new file mode 100644 index 0000000000000..d1b2aa330ed5a --- /dev/null +++ b/src/test/compile-fail-fulldeps/proc-macro/attribute.rs @@ -0,0 +1,46 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![crate_type = "proc-macro"] +#![feature(proc_macro)] + +extern crate proc_macro; + +#[proc_macro_derive] +//~^ ERROR: attribute must be of form: #[proc_macro_derive(TraitName)] +pub fn foo1(input: proc_macro::TokenStream) -> proc_macro::TokenStream { + input +} + +#[proc_macro_derive = "foo"] +//~^ ERROR: attribute must be of form: #[proc_macro_derive(TraitName)] +pub fn foo2(input: proc_macro::TokenStream) -> proc_macro::TokenStream { + input +} + +#[proc_macro_derive( + a = "b" +)] +//~^^ ERROR: must only be one word +pub fn foo3(input: proc_macro::TokenStream) -> proc_macro::TokenStream { + input +} + +#[proc_macro_derive(b, c)] +//~^ ERROR: attribute must only have one argument +pub fn foo4(input: proc_macro::TokenStream) -> proc_macro::TokenStream { + input +} + +#[proc_macro_derive(d(e))] +//~^ ERROR: must only be one word +pub fn foo5(input: proc_macro::TokenStream) -> proc_macro::TokenStream { + input +} diff --git a/src/test/compile-fail-fulldeps/rustc-macro/auxiliary/derive-a.rs b/src/test/compile-fail-fulldeps/proc-macro/auxiliary/derive-a.rs similarity index 77% rename from src/test/compile-fail-fulldeps/rustc-macro/auxiliary/derive-a.rs rename to src/test/compile-fail-fulldeps/proc-macro/auxiliary/derive-a.rs index ff00a9d96a30a..4aa4238611d89 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/auxiliary/derive-a.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/auxiliary/derive-a.rs @@ -11,15 +11,15 @@ // force-host // no-prefer-dynamic -#![feature(rustc_macro)] -#![feature(rustc_macro_lib)] -#![crate_type = "rustc-macro"] +#![feature(proc_macro)] +#![feature(proc_macro_lib)] +#![crate_type = "proc-macro"] -extern crate rustc_macro; +extern crate proc_macro; -use rustc_macro::TokenStream; +use proc_macro::TokenStream; -#[rustc_macro_derive(A)] +#[proc_macro_derive(A)] pub fn derive_a(input: TokenStream) -> TokenStream { input } diff --git a/src/test/compile-fail-fulldeps/rustc-macro/auxiliary/derive-bad.rs b/src/test/compile-fail-fulldeps/proc-macro/auxiliary/derive-bad.rs similarity index 78% rename from src/test/compile-fail-fulldeps/rustc-macro/auxiliary/derive-bad.rs rename to src/test/compile-fail-fulldeps/proc-macro/auxiliary/derive-bad.rs index 5dd42d28b7be1..aae8b63e25354 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/auxiliary/derive-bad.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/auxiliary/derive-bad.rs @@ -11,15 +11,15 @@ // no-prefer-dynamic // force-host -#![feature(rustc_macro)] -#![feature(rustc_macro_lib)] -#![crate_type = "rustc-macro"] +#![feature(proc_macro)] +#![feature(proc_macro_lib)] +#![crate_type = "proc-macro"] -extern crate rustc_macro; +extern crate proc_macro; -use rustc_macro::TokenStream; +use proc_macro::TokenStream; -#[rustc_macro_derive(A)] +#[proc_macro_derive(A)] pub fn derive_a(_input: TokenStream) -> TokenStream { "struct A { inner }".parse().unwrap() } diff --git a/src/test/compile-fail-fulldeps/rustc-macro/auxiliary/derive-panic.rs b/src/test/compile-fail-fulldeps/proc-macro/auxiliary/derive-panic.rs similarity index 77% rename from src/test/compile-fail-fulldeps/rustc-macro/auxiliary/derive-panic.rs rename to src/test/compile-fail-fulldeps/proc-macro/auxiliary/derive-panic.rs index d867082ed5e52..f426fe5437671 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/auxiliary/derive-panic.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/auxiliary/derive-panic.rs @@ -11,15 +11,15 @@ // no-prefer-dynamic // force-host -#![feature(rustc_macro)] -#![feature(rustc_macro_lib)] -#![crate_type = "rustc-macro"] +#![feature(proc_macro)] +#![feature(proc_macro_lib)] +#![crate_type = "proc-macro"] -extern crate rustc_macro; +extern crate proc_macro; -use rustc_macro::TokenStream; +use proc_macro::TokenStream; -#[rustc_macro_derive(A)] +#[proc_macro_derive(A)] pub fn derive_a(_input: TokenStream) -> TokenStream { panic!("nope!"); } diff --git a/src/test/compile-fail-fulldeps/rustc-macro/auxiliary/derive-unstable-2.rs b/src/test/compile-fail-fulldeps/proc-macro/auxiliary/derive-unstable-2.rs similarity index 78% rename from src/test/compile-fail-fulldeps/rustc-macro/auxiliary/derive-unstable-2.rs rename to src/test/compile-fail-fulldeps/proc-macro/auxiliary/derive-unstable-2.rs index 9eebad897564a..d8952e3478b6c 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/auxiliary/derive-unstable-2.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/auxiliary/derive-unstable-2.rs @@ -11,15 +11,15 @@ // force-host // no-prefer-dynamic -#![feature(rustc_macro)] -#![feature(rustc_macro_lib)] -#![crate_type = "rustc-macro"] +#![feature(proc_macro)] +#![feature(proc_macro_lib)] +#![crate_type = "proc-macro"] -extern crate rustc_macro; +extern crate proc_macro; -use rustc_macro::TokenStream; +use proc_macro::TokenStream; -#[rustc_macro_derive(Unstable)] +#[proc_macro_derive(Unstable)] pub fn derive(_input: TokenStream) -> TokenStream { " diff --git a/src/test/compile-fail-fulldeps/rustc-macro/auxiliary/derive-unstable.rs b/src/test/compile-fail-fulldeps/proc-macro/auxiliary/derive-unstable.rs similarity index 78% rename from src/test/compile-fail-fulldeps/rustc-macro/auxiliary/derive-unstable.rs rename to src/test/compile-fail-fulldeps/proc-macro/auxiliary/derive-unstable.rs index f4a1ec9970067..1187b5102e23f 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/auxiliary/derive-unstable.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/auxiliary/derive-unstable.rs @@ -11,15 +11,15 @@ // force-host // no-prefer-dynamic -#![feature(rustc_macro)] -#![feature(rustc_macro_lib)] -#![crate_type = "rustc-macro"] +#![feature(proc_macro)] +#![feature(proc_macro_lib)] +#![crate_type = "proc-macro"] -extern crate rustc_macro; +extern crate proc_macro; -use rustc_macro::TokenStream; +use proc_macro::TokenStream; -#[rustc_macro_derive(Unstable)] +#[proc_macro_derive(Unstable)] pub fn derive(_input: TokenStream) -> TokenStream { "unsafe fn foo() -> u32 { ::std::intrinsics::init() }".parse().unwrap() diff --git a/src/test/compile-fail-fulldeps/rustc-macro/cannot-link.rs b/src/test/compile-fail-fulldeps/proc-macro/cannot-link.rs similarity index 87% rename from src/test/compile-fail-fulldeps/rustc-macro/cannot-link.rs rename to src/test/compile-fail-fulldeps/proc-macro/cannot-link.rs index 1f135330a9995..f6f1be37fc3c9 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/cannot-link.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/cannot-link.rs @@ -11,6 +11,6 @@ // aux-build:derive-a.rs extern crate derive_a; -//~^ ERROR: crates of the `rustc-macro` crate type cannot be linked at runtime +//~^ ERROR: crates of the `proc-macro` crate type cannot be linked at runtime fn main() {} diff --git a/src/test/compile-fail-fulldeps/rustc-macro/define-two.rs b/src/test/compile-fail-fulldeps/proc-macro/define-two.rs similarity index 74% rename from src/test/compile-fail-fulldeps/rustc-macro/define-two.rs rename to src/test/compile-fail-fulldeps/proc-macro/define-two.rs index e4f21dc23840b..420249b258135 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/define-two.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/define-two.rs @@ -10,19 +10,19 @@ // no-prefer-dynamic -#![crate_type = "rustc-macro"] -#![feature(rustc_macro)] +#![crate_type = "proc-macro"] +#![feature(proc_macro)] -extern crate rustc_macro; +extern crate proc_macro; -use rustc_macro::TokenStream; +use proc_macro::TokenStream; -#[rustc_macro_derive(A)] +#[proc_macro_derive(A)] pub fn foo(input: TokenStream) -> TokenStream { input } -#[rustc_macro_derive(A)] //~ ERROR: derive mode defined twice in this crate +#[proc_macro_derive(A)] //~ ERROR: derive mode defined twice in this crate pub fn bar(input: TokenStream) -> TokenStream { input } diff --git a/src/test/compile-fail-fulldeps/rustc-macro/derive-bad.rs b/src/test/compile-fail-fulldeps/proc-macro/derive-bad.rs similarity index 96% rename from src/test/compile-fail-fulldeps/rustc-macro/derive-bad.rs rename to src/test/compile-fail-fulldeps/proc-macro/derive-bad.rs index f3a73af299357..4cc6b9f984c94 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/derive-bad.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/derive-bad.rs @@ -10,7 +10,7 @@ // aux-build:derive-bad.rs -#![feature(rustc_macro)] +#![feature(proc_macro)] #[macro_use] extern crate derive_bad; diff --git a/src/test/compile-fail-fulldeps/rustc-macro/derive-still-gated.rs b/src/test/compile-fail-fulldeps/proc-macro/derive-still-gated.rs similarity index 96% rename from src/test/compile-fail-fulldeps/rustc-macro/derive-still-gated.rs rename to src/test/compile-fail-fulldeps/proc-macro/derive-still-gated.rs index a46d79f517f7d..eb0bb00be9175 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/derive-still-gated.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/derive-still-gated.rs @@ -10,7 +10,7 @@ // aux-build:derive-a.rs -#![feature(rustc_macro)] +#![feature(proc_macro)] #![allow(warnings)] #[macro_use] diff --git a/src/test/compile-fail-fulldeps/rustc-macro/expand-to-unstable-2.rs b/src/test/compile-fail-fulldeps/proc-macro/expand-to-unstable-2.rs similarity index 96% rename from src/test/compile-fail-fulldeps/rustc-macro/expand-to-unstable-2.rs rename to src/test/compile-fail-fulldeps/proc-macro/expand-to-unstable-2.rs index 14c3d84e75be3..23dcbe03b5fee 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/expand-to-unstable-2.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/expand-to-unstable-2.rs @@ -10,7 +10,7 @@ // aux-build:derive-unstable-2.rs -#![feature(rustc_macro)] +#![feature(proc_macro)] #![allow(warnings)] #[macro_use] diff --git a/src/test/compile-fail-fulldeps/rustc-macro/expand-to-unstable.rs b/src/test/compile-fail-fulldeps/proc-macro/expand-to-unstable.rs similarity index 96% rename from src/test/compile-fail-fulldeps/rustc-macro/expand-to-unstable.rs rename to src/test/compile-fail-fulldeps/proc-macro/expand-to-unstable.rs index aa9aaa8115628..fb86f6f1b6574 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/expand-to-unstable.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/expand-to-unstable.rs @@ -10,7 +10,7 @@ // aux-build:derive-unstable.rs -#![feature(rustc_macro)] +#![feature(proc_macro)] #![allow(warnings)] #[macro_use] diff --git a/src/test/compile-fail-fulldeps/rustc-macro/export-macro.rs b/src/test/compile-fail-fulldeps/proc-macro/export-macro.rs similarity index 79% rename from src/test/compile-fail-fulldeps/rustc-macro/export-macro.rs rename to src/test/compile-fail-fulldeps/proc-macro/export-macro.rs index 759f3d32e16e6..48b73e7333185 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/export-macro.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/export-macro.rs @@ -8,10 +8,10 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// error-pattern: cannot export macro_rules! macros from a `rustc-macro` crate +// error-pattern: cannot export macro_rules! macros from a `proc-macro` crate -#![crate_type = "rustc-macro"] -#![feature(rustc_macro)] +#![crate_type = "proc-macro"] +#![feature(proc_macro)] #[macro_export] macro_rules! foo { diff --git a/src/test/compile-fail-fulldeps/rustc-macro/exports.rs b/src/test/compile-fail-fulldeps/proc-macro/exports.rs similarity index 95% rename from src/test/compile-fail-fulldeps/rustc-macro/exports.rs rename to src/test/compile-fail-fulldeps/proc-macro/exports.rs index e985356dc5844..41b94d04e8c73 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/exports.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/exports.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![crate_type = "rustc-macro"] +#![crate_type = "proc-macro"] #![allow(warnings)] pub fn a() {} //~ ERROR: cannot export any items diff --git a/src/test/compile-fail-fulldeps/rustc-macro/feature-gate-1.rs b/src/test/compile-fail-fulldeps/proc-macro/feature-gate-1.rs similarity index 83% rename from src/test/compile-fail-fulldeps/rustc-macro/feature-gate-1.rs rename to src/test/compile-fail-fulldeps/proc-macro/feature-gate-1.rs index 86afc08cae861..f5618fc6425e2 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/feature-gate-1.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/feature-gate-1.rs @@ -8,6 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// error-pattern: the `rustc-macro` crate type is experimental +// error-pattern: the `proc-macro` crate type is experimental -#![crate_type = "rustc-macro"] +#![crate_type = "proc-macro"] diff --git a/src/test/compile-fail-fulldeps/rustc-macro/feature-gate-2.rs b/src/test/compile-fail-fulldeps/proc-macro/feature-gate-2.rs similarity index 87% rename from src/test/compile-fail-fulldeps/rustc-macro/feature-gate-2.rs rename to src/test/compile-fail-fulldeps/proc-macro/feature-gate-2.rs index 1a19f6046d9e1..9c4053266f471 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/feature-gate-2.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/feature-gate-2.rs @@ -8,6 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -extern crate rustc_macro; //~ ERROR: use of unstable library feature +extern crate proc_macro; //~ ERROR: use of unstable library feature fn main() {} diff --git a/src/test/compile-fail-fulldeps/rustc-macro/feature-gate-3.rs b/src/test/compile-fail-fulldeps/proc-macro/feature-gate-3.rs similarity index 83% rename from src/test/compile-fail-fulldeps/rustc-macro/feature-gate-3.rs rename to src/test/compile-fail-fulldeps/proc-macro/feature-gate-3.rs index 9f47f07bd023d..bb6b575962250 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/feature-gate-3.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/feature-gate-3.rs @@ -8,8 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![crate_type = "rustc-macro"] +#![crate_type = "proc-macro"] -#[rustc_macro_derive(Foo)] //~ ERROR: is an experimental feature +#[proc_macro_derive(Foo)] //~ ERROR: is an experimental feature pub fn foo() { } diff --git a/src/test/compile-fail-fulldeps/rustc-macro/feature-gate-4.rs b/src/test/compile-fail-fulldeps/proc-macro/feature-gate-4.rs similarity index 100% rename from src/test/compile-fail-fulldeps/rustc-macro/feature-gate-4.rs rename to src/test/compile-fail-fulldeps/proc-macro/feature-gate-4.rs diff --git a/src/test/compile-fail-fulldeps/rustc-macro/feature-gate-5.rs b/src/test/compile-fail-fulldeps/proc-macro/feature-gate-5.rs similarity index 87% rename from src/test/compile-fail-fulldeps/rustc-macro/feature-gate-5.rs rename to src/test/compile-fail-fulldeps/proc-macro/feature-gate-5.rs index e44b29a170517..672579ce8f14b 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/feature-gate-5.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/feature-gate-5.rs @@ -8,5 +8,5 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#[cfg(rustc_macro)] //~ ERROR: experimental and subject to change +#[cfg(proc_macro)] //~ ERROR: experimental and subject to change fn foo() {} diff --git a/src/test/compile-fail-fulldeps/rustc-macro/import.rs b/src/test/compile-fail-fulldeps/proc-macro/import.rs similarity index 96% rename from src/test/compile-fail-fulldeps/rustc-macro/import.rs rename to src/test/compile-fail-fulldeps/proc-macro/import.rs index c1d0823cb6b84..8f907183cc39b 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/import.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/import.rs @@ -10,7 +10,7 @@ // aux-build:derive-a.rs -#![feature(rustc_macro)] +#![feature(proc_macro)] #![allow(warnings)] #[macro_use] diff --git a/src/test/compile-fail-fulldeps/rustc-macro/load-panic.rs b/src/test/compile-fail-fulldeps/proc-macro/load-panic.rs similarity index 96% rename from src/test/compile-fail-fulldeps/rustc-macro/load-panic.rs rename to src/test/compile-fail-fulldeps/proc-macro/load-panic.rs index 0d08d27c38e46..39c27e82fa5b3 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/load-panic.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/load-panic.rs @@ -10,7 +10,7 @@ // aux-build:derive-panic.rs -#![feature(rustc_macro)] +#![feature(proc_macro)] #[macro_use] extern crate derive_panic; diff --git a/src/test/compile-fail-fulldeps/rustc-macro/require-rustc-macro-crate-type.rs b/src/test/compile-fail-fulldeps/proc-macro/require-rustc-macro-crate-type.rs similarity index 69% rename from src/test/compile-fail-fulldeps/rustc-macro/require-rustc-macro-crate-type.rs rename to src/test/compile-fail-fulldeps/proc-macro/require-rustc-macro-crate-type.rs index cdc50acea9262..44397cdde0c0a 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/require-rustc-macro-crate-type.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/require-rustc-macro-crate-type.rs @@ -8,13 +8,13 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(rustc_macro)] +#![feature(proc_macro)] -extern crate rustc_macro; +extern crate proc_macro; -#[rustc_macro_derive(Foo)] -//~^ ERROR: only usable with crates of the `rustc-macro` crate type -pub fn foo(a: rustc_macro::TokenStream) -> rustc_macro::TokenStream { +#[proc_macro_derive(Foo)] +//~^ ERROR: only usable with crates of the `proc-macro` crate type +pub fn foo(a: proc_macro::TokenStream) -> proc_macro::TokenStream { a } diff --git a/src/test/compile-fail-fulldeps/rustc-macro/shadow-builtin.rs b/src/test/compile-fail-fulldeps/proc-macro/shadow-builtin.rs similarity index 80% rename from src/test/compile-fail-fulldeps/rustc-macro/shadow-builtin.rs rename to src/test/compile-fail-fulldeps/proc-macro/shadow-builtin.rs index 1353a234b4836..5cb2cc59aab9b 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/shadow-builtin.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/shadow-builtin.rs @@ -8,14 +8,14 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![crate_type = "rustc-macro"] -#![feature(rustc_macro)] +#![crate_type = "proc-macro"] +#![feature(proc_macro)] -extern crate rustc_macro; +extern crate proc_macro; -use rustc_macro::TokenStream; +use proc_macro::TokenStream; -#[rustc_macro_derive(PartialEq)] +#[proc_macro_derive(PartialEq)] //~^ ERROR: cannot override a built-in #[derive] mode pub fn foo(input: TokenStream) -> TokenStream { input diff --git a/src/test/compile-fail-fulldeps/rustc-macro/shadow.rs b/src/test/compile-fail-fulldeps/proc-macro/shadow.rs similarity index 81% rename from src/test/compile-fail-fulldeps/rustc-macro/shadow.rs rename to src/test/compile-fail-fulldeps/proc-macro/shadow.rs index 33330ed8f6a05..a04756ca19ba7 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/shadow.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/shadow.rs @@ -9,13 +9,12 @@ // except according to those terms. // aux-build:derive-a.rs -// aux-build:derive-a-2.rs -#![feature(rustc_macro)] +#![feature(proc_macro)] #[macro_use] extern crate derive_a; #[macro_use] -extern crate derive_a_2; //~ ERROR: cannot shadow existing derive mode `A` +extern crate derive_a; //~ ERROR `derive_a` has already been defined fn main() {} diff --git a/src/test/compile-fail-fulldeps/rustc-macro/signature.rs b/src/test/compile-fail-fulldeps/proc-macro/signature.rs similarity index 78% rename from src/test/compile-fail-fulldeps/rustc-macro/signature.rs rename to src/test/compile-fail-fulldeps/proc-macro/signature.rs index 9662cc69e1e14..468c970599e84 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/signature.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/signature.rs @@ -8,17 +8,17 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![crate_type = "rustc-macro"] -#![feature(rustc_macro)] +#![crate_type = "proc-macro"] +#![feature(proc_macro)] #![allow(warnings)] -extern crate rustc_macro; +extern crate proc_macro; -#[rustc_macro_derive(A)] +#[proc_macro_derive(A)] unsafe extern fn foo(a: i32, b: u32) -> u32 { //~^ ERROR: mismatched types //~| NOTE: expected normal fn, found unsafe fn - //~| NOTE: expected type `fn(rustc_macro::TokenStream) -> rustc_macro::TokenStream` + //~| NOTE: expected type `fn(proc_macro::TokenStream) -> proc_macro::TokenStream` //~| NOTE: found type `unsafe extern "C" fn(i32, u32) -> u32 {foo}` loop {} } diff --git a/src/test/compile-fail-fulldeps/rustc-macro/two-crate-types-1.rs b/src/test/compile-fail-fulldeps/proc-macro/two-crate-types-1.rs similarity index 83% rename from src/test/compile-fail-fulldeps/rustc-macro/two-crate-types-1.rs rename to src/test/compile-fail-fulldeps/proc-macro/two-crate-types-1.rs index 35f6149ad4946..db646fb781544 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/two-crate-types-1.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/two-crate-types-1.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// error-pattern: cannot mix `rustc-macro` crate type with others +// error-pattern: cannot mix `proc-macro` crate type with others -#![crate_type = "rustc-macro"] +#![crate_type = "proc-macro"] #![crate_type = "rlib"] diff --git a/src/test/compile-fail-fulldeps/rustc-macro/two-crate-types-2.rs b/src/test/compile-fail-fulldeps/proc-macro/two-crate-types-2.rs similarity index 78% rename from src/test/compile-fail-fulldeps/rustc-macro/two-crate-types-2.rs rename to src/test/compile-fail-fulldeps/proc-macro/two-crate-types-2.rs index ec95e3e4685be..97b0f8446043d 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/two-crate-types-2.rs +++ b/src/test/compile-fail-fulldeps/proc-macro/two-crate-types-2.rs @@ -8,5 +8,5 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// error-pattern: cannot mix `rustc-macro` crate type with others -// compile-flags: --crate-type rlib --crate-type rustc-macro +// error-pattern: cannot mix `proc-macro` crate type with others +// compile-flags: --crate-type rlib --crate-type proc-macro diff --git a/src/test/compile-fail-fulldeps/qquote.rs b/src/test/compile-fail-fulldeps/qquote.rs index 3e5d17e2ffb17..4a7033d44b878 100644 --- a/src/test/compile-fail-fulldeps/qquote.rs +++ b/src/test/compile-fail-fulldeps/qquote.rs @@ -24,7 +24,7 @@ fn main() { let ps = syntax::parse::ParseSess::new(); let mut resolver = syntax::ext::base::DummyResolver; let mut cx = syntax::ext::base::ExtCtxt::new( - &ps, vec![], + &ps, syntax::ext::expand::ExpansionConfig::default("qquote".to_string()), &mut resolver); cx.bt_push(syntax::codemap::ExpnInfo { diff --git a/src/test/compile-fail-fulldeps/rustc-macro/attribute.rs b/src/test/compile-fail-fulldeps/rustc-macro/attribute.rs deleted file mode 100644 index 7740238aeacc9..0000000000000 --- a/src/test/compile-fail-fulldeps/rustc-macro/attribute.rs +++ /dev/null @@ -1,46 +0,0 @@ -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -#![crate_type = "rustc-macro"] -#![feature(rustc_macro)] - -extern crate rustc_macro; - -#[rustc_macro_derive] -//~^ ERROR: attribute must be of form: #[rustc_macro_derive(TraitName)] -pub fn foo1(input: rustc_macro::TokenStream) -> rustc_macro::TokenStream { - input -} - -#[rustc_macro_derive = "foo"] -//~^ ERROR: attribute must be of form: #[rustc_macro_derive(TraitName)] -pub fn foo2(input: rustc_macro::TokenStream) -> rustc_macro::TokenStream { - input -} - -#[rustc_macro_derive( - a = "b" -)] -//~^^ ERROR: must only be one word -pub fn foo3(input: rustc_macro::TokenStream) -> rustc_macro::TokenStream { - input -} - -#[rustc_macro_derive(b, c)] -//~^ ERROR: attribute must only have one argument -pub fn foo4(input: rustc_macro::TokenStream) -> rustc_macro::TokenStream { - input -} - -#[rustc_macro_derive(d(e))] -//~^ ERROR: must only be one word -pub fn foo5(input: rustc_macro::TokenStream) -> rustc_macro::TokenStream { - input -} diff --git a/src/test/compile-fail/E0002.rs b/src/test/compile-fail/E0004-2.rs similarity index 94% rename from src/test/compile-fail/E0002.rs rename to src/test/compile-fail/E0004-2.rs index 0e94c9595d82c..824b86cfa8312 100644 --- a/src/test/compile-fail/E0002.rs +++ b/src/test/compile-fail/E0004-2.rs @@ -11,5 +11,5 @@ fn main() { let x = Some(1); - match x { } //~ ERROR E0002 + match x { } //~ ERROR E0004 } diff --git a/src/test/compile-fail/E0007.rs b/src/test/compile-fail/E0007.rs index 4be115b8afdac..b72b5e3b2808b 100644 --- a/src/test/compile-fail/E0007.rs +++ b/src/test/compile-fail/E0007.rs @@ -15,6 +15,7 @@ fn main() { //~^ ERROR E0007 //~| NOTE binds an already bound by-move value by moving it //~| ERROR E0303 + //~| NOTE not allowed after `@` None => {}, } } diff --git a/src/test/compile-fail/E0025.rs b/src/test/compile-fail/E0025.rs index 3f5922cdc0231..1d15cef8bc930 100644 --- a/src/test/compile-fail/E0025.rs +++ b/src/test/compile-fail/E0025.rs @@ -15,5 +15,8 @@ struct Foo { fn main() { let x = Foo { a:1, b:2 }; - let Foo { a: x, a: y, b: 0 } = x; //~ ERROR E0025 + let Foo { a: x, a: y, b: 0 } = x; + //~^ ERROR field `a` bound multiple times in the pattern + //~| NOTE multiple uses of `a` in pattern + //~| NOTE first use of `a` } diff --git a/src/test/compile-fail/E0033.rs b/src/test/compile-fail/E0033.rs index d320bcd4d0f55..44f73e10e25d3 100644 --- a/src/test/compile-fail/E0033.rs +++ b/src/test/compile-fail/E0033.rs @@ -15,6 +15,7 @@ trait SomeTrait { fn main() { let trait_obj: &SomeTrait = SomeTrait; //~^ ERROR E0425 + //~| NOTE unresolved name //~| ERROR E0038 //~| method `foo` has no receiver //~| NOTE the trait `SomeTrait` cannot be made into an object diff --git a/src/test/compile-fail/E0035.rs b/src/test/compile-fail/E0035.rs index 43f46e3578c20..9322d21d2a88d 100644 --- a/src/test/compile-fail/E0035.rs +++ b/src/test/compile-fail/E0035.rs @@ -17,4 +17,5 @@ impl Test { fn main() { let x = Test; x.method::(); //~ ERROR E0035 + //~| NOTE called with unneeded type parameters } diff --git a/src/test/compile-fail/E0036.rs b/src/test/compile-fail/E0036.rs index 35fd6e8942fe7..ecb6dac66f218 100644 --- a/src/test/compile-fail/E0036.rs +++ b/src/test/compile-fail/E0036.rs @@ -20,4 +20,5 @@ fn main() { let x = Test; let v = &[0]; x.method::(v); //~ ERROR E0036 + //~| NOTE Passed 2 type arguments, expected 1 } diff --git a/src/test/compile-fail/E0049.rs b/src/test/compile-fail/E0049.rs index 5867e11e9acc6..33ebd3f7aca5e 100644 --- a/src/test/compile-fail/E0049.rs +++ b/src/test/compile-fail/E0049.rs @@ -9,13 +9,14 @@ // except according to those terms. trait Foo { - fn foo(x: T) -> Self; + fn foo(x: T) -> Self; //~ NOTE expected 1 type parameter } struct Bar; impl Foo for Bar { fn foo(x: bool) -> Self { Bar } //~ ERROR E0049 + //~| NOTE found 0 type parameters } fn main() { diff --git a/src/test/compile-fail/E0050.rs b/src/test/compile-fail/E0050.rs index 2f7dc96361f9c..5c53d62709aef 100644 --- a/src/test/compile-fail/E0050.rs +++ b/src/test/compile-fail/E0050.rs @@ -9,13 +9,20 @@ // except according to those terms. trait Foo { - fn foo(&self, x: u8) -> bool; + fn foo(&self, x: u8) -> bool; //~ NOTE trait requires 2 parameters + fn bar(&self, x: u8, y: u8, z: u8); //~ NOTE trait requires 4 parameters + fn less(&self); //~ NOTE trait requires 1 parameter } struct Bar; impl Foo for Bar { fn foo(&self) -> bool { true } //~ ERROR E0050 + //~| NOTE expected 2 parameters, found 1 + fn bar(&self) { } //~ ERROR E0050 + //~| NOTE expected 4 parameters, found 1 + fn less(&self, x: u8, y: u8, z: u8) { } //~ ERROR E0050 + //~| NOTE expected 1 parameter, found 4 } fn main() { diff --git a/src/test/compile-fail/E0071.rs b/src/test/compile-fail/E0071.rs index c13ba7bf13671..95653ae83e7cd 100644 --- a/src/test/compile-fail/E0071.rs +++ b/src/test/compile-fail/E0071.rs @@ -9,13 +9,10 @@ // except according to those terms. enum Foo {} +type FooAlias = Foo; fn main() { - let u = Foo { value: 0 }; - //~^ ERROR `Foo` does not name a struct or a struct variant [E0071] - //~| NOTE not a struct - - let t = u32 { value: 4 }; - //~^ ERROR `u32` does not name a struct or a struct variant [E0071] + let u = FooAlias { value: 0 }; + //~^ ERROR expected struct, variant or union type, found enum `Foo` [E0071] //~| NOTE not a struct } diff --git a/src/test/compile-fail/E0164.rs b/src/test/compile-fail/E0164.rs index 1665a80bead77..8d21cde84da1c 100644 --- a/src/test/compile-fail/E0164.rs +++ b/src/test/compile-fail/E0164.rs @@ -8,7 +8,13 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -enum Foo { B { i: u32 } } +#![feature(associated_consts)] + +enum Foo {} + +impl Foo { + const B: u8 = 0; +} fn bar(foo: Foo) -> u32 { match foo { diff --git a/src/test/compile-fail/E0198.rs b/src/test/compile-fail/E0198.rs new file mode 100644 index 0000000000000..892989cc6a080 --- /dev/null +++ b/src/test/compile-fail/E0198.rs @@ -0,0 +1,18 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(optin_builtin_traits)] + +struct Foo; + +unsafe impl !Clone for Foo { } //~ ERROR negative implementations are not unsafe [E0198] + +fn main() { +} diff --git a/src/test/compile-fail/E0199.rs b/src/test/compile-fail/E0199.rs index 8bd3ffdf6f6ee..1a5cd1941a9d1 100644 --- a/src/test/compile-fail/E0199.rs +++ b/src/test/compile-fail/E0199.rs @@ -12,7 +12,8 @@ struct Foo; -unsafe impl !Clone for Foo { } //~ ERROR E0199 +trait Bar { } +unsafe impl Bar for Foo { } //~ ERROR implementing the trait `Bar` is not unsafe [E0199] fn main() { } diff --git a/src/test/compile-fail/E0220.rs b/src/test/compile-fail/E0220.rs index 17e2b18b3745e..c5a1824514d74 100644 --- a/src/test/compile-fail/E0220.rs +++ b/src/test/compile-fail/E0220.rs @@ -13,7 +13,8 @@ trait Trait { } type Foo = Trait; //~ ERROR E0220 - //~^ ERROR E0191 - + //~| NOTE associated type `F` not found + //~| ERROR E0191 + //~| NOTE missing associated type `Bar` value fn main() { } diff --git a/src/test/compile-fail/E0221.rs b/src/test/compile-fail/E0221.rs index 651054580408d..aed2b4084e810 100644 --- a/src/test/compile-fail/E0221.rs +++ b/src/test/compile-fail/E0221.rs @@ -12,17 +12,27 @@ trait T1 {} trait T2 {} trait Foo { - type A: T1; + type A: T1; //~ NOTE: ambiguous `A` from `Foo` } trait Bar : Foo { - type A: T2; + type A: T2; //~ NOTE: ambiguous `A` from `Bar` fn do_something() { let _: Self::A; //~^ ERROR E0221 //~| NOTE ambiguous associated type `A` - //~| NOTE associated type `Self` could derive from `Foo` - //~| NOTE associated type `Self` could derive from `Bar` + } +} + +trait T3 {} + +trait My : std::str::FromStr { + type Err: T3; //~ NOTE: ambiguous `Err` from `My` + fn test() { + let _: Self::Err; + //~^ ERROR E0221 + //~| NOTE ambiguous associated type `Err` + //~| NOTE associated type `Self` could derive from `std::str::FromStr` } } diff --git a/src/test/compile-fail/E0243.rs b/src/test/compile-fail/E0243.rs index 77c9856c261ff..4434723e12f82 100644 --- a/src/test/compile-fail/E0243.rs +++ b/src/test/compile-fail/E0243.rs @@ -11,7 +11,7 @@ struct Foo { x: T } struct Bar { x: Foo } //~^ ERROR E0243 - //~| NOTE expected 1 type arguments, found 0 + //~| NOTE expected 1 type argument, found 0 fn main() { } diff --git a/src/test/compile-fail/E0277.rs b/src/test/compile-fail/E0277.rs index 12f9417f944cd..e4cb50cd3f253 100644 --- a/src/test/compile-fail/E0277.rs +++ b/src/test/compile-fail/E0277.rs @@ -19,6 +19,6 @@ fn some_func(foo: T) { fn main() { some_func(5i32); //~^ ERROR the trait bound `i32: Foo` is not satisfied - //~| NOTE trait `i32: Foo` not satisfied + //~| NOTE the trait `Foo` is not implemented for `i32` //~| NOTE required by `some_func` } diff --git a/src/test/compile-fail/E0297.rs b/src/test/compile-fail/E0297.rs index 32c129b22a166..5792ba06eb0ce 100644 --- a/src/test/compile-fail/E0297.rs +++ b/src/test/compile-fail/E0297.rs @@ -9,7 +9,7 @@ // except according to those terms. fn main() { - let xs : Vec> = vec!(Some(1), None); + let xs : Vec> = vec![Some(1), None]; for Some(x) in xs {} //~^ ERROR E0297 diff --git a/src/test/compile-fail/E0303.rs b/src/test/compile-fail/E0303.rs index 67947fd087c05..e631fe2a8a7cc 100644 --- a/src/test/compile-fail/E0303.rs +++ b/src/test/compile-fail/E0303.rs @@ -10,8 +10,12 @@ fn main() { match Some("hi".to_string()) { - ref op_string_ref @ Some(s) => {}, //~ ERROR E0303 - //~^ ERROR E0009 + ref op_string_ref @ Some(s) => {}, + //~^ ERROR pattern bindings are not allowed after an `@` [E0303] + //~| NOTE not allowed after `@` + //~| ERROR E0009 + //~| NOTE by-move pattern here + //~| NOTE both by-ref and by-move used None => {}, } } diff --git a/src/test/compile-fail/E0408.rs b/src/test/compile-fail/E0408.rs index 43f6d9d757d7e..d75f612482772 100644 --- a/src/test/compile-fail/E0408.rs +++ b/src/test/compile-fail/E0408.rs @@ -12,7 +12,7 @@ fn main() { let x = Some(0); match x { - Some(y) | None => {} //~ ERROR E0408 - _ => () + Some(y) | None => {} //~ ERROR variable `y` from pattern #1 is not bound in pattern #2 + _ => () //~| NOTE pattern doesn't bind `y` } } diff --git a/src/test/compile-fail/E0446.rs b/src/test/compile-fail/E0446.rs index c576661828471..493a272261776 100644 --- a/src/test/compile-fail/E0446.rs +++ b/src/test/compile-fail/E0446.rs @@ -12,6 +12,7 @@ mod Foo { struct Bar(u32); pub fn bar() -> Bar { //~ ERROR E0446 + //~| NOTE can't leak private type Bar(0) } } diff --git a/src/test/compile-fail/E0449.rs b/src/test/compile-fail/E0449.rs index ac365db33e5cd..0b3fdb9e6abe6 100644 --- a/src/test/compile-fail/E0449.rs +++ b/src/test/compile-fail/E0449.rs @@ -15,9 +15,13 @@ trait Foo { } pub impl Bar {} //~ ERROR E0449 + //~| NOTE `pub` not needed here + //~| NOTE place qualifiers on individual impl items instead pub impl Foo for Bar { //~ ERROR E0449 + //~| NOTE `pub` not needed here pub fn foo() {} //~ ERROR E0449 + //~| NOTE `pub` not needed here } fn main() { diff --git a/src/test/compile-fail/E0512.rs b/src/test/compile-fail/E0512.rs index 25f9627164131..2b89873ee45ff 100644 --- a/src/test/compile-fail/E0512.rs +++ b/src/test/compile-fail/E0512.rs @@ -12,4 +12,5 @@ fn takes_u8(_: u8) {} fn main() { unsafe { takes_u8(::std::mem::transmute(0u16)); } //~ ERROR E0512 + //~| transmuting between 16 bits and 8 bits } diff --git a/src/test/compile-fail/E0513.rs b/src/test/compile-fail/E0513.rs new file mode 100644 index 0000000000000..726e23265241d --- /dev/null +++ b/src/test/compile-fail/E0513.rs @@ -0,0 +1,19 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::mem; + +fn main() { + unsafe { + let size = mem::size_of::(); + mem::transmute_copy::(&8_8); //~ ERROR E0513 + //~| NOTE no type for variable + } +} diff --git a/src/test/compile-fail/E0520.rs b/src/test/compile-fail/E0520.rs index 0bb8faea62e1e..ff6152d377f67 100644 --- a/src/test/compile-fail/E0520.rs +++ b/src/test/compile-fail/E0520.rs @@ -27,7 +27,7 @@ impl SpaceLlama for i32 { default fn fly(&self) {} //~^ ERROR E0520 //~| NOTE cannot specialize default item `fly` - //~| NOTE either the parent `impl` or `fly` in the parent `impl` must be marked `default` + //~| NOTE `fly` in the parent `impl` must be marked `default` } fn main() { diff --git a/src/test/compile-fail/E0559.rs b/src/test/compile-fail/E0559.rs index aeeeae4222813..fa6c885843e4c 100644 --- a/src/test/compile-fail/E0559.rs +++ b/src/test/compile-fail/E0559.rs @@ -15,5 +15,5 @@ enum Field { fn main() { let s = Field::Fool { joke: 0 }; //~^ ERROR E0559 - //~| NOTE did you mean `x`? + //~| NOTE field does not exist - did you mean `x`? } diff --git a/src/test/compile-fail/E0560.rs b/src/test/compile-fail/E0560.rs new file mode 100644 index 0000000000000..c6326a0f97740 --- /dev/null +++ b/src/test/compile-fail/E0560.rs @@ -0,0 +1,19 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +struct Simba { + mother: u32, +} + +fn main() { + let s = Simba { mother: 1, father: 0 }; + //~^ ERROR E0560 + //~| NOTE `Simba` does not have this field +} diff --git a/src/test/compile-fail/allocator-dylib-is-system.rs b/src/test/compile-fail/allocator-dylib-is-system.rs index db7f304227f01..4c576de22021c 100644 --- a/src/test/compile-fail/allocator-dylib-is-system.rs +++ b/src/test/compile-fail/allocator-dylib-is-system.rs @@ -18,6 +18,8 @@ // system allocator. Do this by linking in jemalloc and making sure that we get // an error. +// ignore-emscripten FIXME: What "other allocator" should we use for emcc? + #![feature(alloc_jemalloc)] extern crate allocator_dylib; diff --git a/src/test/compile-fail/allocator-rust-dylib-is-jemalloc.rs b/src/test/compile-fail/allocator-rust-dylib-is-jemalloc.rs index 46ad226d25564..02c271ab24da3 100644 --- a/src/test/compile-fail/allocator-rust-dylib-is-jemalloc.rs +++ b/src/test/compile-fail/allocator-rust-dylib-is-jemalloc.rs @@ -16,6 +16,8 @@ // Ensure that rust dynamic libraries use jemalloc as their allocator, verifying // by linking in the system allocator here and ensuring that we get a complaint. +// ignore-emscripten FIXME: What "other allocator" is correct for emscripten? + #![feature(alloc_system)] extern crate allocator_dylib2; diff --git a/src/test/compile-fail/associated-path-shl.rs b/src/test/compile-fail/associated-path-shl.rs new file mode 100644 index 0000000000000..6bc110239cdbf --- /dev/null +++ b/src/test/compile-fail/associated-path-shl.rs @@ -0,0 +1,20 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Check that associated paths starting with `<<` are successfully parsed. + +fn main() { + let _: <::B>::C; //~ ERROR type name `A` is undefined or not in scope + let _ = <::B>::C; //~ ERROR type name `A` is undefined or not in scope + let <::B>::C; //~ ERROR type name `A` is undefined or not in scope + let 0 ... <::B>::C; //~ ERROR type name `A` is undefined or not in scope + //~^ ERROR only char and numeric types are allowed in range patterns + <::B>::C; //~ ERROR type name `A` is undefined or not in scope +} diff --git a/src/test/compile-fail/associated-type-projection-from-multiple-supertraits.rs b/src/test/compile-fail/associated-type-projection-from-multiple-supertraits.rs index a2a11c62bb832..b33bbfd84258f 100644 --- a/src/test/compile-fail/associated-type-projection-from-multiple-supertraits.rs +++ b/src/test/compile-fail/associated-type-projection-from-multiple-supertraits.rs @@ -13,13 +13,19 @@ pub trait Vehicle { type Color; + //~^ NOTE ambiguous `Color` from `Vehicle` + //~| NOTE ambiguous `Color` from `Vehicle` + //~| NOTE ambiguous `Color` from `Vehicle` fn go(&self) { } } pub trait Box { type Color; - + //~^ NOTE ambiguous `Color` from `Box` + //~| NOTE ambiguous `Color` from `Box` + //~| NOTE ambiguous `Color` from `Box` + // fn mail(&self) { } } @@ -29,24 +35,18 @@ pub trait BoxCar : Box + Vehicle { fn dent(c: C, color: C::Color) { //~^ ERROR ambiguous associated type `Color` in bounds of `C` //~| NOTE ambiguous associated type `Color` - //~| NOTE could derive from `Vehicle` - //~| NOTE could derive from `Box` } fn dent_object(c: BoxCar) { //~^ ERROR ambiguous associated type //~| ERROR the value of the associated type `Color` (from the trait `Vehicle`) must be specified //~| NOTE ambiguous associated type `Color` - //~| NOTE could derive from `Vehicle` - //~| NOTE could derive from `Box` //~| NOTE missing associated type `Color` value } fn paint(c: C, d: C::Color) { //~^ ERROR ambiguous associated type `Color` in bounds of `C` //~| NOTE ambiguous associated type `Color` - //~| NOTE could derive from `Vehicle` - //~| NOTE could derive from `Box` } pub fn main() { } diff --git a/src/test/compile-fail/associated-types-ICE-when-projecting-out-of-err.rs b/src/test/compile-fail/associated-types-ICE-when-projecting-out-of-err.rs index 084616964674f..5a19aecf667f0 100644 --- a/src/test/compile-fail/associated-types-ICE-when-projecting-out-of-err.rs +++ b/src/test/compile-fail/associated-types-ICE-when-projecting-out-of-err.rs @@ -32,5 +32,5 @@ fn ice(a: A) { let r = loop {}; r = r + a; //~^ ERROR the trait bound `(): Add` is not satisfied - //~| NOTE trait `(): Add` not satisfied + //~| NOTE the trait `Add` is not implemented for `()` } diff --git a/src/test/compile-fail/E0422.rs b/src/test/compile-fail/associated-types/issue-36499.rs similarity index 83% rename from src/test/compile-fail/E0422.rs rename to src/test/compile-fail/associated-types/issue-36499.rs index 61e96b896a66a..b5b3ecbb580ae 100644 --- a/src/test/compile-fail/E0422.rs +++ b/src/test/compile-fail/associated-types/issue-36499.rs @@ -8,8 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -fn main () { - let x = Foo { x: 1, y: 2 }; - //~^ ERROR E0422 - //~| NOTE not a structure +// error-pattern: aborting due to previous error + +fn main() { + 2 + +2; } diff --git a/src/test/compile-fail/attempted-access-non-fatal.rs b/src/test/compile-fail/attempted-access-non-fatal.rs index 1d9249bc17b1f..fe8e793ed781a 100644 --- a/src/test/compile-fail/attempted-access-non-fatal.rs +++ b/src/test/compile-fail/attempted-access-non-fatal.rs @@ -11,6 +11,6 @@ // Check that bogus field access is non-fatal fn main() { let x = 0; - let _ = x.foo; //~ ERROR attempted access of field - let _ = x.bar; //~ ERROR attempted access of field + let _ = x.foo; //~ no field `foo` on type `{integer}` + let _ = x.bar; //~ no field `bar` on type `{integer}` } diff --git a/src/test/compile-fail/attr-on-generic-formals-are-visited.rs b/src/test/compile-fail/attr-on-generic-formals-are-visited.rs new file mode 100644 index 0000000000000..c902cfdd756df --- /dev/null +++ b/src/test/compile-fail/attr-on-generic-formals-are-visited.rs @@ -0,0 +1,75 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// This test ensures that attributes on formals in generic parameter +// lists are included when we are checking for unstable attributes. +// +// Note that feature(generic_param_attrs) *is* enabled here. We are +// checking feature-gating of the attributes themselves, not the +// capability to parse such attributes in that context. + +#![feature(generic_param_attrs)] +#![allow(dead_code)] + +struct StLt<#[lt_struct] 'a>(&'a u32); +//~^ ERROR The attribute `lt_struct` is currently unknown to the compiler +struct StTy<#[ty_struct] I>(I); +//~^ ERROR The attribute `ty_struct` is currently unknown to the compiler + +enum EnLt<#[lt_enum] 'b> { A(&'b u32), B } +//~^ ERROR The attribute `lt_enum` is currently unknown to the compiler +enum EnTy<#[ty_enum] J> { A(J), B } +//~^ ERROR The attribute `ty_enum` is currently unknown to the compiler + +trait TrLt<#[lt_trait] 'c> { fn foo(&self, _: &'c [u32]) -> &'c u32; } +//~^ ERROR The attribute `lt_trait` is currently unknown to the compiler +trait TrTy<#[ty_trait] K> { fn foo(&self, _: K); } +//~^ ERROR The attribute `ty_trait` is currently unknown to the compiler + +type TyLt<#[lt_type] 'd> = &'d u32; +//~^ ERROR The attribute `lt_type` is currently unknown to the compiler +type TyTy<#[ty_type] L> = (L, ); +//~^ ERROR The attribute `ty_type` is currently unknown to the compiler + +impl<#[lt_inherent] 'e> StLt<'e> { } +//~^ ERROR The attribute `lt_inherent` is currently unknown to the compiler +impl<#[ty_inherent] M> StTy { } +//~^ ERROR The attribute `ty_inherent` is currently unknown to the compiler + +impl<#[lt_impl_for] 'f> TrLt<'f> for StLt<'f> { + //~^ ERROR The attribute `lt_impl_for` is currently unknown to the compiler + fn foo(&self, _: &'f [u32]) -> &'f u32 { loop { } } +} +impl<#[ty_impl_for] N> TrTy for StTy { + //~^ ERROR The attribute `ty_impl_for` is currently unknown to the compiler + fn foo(&self, _: N) { } +} + +fn f_lt<#[lt_fn] 'g>(_: &'g [u32]) -> &'g u32 { loop { } } +//~^ ERROR The attribute `lt_fn` is currently unknown to the compiler +fn f_ty<#[ty_fn] O>(_: O) { } +//~^ ERROR The attribute `ty_fn` is currently unknown to the compiler + +impl StTy { + fn m_lt<#[lt_meth] 'h>(_: &'h [u32]) -> &'h u32 { loop { } } + //~^ ERROR The attribute `lt_meth` is currently unknown to the compiler + fn m_ty<#[ty_meth] P>(_: P) { } + //~^ ERROR The attribute `ty_meth` is currently unknown to the compiler +} + +fn hof_lt(_: Q) + where Q: for <#[lt_hof] 'i> Fn(&'i [u32]) -> &'i u32 + //~^ ERROR The attribute `lt_hof` is currently unknown to the compiler +{ +} + +fn main() { + +} diff --git a/src/test/compile-fail/attr-on-generic-formals-wo-feature-gate.rs b/src/test/compile-fail/attr-on-generic-formals-wo-feature-gate.rs new file mode 100644 index 0000000000000..944802f450a6d --- /dev/null +++ b/src/test/compile-fail/attr-on-generic-formals-wo-feature-gate.rs @@ -0,0 +1,76 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// This test ensures that attributes on formals in generic parameter +// lists are rejected if feature(generic_param_attrs) is not enabled. +// +// (We are prefixing all tested features with `rustc_`, to ensure that +// the attributes themselves won't be rejected by the compiler when +// using `rustc_attrs` feature. There is a separate compile-fail/ test +// ensuring that the attribute feature-gating works in this context.) + +#![feature(rustc_attrs)] +#![allow(dead_code)] + +struct StLt<#[rustc_lt_struct] 'a>(&'a u32); +//~^ ERROR attributes on lifetime bindings are experimental (see issue #34761) +struct StTy<#[rustc_ty_struct] I>(I); +//~^ ERROR attributes on type parameter bindings are experimental (see issue #34761) + +enum EnLt<#[rustc_lt_enum] 'b> { A(&'b u32), B } +//~^ ERROR attributes on lifetime bindings are experimental (see issue #34761) +enum EnTy<#[rustc_ty_enum] J> { A(J), B } +//~^ ERROR attributes on type parameter bindings are experimental (see issue #34761) + +trait TrLt<#[rustc_lt_trait] 'c> { fn foo(&self, _: &'c [u32]) -> &'c u32; } +//~^ ERROR attributes on lifetime bindings are experimental (see issue #34761) +trait TrTy<#[rustc_ty_trait] K> { fn foo(&self, _: K); } +//~^ ERROR attributes on type parameter bindings are experimental (see issue #34761) + +type TyLt<#[rustc_lt_type] 'd> = &'d u32; +//~^ ERROR attributes on lifetime bindings are experimental (see issue #34761) +type TyTy<#[rustc_ty_type] L> = (L, ); +//~^ ERROR attributes on type parameter bindings are experimental (see issue #34761) + +impl<#[rustc_lt_inherent] 'e> StLt<'e> { } +//~^ ERROR attributes on lifetime bindings are experimental (see issue #34761) +impl<#[rustc_ty_inherent] M> StTy { } +//~^ ERROR attributes on type parameter bindings are experimental (see issue #34761) + +impl<#[rustc_lt_impl_for] 'f> TrLt<'f> for StLt<'f> { + //~^ ERROR attributes on lifetime bindings are experimental (see issue #34761) + fn foo(&self, _: &'f [u32]) -> &'f u32 { loop { } } +} +impl<#[rustc_ty_impl_for] N> TrTy for StTy { + //~^ ERROR attributes on type parameter bindings are experimental (see issue #34761) + fn foo(&self, _: N) { } +} + +fn f_lt<#[rustc_lt_fn] 'g>(_: &'g [u32]) -> &'g u32 { loop { } } +//~^ ERROR attributes on lifetime bindings are experimental (see issue #34761) +fn f_ty<#[rustc_ty_fn] O>(_: O) { } +//~^ ERROR attributes on type parameter bindings are experimental (see issue #34761) + +impl StTy { + fn m_lt<#[rustc_lt_meth] 'h>(_: &'h [u32]) -> &'h u32 { loop { } } + //~^ ERROR attributes on lifetime bindings are experimental (see issue #34761) + fn m_ty<#[rustc_ty_meth] P>(_: P) { } + //~^ ERROR attributes on type parameter bindings are experimental (see issue #34761) +} + +fn hof_lt(_: Q) + where Q: for <#[rustc_lt_hof] 'i> Fn(&'i [u32]) -> &'i u32 + //~^ ERROR attributes on lifetime bindings are experimental (see issue #34761) +{ +} + +fn main() { + +} diff --git a/src/test/compile-fail/struct-pat-associated-path.rs b/src/test/compile-fail/attrs-with-no-formal-in-generics-1.rs similarity index 55% rename from src/test/compile-fail/struct-pat-associated-path.rs rename to src/test/compile-fail/attrs-with-no-formal-in-generics-1.rs index d3f840f4fe976..53e287cda208a 100644 --- a/src/test/compile-fail/struct-pat-associated-path.rs +++ b/src/test/compile-fail/attrs-with-no-formal-in-generics-1.rs @@ -8,30 +8,19 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -struct S; +// This test checks variations on `<#[attr] 'a, #[oops]>`, where +// `#[oops]` is left dangling (that is, it is unattached, with no +// formal binding following it). -trait Tr { - type A; -} +#![feature(generic_param_attrs, rustc_attrs)] +#![allow(dead_code)] -impl Tr for S { - type A = S; -} +struct RefIntPair<'a, 'b>(&'a u32, &'b u32); -fn f() { - match S { - T::A {} => {} //~ ERROR `T::A` does not name a struct or a struct variant - } -} - -fn g>() { - match S { - T::A {} => {} //~ ERROR `T::A` does not name a struct or a struct variant - } +impl<#[rustc_1] 'a, 'b, #[oops]> RefIntPair<'a, 'b> { + //~^ ERROR trailing attribute after lifetime parameters } fn main() { - match S { - S::A {} => {} //~ ERROR ambiguous associated type - } + } diff --git a/src/test/compile-fail/attrs-with-no-formal-in-generics-2.rs b/src/test/compile-fail/attrs-with-no-formal-in-generics-2.rs new file mode 100644 index 0000000000000..a38a7bfb93785 --- /dev/null +++ b/src/test/compile-fail/attrs-with-no-formal-in-generics-2.rs @@ -0,0 +1,26 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// This test checks variations on `<#[attr] 'a, #[oops]>`, where +// `#[oops]` is left dangling (that is, it is unattached, with no +// formal binding following it). + +#![feature(generic_param_attrs, rustc_attrs)] +#![allow(dead_code)] + +struct RefAny<'a, T>(&'a T); + +impl<#[rustc_1] 'a, #[rustc_2] T, #[oops]> RefAny<'a, T> { + //~^ ERROR expected identifier, found `>` +} + +fn main() { + +} diff --git a/src/test/compile-fail/attrs-with-no-formal-in-generics-3.rs b/src/test/compile-fail/attrs-with-no-formal-in-generics-3.rs new file mode 100644 index 0000000000000..e7d5b94d24226 --- /dev/null +++ b/src/test/compile-fail/attrs-with-no-formal-in-generics-3.rs @@ -0,0 +1,26 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// This test checks variations on `<#[attr] 'a, #[oops]>`, where +// `#[oops]` is left dangling (that is, it is unattached, with no +// formal binding following it). + +struct RefIntPair<'a, 'b>(&'a u32, &'b u32); + +fn hof_lt(_: Q) + where Q: for <#[rustc_1] 'a, 'b, #[oops]> Fn(RefIntPair<'a,'b>) -> &'b u32 + //~^ ERROR trailing attribute after lifetime parameters +{ + +} + +fn main() { + +} diff --git a/src/test/compile-fail/auto-ref-slice-plus-ref.rs b/src/test/compile-fail/auto-ref-slice-plus-ref.rs index f0f0bdfb38ee3..324e925964779 100644 --- a/src/test/compile-fail/auto-ref-slice-plus-ref.rs +++ b/src/test/compile-fail/auto-ref-slice-plus-ref.rs @@ -14,7 +14,7 @@ fn main() { // Testing that method lookup does not automatically borrow // vectors to slices then automatically create a self reference. - let mut a = vec!(0); + let mut a = vec![0]; a.test_mut(); //~ ERROR no method named `test_mut` found a.test(); //~ ERROR no method named `test` found diff --git a/src/test/compile-fail/auxiliary/import_crate_var.rs b/src/test/compile-fail/auxiliary/import_crate_var.rs new file mode 100644 index 0000000000000..1dfc7a128aa1c --- /dev/null +++ b/src/test/compile-fail/auxiliary/import_crate_var.rs @@ -0,0 +1,12 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#[macro_export] +macro_rules! m { () => { use $crate; } } diff --git a/src/test/compile-fail/auxiliary/issue-36708.rs b/src/test/compile-fail/auxiliary/issue-36708.rs new file mode 100644 index 0000000000000..e64e63a2139e9 --- /dev/null +++ b/src/test/compile-fail/auxiliary/issue-36708.rs @@ -0,0 +1,15 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![crate_type = "lib"] + +pub trait Foo { + fn foo(); +} diff --git a/src/test/compile-fail/auxiliary/issue_19452_aux.rs b/src/test/compile-fail/auxiliary/issue_19452_aux.rs new file mode 100644 index 0000000000000..205566e4b1f54 --- /dev/null +++ b/src/test/compile-fail/auxiliary/issue_19452_aux.rs @@ -0,0 +1,13 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub enum Homura { + Madoka { age: u32 } +} diff --git a/src/test/compile-fail/auxiliary/namespace-mix-new.rs b/src/test/compile-fail/auxiliary/namespace-mix-new.rs new file mode 100644 index 0000000000000..88e8b0d56fe3d --- /dev/null +++ b/src/test/compile-fail/auxiliary/namespace-mix-new.rs @@ -0,0 +1,78 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(item_like_imports, relaxed_adts)] + +pub mod c { + pub struct S {} + pub struct TS(); + pub struct US; + pub enum E { + V {}, + TV(), + UV, + } + + pub struct Item; +} + +pub mod xm1 { + pub use ::c::*; + pub type S = ::c::Item; +} +pub mod xm2 { + pub use ::c::*; + pub const S: ::c::Item = ::c::Item; +} + +pub mod xm3 { + pub use ::c::*; + pub type TS = ::c::Item; +} +pub mod xm4 { + pub use ::c::*; + pub const TS: ::c::Item = ::c::Item; +} + +pub mod xm5 { + pub use ::c::*; + pub type US = ::c::Item; +} +pub mod xm6 { + pub use ::c::*; + pub const US: ::c::Item = ::c::Item; +} + +pub mod xm7 { + pub use ::c::E::*; + pub type V = ::c::Item; +} +pub mod xm8 { + pub use ::c::E::*; + pub const V: ::c::Item = ::c::Item; +} + +pub mod xm9 { + pub use ::c::E::*; + pub type TV = ::c::Item; +} +pub mod xmA { + pub use ::c::E::*; + pub const TV: ::c::Item = ::c::Item; +} + +pub mod xmB { + pub use ::c::E::*; + pub type UV = ::c::Item; +} +pub mod xmC { + pub use ::c::E::*; + pub const UV: ::c::Item = ::c::Item; +} diff --git a/src/test/compile-fail/auxiliary/namespace-mix-old.rs b/src/test/compile-fail/auxiliary/namespace-mix-old.rs new file mode 100644 index 0000000000000..7bbba7163b557 --- /dev/null +++ b/src/test/compile-fail/auxiliary/namespace-mix-old.rs @@ -0,0 +1,85 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// FIXME: Remove when `item_like_imports` is stabilized. + +#![feature(relaxed_adts)] + +pub mod c { + pub struct S {} + pub struct TS(); + pub struct US; + pub enum E { + V {}, + TV(), + UV, + } + + pub struct Item; +} + +pub mod proxy { + pub use c::*; + pub use c::E::*; +} + +pub mod xm1 { + pub use ::proxy::*; + pub type S = ::c::Item; +} +pub mod xm2 { + pub use ::proxy::*; + pub const S: ::c::Item = ::c::Item; +} + +pub mod xm3 { + pub use ::proxy::*; + pub type TS = ::c::Item; +} +pub mod xm4 { + pub use ::proxy::*; + pub const TS: ::c::Item = ::c::Item; +} + +pub mod xm5 { + pub use ::proxy::*; + pub type US = ::c::Item; +} +pub mod xm6 { + pub use ::proxy::*; + pub const US: ::c::Item = ::c::Item; +} + +pub mod xm7 { + pub use ::proxy::*; + pub type V = ::c::Item; +} +pub mod xm8 { + pub use ::proxy::*; + pub const V: ::c::Item = ::c::Item; +} + +pub mod xm9 { + pub use ::proxy::*; + pub type TV = ::c::Item; +} +pub mod xmA { + pub use ::proxy::*; + pub const TV: ::c::Item = ::c::Item; +} + +pub mod xmB { + pub use ::proxy::*; + pub type UV = ::c::Item; +} +pub mod xmC { + pub use ::proxy::*; + pub const UV: ::c::Item = ::c::Item; +} diff --git a/src/test/compile-fail/bad-expr-path.rs b/src/test/compile-fail/bad-expr-path.rs index c35c9255ed28e..c18a318347745 100644 --- a/src/test/compile-fail/bad-expr-path.rs +++ b/src/test/compile-fail/bad-expr-path.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// error-pattern: unresolved name `m1::arguments`. Did you mean `arguments`? +// error-pattern: unresolved name `m1::arguments` mod m1 {} diff --git a/src/test/compile-fail/bad-expr-path2.rs b/src/test/compile-fail/bad-expr-path2.rs index af34887dec954..e1c1afb0049d7 100644 --- a/src/test/compile-fail/bad-expr-path2.rs +++ b/src/test/compile-fail/bad-expr-path2.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// error-pattern: unresolved name `m1::arguments`. Did you mean `arguments`? +// error-pattern: unresolved name `m1::arguments` mod m1 { pub mod arguments {} diff --git a/src/test/compile-fail/blind-item-block-middle.rs b/src/test/compile-fail/blind-item-block-middle.rs index f57727b773d63..0db7eaf0ca7c3 100644 --- a/src/test/compile-fail/blind-item-block-middle.rs +++ b/src/test/compile-fail/blind-item-block-middle.rs @@ -12,6 +12,6 @@ mod foo { pub struct bar; } fn main() { let bar = 5; - //~^ ERROR let bindings cannot shadow structs + //~^ ERROR let bindings cannot shadow unit structs use foo::bar; } diff --git a/src/test/compile-fail/borrowck/borrowck-assign-comp-idx.rs b/src/test/compile-fail/borrowck/borrowck-assign-comp-idx.rs index b18df7f3db608..1e665a12a195b 100644 --- a/src/test/compile-fail/borrowck/borrowck-assign-comp-idx.rs +++ b/src/test/compile-fail/borrowck/borrowck-assign-comp-idx.rs @@ -14,7 +14,7 @@ struct Point { } fn a() { - let mut p = vec!(1); + let mut p = vec![1]; // Create an immutable pointer into p's contents: let q: &isize = &p[0]; @@ -30,7 +30,7 @@ fn b() { // here we alias the mutable vector into an imm slice and try to // modify the original: - let mut p = vec!(1); + let mut p = vec![1]; borrow( &p, @@ -40,7 +40,7 @@ fn b() { fn c() { // Legal because the scope of the borrow does not include the // modification: - let mut p = vec!(1); + let mut p = vec![1]; borrow(&p, ||{}); p[0] = 5; } diff --git a/src/test/compile-fail/borrowck/borrowck-borrowed-uniq-rvalue-2.rs b/src/test/compile-fail/borrowck/borrowck-borrowed-uniq-rvalue-2.rs index 7b811f581c1a7..9178aadeeebe1 100644 --- a/src/test/compile-fail/borrowck/borrowck-borrowed-uniq-rvalue-2.rs +++ b/src/test/compile-fail/borrowck/borrowck-borrowed-uniq-rvalue-2.rs @@ -29,6 +29,6 @@ fn defer<'r>(x: &'r [&'r str]) -> defer<'r> { } fn main() { - let x = defer(&vec!("Goodbye", "world!")); + let x = defer(&vec!["Goodbye", "world!"]); x.x[0]; } diff --git a/src/test/compile-fail/borrowck/borrowck-loan-vec-content.rs b/src/test/compile-fail/borrowck/borrowck-loan-vec-content.rs index 21d9dea77b26a..c5de95f8fc042 100644 --- a/src/test/compile-fail/borrowck/borrowck-loan-vec-content.rs +++ b/src/test/compile-fail/borrowck/borrowck-loan-vec-content.rs @@ -17,12 +17,12 @@ fn takes_imm_elt(_v: &isize, f: F) where F: FnOnce() { } fn has_mut_vec_and_does_not_try_to_change_it() { - let mut v: Vec = vec!(1, 2, 3); + let mut v: Vec = vec![1, 2, 3]; takes_imm_elt(&v[0], || {}) } fn has_mut_vec_but_tries_to_change_it() { - let mut v: Vec = vec!(1, 2, 3); + let mut v: Vec = vec![1, 2, 3]; takes_imm_elt( &v[0], || { //~ ERROR cannot borrow `v` as mutable diff --git a/src/test/compile-fail/borrowck/borrowck-move-out-of-overloaded-auto-deref.rs b/src/test/compile-fail/borrowck/borrowck-move-out-of-overloaded-auto-deref.rs index 27cef1f3c6068..bf4c74741368c 100644 --- a/src/test/compile-fail/borrowck/borrowck-move-out-of-overloaded-auto-deref.rs +++ b/src/test/compile-fail/borrowck/borrowck-move-out-of-overloaded-auto-deref.rs @@ -11,6 +11,6 @@ use std::rc::Rc; pub fn main() { - let _x = Rc::new(vec!(1, 2)).into_iter(); + let _x = Rc::new(vec![1, 2]).into_iter(); //~^ ERROR cannot move out of borrowed content } diff --git a/src/test/compile-fail/borrowck/borrowck-move-out-of-vec-tail.rs b/src/test/compile-fail/borrowck/borrowck-move-out-of-vec-tail.rs index 51e00a0ad2c3a..311208f07b88d 100644 --- a/src/test/compile-fail/borrowck/borrowck-move-out-of-vec-tail.rs +++ b/src/test/compile-fail/borrowck/borrowck-move-out-of-vec-tail.rs @@ -18,11 +18,11 @@ struct Foo { } pub fn main() { - let x = vec!( + let x = vec![ Foo { string: "foo".to_string() }, Foo { string: "bar".to_string() }, Foo { string: "baz".to_string() } - ); + ]; let x: &[Foo] = &x; match *x { [_, ref tail..] => { diff --git a/src/test/compile-fail/borrowck/borrowck-mut-slice-of-imm-vec.rs b/src/test/compile-fail/borrowck/borrowck-mut-slice-of-imm-vec.rs index 9341758afd82e..4e0304e20c00d 100644 --- a/src/test/compile-fail/borrowck/borrowck-mut-slice-of-imm-vec.rs +++ b/src/test/compile-fail/borrowck/borrowck-mut-slice-of-imm-vec.rs @@ -13,6 +13,6 @@ fn write(v: &mut [isize]) { } fn main() { - let v = vec!(1, 2, 3); + let v = vec![1, 2, 3]; write(&mut v); //~ ERROR cannot borrow } diff --git a/src/test/compile-fail/borrowck/borrowck-overloaded-index-move-from-vec.rs b/src/test/compile-fail/borrowck/borrowck-overloaded-index-move-from-vec.rs index 1b62d9c326d77..df72c2b0af72a 100644 --- a/src/test/compile-fail/borrowck/borrowck-overloaded-index-move-from-vec.rs +++ b/src/test/compile-fail/borrowck/borrowck-overloaded-index-move-from-vec.rs @@ -25,7 +25,7 @@ impl Index for MyVec { } fn main() { - let v = MyVec::> { data: vec!(box 1, box 2, box 3) }; + let v = MyVec::> { data: vec![box 1, box 2, box 3] }; let good = &v[0]; // Shouldn't fail here let bad = v[0]; //~^ ERROR cannot move out of indexed content diff --git a/src/test/compile-fail/borrowck/borrowck-use-uninitialized-in-cast.rs b/src/test/compile-fail/borrowck/borrowck-use-uninitialized-in-cast.rs index a3d5af80b533b..3f429bbd4b634 100644 --- a/src/test/compile-fail/borrowck/borrowck-use-uninitialized-in-cast.rs +++ b/src/test/compile-fail/borrowck/borrowck-use-uninitialized-in-cast.rs @@ -12,8 +12,6 @@ // The problem was specified to casting to `*`, as creating unsafe // pointers was not being fully checked. Issue #20791. -// pretty-expanded FIXME #23616 - fn main() { let x: &i32; let y = x as *const i32; //~ ERROR use of possibly uninitialized variable: `*x` diff --git a/src/test/compile-fail/borrowck/borrowck-vec-pattern-element-loan.rs b/src/test/compile-fail/borrowck/borrowck-vec-pattern-element-loan.rs index 63e80b90ac81e..eb5d69d49bd6a 100644 --- a/src/test/compile-fail/borrowck/borrowck-vec-pattern-element-loan.rs +++ b/src/test/compile-fail/borrowck/borrowck-vec-pattern-element-loan.rs @@ -12,7 +12,7 @@ #![feature(slice_patterns)] fn a<'a>() -> &'a [isize] { - let vec = vec!(1, 2, 3, 4); + let vec = vec![1, 2, 3, 4]; let vec: &[isize] = &vec; //~ ERROR does not live long enough let tail = match vec { &[_, ref tail..] => tail, @@ -22,7 +22,7 @@ fn a<'a>() -> &'a [isize] { } fn b<'a>() -> &'a [isize] { - let vec = vec!(1, 2, 3, 4); + let vec = vec![1, 2, 3, 4]; let vec: &[isize] = &vec; //~ ERROR does not live long enough let init = match vec { &[ref init.., _] => init, @@ -32,7 +32,7 @@ fn b<'a>() -> &'a [isize] { } fn c<'a>() -> &'a [isize] { - let vec = vec!(1, 2, 3, 4); + let vec = vec![1, 2, 3, 4]; let vec: &[isize] = &vec; //~ ERROR does not live long enough let slice = match vec { &[_, ref slice.., _] => slice, diff --git a/src/test/compile-fail/borrowck/borrowck-vec-pattern-loan-from-mut.rs b/src/test/compile-fail/borrowck/borrowck-vec-pattern-loan-from-mut.rs index 9dfd4d7792843..505c8c6d53581 100644 --- a/src/test/compile-fail/borrowck/borrowck-vec-pattern-loan-from-mut.rs +++ b/src/test/compile-fail/borrowck/borrowck-vec-pattern-loan-from-mut.rs @@ -11,7 +11,7 @@ #![feature(slice_patterns)] fn a() { - let mut v = vec!(1, 2, 3); + let mut v = vec![1, 2, 3]; let vb: &mut [isize] = &mut v; match vb { &mut [_a, ref tail..] => { diff --git a/src/test/compile-fail/borrowck/borrowck-vec-pattern-nesting.rs b/src/test/compile-fail/borrowck/borrowck-vec-pattern-nesting.rs index ae001e4e34d16..d26364efdbc5d 100644 --- a/src/test/compile-fail/borrowck/borrowck-vec-pattern-nesting.rs +++ b/src/test/compile-fail/borrowck/borrowck-vec-pattern-nesting.rs @@ -25,7 +25,7 @@ fn a() { } fn b() { - let mut vec = vec!(box 1, box 2, box 3); + let mut vec = vec![box 1, box 2, box 3]; let vec: &mut [Box] = &mut vec; match vec { &mut [ref _b..] => { @@ -37,7 +37,7 @@ fn b() { } fn c() { - let mut vec = vec!(box 1, box 2, box 3); + let mut vec = vec![box 1, box 2, box 3]; let vec: &mut [Box] = &mut vec; match vec { &mut [_a, //~ ERROR cannot move out @@ -59,7 +59,7 @@ fn c() { } fn d() { - let mut vec = vec!(box 1, box 2, box 3); + let mut vec = vec![box 1, box 2, box 3]; let vec: &mut [Box] = &mut vec; match vec { &mut [ //~ ERROR cannot move out @@ -73,7 +73,7 @@ fn d() { } fn e() { - let mut vec = vec!(box 1, box 2, box 3); + let mut vec = vec![box 1, box 2, box 3]; let vec: &mut [Box] = &mut vec; match vec { &mut [_a, _b, _c] => {} //~ ERROR cannot move out diff --git a/src/test/compile-fail/borrowck/borrowck-vec-pattern-tail-element-loan.rs b/src/test/compile-fail/borrowck/borrowck-vec-pattern-tail-element-loan.rs index a849e4e2faf3b..cd8f3ebefe628 100644 --- a/src/test/compile-fail/borrowck/borrowck-vec-pattern-tail-element-loan.rs +++ b/src/test/compile-fail/borrowck/borrowck-vec-pattern-tail-element-loan.rs @@ -11,7 +11,7 @@ #![feature(slice_patterns)] fn a<'a>() -> &'a isize { - let vec = vec!(1, 2, 3, 4); + let vec = vec![1, 2, 3, 4]; let vec: &[isize] = &vec; //~ ERROR `vec` does not live long enough let tail = match vec { &[_a, ref tail..] => &tail[0], diff --git a/src/test/compile-fail/cast-rfc0401.rs b/src/test/compile-fail/cast-rfc0401.rs index b6e81504a9d24..0c373057c76e0 100644 --- a/src/test/compile-fail/cast-rfc0401.rs +++ b/src/test/compile-fail/cast-rfc0401.rs @@ -92,7 +92,7 @@ fn main() let _ = v as *const [u8]; //~ ERROR cannot cast let _ = fat_v as *const Foo; //~^ ERROR the trait bound `[u8]: std::marker::Sized` is not satisfied - //~| NOTE trait `[u8]: std::marker::Sized` not satisfied + //~| NOTE the trait `std::marker::Sized` is not implemented for `[u8]` //~| NOTE `[u8]` does not have a constant size known at compile-time //~| NOTE required for the cast to the object type `Foo` let _ = foo as *const str; //~ ERROR casting @@ -107,12 +107,12 @@ fn main() let a : *const str = "hello"; let _ = a as *const Foo; //~^ ERROR the trait bound `str: std::marker::Sized` is not satisfied - //~| NOTE trait `str: std::marker::Sized` not satisfied + //~| NOTE the trait `std::marker::Sized` is not implemented for `str` //~| NOTE `str` does not have a constant size known at compile-time //~| NOTE required for the cast to the object type `Foo` // check no error cascade - let _ = main.f as *const u32; //~ ERROR attempted access of field + let _ = main.f as *const u32; //~ no field `f` on type `fn() {main}` let cf: *const Foo = &0; let _ = cf as *const [u16]; diff --git a/src/test/compile-fail/coherence-cow.rs b/src/test/compile-fail/coherence-cow.rs index 6a2d1bac49381..86ae5b44d9db0 100644 --- a/src/test/compile-fail/coherence-cow.rs +++ b/src/test/compile-fail/coherence-cow.rs @@ -12,8 +12,6 @@ // aux-build:coherence_lib.rs -// pretty-expanded FIXME #23616 - // Test that the `Pair` type reports an error if it contains type // parameters, even when they are covered by local types. This test // was originally intended to test the opposite, but the rules changed diff --git a/src/test/compile-fail/coherence-vec-local-2.rs b/src/test/compile-fail/coherence-vec-local-2.rs index 5f0b56af2c226..196c2f4ee3cd3 100644 --- a/src/test/compile-fail/coherence-vec-local-2.rs +++ b/src/test/compile-fail/coherence-vec-local-2.rs @@ -13,8 +13,6 @@ // aux-build:coherence_lib.rs -// pretty-expanded FIXME #23616 - extern crate coherence_lib as lib; use lib::Remote; diff --git a/src/test/compile-fail/coherence-vec-local.rs b/src/test/compile-fail/coherence-vec-local.rs index c354caac2b5c2..49822dcfcb3f0 100644 --- a/src/test/compile-fail/coherence-vec-local.rs +++ b/src/test/compile-fail/coherence-vec-local.rs @@ -13,8 +13,6 @@ // aux-build:coherence_lib.rs -// pretty-expanded FIXME #23616 - extern crate coherence_lib as lib; use lib::Remote; diff --git a/src/test/compile-fail/const-eval-overflow-2.rs b/src/test/compile-fail/const-eval-overflow-2.rs index 264f02588ae5d..9b045ed1d02cc 100644 --- a/src/test/compile-fail/const-eval-overflow-2.rs +++ b/src/test/compile-fail/const-eval-overflow-2.rs @@ -21,10 +21,6 @@ const NEG_128: i8 = -128; const NEG_NEG_128: i8 = -NEG_128; //~^ ERROR constant evaluation error //~| attempt to negate with overflow -//~| ERROR constant evaluation error -//~| attempt to negate with overflow -//~| ERROR constant evaluation error -//~| attempt to negate with overflow fn main() { match -128i8 { diff --git a/src/test/compile-fail/const-pattern-not-const-evaluable.rs b/src/test/compile-fail/const-pattern-not-const-evaluable.rs index d68d63683a79c..b40aa2a8e27dc 100644 --- a/src/test/compile-fail/const-pattern-not-const-evaluable.rs +++ b/src/test/compile-fail/const-pattern-not-const-evaluable.rs @@ -25,13 +25,11 @@ const fn foo() -> Cake { Marmor //~^ ERROR: constant evaluation error [E0080] //~| unimplemented constant expression: enum variants - //~^^^ ERROR: constant evaluation error [E0080] - //~| unimplemented constant expression: enum variants } const WORKS: Cake = Marmor; -const GOO: Cake = foo(); //~ NOTE for expression here +const GOO: Cake = foo(); fn main() { match BlackForest { diff --git a/src/test/compile-fail/const-unsized.rs b/src/test/compile-fail/const-unsized.rs index a73164b957c83..226b567c546e5 100644 --- a/src/test/compile-fail/const-unsized.rs +++ b/src/test/compile-fail/const-unsized.rs @@ -12,25 +12,25 @@ use std::fmt::Debug; const CONST_0: Debug+Sync = *(&0 as &(Debug+Sync)); //~^ ERROR `std::fmt::Debug + Sync + 'static: std::marker::Sized` is not satisfied -//~| NOTE `std::fmt::Debug + Sync + 'static: std::marker::Sized` not satisfied +//~| NOTE the trait `std::marker::Sized` is not implemented for `std::fmt::Debug + Sync + 'static` //~| NOTE does not have a constant size known at compile-time //~| NOTE constant expressions must have a statically known size const CONST_FOO: str = *"foo"; //~^ ERROR `str: std::marker::Sized` is not satisfied -//~| NOTE `str: std::marker::Sized` not satisfied +//~| NOTE the trait `std::marker::Sized` is not implemented for `str` //~| NOTE does not have a constant size known at compile-time //~| NOTE constant expressions must have a statically known size static STATIC_1: Debug+Sync = *(&1 as &(Debug+Sync)); //~^ ERROR `std::fmt::Debug + Sync + 'static: std::marker::Sized` is not satisfied -//~| NOTE `std::fmt::Debug + Sync + 'static: std::marker::Sized` not satisfied +//~| NOTE the trait `std::marker::Sized` is not implemented for `std::fmt::Debug + Sync + 'static` //~| NOTE does not have a constant size known at compile-time //~| NOTE constant expressions must have a statically known size static STATIC_BAR: str = *"bar"; //~^ ERROR `str: std::marker::Sized` is not satisfied -//~| NOTE `str: std::marker::Sized` not satisfied +//~| NOTE the trait `std::marker::Sized` is not implemented for `str` //~| NOTE does not have a constant size known at compile-time //~| NOTE constant expressions must have a statically known size diff --git a/src/test/compile-fail/dep-graph-type-alias.rs b/src/test/compile-fail/dep-graph-type-alias.rs new file mode 100644 index 0000000000000..80cc9e71c7ab4 --- /dev/null +++ b/src/test/compile-fail/dep-graph-type-alias.rs @@ -0,0 +1,56 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + + +// Test that changing what a `type` points to does not go unnoticed. + +// compile-flags: -Z query-dep-graph + +#![feature(rustc_attrs)] +#![allow(dead_code)] +#![allow(unused_variables)] + +fn main() { } + + +#[rustc_if_this_changed] +type TypeAlias = u32; + +#[rustc_then_this_would_need(ItemSignature)] //~ ERROR OK +struct Struct { + x: TypeAlias, + y: u32 +} + +#[rustc_then_this_would_need(ItemSignature)] //~ ERROR OK +enum Enum { + Variant1(TypeAlias), + Variant2(i32) +} + +#[rustc_then_this_would_need(ItemSignature)] //~ ERROR OK +trait Trait { + fn method(&self, _: TypeAlias); +} + +struct SomeType; + +#[rustc_then_this_would_need(ItemSignature)] //~ ERROR OK +impl SomeType { + fn method(&self, _: TypeAlias) {} +} + +#[rustc_then_this_would_need(ItemSignature)] //~ ERROR OK +type TypeAlias2 = TypeAlias; + +#[rustc_then_this_would_need(ItemSignature)] //~ ERROR OK +fn function(_: TypeAlias) { + +} diff --git a/src/test/compile-fail/derived-errors/issue-30580.rs b/src/test/compile-fail/derived-errors/issue-30580.rs index 88d4aef6d9ddc..553ad0a33410b 100644 --- a/src/test/compile-fail/derived-errors/issue-30580.rs +++ b/src/test/compile-fail/derived-errors/issue-30580.rs @@ -19,7 +19,7 @@ impl<'a, 'tcx> Pass<'a, 'tcx> pub fn tcx(&self) -> &'a &'tcx () { self.1 } fn lol(&mut self, b: &Foo) { - b.c; //~ ERROR no field with that name was found + b.c; //~ ERROR no field `c` on type `&Foo` self.tcx(); } } diff --git a/src/test/compile-fail/deriving-meta-unknown-trait.rs b/src/test/compile-fail/deriving-meta-unknown-trait.rs index e223499469355..596cc1e7d5816 100644 --- a/src/test/compile-fail/deriving-meta-unknown-trait.rs +++ b/src/test/compile-fail/deriving-meta-unknown-trait.rs @@ -8,8 +8,10 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +// ignore-tidy-linelength + #[derive(Eqr)] -//~^ ERROR `#[derive]` for custom traits is not stable enough for use and is subject to change +//~^ ERROR `#[derive]` for custom traits is not stable enough for use. It is deprecated and will be removed in v1.15 (see issue #29644) struct Foo; pub fn main() {} diff --git a/src/test/compile-fail/discrim-overflow-2.rs b/src/test/compile-fail/discrim-overflow-2.rs index 0ff740212e8fa..213683b580883 100644 --- a/src/test/compile-fail/discrim-overflow-2.rs +++ b/src/test/compile-fail/discrim-overflow-2.rs @@ -24,7 +24,9 @@ fn f_i8() { enum A { Ok = i8::MAX - 1, Ok2, - OhNo, //~ ERROR enum discriminant overflowed on value after 127i8; set explicitly via OhNo = -128i8 if that is desired outcome + OhNo, //~ ERROR enum discriminant overflowed [E0370] + //~| NOTE overflowed on value after 127i8 + //~| NOTE explicitly set `OhNo = -128i8` if that is desired outcome } } @@ -33,7 +35,9 @@ fn f_u8() { enum A { Ok = u8::MAX - 1, Ok2, - OhNo, //~ ERROR enum discriminant overflowed on value after 255u8; set explicitly via OhNo = 0u8 if that is desired outcome + OhNo, //~ ERROR enum discriminant overflowed [E0370] + //~| NOTE overflowed on value after 255u8 + //~| NOTE explicitly set `OhNo = 0u8` if that is desired outcome } } @@ -42,7 +46,9 @@ fn f_i16() { enum A { Ok = i16::MAX - 1, Ok2, - OhNo, //~ ERROR enum discriminant overflowed + OhNo, //~ ERROR enum discriminant overflowed [E0370] + //~| NOTE overflowed on value after 32767i16 + //~| NOTE explicitly set `OhNo = -32768i16` if that is desired outcome } } @@ -51,7 +57,9 @@ fn f_u16() { enum A { Ok = u16::MAX - 1, Ok2, - OhNo, //~ ERROR enum discriminant overflowed + OhNo, //~ ERROR enum discriminant overflowed [E0370] + //~| NOTE overflowed on value after 65535u16 + //~| NOTE explicitly set `OhNo = 0u16` if that is desired outcome } } @@ -60,7 +68,9 @@ fn f_i32() { enum A { Ok = i32::MAX - 1, Ok2, - OhNo, //~ ERROR enum discriminant overflowed + OhNo, //~ ERROR enum discriminant overflowed [E0370] + //~| NOTE overflowed on value after 2147483647i32 + //~| NOTE explicitly set `OhNo = -2147483648i32` if that is desired outcome } } @@ -69,7 +79,9 @@ fn f_u32() { enum A { Ok = u32::MAX - 1, Ok2, - OhNo, //~ ERROR enum discriminant overflowed + OhNo, //~ ERROR enum discriminant overflowed [E0370] + //~| NOTE overflowed on value after 4294967295u32 + //~| NOTE explicitly set `OhNo = 0u32` if that is desired outcome } } @@ -78,7 +90,9 @@ fn f_i64() { enum A { Ok = i64::MAX - 1, Ok2, - OhNo, //~ ERROR enum discriminant overflowed + OhNo, //~ ERROR enum discriminant overflowed [E0370] + //~| NOTE overflowed on value after 9223372036854775807i64 + //~| NOTE explicitly set `OhNo = -9223372036854775808i64` if that is desired outcome } } @@ -87,7 +101,9 @@ fn f_u64() { enum A { Ok = u64::MAX - 1, Ok2, - OhNo, //~ ERROR enum discriminant overflowed + OhNo, //~ ERROR enum discriminant overflowed [E0370] + //~| NOTE overflowed on value after 18446744073709551615u64 + //~| NOTE explicitly set `OhNo = 0u64` if that is desired outcome } } diff --git a/src/test/compile-fail/discrim-overflow.rs b/src/test/compile-fail/discrim-overflow.rs index 7316e737b6da8..a3039b8d9573a 100644 --- a/src/test/compile-fail/discrim-overflow.rs +++ b/src/test/compile-fail/discrim-overflow.rs @@ -22,7 +22,9 @@ fn f_i8() { enum A { Ok = i8::MAX - 1, Ok2, - OhNo, //~ ERROR enum discriminant overflowed on value after 127i8; set explicitly via OhNo = -128i8 if that is desired outcome + OhNo, //~ ERROR enum discriminant overflowed [E0370] + //~| NOTE overflowed on value after 127i8 + //~| NOTE explicitly set `OhNo = -128i8` if that is desired outcome } let x = A::Ok; @@ -33,7 +35,9 @@ fn f_u8() { enum A { Ok = u8::MAX - 1, Ok2, - OhNo, //~ ERROR enum discriminant overflowed on value after 255u8; set explicitly via OhNo = 0u8 if that is desired outcome + OhNo, //~ ERROR enum discriminant overflowed [E0370] + //~| NOTE overflowed on value after 255u8 + //~| NOTE explicitly set `OhNo = 0u8` if that is desired outcome } let x = A::Ok; @@ -44,7 +48,9 @@ fn f_i16() { enum A { Ok = i16::MAX - 1, Ok2, - OhNo, //~ ERROR enum discriminant overflowed + OhNo, //~ ERROR enum discriminant overflowed [E0370] + //~| NOTE overflowed on value after 32767i16 + //~| NOTE explicitly set `OhNo = -32768i16` if that is desired outcome } let x = A::Ok; @@ -55,7 +61,9 @@ fn f_u16() { enum A { Ok = u16::MAX - 1, Ok2, - OhNo, //~ ERROR enum discriminant overflowed + OhNo, //~ ERROR enum discriminant overflowed [E0370] + //~| overflowed on value after 65535u16 + //~| NOTE explicitly set `OhNo = 0u16` if that is desired outcome } let x = A::Ok; @@ -66,7 +74,9 @@ fn f_i32() { enum A { Ok = i32::MAX - 1, Ok2, - OhNo, //~ ERROR enum discriminant overflowed + OhNo, //~ ERROR enum discriminant overflowed [E0370] + //~| overflowed on value after 2147483647i32 + //~| NOTE explicitly set `OhNo = -2147483648i32` if that is desired outcome } let x = A::Ok; @@ -77,7 +87,9 @@ fn f_u32() { enum A { Ok = u32::MAX - 1, Ok2, - OhNo, //~ ERROR enum discriminant overflowed + OhNo, //~ ERROR enum discriminant overflowed [E0370] + //~| overflowed on value after 4294967295u32 + //~| NOTE explicitly set `OhNo = 0u32` if that is desired outcome } let x = A::Ok; @@ -88,7 +100,9 @@ fn f_i64() { enum A { Ok = i64::MAX - 1, Ok2, - OhNo, //~ ERROR enum discriminant overflowed + OhNo, //~ ERROR enum discriminant overflowed [E0370] + //~| overflowed on value after 9223372036854775807i64 + //~| NOTE explicitly set `OhNo = -9223372036854775808i64` if that is desired outcome } let x = A::Ok; @@ -99,7 +113,9 @@ fn f_u64() { enum A { Ok = u64::MAX - 1, Ok2, - OhNo, //~ ERROR enum discriminant overflowed + OhNo, //~ ERROR enum discriminant overflowed [E0370] + //~| overflowed on value after 18446744073709551615u64 + //~| NOTE explicitly set `OhNo = 0u64` if that is desired outcome } let x = A::Ok; diff --git a/src/test/compile-fail/drop-with-active-borrows-2.rs b/src/test/compile-fail/drop-with-active-borrows-2.rs index e6e1364dd2ca7..33e4d3e62c418 100644 --- a/src/test/compile-fail/drop-with-active-borrows-2.rs +++ b/src/test/compile-fail/drop-with-active-borrows-2.rs @@ -9,7 +9,7 @@ // except according to those terms. fn read_lines_borrowed<'a>() -> Vec<&'a str> { - let raw_lines: Vec = vec!("foo ".to_string(), " bar".to_string()); + let raw_lines: Vec = vec!["foo ".to_string(), " bar".to_string()]; raw_lines.iter().map(|l| l.trim()).collect() //~^ ERROR `raw_lines` does not live long enough } diff --git a/src/test/compile-fail/empty-comment.rs b/src/test/compile-fail/empty-comment.rs index 5c521a5f304d4..a5568ff826be1 100644 --- a/src/test/compile-fail/empty-comment.rs +++ b/src/test/compile-fail/empty-comment.rs @@ -12,6 +12,10 @@ // This could break some internal logic that assumes the length of a doc comment is at least 5, // leading to an ICE. +macro_rules! one_arg_macro { + ($fmt:expr) => (print!(concat!($fmt, "\n"))); +} + fn main() { - println!(/**/); //~ ERROR unexpected end + one_arg_macro!(/**/); //~ ERROR unexpected end } diff --git a/src/test/compile-fail/empty-struct-braces-pat-1.rs b/src/test/compile-fail/empty-struct-braces-pat-1.rs index 74546152ca90f..e527170e9f957 100644 --- a/src/test/compile-fail/empty-struct-braces-pat-1.rs +++ b/src/test/compile-fail/empty-struct-braces-pat-1.rs @@ -32,13 +32,13 @@ fn main() { } match e3 { E::Empty3 => () - //~^ ERROR `E::Empty3` does not name a unit variant, unit struct or a constant + //~^ ERROR expected unit struct/variant or constant, found struct variant `E::Empty3` } match xe1 { XEmpty1 => () // Not an error, `XEmpty1` is interpreted as a new binding } match xe3 { XE::XEmpty3 => () - //~^ ERROR `XE::XEmpty3` does not name a unit variant, unit struct or a constant + //~^ ERROR expected unit struct/variant or constant, found struct variant `XE::XEmpty3` } } diff --git a/src/test/compile-fail/empty-struct-braces-pat-2.rs b/src/test/compile-fail/empty-struct-braces-pat-2.rs index 52481517ce751..58e3ca6b3ac5c 100644 --- a/src/test/compile-fail/empty-struct-braces-pat-2.rs +++ b/src/test/compile-fail/empty-struct-braces-pat-2.rs @@ -24,15 +24,15 @@ fn main() { let xe1 = XEmpty1 {}; match e1 { - Empty1() => () //~ ERROR unresolved variant or struct `Empty1` + Empty1() => () //~ ERROR unresolved tuple struct/variant `Empty1` } match xe1 { - XEmpty1() => () //~ ERROR unresolved variant or struct `XEmpty1` + XEmpty1() => () //~ ERROR unresolved tuple struct/variant `XEmpty1` } match e1 { - Empty1(..) => () //~ ERROR unresolved variant or struct `Empty1` + Empty1(..) => () //~ ERROR unresolved tuple struct/variant `Empty1` } match xe1 { - XEmpty1(..) => () //~ ERROR unresolved variant or struct `XEmpty1` + XEmpty1(..) => () //~ ERROR unresolved tuple struct/variant `XEmpty1` } } diff --git a/src/test/compile-fail/empty-struct-braces-pat-3.rs b/src/test/compile-fail/empty-struct-braces-pat-3.rs index cb859fe7501c6..1960eca9f8028 100644 --- a/src/test/compile-fail/empty-struct-braces-pat-3.rs +++ b/src/test/compile-fail/empty-struct-braces-pat-3.rs @@ -26,15 +26,19 @@ fn main() { let xe3 = XE::XEmpty3 {}; match e3 { - E::Empty3() => () //~ ERROR `E::Empty3` does not name a tuple variant or a tuple struct + E::Empty3() => () + //~^ ERROR expected tuple struct/variant, found struct variant `E::Empty3` } match xe3 { - XE::XEmpty3() => () //~ ERROR `XE::XEmpty3` does not name a tuple variant or a tuple struct + XE::XEmpty3() => () + //~^ ERROR expected tuple struct/variant, found struct variant `XE::XEmpty3` } match e3 { - E::Empty3(..) => () //~ ERROR `E::Empty3` does not name a tuple variant or a tuple struct + E::Empty3(..) => () + //~^ ERROR expected tuple struct/variant, found struct variant `E::Empty3` } match xe3 { - XE::XEmpty3(..) => () //~ ERROR `XE::XEmpty3` does not name a tuple variant or a tuple + XE::XEmpty3(..) => () + //~^ ERROR expected tuple struct/variant, found struct variant `XE::XEmpty3 } } diff --git a/src/test/compile-fail/empty-struct-tuple-pat.rs b/src/test/compile-fail/empty-struct-tuple-pat.rs index be90e3b26c7ef..f15c126a12608 100644 --- a/src/test/compile-fail/empty-struct-tuple-pat.rs +++ b/src/test/compile-fail/empty-struct-tuple-pat.rs @@ -31,17 +31,19 @@ fn main() { let xe5 = XE::XEmpty5(); match e2 { - Empty2 => () //~ ERROR `Empty2` does not name a unit variant, unit struct or a constant + Empty2 => () //~ ERROR match bindings cannot shadow tuple structs } match xe6 { - XEmpty6 => () //~ ERROR `XEmpty6` does not name a unit variant, unit struct or a constant + XEmpty6 => () //~ ERROR match bindings cannot shadow tuple structs } match e4 { - E::Empty4 => () //~ ERROR `E::Empty4` does not name a unit variant, unit struct or a + E::Empty4 => () + //~^ ERROR expected unit struct/variant or constant, found tuple variant `E::Empty4` } match xe5 { - XE::XEmpty5 => (), //~ ERROR `XE::XEmpty5` does not name a unit variant, unit struct or a + XE::XEmpty5 => (), + //~^ ERROR expected unit struct/variant or constant, found tuple variant `XE::XEmpty5` _ => {}, } } diff --git a/src/test/compile-fail/empty-struct-unit-pat-1.rs b/src/test/compile-fail/empty-struct-unit-pat-1.rs deleted file mode 100644 index aec4ad4cad401..0000000000000 --- a/src/test/compile-fail/empty-struct-unit-pat-1.rs +++ /dev/null @@ -1,51 +0,0 @@ -// Copyright 2015 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// Can't use unit struct as enum pattern - -// aux-build:empty-struct.rs - -#![feature(relaxed_adts)] - -extern crate empty_struct; -use empty_struct::*; - -struct Empty2; - -enum E { - Empty4 -} - -// remove attribute after warning cycle and promoting warnings to errors -fn main() { - let e2 = Empty2; - let e4 = E::Empty4; - let xe2 = XEmpty2; - let xe4 = XE::XEmpty4; - - match e2 { - Empty2(..) => () //~ ERROR `Empty2` does not name a tuple variant or a tuple struct - //~^ WARNING hard error - } - match xe2 { - XEmpty2(..) => () //~ ERROR `XEmpty2` does not name a tuple variant or a tuple struct - //~^ WARNING hard error - } - - match e4 { - E::Empty4(..) => () //~ ERROR `E::Empty4` does not name a tuple variant or a tuple struct - //~^ WARNING hard error - } - match xe4 { - XE::XEmpty4(..) => (), //~ ERROR `XE::XEmpty4` does not name a tuple variant or a tuple - //~^ WARNING hard error - _ => {}, - } -} diff --git a/src/test/compile-fail/empty-struct-unit-pat-2.rs b/src/test/compile-fail/empty-struct-unit-pat-2.rs deleted file mode 100644 index 6375a7f23381e..0000000000000 --- a/src/test/compile-fail/empty-struct-unit-pat-2.rs +++ /dev/null @@ -1,47 +0,0 @@ -// Copyright 2015 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// Can't use unit struct as enum pattern - -// aux-build:empty-struct.rs - -#![feature(relaxed_adts)] - -extern crate empty_struct; -use empty_struct::*; - -struct Empty2; - -enum E { - Empty4 -} - -// remove attribute after warning cycle and promoting warnings to errors -fn main() { - let e2 = Empty2; - let e4 = E::Empty4; - let xe2 = XEmpty2; - let xe4 = XE::XEmpty4; - - match e2 { - Empty2() => () //~ ERROR `Empty2` does not name a tuple variant or a tuple struct - } - match xe2 { - XEmpty2() => () //~ ERROR `XEmpty2` does not name a tuple variant or a tuple struct - } - - match e4 { - E::Empty4() => () //~ ERROR `E::Empty4` does not name a tuple variant or a tuple struct - } - match xe4 { - XE::XEmpty4() => (), //~ ERROR `XE::XEmpty4` does not name a tuple variant or a tuple - _ => {}, - } -} diff --git a/src/test/compile-fail/empty-struct-unit-pat.rs b/src/test/compile-fail/empty-struct-unit-pat.rs new file mode 100644 index 0000000000000..90f6ae5755f81 --- /dev/null +++ b/src/test/compile-fail/empty-struct-unit-pat.rs @@ -0,0 +1,61 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Can't use unit struct as tuple struct pattern + +// aux-build:empty-struct.rs + +#![feature(relaxed_adts)] + +extern crate empty_struct; +use empty_struct::*; + +struct Empty2; + +enum E { + Empty4 +} + +fn main() { + let e2 = Empty2; + let e4 = E::Empty4; + let xe2 = XEmpty2; + let xe4 = XE::XEmpty4; + + match e2 { + Empty2() => () //~ ERROR expected tuple struct/variant, found unit struct `Empty2` + } + match xe2 { + XEmpty2() => () //~ ERROR expected tuple struct/variant, found unit struct `XEmpty2` + } + match e2 { + Empty2(..) => () //~ ERROR expected tuple struct/variant, found unit struct `Empty2` + } + match xe2 { + XEmpty2(..) => () //~ ERROR expected tuple struct/variant, found unit struct `XEmpty2` + } + + match e4 { + E::Empty4() => () //~ ERROR expected tuple struct/variant, found unit variant `E::Empty4` + } + match xe4 { + XE::XEmpty4() => (), + //~^ ERROR expected tuple struct/variant, found unit variant `XE::XEmpty4` + _ => {}, + } + match e4 { + E::Empty4(..) => () //~ ERROR expected tuple struct/variant, found unit variant `E::Empty4` + } + match xe4 { + XE::XEmpty4(..) => (), + //~^ ERROR expected tuple struct/variant, found unit variant `XE::XEmpty4` + _ => {}, + } +} diff --git a/src/test/compile-fail/enum-in-scope.rs b/src/test/compile-fail/enum-in-scope.rs index e89b08a8a0651..bc1bd03f2d652 100644 --- a/src/test/compile-fail/enum-in-scope.rs +++ b/src/test/compile-fail/enum-in-scope.rs @@ -11,5 +11,5 @@ struct hello(isize); fn main() { - let hello = 0; //~ERROR let bindings cannot shadow structs + let hello = 0; //~ERROR let bindings cannot shadow tuple structs } diff --git a/src/test/compile-fail/enums-are-namespaced-xc.rs b/src/test/compile-fail/enums-are-namespaced-xc.rs index 5315e6c834ab3..02939565f69cd 100644 --- a/src/test/compile-fail/enums-are-namespaced-xc.rs +++ b/src/test/compile-fail/enums-are-namespaced-xc.rs @@ -14,5 +14,6 @@ extern crate namespaced_enums; fn main() { let _ = namespaced_enums::A; //~ ERROR unresolved name let _ = namespaced_enums::B(10); //~ ERROR unresolved name - let _ = namespaced_enums::C { a: 10 }; //~ ERROR does not name a structure + let _ = namespaced_enums::C { a: 10 }; + //~^ ERROR unresolved struct, variant or union type `namespaced_enums::C` } diff --git a/src/test/compile-fail/enums-pats-not-idents.rs b/src/test/compile-fail/enums-pats-not-idents.rs index c847366a707a7..03bdbe4e54b68 100644 --- a/src/test/compile-fail/enums-pats-not-idents.rs +++ b/src/test/compile-fail/enums-pats-not-idents.rs @@ -9,5 +9,5 @@ // except according to those terms. fn main() { - let a(1) = 13; //~ ERROR unresolved variant or struct `a` + let a(1) = 13; //~ ERROR unresolved tuple struct/variant `a` } diff --git a/src/test/compile-fail/extern-crate-visibility.rs b/src/test/compile-fail/extern-crate-visibility.rs index 86aae47214804..81e0cb249f382 100644 --- a/src/test/compile-fail/extern-crate-visibility.rs +++ b/src/test/compile-fail/extern-crate-visibility.rs @@ -8,9 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(rustc_attrs)] -#![allow(dead_code)] -#![allow(unused_imports)] +#![allow(unused)] mod foo { extern crate core; @@ -19,11 +17,11 @@ mod foo { // Check that private crates can be used from outside their modules, albeit with warnings use foo::core; //~ WARN extern crate `core` is private //~^ WARN this was previously accepted by the compiler but is being phased out -use foo::core::cell; //~ WARN extern crate `core` is private +use foo::core::cell; //~ ERROR extern crate `core` is private //~^ WARN this was previously accepted by the compiler but is being phased out fn f() { - foo::core::cell::Cell::new(0); //~ WARN extern crate `core` is private + foo::core::cell::Cell::new(0); //~ ERROR extern crate `core` is private //~^ WARN this was previously accepted by the compiler but is being phased out use foo::*; @@ -39,5 +37,4 @@ mod baz { use self::core::cell; // Check that public extern crates are glob imported } -#[rustc_error] -fn main() {} //~ ERROR compilation successful +fn main() {} diff --git a/src/test/compile-fail/feature-gate-field-init-shorthand.rs b/src/test/compile-fail/feature-gate-field-init-shorthand.rs new file mode 100644 index 0000000000000..cd2dae7f461ab --- /dev/null +++ b/src/test/compile-fail/feature-gate-field-init-shorthand.rs @@ -0,0 +1,24 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +struct Foo { + x: i32, + y: bool, + z: i32 +} + +fn main() { + let (x, y, z) = (1, true, 2); + let _ = Foo { + x, //~ ERROR struct field shorthands are unstable + y: y, + z //~ ERROR struct field shorthands are unstable + }; +} diff --git a/src/test/compile-fail/feature-gate-may-dangle.rs b/src/test/compile-fail/feature-gate-may-dangle.rs new file mode 100644 index 0000000000000..23f8ead0ca9dc --- /dev/null +++ b/src/test/compile-fail/feature-gate-may-dangle.rs @@ -0,0 +1,20 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Check that `may_dangle` is rejected if `dropck_eyepatch` feature gate is absent. + +#![feature(generic_param_attrs)] + +struct Pt(A); +impl<#[may_dangle] A> Drop for Pt { + //~^ ERROR may_dangle has unstable semantics and may be removed in the future + //~| HELP add #![feature(dropck_eyepatch)] to the crate attributes to enable + fn drop(&mut self) { } +} diff --git a/src/test/compile-fail/feature-gate-no-debug-2.rs b/src/test/compile-fail/feature-gate-no-debug-2.rs new file mode 100644 index 0000000000000..b663c136ee525 --- /dev/null +++ b/src/test/compile-fail/feature-gate-no-debug-2.rs @@ -0,0 +1,15 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![deny(deprecated)] +#![feature(no_debug)] + +#[no_debug] //~ ERROR use of deprecated attribute `no_debug` +fn main() {} diff --git a/src/test/compile-fail/gated-non-ascii-idents.rs b/src/test/compile-fail/gated-non-ascii-idents.rs index f4b9830d579a4..9e042c3a7d50e 100644 --- a/src/test/compile-fail/gated-non-ascii-idents.rs +++ b/src/test/compile-fail/gated-non-ascii-idents.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -extern crate bäz; //~ ERROR non-ascii idents +extern crate core as bäz; //~ ERROR non-ascii idents use föö::bar; //~ ERROR non-ascii idents diff --git a/src/test/compile-fail/generic-type-less-params-with-defaults.rs b/src/test/compile-fail/generic-type-less-params-with-defaults.rs index d9ac715fa9548..9b1f3e51647cb 100644 --- a/src/test/compile-fail/generic-type-less-params-with-defaults.rs +++ b/src/test/compile-fail/generic-type-less-params-with-defaults.rs @@ -18,5 +18,5 @@ struct Vec( fn main() { let _: Vec; //~^ ERROR E0243 - //~| NOTE expected at least 1 type arguments, found 0 + //~| NOTE expected at least 1 type argument, found 0 } diff --git a/src/test/compile-fail/if-without-else-result.rs b/src/test/compile-fail/if-without-else-result.rs index e8aa1f70ea1dc..95bcce5a8474a 100644 --- a/src/test/compile-fail/if-without-else-result.rs +++ b/src/test/compile-fail/if-without-else-result.rs @@ -10,7 +10,7 @@ fn main() { let a = if true { true }; - //~^ ERROR if may be missing an else clause + //~^ ERROR if may be missing an else clause [E0317] //~| expected type `()` //~| found type `bool` //~| expected (), found bool diff --git a/src/test/compile-fail/impl-trait/auto-trait-leak.rs b/src/test/compile-fail/impl-trait/auto-trait-leak.rs index 60ad266e7f7da..f055d20e1343b 100644 --- a/src/test/compile-fail/impl-trait/auto-trait-leak.rs +++ b/src/test/compile-fail/impl-trait/auto-trait-leak.rs @@ -26,7 +26,7 @@ fn send(_: T) {} fn main() { send(before()); //~^ ERROR the trait bound `std::rc::Rc>: std::marker::Send` is not satisfied - //~| NOTE trait `std::rc::Rc>: std::marker::Send` not satisfied + //~| NOTE the trait `std::marker::Send` is not implemented for `std::rc::Rc>` //~| NOTE `std::rc::Rc>` cannot be sent between threads safely //~| NOTE required because it appears within the type `[closure //~| NOTE required because it appears within the type `impl std::ops::Fn<(i32,)>` @@ -34,7 +34,7 @@ fn main() { send(after()); //~^ ERROR the trait bound `std::rc::Rc>: std::marker::Send` is not satisfied - //~| NOTE trait `std::rc::Rc>: std::marker::Send` not satisfied + //~| NOTE the trait `std::marker::Send` is not implemented for `std::rc::Rc>` //~| NOTE `std::rc::Rc>` cannot be sent between threads safely //~| NOTE required because it appears within the type `[closure //~| NOTE required because it appears within the type `impl std::ops::Fn<(i32,)>` @@ -54,7 +54,7 @@ fn after() -> impl Fn(i32) { fn cycle1() -> impl Clone { send(cycle2().clone()); //~^ ERROR the trait bound `std::rc::Rc: std::marker::Send` is not satisfied - //~| NOTE trait `std::rc::Rc: std::marker::Send` not satisfied + //~| NOTE the trait `std::marker::Send` is not implemented for `std::rc::Rc` //~| NOTE `std::rc::Rc` cannot be sent between threads safely //~| NOTE required because it appears within the type `impl std::clone::Clone` //~| NOTE required by `send` @@ -65,7 +65,7 @@ fn cycle1() -> impl Clone { fn cycle2() -> impl Clone { send(cycle1().clone()); //~^ ERROR the trait bound `std::rc::Rc>: std::marker::Send` is not satisfied - //~| NOTE trait `std::rc::Rc>: std::marker::Send` not satisfied + //~| NOTE the trait `std::marker::Send` is not implemented for `std::rc::Rc>` //~| NOTE `std::rc::Rc>` cannot be sent between threads safely //~| NOTE required because it appears within the type `impl std::clone::Clone` //~| NOTE required by `send` diff --git a/src/test/compile-fail/import-crate-var.rs b/src/test/compile-fail/import-crate-var.rs new file mode 100644 index 0000000000000..9f573945483de --- /dev/null +++ b/src/test/compile-fail/import-crate-var.rs @@ -0,0 +1,21 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// aux-build:import_crate_var.rs +// error-pattern: `$crate` may not be imported +// error-pattern: `use $crate;` was erroneously allowed and will become a hard error + +#![feature(rustc_attrs)] + +#[macro_use] extern crate import_crate_var; +m!(); + +#[rustc_error] +fn main() {} diff --git a/src/test/compile-fail/inherent-overlap.rs b/src/test/compile-fail/inherent-overlap.rs index 333a4ee04b27b..00d41244639f5 100644 --- a/src/test/compile-fail/inherent-overlap.rs +++ b/src/test/compile-fail/inherent-overlap.rs @@ -11,13 +11,12 @@ // Test that you cannot define items with the same name in overlapping inherent // impl blocks. -#![feature(rustc_attrs)] -#![allow(dead_code)] +#![allow(unused)] struct Foo; impl Foo { - fn id() {} //~ WARN duplicate definitions + fn id() {} //~ ERROR duplicate definitions //~^ WARN previously accepted } @@ -28,7 +27,7 @@ impl Foo { struct Bar(T); impl Bar { - fn bar(&self) {} //~ WARN duplicate definitions + fn bar(&self) {} //~ ERROR duplicate definitions //~^ WARN previously accepted } @@ -39,7 +38,7 @@ impl Bar { struct Baz(T); impl Baz { - fn baz(&self) {} //~ WARN duplicate definitions + fn baz(&self) {} //~ ERROR duplicate definitions //~^ WARN previously accepted } @@ -47,5 +46,4 @@ impl Baz> { fn baz(&self) {} } -#[rustc_error] -fn main() {} //~ ERROR compilation successful +fn main() {} diff --git a/src/test/compile-fail/integral-indexing.rs b/src/test/compile-fail/integral-indexing.rs index 897aca66cbfd4..c8f33c3caf8d3 100644 --- a/src/test/compile-fail/integral-indexing.rs +++ b/src/test/compile-fail/integral-indexing.rs @@ -9,7 +9,7 @@ // except according to those terms. pub fn main() { - let v: Vec = vec!(0, 1, 2, 3, 4, 5); + let v: Vec = vec![0, 1, 2, 3, 4, 5]; let s: String = "abcdef".to_string(); v[3_usize]; v[3]; diff --git a/src/test/compile-fail/issue-10200.rs b/src/test/compile-fail/issue-10200.rs index 9eec8487a5087..8c58ef6261e10 100644 --- a/src/test/compile-fail/issue-10200.rs +++ b/src/test/compile-fail/issue-10200.rs @@ -13,7 +13,7 @@ fn foo(_: usize) -> Foo { Foo(false) } fn main() { match Foo(true) { - foo(x) //~ ERROR expected variant or struct, found function `foo` + foo(x) //~ ERROR expected tuple struct/variant, found function `foo` => () } } diff --git a/src/test/compile-fail/issue-11004.rs b/src/test/compile-fail/issue-11004.rs index 308be46271531..069883424222e 100644 --- a/src/test/compile-fail/issue-11004.rs +++ b/src/test/compile-fail/issue-11004.rs @@ -14,9 +14,9 @@ struct A { x: i32, y: f64 } #[cfg(not(works))] unsafe fn access(n:*mut A) -> (i32, f64) { - let x : i32 = n.x; //~ ERROR attempted access of field `x` + let x : i32 = n.x; //~ no field `x` on type `*mut A` //~| NOTE `n` is a native pointer; perhaps you need to deref with `(*n).x` - let y : f64 = n.y; //~ ERROR attempted access of field `y` + let y : f64 = n.y; //~ no field `y` on type `*mut A` //~| NOTE `n` is a native pointer; perhaps you need to deref with `(*n).y` (x, y) } diff --git a/src/test/compile-fail/issue-11873.rs b/src/test/compile-fail/issue-11873.rs index 38956944f63e6..4618851529a15 100644 --- a/src/test/compile-fail/issue-11873.rs +++ b/src/test/compile-fail/issue-11873.rs @@ -9,7 +9,7 @@ // except according to those terms. fn main() { - let mut v = vec!(1); + let mut v = vec![1]; let mut f = || v.push(2); let _w = v; //~ ERROR: cannot move out of `v` diff --git a/src/test/compile-fail/issue-12612.rs b/src/test/compile-fail/issue-12612.rs index 20943bd0ea077..c6f76ca78874b 100644 --- a/src/test/compile-fail/issue-12612.rs +++ b/src/test/compile-fail/issue-12612.rs @@ -16,7 +16,7 @@ use foo::bar; mod test { use bar::foo; //~ ERROR unresolved import `bar::foo` [E0432] - //~^ Maybe a missing `extern crate bar`? + //~^ Maybe a missing `extern crate bar;`? } fn main() {} diff --git a/src/test/compile-fail/issue-12863.rs b/src/test/compile-fail/issue-12863.rs index 7912410f69ea1..d3432410c5427 100644 --- a/src/test/compile-fail/issue-12863.rs +++ b/src/test/compile-fail/issue-12863.rs @@ -12,6 +12,6 @@ mod foo { pub fn bar() {} } fn main() { match () { - foo::bar => {} //~ ERROR expected variant, struct or constant, found function `bar` + foo::bar => {} //~ ERROR expected unit struct/variant or constant, found function `foo::bar` } } diff --git a/src/test/compile-fail/issue-13352.rs b/src/test/compile-fail/issue-13352.rs index 13e677d72bc1e..0c446f5fe4703 100644 --- a/src/test/compile-fail/issue-13352.rs +++ b/src/test/compile-fail/issue-13352.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// pretty-expanded FIXME #23616 - fn foo(_: Box) {} fn main() { diff --git a/src/test/compile-fail/issue-13446.rs b/src/test/compile-fail/issue-13446.rs index 53d1486288984..6ad3ec67b2964 100644 --- a/src/test/compile-fail/issue-13446.rs +++ b/src/test/compile-fail/issue-13446.rs @@ -13,6 +13,6 @@ // error-pattern: mismatched types -static VEC: [u32; 256] = vec!(); +static VEC: [u32; 256] = vec![]; fn main() {} diff --git a/src/test/compile-fail/issue-14092.rs b/src/test/compile-fail/issue-14092.rs index dd02fa7ac151c..df8707ab823e5 100644 --- a/src/test/compile-fail/issue-14092.rs +++ b/src/test/compile-fail/issue-14092.rs @@ -10,6 +10,6 @@ fn fn1(0: Box) {} //~^ ERROR E0243 - //~| NOTE expected 1 type arguments, found 0 + //~| NOTE expected 1 type argument, found 0 fn main() {} diff --git a/src/test/compile-fail/issue-14254.rs b/src/test/compile-fail/issue-14254.rs index 5f8ccd0b0634e..c7bd343bc9a33 100644 --- a/src/test/compile-fail/issue-14254.rs +++ b/src/test/compile-fail/issue-14254.rs @@ -27,87 +27,111 @@ impl BarTy { impl Foo for *const BarTy { fn bar(&self) { baz(); - //~^ ERROR: unresolved name `baz`. Did you mean to call `self.baz`? + //~^ ERROR: unresolved name `baz` + //~| NOTE did you mean to call `self.baz`? a; //~^ ERROR: unresolved name `a` + //~| NOTE unresolved name } } impl<'a> Foo for &'a BarTy { fn bar(&self) { baz(); - //~^ ERROR: unresolved name `baz`. Did you mean to call `self.baz`? + //~^ ERROR: unresolved name `baz` + //~| NOTE did you mean to call `self.baz`? x; - //~^ ERROR: unresolved name `x`. Did you mean `self.x`? + //~^ ERROR: unresolved name `x` + //~| NOTE did you mean `self.x`? y; - //~^ ERROR: unresolved name `y`. Did you mean `self.y`? + //~^ ERROR: unresolved name `y` + //~| NOTE did you mean `self.y`? a; //~^ ERROR: unresolved name `a` + //~| NOTE unresolved name bah; - //~^ ERROR: unresolved name `bah`. Did you mean to call `Foo::bah`? + //~^ ERROR: unresolved name `bah` + //~| NOTE did you mean to call `Foo::bah`? b; //~^ ERROR: unresolved name `b` + //~| NOTE unresolved name } } impl<'a> Foo for &'a mut BarTy { fn bar(&self) { baz(); - //~^ ERROR: unresolved name `baz`. Did you mean to call `self.baz`? + //~^ ERROR: unresolved name `baz` + //~| NOTE did you mean to call `self.baz`? x; - //~^ ERROR: unresolved name `x`. Did you mean `self.x`? + //~^ ERROR: unresolved name `x` + //~| NOTE did you mean `self.x`? y; - //~^ ERROR: unresolved name `y`. Did you mean `self.y`? + //~^ ERROR: unresolved name `y` + //~| NOTE did you mean `self.y`? a; //~^ ERROR: unresolved name `a` + //~| NOTE unresolved name bah; - //~^ ERROR: unresolved name `bah`. Did you mean to call `Foo::bah`? + //~^ ERROR: unresolved name `bah` + //~| NOTE did you mean to call `Foo::bah`? b; //~^ ERROR: unresolved name `b` + //~| NOTE unresolved name } } impl Foo for Box { fn bar(&self) { baz(); - //~^ ERROR: unresolved name `baz`. Did you mean to call `self.baz`? + //~^ ERROR: unresolved name `baz` + //~| NOTE did you mean to call `self.baz`? bah; - //~^ ERROR: unresolved name `bah`. Did you mean to call `Foo::bah`? + //~^ ERROR: unresolved name `bah` + //~| NOTE did you mean to call `Foo::bah`? } } impl Foo for *const isize { fn bar(&self) { baz(); - //~^ ERROR: unresolved name `baz`. Did you mean to call `self.baz`? + //~^ ERROR: unresolved name `baz` + //~| NOTE did you mean to call `self.baz`? bah; - //~^ ERROR: unresolved name `bah`. Did you mean to call `Foo::bah`? + //~^ ERROR: unresolved name `bah` + //~| NOTE did you mean to call `Foo::bah`? } } impl<'a> Foo for &'a isize { fn bar(&self) { baz(); - //~^ ERROR: unresolved name `baz`. Did you mean to call `self.baz`? + //~^ ERROR: unresolved name `baz` + //~| NOTE did you mean to call `self.baz`? bah; - //~^ ERROR: unresolved name `bah`. Did you mean to call `Foo::bah`? + //~^ ERROR: unresolved name `bah` + //~| NOTE did you mean to call `Foo::bah`? } } impl<'a> Foo for &'a mut isize { fn bar(&self) { baz(); - //~^ ERROR: unresolved name `baz`. Did you mean to call `self.baz`? + //~^ ERROR: unresolved name `baz` + //~| NOTE did you mean to call `self.baz`? bah; - //~^ ERROR: unresolved name `bah`. Did you mean to call `Foo::bah`? + //~^ ERROR: unresolved name `bah` + //~| NOTE did you mean to call `Foo::bah`? } } impl Foo for Box { fn bar(&self) { baz(); - //~^ ERROR: unresolved name `baz`. Did you mean to call `self.baz`? + //~^ ERROR: unresolved name `baz` + //~| NOTE did you mean to call `self.baz`? bah; - //~^ ERROR: unresolved name `bah`. Did you mean to call `Foo::bah`? + //~^ ERROR: unresolved name `bah` + //~| NOTE did you mean to call `Foo::bah`? } } diff --git a/src/test/compile-fail/issue-14721.rs b/src/test/compile-fail/issue-14721.rs index 92add18f9413c..58e8e10ee37ac 100644 --- a/src/test/compile-fail/issue-14721.rs +++ b/src/test/compile-fail/issue-14721.rs @@ -10,6 +10,5 @@ fn main() { let foo = "str"; - println!("{}", foo.desc); //~ ERROR attempted access of field `desc` on type `&str`, - // but no field with that name was found + println!("{}", foo.desc); //~ no field `desc` on type `&str` } diff --git a/src/test/compile-fail/issue-15260.rs b/src/test/compile-fail/issue-15260.rs index 5ec82326d6c1a..5f816d34c8445 100644 --- a/src/test/compile-fail/issue-15260.rs +++ b/src/test/compile-fail/issue-15260.rs @@ -14,19 +14,28 @@ struct Foo { fn main() { let Foo { - a: _, //~ NOTE field `a` previously bound here - a: _ //~ ERROR field `a` bound multiple times in the pattern + a: _, //~ NOTE first use of `a` + a: _ + //~^ ERROR field `a` bound multiple times in the pattern + //~| NOTE multiple uses of `a` in pattern } = Foo { a: 29 }; let Foo { - a, //~ NOTE field `a` previously bound here - a: _ //~ ERROR field `a` bound multiple times in the pattern + a, //~ NOTE first use of `a` + a: _ + //~^ ERROR field `a` bound multiple times in the pattern + //~| NOTE multiple uses of `a` in pattern } = Foo { a: 29 }; let Foo { - a, //~ NOTE field `a` previously bound here - //~^ NOTE field `a` previously bound here - a: _, //~ ERROR field `a` bound multiple times in the pattern - a: x //~ ERROR field `a` bound multiple times in the pattern + a, + //~^ NOTE first use of `a` + //~| NOTE first use of `a` + a: _, + //~^ ERROR field `a` bound multiple times in the pattern + //~| NOTE multiple uses of `a` in pattern + a: x + //~^ ERROR field `a` bound multiple times in the pattern + //~| NOTE multiple uses of `a` in pattern } = Foo { a: 29 }; } diff --git a/src/test/compile-fail/issue-16058.rs b/src/test/compile-fail/issue-16058.rs index 671232e701f87..92c1e4b5f5060 100644 --- a/src/test/compile-fail/issue-16058.rs +++ b/src/test/compile-fail/issue-16058.rs @@ -16,7 +16,7 @@ pub struct GslResult { impl GslResult { pub fn new() -> GslResult { - Result { //~ ERROR: `Result` does not name a struct or a struct variant + Result { //~ ERROR: expected struct, variant or union type, found enum `Result` val: 0f64, err: 0f64 } diff --git a/src/test/compile-fail/issue-1697.rs b/src/test/compile-fail/issue-1697.rs index dc09af0ada66f..1375200271c55 100644 --- a/src/test/compile-fail/issue-1697.rs +++ b/src/test/compile-fail/issue-1697.rs @@ -11,6 +11,6 @@ // Testing that we don't fail abnormally after hitting the errors use unresolved::*; //~ ERROR unresolved import `unresolved::*` [E0432] - //~^ Maybe a missing `extern crate unresolved`? + //~^ Maybe a missing `extern crate unresolved;`? fn main() {} diff --git a/src/test/compile-fail/issue-17001.rs b/src/test/compile-fail/issue-17001.rs index 218f68714ff92..413e8b464fff7 100644 --- a/src/test/compile-fail/issue-17001.rs +++ b/src/test/compile-fail/issue-17001.rs @@ -11,5 +11,5 @@ mod foo {} fn main() { - let p = foo { x: () }; //~ ERROR `foo` does not name a struct or a struct variant + let p = foo { x: () }; //~ ERROR expected struct, variant or union type, found module `foo` } diff --git a/src/test/compile-fail/issue-17025.rs b/src/test/compile-fail/issue-17025.rs index 2a1a3397d5447..f250103b14407 100644 --- a/src/test/compile-fail/issue-17025.rs +++ b/src/test/compile-fail/issue-17025.rs @@ -8,6 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +// ignore-test the unsized enum no longer compiles + enum A { B(char), C([Box]), diff --git a/src/test/compile-fail/issue-17405.rs b/src/test/compile-fail/issue-17405.rs index 2f2c252b947c9..5a6bd5ed588a6 100644 --- a/src/test/compile-fail/issue-17405.rs +++ b/src/test/compile-fail/issue-17405.rs @@ -14,6 +14,6 @@ enum Foo { fn main() { match Foo::Bar(1) { - Foo { i } => () //~ ERROR expected variant, struct or type alias, found enum `Foo` + Foo { i } => () //~ ERROR expected struct, variant or union type, found enum `Foo` } } diff --git a/src/test/compile-fail/issue-17518.rs b/src/test/compile-fail/issue-17518.rs index 0410fadeb7892..2113e38c45cc3 100644 --- a/src/test/compile-fail/issue-17518.rs +++ b/src/test/compile-fail/issue-17518.rs @@ -13,5 +13,5 @@ enum SomeEnum { } fn main() { - E { name: "foobar" }; //~ ERROR `E` does not name a structure + E { name: "foobar" }; //~ ERROR unresolved struct, variant or union type `E` } diff --git a/src/test/compile-fail/issue-17933.rs b/src/test/compile-fail/issue-17933.rs index 2313a3fe9c6d5..049a0665c5458 100644 --- a/src/test/compile-fail/issue-17933.rs +++ b/src/test/compile-fail/issue-17933.rs @@ -13,7 +13,7 @@ pub static X: usize = 1; fn main() { match 1 { self::X => { }, - //~^ ERROR expected variant, struct or constant, found static `X` + //~^ ERROR expected unit struct/variant or constant, found static `self::X` _ => { }, } } diff --git a/src/test/compile-fail/issue-19086.rs b/src/test/compile-fail/issue-19086.rs index 56452449d4ee1..ba571ce17fd12 100644 --- a/src/test/compile-fail/issue-19086.rs +++ b/src/test/compile-fail/issue-19086.rs @@ -18,6 +18,6 @@ fn main() { let f = FooB { x: 3, y: 4 }; match f { FooB(a, b) => println!("{} {}", a, b), -//~^ ERROR `FooB` does not name a tuple variant or a tuple struct + //~^ ERROR expected tuple struct/variant, found struct variant `FooB` } } diff --git a/src/test/compile-fail/issue-19244-2.rs b/src/test/compile-fail/issue-19244-2.rs index 7d7d7d7c8ce4d..864f8f6b54e7c 100644 --- a/src/test/compile-fail/issue-19244-2.rs +++ b/src/test/compile-fail/issue-19244-2.rs @@ -13,5 +13,5 @@ const STRUCT: MyStruct = MyStruct { field: 42 }; fn main() { let a: [isize; STRUCT.nonexistent_field]; - //~^ ERROR attempted access of field `nonexistent_field` + //~^ no field `nonexistent_field` on type `MyStruct` } diff --git a/src/test/compile-fail/issue-19452.rs b/src/test/compile-fail/issue-19452.rs index 15d5d2b80c31d..34872b7c8c503 100644 --- a/src/test/compile-fail/issue-19452.rs +++ b/src/test/compile-fail/issue-19452.rs @@ -8,6 +8,9 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +// aux-build:issue_19452_aux.rs +extern crate issue_19452_aux; + enum Homura { Madoka { age: u32 } } @@ -16,4 +19,8 @@ fn main() { let homura = Homura::Madoka; //~^ ERROR uses it like a function //~| struct called like a function + + let homura = issue_19452_aux::Homura::Madoka; + //~^ ERROR uses it like a function + //~| struct called like a function } diff --git a/src/test/compile-fail/issue-19482.rs b/src/test/compile-fail/issue-19482.rs index 21a50f24d5e5c..b54f008f8ce3a 100644 --- a/src/test/compile-fail/issue-19482.rs +++ b/src/test/compile-fail/issue-19482.rs @@ -11,8 +11,6 @@ // Test that a partially specified trait object with unspecified associated // type does not type-check. -// pretty-expanded FIXME #23616 - trait Foo { type A; diff --git a/src/test/compile-fail/issue-19922.rs b/src/test/compile-fail/issue-19922.rs index a8350fe0986c0..d7b2f2b3f991e 100644 --- a/src/test/compile-fail/issue-19922.rs +++ b/src/test/compile-fail/issue-19922.rs @@ -15,4 +15,5 @@ enum Homura { fn main() { let homura = Homura::Akemi { kaname: () }; //~^ ERROR variant `Homura::Akemi` has no field named `kaname` + //~| NOTE field does not exist - did you mean `madoka`? } diff --git a/src/test/compile-fail/issue-21146.rs b/src/test/compile-fail/issue-21146.rs index 02f128e1f5644..457d40e62b037 100644 --- a/src/test/compile-fail/issue-21146.rs +++ b/src/test/compile-fail/issue-21146.rs @@ -8,6 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// error-pattern: expected item, found `parse_error` +// error-pattern: expected one of `!` or `::`, found `` include!("auxiliary/issue-21146-inc.rs"); fn main() {} diff --git a/src/test/compile-fail/issue-21449.rs b/src/test/compile-fail/issue-21449.rs index 090b8a0d16e64..cc44cf88f09bd 100644 --- a/src/test/compile-fail/issue-21449.rs +++ b/src/test/compile-fail/issue-21449.rs @@ -11,5 +11,6 @@ mod MyMod {} fn main() { - let myVar = MyMod { T: 0 }; //~ ERROR `MyMod` does not name a struct or a struct variant + let myVar = MyMod { T: 0 }; + //~^ ERROR expected struct, variant or union type, found module `MyMod` } diff --git a/src/test/compile-fail/issue-21837.rs b/src/test/compile-fail/issue-21837.rs new file mode 100644 index 0000000000000..86506de1ea9c5 --- /dev/null +++ b/src/test/compile-fail/issue-21837.rs @@ -0,0 +1,20 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub trait Bound {} +pub struct Foo(T); + +pub trait Trait1 {} +impl Trait1 for Foo {} + +pub trait Trait2 {} +impl Trait2 for Foo {} //~ ERROR the trait bound `T: Bound` is not satisfied + +fn main() {} diff --git a/src/test/compile-fail/issue-23253.rs b/src/test/compile-fail/issue-23253.rs index e977cd135ab67..626604a87a72c 100644 --- a/src/test/compile-fail/issue-23253.rs +++ b/src/test/compile-fail/issue-23253.rs @@ -12,5 +12,5 @@ enum Foo { Bar } fn main() { Foo::Bar.a; - //~^ ERROR: attempted access of field `a` on type `Foo`, but no field with that name was found + //~^ no field `a` on type `Foo` } diff --git a/src/test/compile-fail/issue-2356.rs b/src/test/compile-fail/issue-2356.rs index da92161967dbd..d7635d7bc9473 100644 --- a/src/test/compile-fail/issue-2356.rs +++ b/src/test/compile-fail/issue-2356.rs @@ -26,6 +26,7 @@ impl MaybeDog { // If this provides a suggestion, it's a bug as MaybeDog doesn't impl Groom shave(); //~^ ERROR: unresolved name `shave` + //~| NOTE unresolved name } } @@ -33,11 +34,14 @@ impl Groom for cat { fn shave(other: usize) { whiskers -= other; //~^ ERROR: unresolved name `whiskers` + //~| NOTE unresolved name //~| HELP this is an associated function shave(4); - //~^ ERROR: unresolved name `shave`. Did you mean to call `Groom::shave`? + //~^ ERROR: unresolved name `shave` + //~| NOTE did you mean to call `Groom::shave`? purr(); //~^ ERROR: unresolved name `purr` + //~| NOTE unresolved name } } @@ -47,12 +51,16 @@ impl cat { fn purr_louder() { static_method(); //~^ ERROR: unresolved name `static_method` + //~| NOTE unresolved name purr(); //~^ ERROR: unresolved name `purr` + //~| NOTE unresolved name purr(); //~^ ERROR: unresolved name `purr` + //~| NOTE unresolved name purr(); //~^ ERROR: unresolved name `purr` + //~| NOTE unresolved name } } @@ -69,27 +77,33 @@ impl cat { fn purr(&self) { grow_older(); //~^ ERROR: unresolved name `grow_older` + //~| NOTE unresolved name shave(); //~^ ERROR: unresolved name `shave` + //~| NOTE unresolved name } fn burn_whiskers(&mut self) { whiskers = 0; - //~^ ERROR: unresolved name `whiskers`. Did you mean `self.whiskers`? + //~^ ERROR: unresolved name `whiskers` + //~| NOTE did you mean `self.whiskers`? } pub fn grow_older(other:usize) { whiskers = 4; //~^ ERROR: unresolved name `whiskers` + //~| NOTE unresolved name //~| HELP this is an associated function purr_louder(); //~^ ERROR: unresolved name `purr_louder` + //~| NOTE unresolved name } } fn main() { self += 1; //~^ ERROR: unresolved name `self` + //~| NOTE unresolved name //~| HELP: module `self` // it's a bug if this suggests a missing `self` as we're not in a method } diff --git a/src/test/compile-fail/issue-24363.rs b/src/test/compile-fail/issue-24363.rs index 590c464371c06..03cae6e64ef5e 100644 --- a/src/test/compile-fail/issue-24363.rs +++ b/src/test/compile-fail/issue-24363.rs @@ -9,7 +9,7 @@ // except according to those terms. fn main() { - 1.create_a_type_error[ //~ ERROR attempted access of field + 1.create_a_type_error[ //~ no field `create_a_type_error` on type `{integer}` ()+() //~ ERROR binary operation `+` cannot be applied // ^ ensure that we typeck the inner expression ^ ]; diff --git a/src/test/compile-fail/issue-24365.rs b/src/test/compile-fail/issue-24365.rs index a4df42a8c70f2..72df6fb59990f 100644 --- a/src/test/compile-fail/issue-24365.rs +++ b/src/test/compile-fail/issue-24365.rs @@ -17,13 +17,13 @@ pub enum Foo { } fn test(a: Foo) { - println!("{}", a.b); //~ ERROR attempted access of field + println!("{}", a.b); //~ no field `b` on type `Foo` } fn main() { let x = Attribute::Code { attr_name_idx: 42, }; - let z = (&x).attr_name_idx; //~ ERROR attempted access of field - let y = x.attr_name_idx; //~ ERROR attempted access of field + let z = (&x).attr_name_idx; //~ no field `attr_name_idx` on type `&Attribute` + let y = x.attr_name_idx; //~ no field `attr_name_idx` on type `Attribute` } diff --git a/src/test/compile-fail/issue-26158.rs b/src/test/compile-fail/issue-26158.rs new file mode 100644 index 0000000000000..54f5313aed16e --- /dev/null +++ b/src/test/compile-fail/issue-26158.rs @@ -0,0 +1,16 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(slice_patterns)] + +fn main() { + let x: &[u32] = &[]; + let &[[ref _a, ref _b..]..] = x; //~ ERROR refutable pattern +} diff --git a/src/test/compile-fail/issue-26459.rs b/src/test/compile-fail/issue-26459.rs index 24b39eeff0f79..8be3d88bd5c8f 100644 --- a/src/test/compile-fail/issue-26459.rs +++ b/src/test/compile-fail/issue-26459.rs @@ -11,6 +11,6 @@ fn main() { match 'a' { char{ch} => true - //~^ ERROR expected variant, struct or type alias, found builtin type `char` + //~^ ERROR expected struct, variant or union type, found builtin type `char` }; } diff --git a/src/test/compile-fail/issue-27033.rs b/src/test/compile-fail/issue-27033.rs index 2a015adb498e1..b8552aaee9005 100644 --- a/src/test/compile-fail/issue-27033.rs +++ b/src/test/compile-fail/issue-27033.rs @@ -10,7 +10,7 @@ fn main() { match Some(1) { - None @ _ => {} //~ ERROR match bindings cannot shadow variants + None @ _ => {} //~ ERROR match bindings cannot shadow unit variants }; const C: u8 = 1; match 1 { diff --git a/src/test/compile-fail/issue-27815.rs b/src/test/compile-fail/issue-27815.rs index 7a329bac61b22..d9840abf0ca77 100644 --- a/src/test/compile-fail/issue-27815.rs +++ b/src/test/compile-fail/issue-27815.rs @@ -11,10 +11,12 @@ mod A {} fn main() { - let u = A { x: 1 }; //~ ERROR `A` does not name a struct or a struct variant - let v = u32 { x: 1 }; //~ ERROR `u32` does not name a struct or a struct variant + let u = A { x: 1 }; //~ ERROR expected struct, variant or union type, found module `A` + let v = u32 { x: 1 }; //~ ERROR expected struct, variant or union type, found builtin type `u32` match () { - A { x: 1 } => {} //~ ERROR expected variant, struct or type alias, found module `A` - u32 { x: 1 } => {} //~ ERROR expected variant, struct or type alias, found builtin type `u32 + A { x: 1 } => {} + //~^ ERROR expected struct, variant or union type, found module `A` + u32 { x: 1 } => {} + //~^ ERROR expected struct, variant or union type, found builtin type `u32` } } diff --git a/src/test/compile-fail/issue-2848.rs b/src/test/compile-fail/issue-2848.rs index e5503edfab5e1..f5e0c545bb524 100644 --- a/src/test/compile-fail/issue-2848.rs +++ b/src/test/compile-fail/issue-2848.rs @@ -19,7 +19,7 @@ mod bar { fn main() { use bar::foo::{alpha, charlie}; match alpha { - alpha | beta => {} //~ ERROR variable `beta` from pattern #2 is not bound in pattern #1 - charlie => {} + alpha | beta => {} //~ ERROR variable `beta` from pattern #2 is not bound in pattern #1 + charlie => {} //~| NOTE pattern doesn't bind `beta` } } diff --git a/src/test/compile-fail/issue-28992-empty.rs b/src/test/compile-fail/issue-28992-empty.rs index e492d48fdaf0f..d47fdda0203e8 100644 --- a/src/test/compile-fail/issue-28992-empty.rs +++ b/src/test/compile-fail/issue-28992-empty.rs @@ -21,6 +21,7 @@ impl S { } fn main() { - if let C1(..) = 0 {} //~ ERROR expected variant or struct, found constant `C1` - if let S::C2(..) = 0 {} //~ ERROR `S::C2` does not name a tuple variant or a tuple struct + if let C1(..) = 0 {} //~ ERROR expected tuple struct/variant, found constant `C1` + if let S::C2(..) = 0 {} + //~^ ERROR expected tuple struct/variant, found associated constant `S::C2` } diff --git a/src/test/compile-fail/issue-3044.rs b/src/test/compile-fail/issue-3044.rs index b934cbe4b5d87..c7b276da57376 100644 --- a/src/test/compile-fail/issue-3044.rs +++ b/src/test/compile-fail/issue-3044.rs @@ -10,7 +10,7 @@ fn main() { - let needlesArr: Vec = vec!('a', 'f'); + let needlesArr: Vec = vec!['a', 'f']; needlesArr.iter().fold(|x, y| { }); //~^^ ERROR this function takes 2 parameters but 1 parameter was supplied diff --git a/src/test/compile-fail/issue-31011.rs b/src/test/compile-fail/issue-31011.rs index b828b11030d71..716b0bbe77294 100644 --- a/src/test/compile-fail/issue-31011.rs +++ b/src/test/compile-fail/issue-31011.rs @@ -11,7 +11,7 @@ macro_rules! log { ( $ctx:expr, $( $args:expr),* ) => { if $ctx.trace { - //~^ ERROR attempted access of field `trace` on type `&T`, but no field with that name + //~^ no field `trace` on type `&T` println!( $( $args, )* ); } } diff --git a/src/test/compile-fail/issue-31109.rs b/src/test/compile-fail/issue-31109.rs index 63b3d58b8231b..e3548d740717f 100644 --- a/src/test/compile-fail/issue-31109.rs +++ b/src/test/compile-fail/issue-31109.rs @@ -11,5 +11,7 @@ fn main() { // FIXME(#31407) this error should go away, but in the meantime we test that it // is accompanied by a somewhat useful error message. - let _: f64 = 1234567890123456789012345678901234567890e-340; //~ ERROR could not evaluate float + let _: f64 = 1234567890123456789012345678901234567890e-340; + //~^ ERROR constant evaluation error + //~| unimplemented constant expression: could not evaluate float literal } diff --git a/src/test/compile-fail/issue-32004.rs b/src/test/compile-fail/issue-32004.rs index 576451f7292a8..7e1f4c28d21e9 100644 --- a/src/test/compile-fail/issue-32004.rs +++ b/src/test/compile-fail/issue-32004.rs @@ -18,12 +18,12 @@ struct S; fn main() { match Foo::Baz { Foo::Bar => {} - //~^ ERROR `Foo::Bar` does not name a unit variant, unit struct or a constant + //~^ ERROR expected unit struct/variant or constant, found tuple variant `Foo::Bar` _ => {} } match S { S(()) => {} - //~^ ERROR `S` does not name a tuple variant or a tuple struct + //~^ ERROR expected tuple struct/variant, found unit struct `S` } } diff --git a/src/test/compile-fail/issue-32086.rs b/src/test/compile-fail/issue-32086.rs index 926f58198dfae..dd236b76a6739 100644 --- a/src/test/compile-fail/issue-32086.rs +++ b/src/test/compile-fail/issue-32086.rs @@ -12,6 +12,6 @@ struct S(u8); const C: S = S(10); fn main() { - let C(a) = S(11); //~ ERROR expected variant or struct, found constant `C` - let C(..) = S(11); //~ ERROR expected variant or struct, found constant `C` + let C(a) = S(11); //~ ERROR expected tuple struct/variant, found constant `C` + let C(..) = S(11); //~ ERROR expected tuple struct/variant, found constant `C` } diff --git a/src/test/compile-fail/issue-32709.rs b/src/test/compile-fail/issue-32709.rs index f9d11f3a171c2..09538818dcdf9 100644 --- a/src/test/compile-fail/issue-32709.rs +++ b/src/test/compile-fail/issue-32709.rs @@ -8,12 +8,10 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(question_mark)] - // Make sure that the span of try shorthand does not include the trailing // semicolon; fn a() -> Result { - Err(5)?; //~ ERROR 16:5: 16:12 + Err(5)?; //~ ERROR 14:5: 14:12 Ok(1) } diff --git a/src/test/compile-fail/issue-32922.rs b/src/test/compile-fail/issue-32922.rs index 491c087c101de..317a47156c1ac 100644 --- a/src/test/compile-fail/issue-32922.rs +++ b/src/test/compile-fail/issue-32922.rs @@ -17,7 +17,7 @@ macro_rules! foo { () => { let _ = bar!(); }} -macro_rules! bar { // test issue #31856 +macro_rules! m { // test issue #31856 ($n:ident) => ( let a = 1; let $n = a; diff --git a/src/test/compile-fail/issue-33876.rs b/src/test/compile-fail/issue-33876.rs index d95890730a0f2..87747d2851f5e 100644 --- a/src/test/compile-fail/issue-33876.rs +++ b/src/test/compile-fail/issue-33876.rs @@ -8,9 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(reflect_marker)] - -use std::marker::Reflect; use std::any::Any; struct Foo; diff --git a/src/test/compile-fail/issue-36116.rs b/src/test/compile-fail/issue-36116.rs new file mode 100644 index 0000000000000..9abf2b5ec3ad3 --- /dev/null +++ b/src/test/compile-fail/issue-36116.rs @@ -0,0 +1,23 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +struct Foo { + _a: T, +} + +fn main() { + let f = Some(Foo { _a: 42 }).map(|a| a as Foo::); + //~^ ERROR unexpected token: `::` + //~| HELP use `<...>` instead of `::<...>` if you meant to specify type arguments + + let g: Foo:: = Foo { _a: 42 }; + //~^ ERROR unexpected token: `::` + //~| HELP use `<...>` instead of `::<...>` if you meant to specify type arguments +} diff --git a/src/test/compile-fail/issue-36617.rs b/src/test/compile-fail/issue-36617.rs new file mode 100644 index 0000000000000..9f5eeb1a45dc8 --- /dev/null +++ b/src/test/compile-fail/issue-36617.rs @@ -0,0 +1,11 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![derive(Copy)] //~ ERROR `derive` may only be applied to structs, enums and unions diff --git a/src/test/compile-fail/issue-36638.rs b/src/test/compile-fail/issue-36638.rs new file mode 100644 index 0000000000000..5e43536ef3f90 --- /dev/null +++ b/src/test/compile-fail/issue-36638.rs @@ -0,0 +1,19 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags: -Z continue-parse-after-error + +struct Foo(Self); +//~^ ERROR expected identifier, found keyword `Self` + +trait Bar {} +//~^ ERROR expected identifier, found keyword `Self` + +fn main() {} diff --git a/src/test/compile-fail/issue-36708.rs b/src/test/compile-fail/issue-36708.rs new file mode 100644 index 0000000000000..6146258fa8d44 --- /dev/null +++ b/src/test/compile-fail/issue-36708.rs @@ -0,0 +1,23 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// aux-build:issue-36708.rs + +extern crate issue_36708 as lib; + +struct Bar; + +impl lib::Foo for Bar { + fn foo() {} + //~^ ERROR E0049 + //~| NOTE found 1 type parameter, expected 0 +} + +fn main() {} diff --git a/src/test/compile-fail/issue-36881.rs b/src/test/compile-fail/issue-36881.rs new file mode 100644 index 0000000000000..cca20e968e0c4 --- /dev/null +++ b/src/test/compile-fail/issue-36881.rs @@ -0,0 +1,14 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn main() { + extern crate rand; + use rand::Rng; //~ ERROR unresolved import +} diff --git a/src/test/compile-fail/issue-37026.rs b/src/test/compile-fail/issue-37026.rs new file mode 100644 index 0000000000000..95fd5d1222e69 --- /dev/null +++ b/src/test/compile-fail/issue-37026.rs @@ -0,0 +1,18 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// aux-build:empty-struct.rs + +extern crate empty_struct; + +fn main() { + let empty_struct::XEmpty2 = (); //~ ERROR mismatched types + let empty_struct::XEmpty6(..) = (); //~ ERROR mismatched types +} diff --git a/src/test/compile-fail/issue-5067.rs b/src/test/compile-fail/issue-5067.rs new file mode 100644 index 0000000000000..1c543a5fdacbb --- /dev/null +++ b/src/test/compile-fail/issue-5067.rs @@ -0,0 +1,62 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +macro_rules! foo { + ( $()* ) => {}; + //~^ ERROR repetition matches empty token tree + ( $()+ ) => {}; + //~^ ERROR repetition matches empty token tree + + ( $(),* ) => {}; // PASS + ( $(),+ ) => {}; // PASS + + ( [$()*] ) => {}; + //~^ ERROR repetition matches empty token tree + ( [$()+] ) => {}; + //~^ ERROR repetition matches empty token tree + + ( [$(),*] ) => {}; // PASS + ( [$(),+] ) => {}; // PASS + + ( $($()* $(),* $(a)* $(a),* )* ) => {}; + //~^ ERROR repetition matches empty token tree + ( $($()* $(),* $(a)* $(a),* )+ ) => {}; + //~^ ERROR repetition matches empty token tree + + ( $(a $(),* $(a)* $(a),* )* ) => {}; // PASS + ( $($(a)+ $(),* $(a)* $(a),* )+ ) => {}; // PASS + + ( $(a $()+)* ) => {}; + //~^ ERROR repetition matches empty token tree + ( $(a $()*)+ ) => {}; + //~^ ERROR repetition matches empty token tree +} + + +// --- Original Issue --- // + +macro_rules! make_vec { + (a $e1:expr $($(, a $e2:expr)*)*) => ([$e1 $($(, $e2)*)*]); + //~^ ERROR repetition matches empty token tree +} + +fn main() { + let _ = make_vec![a 1, a 2, a 3]; +} + + +// --- Minified Issue --- // + +macro_rules! m { + ( $()* ) => {} + //~^ ERROR repetition matches empty token tree +} + +m!(); diff --git a/src/test/compile-fail/issue-5883.rs b/src/test/compile-fail/issue-5883.rs index 019a7bdc734d4..e14d9f3a35c84 100644 --- a/src/test/compile-fail/issue-5883.rs +++ b/src/test/compile-fail/issue-5883.rs @@ -20,6 +20,4 @@ fn new_struct(r: A+'static) Struct { r: r } } -trait Curve {} -enum E {X(Curve+'static)} fn main() {} diff --git a/src/test/compile-fail/issue-5927.rs b/src/test/compile-fail/issue-5927.rs index 3a8ff12429ab3..7668a2117a265 100644 --- a/src/test/compile-fail/issue-5927.rs +++ b/src/test/compile-fail/issue-5927.rs @@ -11,7 +11,7 @@ fn main() { let z = match 3 { - x(1) => x(1) //~ ERROR unresolved variant or struct `x` + x(1) => x(1) //~ ERROR unresolved tuple struct/variant `x` //~^ ERROR unresolved name `x` }; assert!(z == 3); diff --git a/src/test/compile-fail/issue-6804.rs b/src/test/compile-fail/issue-6804.rs index f6b7e13c4f5e9..f7d3ce60c66cc 100644 --- a/src/test/compile-fail/issue-6804.rs +++ b/src/test/compile-fail/issue-6804.rs @@ -8,29 +8,24 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(rustc_attrs)] -#![feature(slice_patterns)] -#![allow(dead_code)] - // Matching against NaN should result in a warning +#![feature(slice_patterns)] +#![allow(unused)] + use std::f64::NAN; -#[rustc_error] -fn main() { //~ ERROR compilation successful +fn main() { let x = NAN; match x { - NAN => {}, + NAN => {}, //~ ERROR floating point constants cannot be used + //~| WARNING hard error _ => {}, }; - //~^^^ WARNING unmatchable NaN in pattern, use the is_nan method in a guard instead - //~| WARNING floating point constants cannot be used - //~| WARNING this was previously accepted + match [x, 1.0] { - [NAN, _] => {}, + [NAN, _] => {}, //~ ERROR floating point constants cannot be used + //~| WARNING hard error _ => {}, }; - //~^^^ WARNING unmatchable NaN in pattern, use the is_nan method in a guard instead - //~| WARNING floating point constants cannot be used - //~| WARNING this was previously accepted } diff --git a/src/test/compile-fail/issue-7970a.rs b/src/test/compile-fail/issue-7970a.rs index 114db74f42048..b97c3037770a6 100644 --- a/src/test/compile-fail/issue-7970a.rs +++ b/src/test/compile-fail/issue-7970a.rs @@ -8,7 +8,11 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +macro_rules! one_arg_macro { + ($fmt:expr) => (print!(concat!($fmt, "\n"))); +} + fn main() { - println!(); + one_arg_macro!(); //~^ ERROR unexpected end of macro invocation } diff --git a/src/test/run-pass/issue-pr29383.rs b/src/test/compile-fail/issue-pr29383.rs similarity index 74% rename from src/test/run-pass/issue-pr29383.rs rename to src/test/compile-fail/issue-pr29383.rs index defb2c164da9b..b60c537e1e6f7 100644 --- a/src/test/run-pass/issue-pr29383.rs +++ b/src/test/compile-fail/issue-pr29383.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![allow(match_of_unit_variant_via_paren_dotdot)] - enum E { A, B, @@ -18,7 +16,7 @@ enum E { fn main() { match None { None => {} - Some(E::A(..)) => {} - Some(E::B(..)) => {} + Some(E::A(..)) => {} //~ ERROR expected tuple struct/variant, found unit variant `E::A` + Some(E::B(..)) => {} //~ ERROR expected tuple struct/variant, found unit variant `E::B` } } diff --git a/src/test/parse-fail/keyword-self-as-identifier.rs b/src/test/compile-fail/keyword-self-as-identifier.rs similarity index 81% rename from src/test/parse-fail/keyword-self-as-identifier.rs rename to src/test/compile-fail/keyword-self-as-identifier.rs index f8b93a1796bfe..650874711a669 100644 --- a/src/test/parse-fail/keyword-self-as-identifier.rs +++ b/src/test/compile-fail/keyword-self-as-identifier.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// compile-flags: -Z parse-only - fn main() { - let Self = "foo"; //~ error: expected identifier, found keyword `Self` + let Self = "foo"; //~ ERROR unresolved unit struct/variant or constant `Self` } diff --git a/src/test/parse-fail/keyword-super-as-identifier.rs b/src/test/compile-fail/keyword-super-as-identifier.rs similarity index 81% rename from src/test/parse-fail/keyword-super-as-identifier.rs rename to src/test/compile-fail/keyword-super-as-identifier.rs index a48683a4f54dc..531705563e2e0 100644 --- a/src/test/parse-fail/keyword-super-as-identifier.rs +++ b/src/test/compile-fail/keyword-super-as-identifier.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// compile-flags: -Z parse-only - fn main() { - let super = "foo"; //~ error: expected identifier, found keyword `super` + let super = "foo"; //~ ERROR unresolved unit struct/variant or constant `super` } diff --git a/src/test/parse-fail/keyword-super.rs b/src/test/compile-fail/keyword-super.rs similarity index 81% rename from src/test/parse-fail/keyword-super.rs rename to src/test/compile-fail/keyword-super.rs index 671be8c44b9c6..9ac9e800c843b 100644 --- a/src/test/parse-fail/keyword-super.rs +++ b/src/test/compile-fail/keyword-super.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// compile-flags: -Z parse-only - fn main() { - let super: isize; //~ ERROR expected identifier, found keyword `super` + let super: isize; //~ ERROR unresolved unit struct/variant or constant `super` } diff --git a/src/test/compile-fail/lexical-scopes.rs b/src/test/compile-fail/lexical-scopes.rs index 505a91f223cc6..1ab59e790d7ee 100644 --- a/src/test/compile-fail/lexical-scopes.rs +++ b/src/test/compile-fail/lexical-scopes.rs @@ -10,7 +10,7 @@ struct T { i: i32 } fn f() { - let t = T { i: 0 }; //~ ERROR `T` does not name a struct or a struct variant + let t = T { i: 0 }; //~ ERROR expected struct, variant or union type, found type parameter `T` } mod Foo { diff --git a/src/test/compile-fail/lifetime-underscore.rs b/src/test/compile-fail/lifetime-underscore.rs index 102d3576e5467..b768009132ca9 100644 --- a/src/test/compile-fail/lifetime-underscore.rs +++ b/src/test/compile-fail/lifetime-underscore.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![deny(lifetime_underscore)] - fn _f<'_>() //~ ERROR invalid lifetime name `'_` //~^ WARN this was previously accepted -> &'_ u8 //~ ERROR invalid lifetime name `'_` diff --git a/src/test/compile-fail/lint-group-style.rs b/src/test/compile-fail/lint-group-style.rs index 393e46ab5394c..b2e6072c9855c 100644 --- a/src/test/compile-fail/lint-group-style.rs +++ b/src/test/compile-fail/lint-group-style.rs @@ -24,7 +24,7 @@ mod test { mod bad { fn CamelCase() {} //~ ERROR function `CamelCase` should have a snake case name - static bad: isize = 1; //~ ERROR static constant `bad` should have an upper case name + static bad: isize = 1; //~ ERROR static variable `bad` should have an upper case name } mod warn { diff --git a/src/test/compile-fail/lint-non-uppercase-statics.rs b/src/test/compile-fail/lint-non-uppercase-statics.rs index e1fbc73bbed15..463a93612ca00 100644 --- a/src/test/compile-fail/lint-non-uppercase-statics.rs +++ b/src/test/compile-fail/lint-non-uppercase-statics.rs @@ -11,6 +11,9 @@ #![forbid(non_upper_case_globals)] #![allow(dead_code)] -static foo: isize = 1; //~ ERROR static constant `foo` should have an upper case name such as `FOO` +static foo: isize = 1; //~ ERROR static variable `foo` should have an upper case name such as `FOO` + +static mut bar: isize = 1; + //~^ ERROR static variable `bar` should have an upper case name such as `BAR` fn main() { } diff --git a/src/test/compile-fail/lint-qualification.rs b/src/test/compile-fail/lint-qualification.rs index 0ad3d2c5e7317..af9b21dadd1d0 100644 --- a/src/test/compile-fail/lint-qualification.rs +++ b/src/test/compile-fail/lint-qualification.rs @@ -18,4 +18,11 @@ fn main() { use foo::bar; foo::bar(); //~ ERROR: unnecessary qualification bar(); + + let _ = || -> Result<(), ()> { try!(Ok(())); Ok(()) }; // issue #37345 + + macro_rules! m { + () => { $crate::foo::bar(); } + } + m!(); // issue #37357 } diff --git a/src/test/compile-fail/lint-unused-extern-crate.rs b/src/test/compile-fail/lint-unused-extern-crate.rs index 854c51d0769e7..52cb84f662dd0 100644 --- a/src/test/compile-fail/lint-unused-extern-crate.rs +++ b/src/test/compile-fail/lint-unused-extern-crate.rs @@ -26,6 +26,8 @@ extern crate rand; // no error, the use marks it as used extern crate lint_unused_extern_crate as other; // no error, the use * marks it as used +#[macro_use] extern crate core; // no error, the `#[macro_use]` marks it as used + #[allow(unused_imports)] use rand::isaac::IsaacRng; diff --git a/src/test/compile-fail/lint-unused-imports.rs b/src/test/compile-fail/lint-unused-imports.rs index 239f380e6c4ab..3f91c3e1e5c79 100644 --- a/src/test/compile-fail/lint-unused-imports.rs +++ b/src/test/compile-fail/lint-unused-imports.rs @@ -17,19 +17,19 @@ use std::mem::*; // shouldn't get errors for not using // everything imported // Should get errors for both 'Some' and 'None' -use std::option::Option::{Some, None}; //~ ERROR unused import - //~^ ERROR unused import +use std::option::Option::{Some, None}; //~ ERROR unused import: `Some` + //~^ ERROR unused import: `None` -use test::A; //~ ERROR unused import +use test::A; //~ ERROR unused import: `test::A` // Be sure that if we just bring some methods into scope that they're also // counted as being used. use test::B; // But only when actually used: do not get confused by the method with the same name. -use test::B2; //~ ERROR unused import +use test::B2; //~ ERROR unused import: `test::B2` // Make sure this import is warned about when at least one of its imported names // is unused -use test2::{foo, bar}; //~ ERROR unused import +use test2::{foo, bar}; //~ ERROR unused import: `bar` mod test2 { pub fn foo() {} @@ -57,7 +57,7 @@ mod bar { pub mod c { use foo::Point; - use foo::Square; //~ ERROR unused import + use foo::Square; //~ ERROR unused import: `foo::Square` pub fn cc(_p: Point) -> super::Square { fn f() -> super::Square { super::Square @@ -73,7 +73,7 @@ mod bar { } fn g() { - use self::g; //~ ERROR unused import + use self::g; //~ ERROR unused import: `self::g` fn f() { self::g(); } @@ -82,7 +82,7 @@ fn g() { // c.f. issue #35135 #[allow(unused_variables)] fn h() { - use test2::foo; //~ ERROR unused import + use test2::foo; //~ ERROR unused import: `test2::foo` let foo = 0; } diff --git a/src/test/compile-fail/lint-unused-mut-variables.rs b/src/test/compile-fail/lint-unused-mut-variables.rs index 8165dd0fa29c0..21cfadb9c7992 100644 --- a/src/test/compile-fail/lint-unused-mut-variables.rs +++ b/src/test/compile-fail/lint-unused-mut-variables.rs @@ -21,7 +21,7 @@ fn main() { let mut a = 3; //~ ERROR: variable does not need to be mutable let mut a = 2; //~ ERROR: variable does not need to be mutable let mut b = 3; //~ ERROR: variable does not need to be mutable - let mut a = vec!(3); //~ ERROR: variable does not need to be mutable + let mut a = vec![3]; //~ ERROR: variable does not need to be mutable let (mut a, b) = (1, 2); //~ ERROR: variable does not need to be mutable let mut a; //~ ERROR: variable does not need to be mutable a = 3; @@ -88,5 +88,5 @@ fn callback(f: F) where F: FnOnce() {} #[allow(unused_mut)] fn foo(mut a: isize) { let mut a = 3; - let mut b = vec!(2); + let mut b = vec![2]; } diff --git a/src/test/compile-fail/macro-context.rs b/src/test/compile-fail/macro-context.rs index 4aa0a3023bb10..80802e19f8401 100644 --- a/src/test/compile-fail/macro-context.rs +++ b/src/test/compile-fail/macro-context.rs @@ -14,11 +14,8 @@ macro_rules! m { //~| ERROR macro expansion ignores token `typeof` //~| ERROR macro expansion ignores token `;` //~| ERROR macro expansion ignores token `;` - //~| ERROR macro expansion ignores token `i` } -m!(); //~ NOTE the usage of `m!` is likely invalid in item context - fn main() { let a: m!(); //~ NOTE the usage of `m!` is likely invalid in type context let i = m!(); //~ NOTE the usage of `m!` is likely invalid in expression context diff --git a/src/test/compile-fail/macro-shadowing.rs b/src/test/compile-fail/macro-shadowing.rs new file mode 100644 index 0000000000000..8381dc34a6a15 --- /dev/null +++ b/src/test/compile-fail/macro-shadowing.rs @@ -0,0 +1,39 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// aux-build:two_macros.rs + +macro_rules! foo { () => {} } +macro_rules! macro_one { () => {} } +#[macro_use(macro_two)] extern crate two_macros; + +macro_rules! m1 { () => { + macro_rules! foo { () => {} } //~ ERROR `foo` is already in scope + //~^ NOTE macro-expanded `macro_rules!`s may not shadow existing macros + + #[macro_use] //~ ERROR `macro_two` is already in scope + //~^ NOTE macro-expanded `#[macro_use]`s may not shadow existing macros + extern crate two_macros as __; +}} +m1!(); //~ NOTE in this expansion + //~| NOTE in this expansion + //~| NOTE in this expansion + //~| NOTE in this expansion + +foo!(); + +macro_rules! m2 { () => { + macro_rules! foo { () => {} } + foo!(); +}} +m2!(); +//^ Since `foo` is not used outside this expansion, it is not a shadowing error. + +fn main() {} diff --git a/src/test/compile-fail/macro-use-scope.rs b/src/test/compile-fail/macro-use-scope.rs index 5256396a242c9..9d389413ba9ad 100644 --- a/src/test/compile-fail/macro-use-scope.rs +++ b/src/test/compile-fail/macro-use-scope.rs @@ -19,10 +19,10 @@ fn f() { #[macro_use(macro_one)] // Check that this macro is usable in the above function extern crate two_macros; +fn g() { + macro_two!(); +} macro_rules! m { () => { - fn g() { - macro_two!(); - } #[macro_use(macro_two)] // Check that this macro is usable in the above function extern crate two_macros as _two_macros; } } diff --git a/src/test/compile-fail/match-byte-array-patterns.rs b/src/test/compile-fail/match-byte-array-patterns.rs new file mode 100644 index 0000000000000..86323656b873e --- /dev/null +++ b/src/test/compile-fail/match-byte-array-patterns.rs @@ -0,0 +1,73 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(advanced_slice_patterns, slice_patterns)] + +fn main() { + let buf = &[0, 1, 2, 3]; + + match buf { + b"AAAA" => {}, + &[0x41, 0x41, 0x41, 0x41] => {} //~ ERROR unreachable pattern + _ => {} + } + + match buf { + &[0x41, 0x41, 0x41, 0x41] => {} + b"AAAA" => {}, //~ ERROR unreachable pattern + _ => {} + } + + match buf { + &[_, 0x41, 0x41, 0x41] => {}, + b"AAAA" => {}, //~ ERROR unreachable pattern + _ => {} + } + + match buf { + &[0x41, .., 0x41] => {} + b"AAAA" => {}, //~ ERROR unreachable pattern + _ => {} + } + + match buf { //~ ERROR non-exhaustive + b"AAAA" => {} + } + + let buf: &[u8] = buf; + + match buf { + b"AAAA" => {}, + &[0x41, 0x41, 0x41, 0x41] => {} //~ ERROR unreachable pattern + _ => {} + } + + match buf { + &[0x41, 0x41, 0x41, 0x41] => {} + b"AAAA" => {}, //~ ERROR unreachable pattern + _ => {} + } + + match buf { + &[_, 0x41, 0x41, 0x41] => {}, + b"AAAA" => {}, //~ ERROR unreachable pattern + _ => {} + } + + match buf { + &[0x41, .., 0x41] => {} + b"AAAA" => {}, //~ ERROR unreachable pattern + _ => {} + } + + match buf { //~ ERROR non-exhaustive + b"AAAA" => {} + } +} diff --git a/src/test/compile-fail/match-pattern-field-mismatch-2.rs b/src/test/compile-fail/match-pattern-field-mismatch-2.rs index a4ba93ea17333..aed9130d60e4b 100644 --- a/src/test/compile-fail/match-pattern-field-mismatch-2.rs +++ b/src/test/compile-fail/match-pattern-field-mismatch-2.rs @@ -20,7 +20,7 @@ fn main() { color::rgb(_, _, _) => { } color::cmyk(_, _, _, _) => { } color::no_color(_) => { } - //~^ ERROR `color::no_color` does not name a tuple variant or a tuple struct + //~^ ERROR expected tuple struct/variant, found unit variant `color::no_color` } } } diff --git a/src/test/compile-fail/match-pattern-field-mismatch.rs b/src/test/compile-fail/match-pattern-field-mismatch.rs index 8426ecdaf995e..ddd5d63317011 100644 --- a/src/test/compile-fail/match-pattern-field-mismatch.rs +++ b/src/test/compile-fail/match-pattern-field-mismatch.rs @@ -18,7 +18,7 @@ fn main() { fn foo(c: color) { match c { color::rgb(_, _) => { } - //~^ ERROR this pattern has 2 fields, but the corresponding variant has 3 fields + //~^ ERROR this pattern has 2 fields, but the corresponding tuple variant has 3 fields color::cmyk(_, _, _, _) => { } color::no_color => { } } diff --git a/src/test/compile-fail/match-vec-unreachable.rs b/src/test/compile-fail/match-vec-unreachable.rs index 57e3a58b5660e..4d9b3aea1124b 100644 --- a/src/test/compile-fail/match-vec-unreachable.rs +++ b/src/test/compile-fail/match-vec-unreachable.rs @@ -29,7 +29,7 @@ fn main() { _ => { } } - let x: Vec = vec!('a', 'b', 'c'); + let x: Vec = vec!['a', 'b', 'c']; let x: &[char] = &x; match *x { ['a', 'b', 'c', ref _tail..] => {} diff --git a/src/test/compile-fail/meta-expected-error-correct-rev.rs b/src/test/compile-fail/meta-expected-error-correct-rev.rs index 95b4e1a33cccd..bd70879d13e5f 100644 --- a/src/test/compile-fail/meta-expected-error-correct-rev.rs +++ b/src/test/compile-fail/meta-expected-error-correct-rev.rs @@ -9,7 +9,6 @@ // except according to those terms. // revisions: a -// pretty-expanded FIXME #23616 // Counterpart to `meta-expected-error-wrong-rev.rs` diff --git a/src/test/compile-fail/meta-expected-error-wrong-rev.rs b/src/test/compile-fail/meta-expected-error-wrong-rev.rs index 084c6ed4f4b41..3c13050812c93 100644 --- a/src/test/compile-fail/meta-expected-error-wrong-rev.rs +++ b/src/test/compile-fail/meta-expected-error-wrong-rev.rs @@ -10,7 +10,6 @@ // revisions: a // should-fail -// pretty-expanded FIXME #23616 // This is a "meta-test" of the compilertest framework itself. In // particular, it includes the right error message, but the message diff --git a/src/test/compile-fail/method-path-in-pattern.rs b/src/test/compile-fail/method-path-in-pattern.rs index ef011c89c622b..aaa89b2282967 100644 --- a/src/test/compile-fail/method-path-in-pattern.rs +++ b/src/test/compile-fail/method-path-in-pattern.rs @@ -22,13 +22,13 @@ impl MyTrait for Foo {} fn main() { match 0u32 { - Foo::bar => {} //~ ERROR `Foo::bar` does not name a unit variant, unit struct or a constant + Foo::bar => {} //~ ERROR expected unit struct/variant or constant, found method `Foo::bar` } match 0u32 { - ::bar => {} //~ ERROR `bar` does not name a unit variant, unit struct or a constant + ::bar => {} //~ ERROR expected unit struct/variant or constant, found method `bar` } match 0u32 { ::trait_bar => {} - //~^ ERROR `trait_bar` does not name a unit variant, unit struct or a constant + //~^ ERROR expected unit struct/variant or constant, found method `trait_bar` } } diff --git a/src/test/compile-fail/method-resolvable-path-in-pattern.rs b/src/test/compile-fail/method-resolvable-path-in-pattern.rs index 3ae792f9c0f37..4d8959466b948 100644 --- a/src/test/compile-fail/method-resolvable-path-in-pattern.rs +++ b/src/test/compile-fail/method-resolvable-path-in-pattern.rs @@ -19,6 +19,6 @@ impl MyTrait for Foo {} fn main() { match 0u32 { ::trait_bar => {} - //~^ ERROR expected variant, struct or constant, found method `trait_bar` + //~^ ERROR expected unit struct/variant or constant, found method `MyTrait::trait_bar` } } diff --git a/src/test/compile-fail/mir-dataflow/def-inits-1.rs b/src/test/compile-fail/mir-dataflow/def-inits-1.rs index a133ddc15f1ac..1ba1bb35bb5f3 100644 --- a/src/test/compile-fail/mir-dataflow/def-inits-1.rs +++ b/src/test/compile-fail/mir-dataflow/def-inits-1.rs @@ -11,7 +11,6 @@ // General test of maybe_uninits state computed by MIR dataflow. #![feature(rustc_attrs)] -#![feature(stmt_expr_attributes)] use std::intrinsics::rustc_peek; use std::mem::{drop, replace}; diff --git a/src/test/compile-fail/mir-dataflow/inits-1.rs b/src/test/compile-fail/mir-dataflow/inits-1.rs index 949688098f622..c8cf44adb9719 100644 --- a/src/test/compile-fail/mir-dataflow/inits-1.rs +++ b/src/test/compile-fail/mir-dataflow/inits-1.rs @@ -11,7 +11,6 @@ // General test of maybe_inits state computed by MIR dataflow. #![feature(rustc_attrs)] -#![feature(stmt_expr_attributes)] use std::intrinsics::rustc_peek; use std::mem::{drop, replace}; diff --git a/src/test/compile-fail/mir-dataflow/uninits-1.rs b/src/test/compile-fail/mir-dataflow/uninits-1.rs index c13daae24f35d..a82bfc8969880 100644 --- a/src/test/compile-fail/mir-dataflow/uninits-1.rs +++ b/src/test/compile-fail/mir-dataflow/uninits-1.rs @@ -11,7 +11,6 @@ // General test of maybe_uninits state computed by MIR dataflow. #![feature(rustc_attrs)] -#![feature(stmt_expr_attributes)] use std::intrinsics::rustc_peek; use std::mem::{drop, replace}; diff --git a/src/test/compile-fail/mir-dataflow/uninits-2.rs b/src/test/compile-fail/mir-dataflow/uninits-2.rs index 94f812a40a9b5..8cfdae506625e 100644 --- a/src/test/compile-fail/mir-dataflow/uninits-2.rs +++ b/src/test/compile-fail/mir-dataflow/uninits-2.rs @@ -11,7 +11,6 @@ // General test of maybe_uninits state computed by MIR dataflow. #![feature(rustc_attrs)] -#![feature(stmt_expr_attributes)] use std::intrinsics::rustc_peek; use std::mem::{drop, replace}; diff --git a/src/test/compile-fail/moves-based-on-type-access-to-field.rs b/src/test/compile-fail/moves-based-on-type-access-to-field.rs index b8572fbd2150d..63fb4ff02a4ea 100644 --- a/src/test/compile-fail/moves-based-on-type-access-to-field.rs +++ b/src/test/compile-fail/moves-based-on-type-access-to-field.rs @@ -16,7 +16,7 @@ fn consume(_s: String) {} fn touch(_a: &A) {} fn f20() { - let x = vec!("hi".to_string()); + let x = vec!["hi".to_string()]; consume(x.into_iter().next().unwrap()); touch(&x[0]); //~ ERROR use of moved value: `x` } diff --git a/src/test/compile-fail/moves-based-on-type-exprs.rs b/src/test/compile-fail/moves-based-on-type-exprs.rs index 9ad44567a41f2..194f278259b6b 100644 --- a/src/test/compile-fail/moves-based-on-type-exprs.rs +++ b/src/test/compile-fail/moves-based-on-type-exprs.rs @@ -29,7 +29,7 @@ fn f20() { } fn f21() { - let x = vec!(1, 2, 3); + let x = vec![1, 2, 3]; let _y = (x[0], 3); touch(&x); } @@ -77,24 +77,24 @@ fn f70() { fn f80() { let x = "hi".to_string(); - let _y = vec!(x); + let _y = vec![x]; touch(&x); //~ ERROR use of moved value: `x` } fn f100() { - let x = vec!("hi".to_string()); + let x = vec!["hi".to_string()]; let _y = x.into_iter().next().unwrap(); touch(&x); //~ ERROR use of moved value: `x` } fn f110() { - let x = vec!("hi".to_string()); + let x = vec!["hi".to_string()]; let _y = [x.into_iter().next().unwrap(); 1]; touch(&x); //~ ERROR use of moved value: `x` } fn f120() { - let mut x = vec!("hi".to_string(), "ho".to_string()); + let mut x = vec!["hi".to_string(), "ho".to_string()]; x.swap(0, 1); touch(&x[0]); touch(&x[1]); diff --git a/src/test/compile-fail/name-clash-nullary.rs b/src/test/compile-fail/name-clash-nullary.rs index 2e2d53c4d40b6..4c76c4b8b0265 100644 --- a/src/test/compile-fail/name-clash-nullary.rs +++ b/src/test/compile-fail/name-clash-nullary.rs @@ -11,7 +11,7 @@ use std::option::*; fn main() { - let None: isize = 42; //~ ERROR let bindings cannot shadow variants + let None: isize = 42; //~ ERROR let bindings cannot shadow unit variants log(debug, None); //~^ ERROR unresolved name `debug` //~| ERROR unresolved name `log` diff --git a/src/test/compile-fail/namespace-mix-new.rs b/src/test/compile-fail/namespace-mix-new.rs new file mode 100644 index 0000000000000..0abe8bd439093 --- /dev/null +++ b/src/test/compile-fail/namespace-mix-new.rs @@ -0,0 +1,167 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// aux-build:namespace-mix-new.rs + +#![feature(item_like_imports, relaxed_adts)] + +extern crate namespace_mix_new; +use namespace_mix_new::*; + +mod c { + pub struct S {} + pub struct TS(); + pub struct US; + pub enum E { + V {}, + TV(), + UV, + } + + pub struct Item; +} + +// Use something emitting the type argument name, e.g. unsatisfied bound. +trait Impossible {} +fn check(_: T) {} + +mod m1 { + pub use ::c::*; + pub type S = ::c::Item; +} +mod m2 { + pub use ::c::*; + pub const S: ::c::Item = ::c::Item; +} + +fn f12() { + check(m1::S{}); //~ ERROR c::Item + check(m1::S); //~ ERROR unresolved name + check(m2::S{}); //~ ERROR c::S + check(m2::S); //~ ERROR c::Item +} +fn xf12() { + check(xm1::S{}); //~ ERROR c::Item + check(xm1::S); //~ ERROR unresolved name + check(xm2::S{}); //~ ERROR c::S + check(xm2::S); //~ ERROR c::Item +} + +mod m3 { + pub use ::c::*; + pub type TS = ::c::Item; +} +mod m4 { + pub use ::c::*; + pub const TS: ::c::Item = ::c::Item; +} + +fn f34() { + check(m3::TS{}); //~ ERROR c::Item + check(m3::TS); //~ ERROR c::TS + check(m4::TS{}); //~ ERROR c::TS + check(m4::TS); //~ ERROR c::Item +} +fn xf34() { + check(xm3::TS{}); //~ ERROR c::Item + check(xm3::TS); //~ ERROR c::TS + check(xm4::TS{}); //~ ERROR c::TS + check(xm4::TS); //~ ERROR c::Item +} + +mod m5 { + pub use ::c::*; + pub type US = ::c::Item; +} +mod m6 { + pub use ::c::*; + pub const US: ::c::Item = ::c::Item; +} + +fn f56() { + check(m5::US{}); //~ ERROR c::Item + check(m5::US); //~ ERROR c::US + check(m6::US{}); //~ ERROR c::US + check(m6::US); //~ ERROR c::Item +} +fn xf56() { + check(xm5::US{}); //~ ERROR c::Item + check(xm5::US); //~ ERROR c::US + check(xm6::US{}); //~ ERROR c::US + check(xm6::US); //~ ERROR c::Item +} + +mod m7 { + pub use ::c::E::*; + pub type V = ::c::Item; +} +mod m8 { + pub use ::c::E::*; + pub const V: ::c::Item = ::c::Item; +} + +fn f78() { + check(m7::V{}); //~ ERROR c::Item + check(m7::V); //~ ERROR name of a struct or struct variant + check(m8::V{}); //~ ERROR c::E + check(m8::V); //~ ERROR c::Item +} +fn xf78() { + check(xm7::V{}); //~ ERROR c::Item + check(xm7::V); //~ ERROR name of a struct or struct variant + check(xm8::V{}); //~ ERROR c::E + check(xm8::V); //~ ERROR c::Item +} + +mod m9 { + pub use ::c::E::*; + pub type TV = ::c::Item; +} +mod mA { + pub use ::c::E::*; + pub const TV: ::c::Item = ::c::Item; +} + +fn f9A() { + check(m9::TV{}); //~ ERROR c::Item + check(m9::TV); //~ ERROR c::E + check(mA::TV{}); //~ ERROR c::E + check(mA::TV); //~ ERROR c::Item +} +fn xf9A() { + check(xm9::TV{}); //~ ERROR c::Item + check(xm9::TV); //~ ERROR c::E + check(xmA::TV{}); //~ ERROR c::E + check(xmA::TV); //~ ERROR c::Item +} + +mod mB { + pub use ::c::E::*; + pub type UV = ::c::Item; +} +mod mC { + pub use ::c::E::*; + pub const UV: ::c::Item = ::c::Item; +} + +fn fBC() { + check(mB::UV{}); //~ ERROR c::Item + check(mB::UV); //~ ERROR c::E + check(mC::UV{}); //~ ERROR c::E + check(mC::UV); //~ ERROR c::Item +} +fn xfBC() { + check(xmB::UV{}); //~ ERROR c::Item + check(xmB::UV); //~ ERROR c::E + check(xmC::UV{}); //~ ERROR c::E + check(xmC::UV); //~ ERROR c::Item +} + +fn main() {} diff --git a/src/test/compile-fail/namespace-mix-old.rs b/src/test/compile-fail/namespace-mix-old.rs new file mode 100644 index 0000000000000..ad6766441961b --- /dev/null +++ b/src/test/compile-fail/namespace-mix-old.rs @@ -0,0 +1,174 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// FIXME: Remove when `item_like_imports` is stabilized. + +// aux-build:namespace-mix-old.rs + +#![feature(relaxed_adts)] + +extern crate namespace_mix_old; +use namespace_mix_old::{xm1, xm2, xm3, xm4, xm5, xm6, xm7, xm8, xm9, xmA, xmB, xmC}; + +mod c { + pub struct S {} + pub struct TS(); + pub struct US; + pub enum E { + V {}, + TV(), + UV, + } + + pub struct Item; +} + +mod proxy { + pub use c::*; + pub use c::E::*; +} + +// Use something emitting the type argument name, e.g. unsatisfied bound. +trait Impossible {} +fn check(_: T) {} + +mod m1 { + pub use ::proxy::*; + pub type S = ::c::Item; +} +mod m2 { + pub use ::proxy::*; + pub const S: ::c::Item = ::c::Item; +} + +fn f12() { + check(m1::S{}); //~ ERROR c::Item + check(m1::S); //~ ERROR unresolved name + check(m2::S{}); //~ ERROR c::S + check(m2::S); //~ ERROR c::Item +} +fn xf12() { + check(xm1::S{}); //~ ERROR c::Item + check(xm1::S); //~ ERROR unresolved name + check(xm2::S{}); //~ ERROR c::S + check(xm2::S); //~ ERROR c::Item +} + +mod m3 { + pub use ::proxy::*; + pub type TS = ::c::Item; +} +mod m4 { + pub use ::proxy::*; + pub const TS: ::c::Item = ::c::Item; +} + +fn f34() { + check(m3::TS{}); //~ ERROR c::Item + check(m3::TS); //~ ERROR c::TS + check(m4::TS{}); //~ ERROR c::TS + check(m4::TS); //~ ERROR c::Item +} +fn xf34() { + check(xm3::TS{}); //~ ERROR c::Item + check(xm3::TS); //~ ERROR c::TS + check(xm4::TS{}); //~ ERROR c::TS + check(xm4::TS); //~ ERROR c::Item +} + +mod m5 { + pub use ::proxy::*; + pub type US = ::c::Item; +} +mod m6 { + pub use ::proxy::*; + pub const US: ::c::Item = ::c::Item; +} + +fn f56() { + check(m5::US{}); //~ ERROR c::Item + check(m5::US); //~ ERROR c::US + check(m6::US{}); //~ ERROR c::US + check(m6::US); //~ ERROR c::Item +} +fn xf56() { + check(xm5::US{}); //~ ERROR c::Item + check(xm5::US); //~ ERROR c::US + check(xm6::US{}); //~ ERROR c::US + check(xm6::US); //~ ERROR c::Item +} + +mod m7 { + pub use ::proxy::*; + pub type V = ::c::Item; +} +mod m8 { + pub use ::proxy::*; + pub const V: ::c::Item = ::c::Item; +} + +fn f78() { + check(m7::V{}); //~ ERROR c::Item + check(m7::V); //~ ERROR name of a struct or struct variant + check(m8::V{}); //~ ERROR c::E + check(m8::V); //~ ERROR c::Item +} +fn xf78() { + check(xm7::V{}); //~ ERROR c::Item + check(xm7::V); //~ ERROR name of a struct or struct variant + check(xm8::V{}); //~ ERROR c::E + check(xm8::V); //~ ERROR c::Item +} + +mod m9 { + pub use ::proxy::*; + pub type TV = ::c::Item; +} +mod mA { + pub use ::proxy::*; + pub const TV: ::c::Item = ::c::Item; +} + +fn f9A() { + check(m9::TV{}); //~ ERROR c::Item + check(m9::TV); //~ ERROR c::E + check(mA::TV{}); //~ ERROR c::E + check(mA::TV); //~ ERROR c::Item +} +fn xf9A() { + check(xm9::TV{}); //~ ERROR c::Item + check(xm9::TV); //~ ERROR c::E + check(xmA::TV{}); //~ ERROR c::E + check(xmA::TV); //~ ERROR c::Item +} + +mod mB { + pub use ::proxy::*; + pub type UV = ::c::Item; +} +mod mC { + pub use ::proxy::*; + pub const UV: ::c::Item = ::c::Item; +} + +fn fBC() { + check(mB::UV{}); //~ ERROR c::Item + check(mB::UV); //~ ERROR c::E + check(mC::UV{}); //~ ERROR c::E + check(mC::UV); //~ ERROR c::Item +} +fn xfBC() { + check(xmB::UV{}); //~ ERROR c::Item + check(xmB::UV); //~ ERROR c::E + check(xmC::UV{}); //~ ERROR c::E + check(xmC::UV); //~ ERROR c::Item +} + +fn main() {} diff --git a/src/test/compile-fail/no-capture-arc.rs b/src/test/compile-fail/no-capture-arc.rs index 7b7b3c414dded..5e1d22bf63b89 100644 --- a/src/test/compile-fail/no-capture-arc.rs +++ b/src/test/compile-fail/no-capture-arc.rs @@ -14,7 +14,7 @@ use std::sync::Arc; use std::thread; fn main() { - let v = vec!(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); + let v = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; let arc_v = Arc::new(v); thread::spawn(move|| { diff --git a/src/test/compile-fail/no-link.rs b/src/test/compile-fail/no-link.rs index 957b6cda55311..8f6da99806b3b 100644 --- a/src/test/compile-fail/no-link.rs +++ b/src/test/compile-fail/no-link.rs @@ -13,6 +13,6 @@ extern crate libc; fn main() { unsafe { - libc::abs(0); //~ ERROR Use of undeclared type or module `libc` + libc::abs(0); //~ ERROR unresolved name } } diff --git a/src/test/compile-fail/no-patterns-in-args-2.rs b/src/test/compile-fail/no-patterns-in-args-2.rs new file mode 100644 index 0000000000000..385d012cadee6 --- /dev/null +++ b/src/test/compile-fail/no-patterns-in-args-2.rs @@ -0,0 +1,23 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![deny(patterns_in_fns_without_body)] + +trait Tr { + fn f1(mut arg: u8); //~ ERROR patterns aren't allowed in methods without bodies + //~^ WARN was previously accepted + fn f2(&arg: u8); //~ ERROR patterns aren't allowed in methods without bodies + //~^ WARN was previously accepted + fn g1(arg: u8); // OK + fn g2(_: u8); // OK + fn g3(u8); // OK +} + +fn main() {} diff --git a/src/test/compile-fail/no-reuse-move-arc.rs b/src/test/compile-fail/no-reuse-move-arc.rs index 1720b40c83bbd..76c8a444320d3 100644 --- a/src/test/compile-fail/no-reuse-move-arc.rs +++ b/src/test/compile-fail/no-reuse-move-arc.rs @@ -12,7 +12,7 @@ use std::sync::Arc; use std::thread; fn main() { - let v = vec!(1, 2, 3, 4, 5, 6, 7, 8, 9, 10); + let v = vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10]; let arc_v = Arc::new(v); thread::spawn(move|| { diff --git a/src/test/compile-fail/no-type-for-node-ice.rs b/src/test/compile-fail/no-type-for-node-ice.rs index aab4db6eadfd6..f049f69e4bba2 100644 --- a/src/test/compile-fail/no-type-for-node-ice.rs +++ b/src/test/compile-fail/no-type-for-node-ice.rs @@ -11,5 +11,5 @@ // Related issues: #20401, #20506, #20614, #20752, #20829, #20846, #20885, #20886 fn main() { - "".homura[""]; //~ ERROR no field with that name was found + "".homura[""]; //~ no field `homura` on type `&'static str` } diff --git a/src/test/compile-fail/non-copyable-void.rs b/src/test/compile-fail/non-copyable-void.rs index fd245f38a0c92..6067b71280cef 100644 --- a/src/test/compile-fail/non-copyable-void.rs +++ b/src/test/compile-fail/non-copyable-void.rs @@ -11,7 +11,7 @@ extern crate libc; fn main() { - let x : *const Vec = &vec!(1,2,3); + let x : *const Vec = &vec![1,2,3]; let y : *const libc::c_void = x as *const libc::c_void; unsafe { let _z = (*y).clone(); diff --git a/src/test/compile-fail/non-exhaustive-match.rs b/src/test/compile-fail/non-exhaustive-match.rs index 017baacc9d329..74e728d713b1d 100644 --- a/src/test/compile-fail/non-exhaustive-match.rs +++ b/src/test/compile-fail/non-exhaustive-match.rs @@ -37,20 +37,20 @@ fn main() { (_, t::a) => {} (t::b, t::b) => {} } - let vec = vec!(Some(42), None, Some(21)); + let vec = vec![Some(42), None, Some(21)]; let vec: &[Option] = &vec; match *vec { //~ ERROR non-exhaustive patterns: `[]` not covered [Some(..), None, ref tail..] => {} [Some(..), Some(..), ref tail..] => {} [None] => {} } - let vec = vec!(1); + let vec = vec![1]; let vec: &[isize] = &vec; match *vec { [_, ref tail..] => (), [] => () } - let vec = vec!(0.5f32); + let vec = vec![0.5f32]; let vec: &[f32] = &vec; match *vec { //~ ERROR non-exhaustive patterns: `[_, _, _, _]` not covered [0.1, 0.2, 0.3] => (), @@ -58,7 +58,7 @@ fn main() { [0.1] => (), [] => () } - let vec = vec!(Some(42), None, Some(21)); + let vec = vec![Some(42), None, Some(21)]; let vec: &[Option] = &vec; match *vec { [Some(..), None, ref tail..] => {} diff --git a/src/test/compile-fail/numeric-fields.rs b/src/test/compile-fail/numeric-fields.rs index c4aff9471b8a1..a67707257d2f2 100644 --- a/src/test/compile-fail/numeric-fields.rs +++ b/src/test/compile-fail/numeric-fields.rs @@ -13,8 +13,12 @@ struct S(u8, u16); fn main() { - let s = S{0b1: 10, 0: 11}; //~ ERROR struct `S` has no field named `0b1` + let s = S{0b1: 10, 0: 11}; + //~^ ERROR struct `S` has no field named `0b1` + //~| NOTE field does not exist - did you mean `1`? match s { - S{0: a, 0x1: b, ..} => {} //~ ERROR does not have a field named `0x1` + S{0: a, 0x1: b, ..} => {} + //~^ ERROR does not have a field named `0x1` + //~| NOTE struct `S::{{constructor}}` does not have field `0x1` } } diff --git a/src/test/compile-fail/object-lifetime-default-from-rptr-box-error.rs b/src/test/compile-fail/object-lifetime-default-from-rptr-box-error.rs index e351c84c8afc6..98301ef1a0a1d 100644 --- a/src/test/compile-fail/object-lifetime-default-from-rptr-box-error.rs +++ b/src/test/compile-fail/object-lifetime-default-from-rptr-box-error.rs @@ -11,8 +11,6 @@ // Test that the lifetime from the enclosing `&` is "inherited" // through the `Box` struct. -// pretty-expanded FIXME #23616 - #![allow(dead_code)] trait Test { diff --git a/src/test/compile-fail/object-lifetime-default-from-rptr-struct-error.rs b/src/test/compile-fail/object-lifetime-default-from-rptr-struct-error.rs index 93268559e8e51..836e4fa114263 100644 --- a/src/test/compile-fail/object-lifetime-default-from-rptr-struct-error.rs +++ b/src/test/compile-fail/object-lifetime-default-from-rptr-struct-error.rs @@ -11,8 +11,6 @@ // Test that the lifetime from the enclosing `&` is "inherited" // through the `MyBox` struct. -// pretty-expanded FIXME #23616 - #![allow(dead_code)] #![feature(rustc_error)] diff --git a/src/test/compile-fail/on-unimplemented/multiple-impls.rs b/src/test/compile-fail/on-unimplemented/multiple-impls.rs index cc7c2f4f796d9..0ad9f21e0983f 100644 --- a/src/test/compile-fail/on-unimplemented/multiple-impls.rs +++ b/src/test/compile-fail/on-unimplemented/multiple-impls.rs @@ -42,17 +42,17 @@ impl Index> for [i32] { fn main() { Index::index(&[] as &[i32], 2u32); //~^ ERROR E0277 - //~| NOTE not satisfied + //~| NOTE the trait `Index` is not implemented for `[i32]` //~| NOTE trait message //~| NOTE required by Index::index(&[] as &[i32], Foo(2u32)); //~^ ERROR E0277 - //~| NOTE not satisfied + //~| NOTE the trait `Index>` is not implemented for `[i32]` //~| NOTE on impl for Foo //~| NOTE required by Index::index(&[] as &[i32], Bar(2u32)); //~^ ERROR E0277 - //~| NOTE not satisfied + //~| NOTE the trait `Index>` is not implemented for `[i32]` //~| NOTE on impl for Bar //~| NOTE required by } diff --git a/src/test/compile-fail/on-unimplemented/on-impl.rs b/src/test/compile-fail/on-unimplemented/on-impl.rs index c22e48bede4ef..a7c599330a070 100644 --- a/src/test/compile-fail/on-unimplemented/on-impl.rs +++ b/src/test/compile-fail/on-unimplemented/on-impl.rs @@ -29,8 +29,9 @@ impl Index for [i32] { #[rustc_error] fn main() { - Index::::index(&[1, 2, 3] as &[i32], 2u32); //~ ERROR E0277 - //~| NOTE not satisfied - //~| NOTE a usize is required - //~| NOTE required by + Index::::index(&[1, 2, 3] as &[i32], 2u32); + //~^ ERROR E0277 + //~| NOTE the trait `Index` is not implemented for `[i32]` + //~| NOTE a usize is required + //~| NOTE required by } diff --git a/src/test/compile-fail/on-unimplemented/on-trait.rs b/src/test/compile-fail/on-unimplemented/on-trait.rs index 9ea2809374cd8..3a789f3faeb2a 100644 --- a/src/test/compile-fail/on-unimplemented/on-trait.rs +++ b/src/test/compile-fail/on-unimplemented/on-trait.rs @@ -30,14 +30,14 @@ fn collect, B: MyFromIterator>(it: I) -> B { } pub fn main() { - let x = vec!(1u8, 2, 3, 4); + let x = vec![1u8, 2, 3, 4]; let y: Option> = collect(x.iter()); // this should give approximately the same error for x.iter().collect() //~^ ERROR //~^^ NOTE a collection of type `std::option::Option>` cannot be built from an iterator over elements of type `&u8` //~^^^ NOTE required by `collect` - //~| NOTE trait `std::option::Option>: MyFromIterator<&u8>` not satisfied + //~| NOTE the trait `MyFromIterator<&u8>` is not implemented for `std::option::Option>` let x: String = foobar(); //~ ERROR //~^ NOTE test error `std::string::String` with `u8` `_` `u32` //~^^ NOTE required by `foobar` - //~| NOTE trait `std::string::String: Foo` not satisfied + //~| NOTE the trait `Foo` is not implemented for `std::string::String` } diff --git a/src/test/compile-fail/on-unimplemented/slice-index.rs b/src/test/compile-fail/on-unimplemented/slice-index.rs index 5c548b5d5bf20..d528d0e626a79 100644 --- a/src/test/compile-fail/on-unimplemented/slice-index.rs +++ b/src/test/compile-fail/on-unimplemented/slice-index.rs @@ -17,10 +17,12 @@ use std::ops::Index; #[rustc_error] fn main() { let x = &[1, 2, 3] as &[i32]; - x[1i32]; //~ ERROR E0277 - //~| NOTE trait `[i32]: std::ops::Index` not satisfied - //~| NOTE slice indices are of type `usize` - x[..1i32]; //~ ERROR E0277 - //~| NOTE trait `[i32]: std::ops::Index>` not satisfied - //~| NOTE slice indices are of type `usize` + x[1i32]; + //~^ ERROR E0277 + //~| NOTE the trait `std::ops::Index` is not implemented for `[i32]` + //~| NOTE slice indices are of type `usize` + x[..1i32]; + //~^ ERROR E0277 + //~| NOTE the trait `std::ops::Index>` is not implemented for `[i32]` + //~| NOTE slice indices are of type `usize` } diff --git a/src/test/compile-fail/pat-shadow-in-nested-binding.rs b/src/test/compile-fail/pat-shadow-in-nested-binding.rs index f1683e51c648d..3dbe08f1908d7 100644 --- a/src/test/compile-fail/pat-shadow-in-nested-binding.rs +++ b/src/test/compile-fail/pat-shadow-in-nested-binding.rs @@ -11,5 +11,5 @@ struct foo(usize); fn main() { - let (foo, _) = (2, 3); //~ ERROR let bindings cannot shadow structs + let (foo, _) = (2, 3); //~ ERROR let bindings cannot shadow tuple structs } diff --git a/src/test/compile-fail/pat-tuple-overfield.rs b/src/test/compile-fail/pat-tuple-overfield.rs index 034ef4a72e21c..069c1dc0aea1b 100644 --- a/src/test/compile-fail/pat-tuple-overfield.rs +++ b/src/test/compile-fail/pat-tuple-overfield.rs @@ -20,9 +20,9 @@ fn main() { } match S(1, 2, 3) { S(1, 2, 3, 4) => {} - //~^ ERROR this pattern has 4 fields, but the corresponding struct has 3 fields + //~^ ERROR this pattern has 4 fields, but the corresponding tuple struct has 3 fields S(1, 2, .., 3, 4) => {} - //~^ ERROR this pattern has 4 fields, but the corresponding struct has 3 fields + //~^ ERROR this pattern has 4 fields, but the corresponding tuple struct has 3 fields _ => {} } } diff --git a/src/test/compile-fail/paths-in-macro-invocations.rs b/src/test/compile-fail/paths-in-macro-invocations.rs new file mode 100644 index 0000000000000..c69b7e526cc3b --- /dev/null +++ b/src/test/compile-fail/paths-in-macro-invocations.rs @@ -0,0 +1,38 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +::foo::bar!(); //~ ERROR expected macro name without module separators +foo::bar!(); //~ ERROR expected macro name without module separators + +trait T { + foo::bar!(); //~ ERROR expected macro name without module separators + ::foo::bar!(); //~ ERROR expected macro name without module separators +} + +struct S { + x: foo::bar!(), //~ ERROR expected macro name without module separators + y: ::foo::bar!(), //~ ERROR expected macro name without module separators +} + +impl S { + foo::bar!(); //~ ERROR expected macro name without module separators + ::foo::bar!(); //~ ERROR expected macro name without module separators +} + +fn main() { + foo::bar!(); //~ ERROR expected macro name without module separators + ::foo::bar!(); //~ ERROR expected macro name without module separators + + let _ = foo::bar!(); //~ ERROR expected macro name without module separators + let _ = ::foo::bar!(); //~ ERROR expected macro name without module separators + + let foo::bar!() = 0; //~ ERROR expected macro name without module separators + let ::foo::bar!() = 0; //~ ERROR expected macro name without module separators +} diff --git a/src/test/compile-fail/pattern-error-continue.rs b/src/test/compile-fail/pattern-error-continue.rs index 507012e8c5c06..e63b84594aa94 100644 --- a/src/test/compile-fail/pattern-error-continue.rs +++ b/src/test/compile-fail/pattern-error-continue.rs @@ -25,7 +25,7 @@ fn f(_c: char) {} fn main() { match A::B(1, 2) { A::B(_, _, _) => (), //~ ERROR this pattern has 3 fields, but - A::D(_) => (), //~ ERROR `A::D` does not name a tuple variant or a tuple struct + A::D(_) => (), //~ ERROR expected tuple struct/variant, found unit variant `A::D` _ => () } match 'c' { diff --git a/src/test/compile-fail/platform-intrinsic-params.rs b/src/test/compile-fail/platform-intrinsic-params.rs new file mode 100644 index 0000000000000..e8a9031dba8db --- /dev/null +++ b/src/test/compile-fail/platform-intrinsic-params.rs @@ -0,0 +1,16 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(platform_intrinsics)] +extern "platform-intrinsic" { + fn x86_mm_movemask_ps() -> i32; //~ERROR found 0, expected 1 +} + +fn main() { } diff --git a/src/test/compile-fail/qualified-path-params.rs b/src/test/compile-fail/qualified-path-params.rs index 9034e24a6fee0..82b0536a64ad4 100644 --- a/src/test/compile-fail/qualified-path-params.rs +++ b/src/test/compile-fail/qualified-path-params.rs @@ -28,7 +28,7 @@ impl S { fn main() { match 10 { ::A::f:: => {} - //~^ ERROR `Tr::A::f` does not name a unit variant, unit struct or a constant + //~^ ERROR expected unit struct/variant or constant, found method `Tr::A::f` 0 ... ::A::f:: => {} //~ ERROR only char and numeric types are allowed in range } } diff --git a/src/test/compile-fail/regions-escape-loop-via-vec.rs b/src/test/compile-fail/regions-escape-loop-via-vec.rs index f5ea7a2108e79..8982b5cd98de4 100644 --- a/src/test/compile-fail/regions-escape-loop-via-vec.rs +++ b/src/test/compile-fail/regions-escape-loop-via-vec.rs @@ -11,7 +11,7 @@ // The type of `y` ends up getting inferred to the type of the block. fn broken() { let mut x = 3; - let mut _y = vec!(&mut x); + let mut _y = vec![&mut x]; //~^ NOTE borrow of `x` occurs here //~| NOTE borrow of `x` occurs here //~| NOTE borrow of `x` occurs here diff --git a/src/test/compile-fail/resolve-hint-macro.rs b/src/test/compile-fail/resolve-hint-macro.rs index f05f1cd544a10..edaab01275712 100644 --- a/src/test/compile-fail/resolve-hint-macro.rs +++ b/src/test/compile-fail/resolve-hint-macro.rs @@ -9,5 +9,7 @@ // except according to those terms. fn main() { - assert(true); //~ERROR unresolved name `assert`. Did you mean the macro `assert!`? + assert(true); + //~^ ERROR unresolved name `assert` + //~| NOTE did you mean the macro `assert!`? } diff --git a/src/test/compile-fail/resolve-inconsistent-names.rs b/src/test/compile-fail/resolve-inconsistent-names.rs index f7f3acd37d62d..1e2541502ace8 100644 --- a/src/test/compile-fail/resolve-inconsistent-names.rs +++ b/src/test/compile-fail/resolve-inconsistent-names.rs @@ -11,7 +11,9 @@ fn main() { let y = 1; match y { - a | b => {} //~ ERROR variable `a` from pattern #1 is not bound in pattern #2 - //~^ ERROR variable `b` from pattern #2 is not bound in pattern #1 + a | b => {} //~ ERROR variable `a` from pattern #1 is not bound in pattern #2 + //~^ ERROR variable `b` from pattern #2 is not bound in pattern #1 + //~| NOTE pattern doesn't bind `a` + //~| NOTE pattern doesn't bind `b` } } diff --git a/src/test/compile-fail/resolve-label.rs b/src/test/compile-fail/resolve-label.rs index 398b4f5859e65..ed2c3e0e9b860 100644 --- a/src/test/compile-fail/resolve-label.rs +++ b/src/test/compile-fail/resolve-label.rs @@ -16,6 +16,9 @@ fn f() { } } } + + // issue #37353 + loop { 'w: while break 'w { } } //~ ERROR use of undeclared label } fn main() {} diff --git a/src/test/compile-fail/self-vs-path-ambiguity.rs b/src/test/compile-fail/self-vs-path-ambiguity.rs new file mode 100644 index 0000000000000..9753014e7810a --- /dev/null +++ b/src/test/compile-fail/self-vs-path-ambiguity.rs @@ -0,0 +1,23 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Check that `self::foo` is parsed as a general pattern and not a self argument. + +struct S; + +impl S { + fn f(self::S: S) {} + fn g(&self::S: &S) {} + fn h(&mut self::S: &mut S) {} + fn i(&'a self::S: &S) {} //~ ERROR unexpected lifetime `'a` in pattern + //~^ ERROR expected one of `)` or `mut`, found `'a` +} + +fn main() {} diff --git a/src/test/compile-fail/self_type_keyword-2.rs b/src/test/compile-fail/self_type_keyword-2.rs index 613f54eb33134..118d3d8a0bec2 100644 --- a/src/test/compile-fail/self_type_keyword-2.rs +++ b/src/test/compile-fail/self_type_keyword-2.rs @@ -10,4 +10,14 @@ use self::Self as Foo; //~ ERROR unresolved import `self::Self` -pub fn main() {} +pub fn main() { + let Self = 5; + //~^ ERROR unresolved unit struct/variant or constant `Self` + + match 15 { + Self => (), + //~^ ERROR unresolved unit struct/variant or constant `Self` + Foo { x: Self } => (), + //~^ ERROR unresolved unit struct/variant or constant `Self` + } +} diff --git a/src/test/compile-fail/self_type_keyword.rs b/src/test/compile-fail/self_type_keyword.rs index b9c9d7a389b95..db6bcc611b823 100644 --- a/src/test/compile-fail/self_type_keyword.rs +++ b/src/test/compile-fail/self_type_keyword.rs @@ -17,12 +17,7 @@ struct Bar<'Self>; //~^ ERROR lifetimes cannot use keyword names pub fn main() { - let Self = 5; - //~^ ERROR expected identifier, found keyword `Self` - match 15 { - Self => (), - //~^ ERROR expected identifier, found keyword `Self` ref Self => (), //~^ ERROR expected identifier, found keyword `Self` mut Self => (), @@ -30,20 +25,23 @@ pub fn main() { ref mut Self => (), //~^ ERROR expected identifier, found keyword `Self` Self!() => (), - //~^ ERROR expected identifier, found keyword `Self` - //~^^ ERROR macro undefined: 'Self!' - Foo { x: Self } => (), - //~^ ERROR expected identifier, found keyword `Self` + //~^ ERROR macro undefined: 'Self!' Foo { Self } => (), //~^ ERROR expected identifier, found keyword `Self` } } -use std::option::Option as Self; -//~^ ERROR expected identifier, found keyword `Self` +mod m1 { + extern crate core as Self; + //~^ ERROR expected identifier, found keyword `Self` +} -extern crate Self; -//~^ ERROR expected identifier, found keyword `Self` +mod m2 { + use std::option::Option as Self; + //~^ ERROR expected identifier, found keyword `Self` +} -trait Self {} -//~^ ERROR expected identifier, found keyword `Self` +mod m3 { + trait Self {} + //~^ ERROR expected identifier, found keyword `Self` +} diff --git a/src/test/compile-fail/stmt_expr_attrs_no_feature.rs b/src/test/compile-fail/stmt_expr_attrs_no_feature.rs index 68338b115950c..2fda2ee0900c7 100644 --- a/src/test/compile-fail/stmt_expr_attrs_no_feature.rs +++ b/src/test/compile-fail/stmt_expr_attrs_no_feature.rs @@ -21,15 +21,15 @@ fn main() { #[attr] fn a() {} - #[attr] //~ ERROR 15701 + #[attr] { } - #[attr] //~ ERROR 15701 + #[attr] 5; - #[attr] //~ ERROR 15701 + #[attr] stmt_mac!(); } @@ -43,7 +43,7 @@ fn c() { #[cfg(not(unset))] fn j() { - #[attr] //~ ERROR 15701 + #[attr] 5; } @@ -55,7 +55,7 @@ fn d() { #[cfg_attr(not(unset), cfg(not(unset)))] fn i() { - #[attr] //~ ERROR 15701 + #[attr] 8; } @@ -64,7 +64,7 @@ fn i() { macro_rules! item_mac { ($e:ident) => { fn $e() { - #[attr] //~ ERROR 15701 + #[attr] 42; #[cfg(unset)] @@ -75,7 +75,7 @@ macro_rules! item_mac { #[cfg(not(unset))] fn k() { - #[attr] //~ ERROR 15701 + #[attr] 5; } @@ -87,7 +87,7 @@ macro_rules! item_mac { #[cfg_attr(not(unset), cfg(not(unset)))] fn h() { - #[attr] //~ ERROR 15701 + #[attr] 8; } diff --git a/src/test/compile-fail/struct-fields-hints-no-dupe.rs b/src/test/compile-fail/struct-fields-hints-no-dupe.rs index f25f01af33fd1..de78503d9044f 100644 --- a/src/test/compile-fail/struct-fields-hints-no-dupe.rs +++ b/src/test/compile-fail/struct-fields-hints-no-dupe.rs @@ -19,7 +19,7 @@ fn main() { foo : 5, bar : 42, //~^ ERROR struct `A` has no field named `bar` - //~| NOTE did you mean `barr`? + //~| NOTE field does not exist - did you mean `barr`? car : 9, }; } diff --git a/src/test/compile-fail/struct-fields-hints.rs b/src/test/compile-fail/struct-fields-hints.rs index 62ec6e6b0d249..628f03f3272ca 100644 --- a/src/test/compile-fail/struct-fields-hints.rs +++ b/src/test/compile-fail/struct-fields-hints.rs @@ -19,6 +19,6 @@ fn main() { foo : 5, bar : 42, //~^ ERROR struct `A` has no field named `bar` - //~| NOTE did you mean `car`? + //~| NOTE field does not exist - did you mean `car`? }; } diff --git a/src/test/compile-fail-fulldeps/plugin-MacroRulesTT.rs b/src/test/compile-fail/struct-fields-shorthand-unresolved.rs similarity index 71% rename from src/test/compile-fail-fulldeps/plugin-MacroRulesTT.rs rename to src/test/compile-fail/struct-fields-shorthand-unresolved.rs index e13ddd13f5d99..50a43f4a276da 100644 --- a/src/test/compile-fail-fulldeps/plugin-MacroRulesTT.rs +++ b/src/test/compile-fail/struct-fields-shorthand-unresolved.rs @@ -8,11 +8,17 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// aux-build:macro_crate_MacroRulesTT.rs -// ignore-stage1 -// error-pattern: plugin tried to register a new MacroRulesTT +#![feature(field_init_shorthand)] -#![feature(plugin)] -#![plugin(macro_crate_MacroRulesTT)] +struct Foo { + x: i32, + y: i32 +} -fn main() { } +fn main() { + let x = 0; + let foo = Foo { + x, + y //~ ERROR unresolved name `y` + }; +} diff --git a/src/test/compile-fail/struct-fields-shorthand.rs b/src/test/compile-fail/struct-fields-shorthand.rs new file mode 100644 index 0000000000000..f764322cadbbb --- /dev/null +++ b/src/test/compile-fail/struct-fields-shorthand.rs @@ -0,0 +1,24 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(field_init_shorthand)] + +struct Foo { + x: i32, + y: i32 +} + +fn main() { + let (x, y, z) = (0, 1, 2); + let foo = Foo { + x, y, z //~ ERROR struct `Foo` has no field named `z` + }; +} + diff --git a/src/test/compile-fail/struct-fields-too-many.rs b/src/test/compile-fail/struct-fields-too-many.rs index 5d16573f2f1e3..0848ada731a65 100644 --- a/src/test/compile-fail/struct-fields-too-many.rs +++ b/src/test/compile-fail/struct-fields-too-many.rs @@ -15,6 +15,8 @@ struct BuildData { fn main() { let foo = BuildData { foo: 0, - bar: 0 //~ ERROR struct `BuildData` has no field named `bar` + bar: 0 + //~^ ERROR struct `BuildData` has no field named `bar` + //~| NOTE `BuildData` does not have this field }; } diff --git a/src/test/compile-fail/struct-fields-typo.rs b/src/test/compile-fail/struct-fields-typo.rs index c897dc55204b1..0e30c1e86e4f2 100644 --- a/src/test/compile-fail/struct-fields-typo.rs +++ b/src/test/compile-fail/struct-fields-typo.rs @@ -18,7 +18,7 @@ fn main() { foo: 0, bar: 0.5, }; - let x = foo.baa;//~ ERROR attempted access of field `baa` on type `BuildData` - //~^ HELP did you mean `bar`? + let x = foo.baa;//~ no field `baa` on type `BuildData` + //~^ did you mean `bar`? println!("{}", x); } diff --git a/src/test/compile-fail/struct-pat-derived-error.rs b/src/test/compile-fail/struct-pat-derived-error.rs index 4b65292340fa1..f525ec373753d 100644 --- a/src/test/compile-fail/struct-pat-derived-error.rs +++ b/src/test/compile-fail/struct-pat-derived-error.rs @@ -15,7 +15,7 @@ struct a { impl a { fn foo(&self) { - let a { x, y } = self.d; //~ ERROR attempted access of field `d` + let a { x, y } = self.d; //~ ERROR no field `d` on type `&a` //~^ ERROR struct `a` does not have a field named `x` //~^^ ERROR struct `a` does not have a field named `y` //~^^^ ERROR pattern does not mention field `b` diff --git a/src/test/compile-fail/feature-gate-try-operator.rs b/src/test/compile-fail/struct-path-alias-bounds.rs similarity index 71% rename from src/test/compile-fail/feature-gate-try-operator.rs rename to src/test/compile-fail/struct-path-alias-bounds.rs index b05c7323962c5..1b6e51e37034e 100644 --- a/src/test/compile-fail/feature-gate-try-operator.rs +++ b/src/test/compile-fail/struct-path-alias-bounds.rs @@ -8,11 +8,14 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -macro_rules! id { - ($e:expr) => { $e } -} +// issue #36286 + +struct S { a: T } + +struct NoClone; +type A = S; fn main() { - id!(x?); //~ error: the `?` operator is not stable (see issue #31436) - y?; //~ error: the `?` operator is not stable (see issue #31436) + let s = A { a: NoClone }; + //~^ ERROR the trait bound `NoClone: std::clone::Clone` is not satisfied } diff --git a/src/test/compile-fail/struct-path-associated-type.rs b/src/test/compile-fail/struct-path-associated-type.rs new file mode 100644 index 0000000000000..660ac44ce0b53 --- /dev/null +++ b/src/test/compile-fail/struct-path-associated-type.rs @@ -0,0 +1,48 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +struct S; + +trait Tr { + type A; +} + +impl Tr for S { + type A = S; +} + +fn f() { + let s = T::A {}; + //~^ ERROR expected struct, variant or union type, found associated type + let z = T::A:: {}; + //~^ ERROR expected struct, variant or union type, found associated type + //~| ERROR type parameters are not allowed on this type + match S { + T::A {} => {} + //~^ ERROR expected struct, variant or union type, found associated type + } +} + +fn g>() { + let s = T::A {}; // OK + let z = T::A:: {}; //~ ERROR type parameters are not allowed on this type + match S { + T::A {} => {} // OK + } +} + +fn main() { + let s = S::A {}; //~ ERROR ambiguous associated type + let z = S::A:: {}; //~ ERROR ambiguous associated type + //~^ ERROR type parameters are not allowed on this type + match S { + S::A {} => {} //~ ERROR ambiguous associated type + } +} diff --git a/src/test/compile-fail/struct-path-self-type-mismatch.rs b/src/test/compile-fail/struct-path-self-type-mismatch.rs new file mode 100644 index 0000000000000..f694e7d277c7f --- /dev/null +++ b/src/test/compile-fail/struct-path-self-type-mismatch.rs @@ -0,0 +1,38 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +struct Foo { inner: A } + +trait Bar { fn bar(); } + +impl Bar for Foo { + fn bar() { + Self { inner: 1.5f32 }; //~ ERROR mismatched types + //~^ NOTE expected i32, found f32 + } +} + +impl Foo { + fn new(u: U) -> Foo { + Self { + //~^ ERROR mismatched types + //~| expected type parameter, found a different type parameter + //~| expected type `Foo` + //~| found type `Foo` + inner: u + //~^ ERROR mismatched types + //~| expected type parameter, found a different type parameter + //~| expected type `T` + //~| found type `U` + } + } +} + +fn main() {} diff --git a/src/test/compile-fail/struct-path-self.rs b/src/test/compile-fail/struct-path-self.rs new file mode 100644 index 0000000000000..067d6ac22dc6f --- /dev/null +++ b/src/test/compile-fail/struct-path-self.rs @@ -0,0 +1,47 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +struct S; + +trait Tr { + fn f() { + let s = Self {}; + //~^ ERROR expected struct, variant or union type, found Self + let z = Self:: {}; + //~^ ERROR expected struct, variant or union type, found Self + //~| ERROR type parameters are not allowed on this type + match s { + Self { .. } => {} + //~^ ERROR expected struct, variant or union type, found Self + } + } +} + +impl Tr for S { + fn f() { + let s = Self {}; // OK + let z = Self:: {}; //~ ERROR type parameters are not allowed on this type + match s { + Self { .. } => {} // OK + } + } +} + +impl S { + fn g() { + let s = Self {}; // OK + let z = Self:: {}; //~ ERROR type parameters are not allowed on this type + match s { + Self { .. } => {} // OK + } + } +} + +fn main() {} diff --git a/src/test/compile-fail/suggest-private-fields.rs b/src/test/compile-fail/suggest-private-fields.rs index 906bfc78498e4..3672e0e90c2a2 100644 --- a/src/test/compile-fail/suggest-private-fields.rs +++ b/src/test/compile-fail/suggest-private-fields.rs @@ -24,18 +24,18 @@ fn main () { let k = B { aa: 20, //~^ ERROR struct `xc::B` has no field named `aa` - //~| NOTE did you mean `a`? + //~| NOTE field does not exist - did you mean `a`? bb: 20, //~^ ERROR struct `xc::B` has no field named `bb` - //~| NOTE did you mean `a`? + //~| NOTE field does not exist - did you mean `a`? }; // local crate struct let l = A { aa: 20, //~^ ERROR struct `A` has no field named `aa` - //~| NOTE did you mean `a`? + //~| NOTE field does not exist - did you mean `a`? bb: 20, //~^ ERROR struct `A` has no field named `bb` - //~| NOTE did you mean `b`? + //~| NOTE field does not exist - did you mean `b`? }; } diff --git a/src/test/compile-fail/token-error-correct-2.rs b/src/test/compile-fail/token-error-correct-2.rs index ab429ab878073..151c1d432ed36 100644 --- a/src/test/compile-fail/token-error-correct-2.rs +++ b/src/test/compile-fail/token-error-correct-2.rs @@ -13,5 +13,6 @@ fn main() { if foo { //~ NOTE: unclosed delimiter //~^ ERROR: unresolved name `foo` + //~| NOTE unresolved name ) //~ ERROR: incorrect close delimiter: `)` } diff --git a/src/test/compile-fail/token-error-correct-3.rs b/src/test/compile-fail/token-error-correct-3.rs index 24627e9420874..5f21bf18d7b1b 100644 --- a/src/test/compile-fail/token-error-correct-3.rs +++ b/src/test/compile-fail/token-error-correct-3.rs @@ -19,6 +19,7 @@ pub mod raw { callback: F) -> io::Result { if !is_directory(path.as_ref()) { //~ ERROR: unresolved name `is_directory` + //~| NOTE unresolved name callback(path.as_ref(); //~ NOTE: unclosed delimiter //~^ ERROR: expected one of fs::create_dir_all(path.as_ref()).map(|()| true) //~ ERROR: mismatched types diff --git a/src/test/compile-fail/token-error-correct.rs b/src/test/compile-fail/token-error-correct.rs index f5fecf3e1740a..3ba9edda07f26 100644 --- a/src/test/compile-fail/token-error-correct.rs +++ b/src/test/compile-fail/token-error-correct.rs @@ -17,6 +17,8 @@ fn main() { //~^^^ ERROR: unresolved name `bar` //~^^^^ ERROR: unresolved name `foo` //~^^^^^ ERROR: expected one of `)`, `,`, `.`, `<`, `?` + //~| NOTE unresolved name + //~| NOTE unresolved name } //~ ERROR: incorrect close delimiter: `}` //~^ ERROR: incorrect close delimiter: `}` //~^^ ERROR: expected expression, found `)` diff --git a/src/test/compile-fail/trait-as-struct-constructor.rs b/src/test/compile-fail/trait-as-struct-constructor.rs index c78eebddbfdb8..49d58580da52d 100644 --- a/src/test/compile-fail/trait-as-struct-constructor.rs +++ b/src/test/compile-fail/trait-as-struct-constructor.rs @@ -12,6 +12,5 @@ trait TraitNotAStruct {} fn main() { TraitNotAStruct{ value: 0 }; - //~^ ERROR: `TraitNotAStruct` does not name a struct or a struct variant [E0071] - //~| NOTE not a struct + //~^ ERROR expected struct, variant or union type, found trait `TraitNotAStruct` } diff --git a/src/test/compile-fail/trait-suggest-where-clause.rs b/src/test/compile-fail/trait-suggest-where-clause.rs index d15e3536d60ca..7530d8890b98c 100644 --- a/src/test/compile-fail/trait-suggest-where-clause.rs +++ b/src/test/compile-fail/trait-suggest-where-clause.rs @@ -16,13 +16,13 @@ fn check() { // suggest a where-clause, if needed mem::size_of::(); //~^ ERROR `U: std::marker::Sized` is not satisfied - //~| NOTE trait `U: std::marker::Sized` not satisfied + //~| NOTE the trait `std::marker::Sized` is not implemented for `U` //~| HELP consider adding a `where U: std::marker::Sized` bound //~| NOTE required by `std::mem::size_of` mem::size_of::>(); //~^ ERROR `U: std::marker::Sized` is not satisfied - //~| NOTE trait `U: std::marker::Sized` not satisfied + //~| NOTE the trait `std::marker::Sized` is not implemented for `U` //~| HELP consider adding a `where U: std::marker::Sized` bound //~| NOTE required because it appears within the type `Misc` //~| NOTE required by `std::mem::size_of` @@ -31,13 +31,13 @@ fn check() { >::from; //~^ ERROR `u64: std::convert::From` is not satisfied - //~| NOTE trait `u64: std::convert::From` not satisfied + //~| NOTE the trait `std::convert::From` is not implemented for `u64` //~| HELP consider adding a `where u64: std::convert::From` bound //~| NOTE required by `std::convert::From::from` ::Item>>::from; //~^ ERROR `u64: std::convert::From<::Item>` is not satisfied - //~| NOTE trait `u64: std::convert::From<::Item>` not satisfied + //~| NOTE the trait `std::convert::From<::Item>` is not implemented //~| HELP consider adding a `where u64: //~| NOTE required by `std::convert::From::from` @@ -45,20 +45,20 @@ fn check() { as From>::from; //~^ ERROR `Misc<_>: std::convert::From` is not satisfied - //~| NOTE trait `Misc<_>: std::convert::From` not satisfied + //~| NOTE the trait `std::convert::From` is not implemented for `Misc<_>` //~| NOTE required by `std::convert::From::from` // ... and also not if the error is not related to the type mem::size_of::<[T]>(); //~^ ERROR `[T]: std::marker::Sized` is not satisfied - //~| NOTE `[T]: std::marker::Sized` not satisfied + //~| NOTE the trait `std::marker::Sized` is not implemented for `[T]` //~| NOTE `[T]` does not have a constant size //~| NOTE required by `std::mem::size_of` mem::size_of::<[&U]>(); //~^ ERROR `[&U]: std::marker::Sized` is not satisfied - //~| NOTE `[&U]: std::marker::Sized` not satisfied + //~| NOTE the trait `std::marker::Sized` is not implemented for `[&U]` //~| NOTE `[&U]` does not have a constant size //~| NOTE required by `std::mem::size_of` } diff --git a/src/test/compile-fail/typeck_type_placeholder_lifetime_1.rs b/src/test/compile-fail/typeck_type_placeholder_lifetime_1.rs index f60d925a74864..f40445a030e01 100644 --- a/src/test/compile-fail/typeck_type_placeholder_lifetime_1.rs +++ b/src/test/compile-fail/typeck_type_placeholder_lifetime_1.rs @@ -18,5 +18,5 @@ struct Foo<'a, T:'a> { pub fn main() { let c: Foo<_, _> = Foo { r: &5 }; //~^ ERROR E0244 - //~| NOTE expected 1 type arguments, found 2 + //~| NOTE expected 1 type argument, found 2 } diff --git a/src/test/compile-fail/typeck_type_placeholder_lifetime_2.rs b/src/test/compile-fail/typeck_type_placeholder_lifetime_2.rs index ec2675ece74b0..47898690fcce4 100644 --- a/src/test/compile-fail/typeck_type_placeholder_lifetime_2.rs +++ b/src/test/compile-fail/typeck_type_placeholder_lifetime_2.rs @@ -18,5 +18,5 @@ struct Foo<'a, T:'a> { pub fn main() { let c: Foo<_, usize> = Foo { r: &5 }; //~^ ERROR E0244 - //~| NOTE expected 1 type arguments, found 2 + //~| NOTE expected 1 type argument, found 2 } diff --git a/src/test/compile-fail/unboxed-closure-sugar-wrong-trait.rs b/src/test/compile-fail/unboxed-closure-sugar-wrong-trait.rs index 1209757610251..50f4f3b98b337 100644 --- a/src/test/compile-fail/unboxed-closure-sugar-wrong-trait.rs +++ b/src/test/compile-fail/unboxed-closure-sugar-wrong-trait.rs @@ -15,6 +15,7 @@ trait Trait {} fn f isize>(x: F) {} //~^ ERROR E0244 //~| NOTE expected no type arguments, found 1 -//~| ERROR associated type `Output` not found +//~| ERROR E0220 +//~| NOTE associated type `Output` not found fn main() {} diff --git a/src/test/compile-fail/unboxed-closures-failed-recursive-fn-2.rs b/src/test/compile-fail/unboxed-closures-failed-recursive-fn-2.rs index f40c8fc747494..12b48b2a6c8aa 100644 --- a/src/test/compile-fail/unboxed-closures-failed-recursive-fn-2.rs +++ b/src/test/compile-fail/unboxed-closures-failed-recursive-fn-2.rs @@ -16,7 +16,7 @@ fn a() { let mut closure0 = None; - let vec = vec!(1, 2, 3); + let vec = vec![1, 2, 3]; loop { { diff --git a/src/test/compile-fail/unboxed-closures-move-upvar-from-non-once-ref-closure.rs b/src/test/compile-fail/unboxed-closures-move-upvar-from-non-once-ref-closure.rs index e66610c149607..cd9f1636c3f4d 100644 --- a/src/test/compile-fail/unboxed-closures-move-upvar-from-non-once-ref-closure.rs +++ b/src/test/compile-fail/unboxed-closures-move-upvar-from-non-once-ref-closure.rs @@ -16,7 +16,7 @@ fn call(f: F) where F : Fn() { } fn main() { - let y = vec!(format!("World")); + let y = vec![format!("World")]; call(|| { y.into_iter(); //~^ ERROR cannot move out of captured outer variable in an `Fn` closure diff --git a/src/test/compile-fail/union/union-fields.rs b/src/test/compile-fail/union/union-fields.rs index a1721dda7decb..3ee95c2ef4258 100644 --- a/src/test/compile-fail/union/union-fields.rs +++ b/src/test/compile-fail/union/union-fields.rs @@ -21,6 +21,7 @@ fn main() { let u = U { a: 0, b: 1 }; //~ ERROR union expressions should have exactly one field let u = U { a: 0, b: 1, c: 2 }; //~ ERROR union expressions should have exactly one field //~^ ERROR union `U` has no field named `c` + //~| NOTE `U` does not have this field let u = U { ..u }; //~ ERROR union expressions should have exactly one field //~^ ERROR functional record update syntax requires a struct @@ -29,6 +30,7 @@ fn main() { let U { a, b } = u; //~ ERROR union patterns should have exactly one field let U { a, b, c } = u; //~ ERROR union patterns should have exactly one field //~^ ERROR union `U` does not have a field named `c` + //~| NOTE union `U` does not have field `c` let U { .. } = u; //~ ERROR union patterns should have exactly one field //~^ ERROR `..` cannot be used in union patterns let U { a, .. } = u; //~ ERROR `..` cannot be used in union patterns diff --git a/src/test/compile-fail/union/union-suggest-field.rs b/src/test/compile-fail/union/union-suggest-field.rs index 92811b6b5be11..3c355989b82f0 100644 --- a/src/test/compile-fail/union/union-suggest-field.rs +++ b/src/test/compile-fail/union/union-suggest-field.rs @@ -21,9 +21,9 @@ impl U { fn main() { let u = U { principle: 0 }; //~^ ERROR union `U` has no field named `principle` - //~| NOTE did you mean `principal`? - let w = u.principial; //~ ERROR attempted access of field `principial` on type `U` - //~^ HELP did you mean `principal`? + //~| NOTE field does not exist - did you mean `principal`? + let w = u.principial; //~ ERROR no field `principial` on type `U` + //~^ did you mean `principal`? let y = u.calculate; //~ ERROR attempted to take value of method `calculate` on type `U` //~^ HELP maybe a `()` to call it is missing? diff --git a/src/test/compile-fail/unresolved-import.rs b/src/test/compile-fail/unresolved-import.rs index 0a9a437569730..47490af0ff35e 100644 --- a/src/test/compile-fail/unresolved-import.rs +++ b/src/test/compile-fail/unresolved-import.rs @@ -11,7 +11,7 @@ // ignore-tidy-linelength use foo::bar; //~ ERROR unresolved import `foo::bar` [E0432] - //~^ Maybe a missing `extern crate foo`? + //~^ Maybe a missing `extern crate foo;`? use bar::Baz as x; //~ ERROR unresolved import `bar::Baz` [E0432] //~^ no `Baz` in `bar`. Did you mean to use `Bar`? diff --git a/src/test/compile-fail/unsafe-fn-autoderef.rs b/src/test/compile-fail/unsafe-fn-autoderef.rs index 97a7bf147105a..15b304c69baf5 100644 --- a/src/test/compile-fail/unsafe-fn-autoderef.rs +++ b/src/test/compile-fail/unsafe-fn-autoderef.rs @@ -26,7 +26,7 @@ fn f(p: *const Rec) -> isize { // are prohibited by various checks, such as that the enum is // instantiable and so forth). - return p.f; //~ ERROR attempted access of field `f` on type `*const Rec` + return p.f; //~ ERROR no field `f` on type `*const Rec` } fn main() { diff --git a/src/test/compile-fail/unsized-enum.rs b/src/test/compile-fail/unsized-enum.rs index 61b2b01b35584..5d791215f36c6 100644 --- a/src/test/compile-fail/unsized-enum.rs +++ b/src/test/compile-fail/unsized-enum.rs @@ -19,11 +19,4 @@ fn foo2() { not_sized::>() } // // Not OK: `T` is not sized. -enum Bar { BarSome(U), BarNone } -fn bar1() { not_sized::>() } -fn bar2() { is_sized::>() } -//~^ ERROR `T: std::marker::Sized` is not satisfied -// -// Not OK: `Bar` is not sized, but it should be. - fn main() { } diff --git a/src/test/compile-fail/unsized-enum2.rs b/src/test/compile-fail/unsized-enum2.rs new file mode 100644 index 0000000000000..95fc3243fbed3 --- /dev/null +++ b/src/test/compile-fail/unsized-enum2.rs @@ -0,0 +1,68 @@ +// Copyright 206 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::ops::Deref; + +// Due to aggressive error message deduplication, we require 20 *different* +// unsized types (even Path and [u8] are considered the "same"). + +trait Foo {} +trait Bar {} +trait FooBar {} +trait BarFoo {} + +trait PathHelper1 {} +trait PathHelper2 {} +trait PathHelper3 {} +trait PathHelper4 {} + +struct Path1(PathHelper1); +struct Path2(PathHelper2); +struct Path3(PathHelper3); +struct Path4(PathHelper4); + +enum E { + // parameter + VA(W), //~ ERROR `W: std::marker::Sized` is not satisfied + VB{x: X}, //~ ERROR `X: std::marker::Sized` is not satisfied + VC(isize, Y), //~ ERROR `Y: std::marker::Sized` is not satisfied + VD{u: isize, x: Z}, //~ ERROR `Z: std::marker::Sized` is not satisfied + + // slice / str + VE([u8]), //~ ERROR `[u8]: std::marker::Sized` is not satisfied + VF{x: str}, //~ ERROR `str: std::marker::Sized` is not satisfied + VG(isize, [f32]), //~ ERROR `[f32]: std::marker::Sized` is not satisfied + VH{u: isize, x: [u32]}, //~ ERROR `[u32]: std::marker::Sized` is not satisfied + + // unsized struct + VI(Path1), //~ ERROR `PathHelper1 + 'static: std::marker::Sized` is not satisfied + VJ{x: Path2}, //~ ERROR `PathHelper2 + 'static: std::marker::Sized` is not satisfied + VK(isize, Path3), //~ ERROR `PathHelper3 + 'static: std::marker::Sized` is not satisfied + VL{u: isize, x: Path4}, //~ ERROR `PathHelper4 + 'static: std::marker::Sized` is not satisfied + + // plain trait + VM(Foo), //~ ERROR `Foo + 'static: std::marker::Sized` is not satisfied + VN{x: Bar}, //~ ERROR `Bar + 'static: std::marker::Sized` is not satisfied + VO(isize, FooBar), //~ ERROR `FooBar + 'static: std::marker::Sized` is not satisfied + VP{u: isize, x: BarFoo}, //~ ERROR `BarFoo + 'static: std::marker::Sized` is not satisfied + + // projected + VQ(<&'static [i8] as Deref>::Target), //~ ERROR `[i8]: std::marker::Sized` is not satisfied + VR{x: <&'static [char] as Deref>::Target}, + //~^ ERROR `[char]: std::marker::Sized` is not satisfied + VS(isize, <&'static [f64] as Deref>::Target), + //~^ ERROR `[f64]: std::marker::Sized` is not satisfied + VT{u: isize, x: <&'static [i32] as Deref>::Target}, + //~^ ERROR `[i32]: std::marker::Sized` is not satisfied +} + + +fn main() { } + diff --git a/src/test/compile-fail/unsized3.rs b/src/test/compile-fail/unsized3.rs index f88165c02e988..9b6ccf22c8da5 100644 --- a/src/test/compile-fail/unsized3.rs +++ b/src/test/compile-fail/unsized3.rs @@ -31,19 +31,8 @@ fn f3(x: &X) { fn f4(x: &X) { } -// Test with unsized enum. -enum E { - V(X), -} - fn f5(x: &Y) {} fn f6(x: &X) {} -fn f7(x1: &E, x2: &E) { - f5(x1); - //~^ ERROR `X: std::marker::Sized` is not satisfied - f6(x2); // ok -} - // Test with unsized struct. struct S { @@ -57,13 +46,13 @@ fn f8(x1: &S, x2: &S) { } // Test some tuples. -fn f9(x1: Box>, x2: Box>) { +fn f9(x1: Box>) { f5(&(*x1, 34)); //~^ ERROR `X: std::marker::Sized` is not satisfied } -fn f10(x1: Box>, x2: Box>) { - f5(&(32, *x2)); +fn f10(x1: Box>) { + f5(&(32, *x1)); //~^ ERROR `X: std::marker::Sized` is not satisfied } diff --git a/src/test/compile-fail/use-super-global-path.rs b/src/test/compile-fail/use-super-global-path.rs index 1d0d60a775fda..3fa0712fb4ca3 100644 --- a/src/test/compile-fail/use-super-global-path.rs +++ b/src/test/compile-fail/use-super-global-path.rs @@ -8,22 +8,20 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(rustc_attrs)] -#![allow(unused_imports, dead_code)] +#![allow(unused)] struct S; struct Z; mod foo { - use ::super::{S, Z}; //~ WARN global paths cannot start with `super` + use ::super::{S, Z}; //~ ERROR global paths cannot start with `super` //~^ WARN this was previously accepted by the compiler but is being phased out pub fn g() { - use ::super::main; //~ WARN global paths cannot start with `super` + use ::super::main; //~ ERROR global paths cannot start with `super` //~^ WARN this was previously accepted by the compiler but is being phased out main(); } } -#[rustc_error] -fn main() { foo::g(); } //~ ERROR compilation successful +fn main() { foo::g(); } diff --git a/src/test/compile-fail/user-defined-macro-rules.rs b/src/test/compile-fail/user-defined-macro-rules.rs new file mode 100644 index 0000000000000..d55cef434f8d3 --- /dev/null +++ b/src/test/compile-fail/user-defined-macro-rules.rs @@ -0,0 +1,11 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +macro_rules! macro_rules { () => {} } //~ ERROR user-defined macros may not be named `macro_rules` diff --git a/src/test/compile-fail/variadic-ffi-2.rs b/src/test/compile-fail/variadic-ffi-2.rs index 1d519c978a35c..afcad9d8f9614 100644 --- a/src/test/compile-fail/variadic-ffi-2.rs +++ b/src/test/compile-fail/variadic-ffi-2.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -fn baz(f: extern "stdcall" fn(usize, ...)) { +fn baz(f: extern "cdecl" fn(usize, ...)) { //~^ ERROR: variadic function must have C calling convention f(22, 44); } diff --git a/src/test/compile-fail/variadic-ffi.rs b/src/test/compile-fail/variadic-ffi.rs index 129421784930b..af2b552e20f14 100644 --- a/src/test/compile-fail/variadic-ffi.rs +++ b/src/test/compile-fail/variadic-ffi.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -extern "stdcall" { +extern "cdecl" { fn printf(_: *const u8, ...); //~ ERROR: variadic function must have C calling convention } diff --git a/src/test/compile-fail/variance-trait-matching.rs b/src/test/compile-fail/variance-trait-matching.rs index 49dc1e68c221f..2d78940ce4b9d 100644 --- a/src/test/compile-fail/variance-trait-matching.rs +++ b/src/test/compile-fail/variance-trait-matching.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// pretty-expanded FIXME #23616 - #![allow(dead_code)] // Get is covariant in T diff --git a/src/test/compile-fail/vec-macro-with-comma-only.rs b/src/test/compile-fail/vec-macro-with-comma-only.rs index 346cf1ec555d3..96f58666fdff3 100644 --- a/src/test/compile-fail/vec-macro-with-comma-only.rs +++ b/src/test/compile-fail/vec-macro-with-comma-only.rs @@ -9,5 +9,5 @@ // except according to those terms. pub fn main() { - vec!(,); //~ ERROR expected expression, found `,` + vec![,]; //~ ERROR expected expression, found `,` } diff --git a/src/test/compile-fail/vec-mut-iter-borrow.rs b/src/test/compile-fail/vec-mut-iter-borrow.rs index 023ef72c453bb..571634e399261 100644 --- a/src/test/compile-fail/vec-mut-iter-borrow.rs +++ b/src/test/compile-fail/vec-mut-iter-borrow.rs @@ -9,7 +9,7 @@ // except according to those terms. fn main() { - let mut xs: Vec = vec!(); + let mut xs: Vec = vec![]; for x in &mut xs { xs.push(1) //~ ERROR cannot borrow `xs` diff --git a/src/test/compile-fail/vec-res-add.rs b/src/test/compile-fail/vec-res-add.rs index cf64486c9c7bf..27f6fc51164ad 100644 --- a/src/test/compile-fail/vec-res-add.rs +++ b/src/test/compile-fail/vec-res-add.rs @@ -21,8 +21,8 @@ impl Drop for r { fn main() { // This can't make sense as it would copy the classes - let i = vec!(r(0)); - let j = vec!(r(1)); + let i = vec![r(0)]; + let j = vec![r(1)]; let k = i + j; //~^ ERROR binary operation `+` cannot be applied to type println!("{:?}", j); diff --git a/src/test/compile-fail/writing-to-immutable-vec.rs b/src/test/compile-fail/writing-to-immutable-vec.rs index 09c9439b464ec..f289b85992ee8 100644 --- a/src/test/compile-fail/writing-to-immutable-vec.rs +++ b/src/test/compile-fail/writing-to-immutable-vec.rs @@ -10,6 +10,6 @@ fn main() { - let v: Vec = vec!(1, 2, 3); + let v: Vec = vec![1, 2, 3]; v[1] = 4; //~ ERROR cannot borrow immutable local variable `v` as mutable } diff --git a/src/test/compile-fail/xcrate-private-by-default.rs b/src/test/compile-fail/xcrate-private-by-default.rs index 3bd4c780625a4..7dd4d970945e0 100644 --- a/src/test/compile-fail/xcrate-private-by-default.rs +++ b/src/test/compile-fail/xcrate-private-by-default.rs @@ -39,7 +39,7 @@ fn main() { foo::(); //~^ ERROR: enum `m` is private foo::(); - //~^ ERROR: type `n` is private + //~^ ERROR: type alias `n` is private // public items in a private mod should be inaccessible static_priv_by_default::foo::a; diff --git a/src/test/debuginfo/drop-locations.rs b/src/test/debuginfo/drop-locations.rs new file mode 100644 index 0000000000000..3a7c534c13902 --- /dev/null +++ b/src/test/debuginfo/drop-locations.rs @@ -0,0 +1,91 @@ +// Copyright 2013-2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// ignore-windows +// ignore-android +// min-lldb-version: 310 + +#![allow(unused)] + +// compile-flags:-g + +// This test checks that drop glue code gets attributed to scope's closing brace, +// and function epilogues - to function's closing brace. + +// === GDB TESTS =================================================================================== + +// gdb-command:run +// gdb-command:next +// gdb-command:frame +// gdb-check:[...]#loc1[...] +// gdb-command:next +// gdb-command:frame +// gdb-check:[...]#loc2[...] +// gdb-command:next +// gdb-command:frame +// gdb-check:[...]#loc3[...] +// gdb-command:next +// gdb-command:frame +// gdb-check:[...]#loc4[...] +// gdb-command:next +// gdb-command:frame +// gdb-check:[...]#loc5[...] +// gdb-command:next +// gdb-command:frame +// gdb-check:[...]#loc6[...] + +// === LLDB TESTS ================================================================================== + +// lldb-command:set set stop-line-count-before 0 +// lldb-command:set set stop-line-count-after 1 +// Can't set both to zero or lldb will stop printing source at all. So it will output the current +// line and the next. We deal with this by having at least 2 lines between the #loc's + +// lldb-command:run +// lldb-command:next +// lldb-command:frame select +// lldb-check:[...]#loc1[...] +// lldb-command:next +// lldb-command:frame select +// lldb-check:[...]#loc2[...] +// lldb-command:next +// lldb-command:frame select +// lldb-check:[...]#loc3[...] +// lldb-command:next +// lldb-command:frame select +// lldb-check:[...]#loc4[...] +// lldb-command:next +// lldb-command:frame select +// lldb-check:[...]#loc5[...] +// lldb-command:next +// lldb-command:frame select +// lldb-check:[...]#loc6[...] + +fn main() { + + foo(); + + zzz(); // #loc5 + +} // #loc6 + +fn foo() { + { + let s = String::from("s"); // #break + + zzz(); // #loc1 + + } // #loc2 + + zzz(); // #loc3 + +} // #loc4 + +fn zzz() {()} diff --git a/src/test/debuginfo/issue14411.rs b/src/test/debuginfo/issue14411.rs index 3b2d372117d00..d334e33f887b8 100644 --- a/src/test/debuginfo/issue14411.rs +++ b/src/test/debuginfo/issue14411.rs @@ -20,6 +20,6 @@ fn test(a: &Vec) { } pub fn main() { - let data = vec!(); + let data = vec![]; test(&data); } diff --git a/src/test/incremental/cache_file_headers.rs b/src/test/incremental/cache_file_headers.rs new file mode 100644 index 0000000000000..274a3920be8d4 --- /dev/null +++ b/src/test/incremental/cache_file_headers.rs @@ -0,0 +1,29 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// This test case makes sure that the compiler does not try to re-use anything +// from the incremental compilation cache if the cache was produced by a +// different compiler version. This is tested by artificially forcing the +// emission of a different compiler version in the header of rpass1 artifacts, +// and then making sure that the only object file of the test program gets +// re-translated although the program stays unchanged. + +// The `l33t haxx0r` Rust compiler is known to produce incr. comp. artifacts +// that are outrageously incompatible with just about anything, even itself: +//[rpass1] rustc-env:RUSTC_FORCE_INCR_COMP_ARTIFACT_HEADER="l33t haxx0r rustc 2.1 LTS" + +// revisions:rpass1 rpass2 + +#![feature(rustc_attrs)] +#![rustc_partition_translated(module="cache_file_headers", cfg="rpass2")] + +fn main() { + // empty +} diff --git a/src/test/incremental/change_crate_order/auxiliary/a.rs b/src/test/incremental/change_crate_order/auxiliary/a.rs new file mode 100644 index 0000000000000..69b4acd3e3023 --- /dev/null +++ b/src/test/incremental/change_crate_order/auxiliary/a.rs @@ -0,0 +1,14 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![crate_type="rlib"] + +pub static A : u32 = 32; + diff --git a/src/test/incremental/change_crate_order/auxiliary/b.rs b/src/test/incremental/change_crate_order/auxiliary/b.rs new file mode 100644 index 0000000000000..1ab97a312c1f1 --- /dev/null +++ b/src/test/incremental/change_crate_order/auxiliary/b.rs @@ -0,0 +1,14 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![crate_type="rlib"] + +pub static B: u32 = 32; + diff --git a/src/test/compile-fail-fulldeps/auxiliary/macro_crate_MacroRulesTT.rs b/src/test/incremental/change_crate_order/main.rs similarity index 56% rename from src/test/compile-fail-fulldeps/auxiliary/macro_crate_MacroRulesTT.rs rename to src/test/incremental/change_crate_order/main.rs index 9e693fcc56440..bd8742ff38e01 100644 --- a/src/test/compile-fail-fulldeps/auxiliary/macro_crate_MacroRulesTT.rs +++ b/src/test/incremental/change_crate_order/main.rs @@ -8,19 +8,27 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// force-host +// aux-build:a.rs +// aux-build:b.rs +// revisions:rpass1 rpass2 -#![feature(plugin_registrar, rustc_private)] +#![feature(rustc_attrs)] -extern crate syntax; -extern crate rustc; -extern crate rustc_plugin; -use syntax::parse::token; -use syntax::ext::base::MacroRulesTT; -use rustc_plugin::Registry; +#[cfg(rpass1)] +extern crate a; +#[cfg(rpass1)] +extern crate b; -#[plugin_registrar] -pub fn plugin_registrar(reg: &mut Registry) { - reg.register_syntax_extension(token::intern("bogus"), MacroRulesTT); +#[cfg(rpass2)] +extern crate b; +#[cfg(rpass2)] +extern crate a; + +use a::A; +use b::B; + +//? #[rustc_clean(label="TypeckItemBody", cfg="rpass2")] +pub fn main() { + A + B; } diff --git a/src/test/incremental/change_private_impl_method/struct_point.rs b/src/test/incremental/change_private_impl_method/struct_point.rs new file mode 100644 index 0000000000000..8fa34bde17053 --- /dev/null +++ b/src/test/incremental/change_private_impl_method/struct_point.rs @@ -0,0 +1,114 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Test where we change the body of a private method in an impl. +// We then test what sort of functions must be rebuilt as a result. + +// revisions:rpass1 rpass2 +// compile-flags: -Z query-dep-graph + +#![feature(rustc_attrs)] +#![feature(stmt_expr_attributes)] +#![allow(dead_code)] + +#![rustc_partition_translated(module="struct_point-point", cfg="rpass2")] + +// FIXME(#37121) -- the following two modules *should* be reused but are not +#![rustc_partition_translated(module="struct_point-fn_calls_methods_in_same_impl", cfg="rpass2")] +#![rustc_partition_translated(module="struct_point-fn_calls_methods_in_another_impl", cfg="rpass2")] +#![rustc_partition_reused(module="struct_point-fn_make_struct", cfg="rpass2")] +#![rustc_partition_reused(module="struct_point-fn_read_field", cfg="rpass2")] +#![rustc_partition_reused(module="struct_point-fn_write_field", cfg="rpass2")] + +mod point { + pub struct Point { + pub x: f32, + pub y: f32, + } + + impl Point { + fn distance_squared(&self) -> f32 { + #[cfg(rpass1)] + return self.x + self.y; + + #[cfg(rpass2)] + return self.x * self.x + self.y * self.y; + } + + pub fn distance_from_origin(&self) -> f32 { + self.distance_squared().sqrt() + } + } + + impl Point { + pub fn translate(&mut self, x: f32, y: f32) { + self.x += x; + self.y += y; + } + } + +} + +/// A fn item that calls (public) methods on `Point` from the same impl which changed +mod fn_calls_methods_in_same_impl { + use point::Point; + + // FIXME(#37121) -- we should not need to typeck this again + #[rustc_dirty(label="TypeckItemBody", cfg="rpass2")] + pub fn check() { + let x = Point { x: 2.0, y: 2.0 }; + x.distance_from_origin(); + } +} + +/// A fn item that calls (public) methods on `Point` from another impl +mod fn_calls_methods_in_another_impl { + use point::Point; + + // FIXME(#37121) -- we should not need to typeck this again + #[rustc_dirty(label="TypeckItemBody", cfg="rpass2")] + pub fn check() { + let mut x = Point { x: 2.0, y: 2.0 }; + x.translate(3.0, 3.0); + } +} + +/// A fn item that makes an instance of `Point` but does not invoke methods +mod fn_make_struct { + use point::Point; + + #[rustc_clean(label="TypeckItemBody", cfg="rpass2")] + pub fn make_origin() -> Point { + Point { x: 2.0, y: 2.0 } + } +} + +/// A fn item that reads fields from `Point` but does not invoke methods +mod fn_read_field { + use point::Point; + + #[rustc_clean(label="TypeckItemBody", cfg="rpass2")] + pub fn get_x(p: Point) -> f32 { + p.x + } +} + +/// A fn item that writes to a field of `Point` but does not invoke methods +mod fn_write_field { + use point::Point; + + #[rustc_clean(label="TypeckItemBody", cfg="rpass2")] + pub fn inc_x(p: &mut Point) { + p.x += 1.0; + } +} + +fn main() { +} diff --git a/src/test/incremental/hashes/consts.rs b/src/test/incremental/hashes/consts.rs new file mode 100644 index 0000000000000..10c02d84b385e --- /dev/null +++ b/src/test/incremental/hashes/consts.rs @@ -0,0 +1,132 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + + +// This test case tests the incremental compilation hash (ICH) implementation +// for consts. + +// The general pattern followed here is: Change one thing between rev1 and rev2 +// and make sure that the hash has changed, then change nothing between rev2 and +// rev3 and make sure that the hash has not changed. + +// must-compile-successfully +// revisions: cfail1 cfail2 cfail3 +// compile-flags: -Z query-dep-graph + +#![allow(warnings)] +#![feature(rustc_attrs)] +#![crate_type="rlib"] + + +// Change const visibility --------------------------------------------------- +#[cfg(cfail1)] +const CONST_VISIBILITY: u8 = 0; + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +pub const CONST_VISIBILITY: u8 = 0; + + +// Change type from i32 to u32 ------------------------------------------------ +#[cfg(cfail1)] +const CONST_CHANGE_TYPE_1: i32 = 0; + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +const CONST_CHANGE_TYPE_1: u32 = 0; + + +// Change type from Option to Option -------------------------------- +#[cfg(cfail1)] +const CONST_CHANGE_TYPE_2: Option = None; + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +const CONST_CHANGE_TYPE_2: Option = None; + + +// Change value between simple literals --------------------------------------- +#[cfg(cfail1)] +const CONST_CHANGE_VALUE_1: i16 = 1; + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +const CONST_CHANGE_VALUE_1: i16 = 2; + + +// Change value between expressions ------------------------------------------- +#[cfg(cfail1)] +const CONST_CHANGE_VALUE_2: i16 = 1 + 1; + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +const CONST_CHANGE_VALUE_2: i16 = 1 + 2; + + +#[cfg(cfail1)] +const CONST_CHANGE_VALUE_3: i16 = 2 + 3; + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +const CONST_CHANGE_VALUE_3: i16 = 2 * 3; + + +#[cfg(cfail1)] +const CONST_CHANGE_VALUE_4: i16 = 1 + 2 * 3; + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +const CONST_CHANGE_VALUE_4: i16 = 1 + 2 * 4; + + +// Change type indirectly ----------------------------------------------------- +struct ReferencedType1; +struct ReferencedType2; + +mod const_change_type_indirectly { + #[cfg(cfail1)] + use super::ReferencedType1 as Type; + + #[cfg(not(cfail1))] + use super::ReferencedType2 as Type; + + #[rustc_dirty(label="Hir", cfg="cfail2")] + #[rustc_clean(label="Hir", cfg="cfail3")] + #[rustc_metadata_dirty(cfg="cfail2")] + #[rustc_metadata_clean(cfg="cfail3")] + const CONST_CHANGE_TYPE_INDIRECTLY_1: Type = Type; + + #[rustc_dirty(label="Hir", cfg="cfail2")] + #[rustc_clean(label="Hir", cfg="cfail3")] + #[rustc_metadata_dirty(cfg="cfail2")] + #[rustc_metadata_clean(cfg="cfail3")] + const CONST_CHANGE_TYPE_INDIRECTLY_2: Option = None; +} diff --git a/src/test/incremental/hashes/enum_defs.rs b/src/test/incremental/hashes/enum_defs.rs new file mode 100644 index 0000000000000..aa17a24be23b6 --- /dev/null +++ b/src/test/incremental/hashes/enum_defs.rs @@ -0,0 +1,715 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + + +// This test case tests the incremental compilation hash (ICH) implementation +// for enum definitions. + +// The general pattern followed here is: Change one thing between rev1 and rev2 +// and make sure that the hash has changed, then change nothing between rev2 and +// rev3 and make sure that the hash has not changed. + +// We also test the ICH for enum definitions exported in metadata. Same as +// above, we want to make sure that the change between rev1 and rev2 also +// results in a change of the ICH for the enum's metadata, and that it stays +// the same between rev2 and rev3. + +// must-compile-successfully +// revisions: cfail1 cfail2 cfail3 +// compile-flags: -Z query-dep-graph + +#![allow(warnings)] +#![feature(rustc_attrs)] +#![crate_type="rlib"] + + + +// Change enum visibility ----------------------------------------------------- +#[cfg(cfail1)] +enum EnumVisibility { A } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_clean(cfg="cfail3")] +pub enum EnumVisibility { A } + + + +// Change name of a c-style variant ------------------------------------------- +#[cfg(cfail1)] +enum EnumChangeNameCStyleVariant { + Variant1, + Variant2, +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +enum EnumChangeNameCStyleVariant { + Variant1, + Variant2Changed, +} + + + +// Change name of a tuple-style variant --------------------------------------- +#[cfg(cfail1)] +enum EnumChangeNameTupleStyleVariant { + Variant1, + Variant2(u32, f32), +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +enum EnumChangeNameTupleStyleVariant { + Variant1, + Variant2Changed(u32, f32), +} + + + +// Change name of a struct-style variant -------------------------------------- +#[cfg(cfail1)] +enum EnumChangeNameStructStyleVariant { + Variant1, + Variant2 { a: u32, b: f32 }, +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +enum EnumChangeNameStructStyleVariant { + Variant1, + Variant2Changed { a: u32, b: f32 }, +} + + + +// Change the value of a c-style variant -------------------------------------- +#[cfg(cfail1)] +enum EnumChangeValueCStyleVariant0 { + Variant1, + Variant2 = 11, +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +enum EnumChangeValueCStyleVariant0 { + Variant1, + Variant2 = 22, +} + +#[cfg(cfail1)] +enum EnumChangeValueCStyleVariant1 { + Variant1, + Variant2, +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +enum EnumChangeValueCStyleVariant1 { + Variant1, + Variant2 = 11, +} + + + +// Add a c-style variant ------------------------------------------------------ +#[cfg(cfail1)] +enum EnumAddCStyleVariant { + Variant1, +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +enum EnumAddCStyleVariant { + Variant1, + Variant2, +} + + + +// Remove a c-style variant --------------------------------------------------- +#[cfg(cfail1)] +enum EnumRemoveCStyleVariant { + Variant1, + Variant2, +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +enum EnumRemoveCStyleVariant { + Variant1, +} + + + +// Add a tuple-style variant -------------------------------------------------- +#[cfg(cfail1)] +enum EnumAddTupleStyleVariant { + Variant1, +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +enum EnumAddTupleStyleVariant { + Variant1, + Variant2(u32, f32), +} + + + +// Remove a tuple-style variant ----------------------------------------------- +#[cfg(cfail1)] +enum EnumRemoveTupleStyleVariant { + Variant1, + Variant2(u32, f32), +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +enum EnumRemoveTupleStyleVariant { + Variant1, +} + + + +// Add a struct-style variant ------------------------------------------------- +#[cfg(cfail1)] +enum EnumAddStructStyleVariant { + Variant1, +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +enum EnumAddStructStyleVariant { + Variant1, + Variant2 { a: u32, b: f32 }, +} + + + +// Remove a struct-style variant ---------------------------------------------- +#[cfg(cfail1)] +enum EnumRemoveStructStyleVariant { + Variant1, + Variant2 { a: u32, b: f32 }, +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +enum EnumRemoveStructStyleVariant { + Variant1, +} + + + +// Change the type of a field in a tuple-style variant ------------------------ +#[cfg(cfail1)] +enum EnumChangeFieldTypeTupleStyleVariant { + Variant1(u32, u32), +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +enum EnumChangeFieldTypeTupleStyleVariant { + Variant1(u32, u64), +} + + + +// Change the type of a field in a struct-style variant ----------------------- +#[cfg(cfail1)] +enum EnumChangeFieldTypeStructStyleVariant { + Variant1, + Variant2 { a: u32, b: u32 }, +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +enum EnumChangeFieldTypeStructStyleVariant { + Variant1, + Variant2 { a: u32, b: u64 }, +} + + + +// Change the name of a field in a struct-style variant ----------------------- +#[cfg(cfail1)] +enum EnumChangeFieldNameStructStyleVariant { + Variant1 { a: u32, b: u32 }, +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +enum EnumChangeFieldNameStructStyleVariant { + Variant1 { a: u32, c: u32 }, +} + + + +// Change order of fields in a tuple-style variant ---------------------------- +#[cfg(cfail1)] +enum EnumChangeOrderTupleStyleVariant { + Variant1(u32, u64), +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +enum EnumChangeOrderTupleStyleVariant { + Variant1(u64, u32), +} + + + +// Change order of fields in a struct-style variant --------------------------- +#[cfg(cfail1)] +enum EnumChangeFieldOrderStructStyleVariant { + Variant1 { a: u32, b: f32 }, +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +enum EnumChangeFieldOrderStructStyleVariant { + Variant1 { b: f32, a: u32 }, +} + + + +// Add a field to a tuple-style variant --------------------------------------- +#[cfg(cfail1)] +enum EnumAddFieldTupleStyleVariant { + Variant1(u32, u32), +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +enum EnumAddFieldTupleStyleVariant { + Variant1(u32, u32, u32), +} + + + +// Add a field to a struct-style variant -------------------------------------- +#[cfg(cfail1)] +enum EnumAddFieldStructStyleVariant { + Variant1 { a: u32, b: u32 }, +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +enum EnumAddFieldStructStyleVariant { + Variant1 { a: u32, b: u32, c: u32 }, +} + + + +// Add #[must_use] to the enum ------------------------------------------------ +#[cfg(cfail1)] +enum EnumAddMustUse { + Variant1, + Variant2, +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +#[must_use] +enum EnumAddMustUse { + Variant1, + Variant2, +} + + + +// Add #[repr(C)] to the enum ------------------------------------------------- +#[cfg(cfail1)] +enum EnumAddReprC { + Variant1, + Variant2, +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +#[repr(C)] +enum EnumAddReprC { + Variant1, + Variant2, +} + + + +// Change the name of a type parameter ---------------------------------------- +#[cfg(cfail1)] +enum EnumChangeNameOfTypeParameter { + Variant1(S), +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +#[repr(C)] +enum EnumChangeNameOfTypeParameter { + Variant1(T), +} + + + +// Add a type parameter ------------------------------------------------------ +#[cfg(cfail1)] +enum EnumAddTypeParameter { + Variant1(S), + Variant2(S), +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +#[repr(C)] +enum EnumAddTypeParameter { + Variant1(S), + Variant2(T), +} + + + +// Change the name of a lifetime parameter ------------------------------------ +#[cfg(cfail1)] +enum EnumChangeNameOfLifetimeParameter<'a> { + Variant1(&'a u32), +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +#[repr(C)] +enum EnumChangeNameOfLifetimeParameter<'b> { + Variant1(&'b u32), +} + + + +// Add a lifetime parameter --------------------------------------------------- +#[cfg(cfail1)] +enum EnumAddLifetimeParameter<'a> { + Variant1(&'a u32), + Variant2(&'a u32), +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +#[repr(C)] +enum EnumAddLifetimeParameter<'a, 'b> { + Variant1(&'a u32), + Variant2(&'b u32), +} + + + +// Add a lifetime bound to a lifetime parameter ------------------------------- +#[cfg(cfail1)] +enum EnumAddLifetimeParameterBound<'a, 'b> { + Variant1(&'a u32), + Variant2(&'b u32), +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +#[repr(C)] +enum EnumAddLifetimeParameterBound<'a, 'b: 'a> { + Variant1(&'a u32), + Variant2(&'b u32), +} + +// Add a lifetime bound to a type parameter ----------------------------------- +#[cfg(cfail1)] +enum EnumAddLifetimeBoundToParameter<'a, T> { + Variant1(T), + Variant2(&'a u32), +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +#[repr(C)] +enum EnumAddLifetimeBoundToParameter<'a, T: 'a> { + Variant1(T), + Variant2(&'a u32), +} + + + +// Add a trait bound to a type parameter -------------------------------------- +#[cfg(cfail1)] +enum EnumAddTraitBound { + Variant1(S), +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +#[repr(C)] +enum EnumAddTraitBound { + Variant1(T), +} + + + +// Add a lifetime bound to a lifetime parameter in where clause --------------- +#[cfg(cfail1)] +enum EnumAddLifetimeParameterBoundWhere<'a, 'b> { + Variant1(&'a u32), + Variant2(&'b u32), +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +#[repr(C)] +enum EnumAddLifetimeParameterBoundWhere<'a, 'b> where 'b: 'a { + Variant1(&'a u32), + Variant2(&'b u32), +} + + + +// Add a lifetime bound to a type parameter in where clause ------------------- +#[cfg(cfail1)] +enum EnumAddLifetimeBoundToParameterWhere<'a, T> { + Variant1(T), + Variant2(&'a u32), +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +#[repr(C)] +enum EnumAddLifetimeBoundToParameterWhere<'a, T> where T: 'a { + Variant1(T), + Variant2(&'a u32), +} + + + +// Add a trait bound to a type parameter in where clause ---------------------- +#[cfg(cfail1)] +enum EnumAddTraitBoundWhere { + Variant1(S), +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +#[repr(C)] +enum EnumAddTraitBoundWhere where T: Sync { + Variant1(T), +} + + + +// In an enum with two variants, swap usage of type parameters ---------------- +#[cfg(cfail1)] +enum EnumSwapUsageTypeParameters { + Variant1 { a: A }, + Variant2 { a: B }, +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +enum EnumSwapUsageTypeParameters { + Variant1 { a: B }, + Variant2 { a: A }, +} + + + +// In an enum with two variants, swap usage of lifetime parameters ------------ +#[cfg(cfail1)] +enum EnumSwapUsageLifetimeParameters<'a, 'b> { + Variant1 { a: &'a u32 }, + Variant2 { b: &'b u32 }, +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +enum EnumSwapUsageLifetimeParameters<'a, 'b> { + Variant1 { a: &'b u32 }, + Variant2 { b: &'a u32 }, +} + + + +struct ReferencedType1; +struct ReferencedType2; + + + +// Change field type in tuple-style variant indirectly by modifying a use statement +mod change_field_type_indirectly_tuple_style { + #[cfg(cfail1)] + use super::ReferencedType1 as FieldType; + #[cfg(not(cfail1))] + use super::ReferencedType2 as FieldType; + + #[rustc_dirty(label="Hir", cfg="cfail2")] + #[rustc_clean(label="Hir", cfg="cfail3")] + #[rustc_metadata_dirty(cfg="cfail2")] + #[rustc_metadata_clean(cfg="cfail3")] + enum TupleStyle { + Variant1(FieldType) + } +} + + + +// Change field type in record-style variant indirectly by modifying a use statement +mod change_field_type_indirectly_struct_style { + #[cfg(cfail1)] + use super::ReferencedType1 as FieldType; + #[cfg(not(cfail1))] + use super::ReferencedType2 as FieldType; + + #[rustc_dirty(label="Hir", cfg="cfail2")] + #[rustc_clean(label="Hir", cfg="cfail3")] + #[rustc_metadata_dirty(cfg="cfail2")] + #[rustc_metadata_clean(cfg="cfail3")] + enum StructStyle { + Variant1 { a: FieldType } + } +} + + + +trait ReferencedTrait1 {} +trait ReferencedTrait2 {} + + + +// Change trait bound of type parameter indirectly by modifying a use statement +mod change_trait_bound_indirectly { + #[cfg(cfail1)] + use super::ReferencedTrait1 as Trait; + #[cfg(not(cfail1))] + use super::ReferencedTrait2 as Trait; + + #[rustc_dirty(label="Hir", cfg="cfail2")] + #[rustc_clean(label="Hir", cfg="cfail3")] + #[rustc_metadata_dirty(cfg="cfail2")] + #[rustc_metadata_clean(cfg="cfail3")] + enum Enum { + Variant1(T) + } +} + + + +// Change trait bound of type parameter in where clause indirectly by modifying a use statement +mod change_trait_bound_indirectly_where { + #[cfg(cfail1)] + use super::ReferencedTrait1 as Trait; + #[cfg(not(cfail1))] + use super::ReferencedTrait2 as Trait; + + #[rustc_dirty(label="Hir", cfg="cfail2")] + #[rustc_clean(label="Hir", cfg="cfail3")] + #[rustc_metadata_dirty(cfg="cfail2")] + #[rustc_metadata_clean(cfg="cfail3")] + enum Enum where T: Trait { + Variant1(T) + } +} + + diff --git a/src/test/incremental/hashes/function_interfaces.rs b/src/test/incremental/hashes/function_interfaces.rs new file mode 100644 index 0000000000000..93d94cd1a19c4 --- /dev/null +++ b/src/test/incremental/hashes/function_interfaces.rs @@ -0,0 +1,400 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + + +// This test case tests the incremental compilation hash (ICH) implementation +// for function interfaces. + +// The general pattern followed here is: Change one thing between rev1 and rev2 +// and make sure that the hash has changed, then change nothing between rev2 and +// rev3 and make sure that the hash has not changed. + +// must-compile-successfully +// revisions: cfail1 cfail2 cfail3 +// compile-flags: -Z query-dep-graph + + +#![allow(warnings)] +#![feature(conservative_impl_trait)] +#![feature(intrinsics)] +#![feature(linkage)] +#![feature(rustc_attrs)] +#![crate_type="rlib"] + + +// Add Parameter --------------------------------------------------------------- + +#[cfg(cfail1)] +fn add_parameter() {} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +fn add_parameter(p: i32) {} + + +// Add Return Type ------------------------------------------------------------- + +#[cfg(cfail1)] +fn add_return_type() {} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +fn add_return_type() -> () {} + + +// Change Parameter Type ------------------------------------------------------- + +#[cfg(cfail1)] +fn type_of_parameter(p: i32) {} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +fn type_of_parameter(p: i64) {} + + +// Change Parameter Type Reference --------------------------------------------- + +#[cfg(cfail1)] +fn type_of_parameter_ref(p: &i32) {} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +fn type_of_parameter_ref(p: &mut i32) {} + + +// Change Parameter Order ------------------------------------------------------ + +#[cfg(cfail1)] +fn order_of_parameters(p1: i32, p2: i64) {} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +fn order_of_parameters(p2: i64, p1: i32) {} + + +// Unsafe ---------------------------------------------------------------------- + +#[cfg(cfail1)] +fn make_unsafe() {} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +unsafe fn make_unsafe() {} + + +// Extern ---------------------------------------------------------------------- + +#[cfg(cfail1)] +fn make_extern() {} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +extern fn make_extern() {} + + +// Extern C Extern Rust-Intrinsic ---------------------------------------------- + +#[cfg(cfail1)] +extern "C" fn make_intrinsic() {} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +extern "rust-intrinsic" fn make_intrinsic() {} + + +// Type Parameter -------------------------------------------------------------- + +#[cfg(cfail1)] +fn type_parameter() {} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +fn type_parameter() {} + + +// Lifetime Parameter ---------------------------------------------------------- + +#[cfg(cfail1)] +fn lifetime_parameter() {} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +fn lifetime_parameter<'a>() {} + + +// Trait Bound ----------------------------------------------------------------- + +#[cfg(cfail1)] +fn trait_bound() {} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +fn trait_bound() {} + + +// Builtin Bound --------------------------------------------------------------- + +#[cfg(cfail1)] +fn builtin_bound() {} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +fn builtin_bound() {} + + +// Lifetime Bound -------------------------------------------------------------- + +#[cfg(cfail1)] +fn lifetime_bound<'a, T>() {} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +fn lifetime_bound<'a, T: 'a>() {} + + +// Second Trait Bound ---------------------------------------------------------- + +#[cfg(cfail1)] +fn second_trait_bound() {} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +fn second_trait_bound() {} + + +// Second Builtin Bound -------------------------------------------------------- + +#[cfg(cfail1)] +fn second_builtin_bound() {} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +fn second_builtin_bound() {} + + +// Second Lifetime Bound ------------------------------------------------------- + +#[cfg(cfail1)] +fn second_lifetime_bound<'a, 'b, T: 'a>() {} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +fn second_lifetime_bound<'a, 'b, T: 'a + 'b>() {} + + +// Inline ---------------------------------------------------------------------- + +#[cfg(cfail1)] +fn inline() {} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +#[inline] +fn inline() {} + + +// Inline Never ---------------------------------------------------------------- + +#[cfg(cfail1)] +#[inline(always)] +fn inline_never() {} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +#[inline(never)] +fn inline_never() {} + + +// No Mangle ------------------------------------------------------------------- + +#[cfg(cfail1)] +fn no_mangle() {} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +#[no_mangle] +fn no_mangle() {} + + +// Linkage --------------------------------------------------------------------- + +#[cfg(cfail1)] +fn linkage() {} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +#[linkage="weak_odr"] +fn linkage() {} + + +// Return Impl Trait ----------------------------------------------------------- + +#[cfg(cfail1)] +fn return_impl_trait() -> i32 { + 0 +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +fn return_impl_trait() -> impl Clone { + 0 +} + + +// Change Return Impl Trait ---------------------------------------------------- + +#[cfg(cfail1)] +fn change_return_impl_trait() -> impl Clone { + 0 +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +fn change_return_impl_trait() -> impl Copy { + 0 +} + + +// Change Return Type Indirectly ----------------------------------------------- + +struct ReferencedType1; +struct ReferencedType2; + +mod change_return_type_indirectly { + #[cfg(cfail1)] + use super::ReferencedType1 as ReturnType; + #[cfg(not(cfail1))] + use super::ReferencedType2 as ReturnType; + + #[rustc_dirty(label="Hir", cfg="cfail2")] + #[rustc_clean(label="Hir", cfg="cfail3")] + #[rustc_metadata_dirty(cfg="cfail2")] + #[rustc_metadata_clean(cfg="cfail3")] + fn indirect_return_type() -> ReturnType { + ReturnType {} + } +} + + +// Change Parameter Type Indirectly -------------------------------------------- + +mod change_parameter_type_indirectly { + #[cfg(cfail1)] + use super::ReferencedType1 as ParameterType; + #[cfg(not(cfail1))] + use super::ReferencedType2 as ParameterType; + + #[rustc_dirty(label="Hir", cfg="cfail2")] + #[rustc_clean(label="Hir", cfg="cfail3")] + #[rustc_metadata_dirty(cfg="cfail2")] + #[rustc_metadata_clean(cfg="cfail3")] + fn indirect_parameter_type(p: ParameterType) {} +} + + +// Change Trait Bound Indirectly ----------------------------------------------- + +trait ReferencedTrait1 {} +trait ReferencedTrait2 {} + +mod change_trait_bound_indirectly { + #[cfg(cfail1)] + use super::ReferencedTrait1 as Trait; + #[cfg(not(cfail1))] + use super::ReferencedTrait2 as Trait; + + #[rustc_dirty(label="Hir", cfg="cfail2")] + #[rustc_clean(label="Hir", cfg="cfail3")] + #[rustc_metadata_dirty(cfg="cfail2")] + #[rustc_metadata_clean(cfg="cfail3")] + fn indirect_trait_bound(p: T) {} +} + + +// Change Trait Bound Indirectly In Where Clause ------------------------------- + +mod change_trait_bound_indirectly_in_where_clause { + #[cfg(cfail1)] + use super::ReferencedTrait1 as Trait; + #[cfg(not(cfail1))] + use super::ReferencedTrait2 as Trait; + + #[rustc_dirty(label="Hir", cfg="cfail2")] + #[rustc_clean(label="Hir", cfg="cfail3")] + #[rustc_metadata_dirty(cfg="cfail2")] + #[rustc_metadata_clean(cfg="cfail3")] + fn indirect_trait_bound_where(p: T) where T: Trait {} +} diff --git a/src/test/incremental/hashes/statics.rs b/src/test/incremental/hashes/statics.rs new file mode 100644 index 0000000000000..ac67e4349013a --- /dev/null +++ b/src/test/incremental/hashes/statics.rs @@ -0,0 +1,185 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + + +// This test case tests the incremental compilation hash (ICH) implementation +// for statics. + +// The general pattern followed here is: Change one thing between rev1 and rev2 +// and make sure that the hash has changed, then change nothing between rev2 and +// rev3 and make sure that the hash has not changed. + +// must-compile-successfully +// revisions: cfail1 cfail2 cfail3 +// compile-flags: -Z query-dep-graph + +#![allow(warnings)] +#![feature(rustc_attrs)] +#![feature(linkage)] +#![feature(thread_local)] +#![crate_type="rlib"] + + +// Change static visibility --------------------------------------------------- +#[cfg(cfail1)] +static STATIC_VISIBILITY: u8 = 0; + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +pub static STATIC_VISIBILITY: u8 = 0; + + +// Change static mutability --------------------------------------------------- +#[cfg(cfail1)] +static STATIC_MUTABILITY: u8 = 0; + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +static mut STATIC_MUTABILITY: u8 = 0; + + +// Add linkage attribute ------------------------------------------------------ +#[cfg(cfail1)] +static STATIC_LINKAGE: u8 = 0; + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +#[linkage="weak_odr"] +static STATIC_LINKAGE: u8 = 0; + + +// Add no_mangle attribute ---------------------------------------------------- +#[cfg(cfail1)] +static STATIC_NO_MANGLE: u8 = 0; + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +#[no_mangle] +static STATIC_NO_MANGLE: u8 = 0; + + +// Add thread_local attribute ------------------------------------------------- +#[cfg(cfail1)] +static STATIC_THREAD_LOCAL: u8 = 0; + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +#[thread_local] +static STATIC_THREAD_LOCAL: u8 = 0; + + +// Change type from i16 to u64 ------------------------------------------------ +#[cfg(cfail1)] +static STATIC_CHANGE_TYPE_1: i16 = 0; + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +static STATIC_CHANGE_TYPE_1: u64 = 0; + + +// Change type from Option to Option --------------------------------- +#[cfg(cfail1)] +static STATIC_CHANGE_TYPE_2: Option = None; + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +static STATIC_CHANGE_TYPE_2: Option = None; + + +// Change value between simple literals --------------------------------------- +#[cfg(cfail1)] +static STATIC_CHANGE_VALUE_1: i16 = 1; + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +static STATIC_CHANGE_VALUE_1: i16 = 2; + + +// Change value between expressions ------------------------------------------- +#[cfg(cfail1)] +static STATIC_CHANGE_VALUE_2: i16 = 1 + 1; + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +static STATIC_CHANGE_VALUE_2: i16 = 1 + 2; + + +#[cfg(cfail1)] +static STATIC_CHANGE_VALUE_3: i16 = 2 + 3; + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +static STATIC_CHANGE_VALUE_3: i16 = 2 * 3; + + +#[cfg(cfail1)] +static STATIC_CHANGE_VALUE_4: i16 = 1 + 2 * 3; + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +static STATIC_CHANGE_VALUE_4: i16 = 1 + 2 * 4; + + +// Change type indirectly ----------------------------------------------------- +struct ReferencedType1; +struct ReferencedType2; + +mod static_change_type_indirectly { + #[cfg(cfail1)] + use super::ReferencedType1 as Type; + + #[cfg(not(cfail1))] + use super::ReferencedType2 as Type; + + #[rustc_dirty(label="Hir", cfg="cfail2")] + #[rustc_clean(label="Hir", cfg="cfail3")] + #[rustc_metadata_dirty(cfg="cfail2")] + #[rustc_metadata_clean(cfg="cfail3")] + static STATIC_CHANGE_TYPE_INDIRECTLY_1: Type = Type; + + #[rustc_dirty(label="Hir", cfg="cfail2")] + #[rustc_clean(label="Hir", cfg="cfail3")] + #[rustc_metadata_dirty(cfg="cfail2")] + #[rustc_metadata_clean(cfg="cfail3")] + static STATIC_CHANGE_TYPE_INDIRECTLY_2: Option = None; +} diff --git a/src/test/incremental/hashes/struct_defs.rs b/src/test/incremental/hashes/struct_defs.rs new file mode 100644 index 0000000000000..2d79987823f20 --- /dev/null +++ b/src/test/incremental/hashes/struct_defs.rs @@ -0,0 +1,309 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + + +// This test case tests the incremental compilation hash (ICH) implementation +// for struct definitions. + +// The general pattern followed here is: Change one thing between rev1 and rev2 +// and make sure that the hash has changed, then change nothing between rev2 and +// rev3 and make sure that the hash has not changed. + +// We also test the ICH for struct definitions exported in metadata. Same as +// above, we want to make sure that the change between rev1 and rev2 also +// results in a change of the ICH for the struct's metadata, and that it stays +// the same between rev2 and rev3. + +// must-compile-successfully +// revisions: cfail1 cfail2 cfail3 +// compile-flags: -Z query-dep-graph + + +#![allow(warnings)] +#![feature(rustc_attrs)] +#![crate_type="rlib"] + +// Layout ---------------------------------------------------------------------- +#[cfg(cfail1)] +pub struct LayoutPacked; + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +#[repr(packed)] +pub struct LayoutPacked; + +#[cfg(cfail1)] +struct LayoutC; + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +#[repr(C)] +struct LayoutC; + + +// Tuple Struct Change Field Type ---------------------------------------------- + +#[cfg(cfail1)] +struct TupleStructFieldType(i32); + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +struct TupleStructFieldType(u32); + + +// Tuple Struct Add Field ------------------------------------------------------ + +#[cfg(cfail1)] +struct TupleStructAddField(i32); + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +struct TupleStructAddField(i32, u32); + + +// Tuple Struct Field Visibility ----------------------------------------------- + +#[cfg(cfail1)] +struct TupleStructFieldVisibility(char); + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +struct TupleStructFieldVisibility(pub char); + + +// Record Struct Field Type ---------------------------------------------------- + +#[cfg(cfail1)] +struct RecordStructFieldType { x: f32 } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +struct RecordStructFieldType { x: u64 } + + +// Record Struct Field Name ---------------------------------------------------- + +#[cfg(cfail1)] +struct RecordStructFieldName { x: f32 } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +struct RecordStructFieldName { y: f32 } + + +// Record Struct Add Field ----------------------------------------------------- + +#[cfg(cfail1)] +struct RecordStructAddField { x: f32 } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +struct RecordStructAddField { x: f32, y: () } + + +// Record Struct Field Visibility ---------------------------------------------- + +#[cfg(cfail1)] +struct RecordStructFieldVisibility { x: f32 } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +struct RecordStructFieldVisibility { pub x: f32 } + + +// Add Lifetime Parameter ------------------------------------------------------ + +#[cfg(cfail1)] +struct AddLifetimeParameter<'a>(&'a f32, &'a f64); + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +struct AddLifetimeParameter<'a, 'b>(&'a f32, &'b f64); + + +// Add Lifetime Parameter Bound ------------------------------------------------ + +#[cfg(cfail1)] +struct AddLifetimeParameterBound<'a, 'b>(&'a f32, &'b f64); + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +struct AddLifetimeParameterBound<'a, 'b: 'a>(&'a f32, &'b f64); + +#[cfg(cfail1)] +struct AddLifetimeParameterBoundWhereClause<'a, 'b>(&'a f32, &'b f64); + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +struct AddLifetimeParameterBoundWhereClause<'a, 'b>(&'a f32, &'b f64) + where 'b: 'a; + + +// Add Type Parameter ---------------------------------------------------------- + +#[cfg(cfail1)] +struct AddTypeParameter(T1, T1); + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +struct AddTypeParameter(T1, T2); + + +// Add Type Parameter Bound ---------------------------------------------------- + +#[cfg(cfail1)] +struct AddTypeParameterBound(T); + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +struct AddTypeParameterBound(T); + + +#[cfg(cfail1)] +struct AddTypeParameterBoundWhereClause(T); + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +struct AddTypeParameterBoundWhereClause(T) where T: Sync; + + +// Empty struct ---------------------------------------------------------------- + +#[rustc_clean(label="Hir", cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail2")] +pub struct EmptyStruct; + + +// Visibility ------------------------------------------------------------------ + +#[cfg(cfail1)] +struct Visibility; + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_clean(cfg="cfail3")] +pub struct Visibility; + + + + +struct ReferencedType1; +struct ReferencedType2; + +// Tuple Struct Change Field Type Indirectly ----------------------------------- +mod tuple_struct_change_field_type_indirectly { + #[cfg(cfail1)] + use super::ReferencedType1 as FieldType; + #[cfg(not(cfail1))] + use super::ReferencedType2 as FieldType; + + #[rustc_dirty(label="Hir", cfg="cfail2")] + #[rustc_clean(label="Hir", cfg="cfail3")] + #[rustc_metadata_dirty(cfg="cfail2")] + #[rustc_metadata_clean(cfg="cfail3")] + struct TupleStruct(FieldType); +} + + +// Record Struct Change Field Type Indirectly ----------------------------------- +mod record_struct_change_field_type_indirectly { + #[cfg(cfail1)] + use super::ReferencedType1 as FieldType; + #[cfg(not(cfail1))] + use super::ReferencedType2 as FieldType; + + #[rustc_dirty(label="Hir", cfg="cfail2")] + #[rustc_clean(label="Hir", cfg="cfail3")] + #[rustc_metadata_dirty(cfg="cfail2")] + #[rustc_metadata_clean(cfg="cfail3")] + struct RecordStruct { + _x: FieldType + } +} + + + + +trait ReferencedTrait1 {} +trait ReferencedTrait2 {} + +// Change Trait Bound Indirectly ----------------------------------------------- +mod change_trait_bound_indirectly { + #[cfg(cfail1)] + use super::ReferencedTrait1 as Trait; + #[cfg(not(cfail1))] + use super::ReferencedTrait2 as Trait; + + #[rustc_dirty(label="Hir", cfg="cfail2")] + #[rustc_clean(label="Hir", cfg="cfail3")] + #[rustc_metadata_dirty(cfg="cfail2")] + #[rustc_metadata_clean(cfg="cfail3")] + struct Struct(T); +} + +// Change Trait Bound Indirectly In Where Clause ------------------------------- +mod change_trait_bound_indirectly_in_where_clause { + #[cfg(cfail1)] + use super::ReferencedTrait1 as Trait; + #[cfg(not(cfail1))] + use super::ReferencedTrait2 as Trait; + + #[rustc_dirty(label="Hir", cfg="cfail2")] + #[rustc_clean(label="Hir", cfg="cfail3")] + #[rustc_metadata_dirty(cfg="cfail2")] + #[rustc_metadata_clean(cfg="cfail3")] + struct Struct(T) where T : Trait; +} diff --git a/src/test/incremental/hashes/trait_defs.rs b/src/test/incremental/hashes/trait_defs.rs new file mode 100644 index 0000000000000..391c2e75ba4d5 --- /dev/null +++ b/src/test/incremental/hashes/trait_defs.rs @@ -0,0 +1,1115 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + + +// This test case tests the incremental compilation hash (ICH) implementation +// for trait definitions. + +// The general pattern followed here is: Change one thing between rev1 and rev2 +// and make sure that the hash has changed, then change nothing between rev2 and +// rev3 and make sure that the hash has not changed. + +// We also test the ICH for trait definitions exported in metadata. Same as +// above, we want to make sure that the change between rev1 and rev2 also +// results in a change of the ICH for the trait's metadata, and that it stays +// the same between rev2 and rev3. + +// must-compile-successfully +// revisions: cfail1 cfail2 cfail3 +// compile-flags: -Z query-dep-graph + +#![allow(warnings)] +#![feature(rustc_attrs)] +#![crate_type="rlib"] +#![feature(associated_type_defaults)] +#![feature(intrinsics)] +#![feature(associated_consts)] + + +// Change trait visibility -------------------------------------------------------- +#[cfg(cfail1)] +trait TraitVisibility { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_clean(cfg="cfail3")] +pub trait TraitVisibility { } + + + +// Change trait unsafety ---------------------------------------------------------- +#[cfg(cfail1)] +trait TraitUnsafety { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +unsafe trait TraitUnsafety { } + + + +// Add method --------------------------------------------------------------------- +#[cfg(cfail1)] +trait TraitAddMethod { +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +pub trait TraitAddMethod { + fn method(); +} + + + +// Change name of method ---------------------------------------------------------- +#[cfg(cfail1)] +trait TraitChangeMethodName { + fn method(); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitChangeMethodName { + fn methodChanged(); +} + + + +// Add return type to method ------------------------------------------------------ +#[cfg(cfail1)] +trait TraitAddReturnType { + fn method(); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddReturnType { + fn method() -> u32; +} + + + +// Change return type of method --------------------------------------------------- +#[cfg(cfail1)] +trait TraitChangeReturnType { + fn method() -> u32; +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitChangeReturnType { + fn method() -> u64; +} + + + +// Add parameter to method -------------------------------------------------------- +#[cfg(cfail1)] +trait TraitAddParameterToMethod { + fn method(); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddParameterToMethod { + fn method(a: u32); +} + + + +// Change name of method parameter ------------------------------------------------ +#[cfg(cfail1)] +trait TraitChangeMethodParameterName { + fn method(a: u32); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitChangeMethodParameterName { + fn method(b: u32); +} + + + +// Change type of method parameter (i32 => i64) ----------------------------------- +#[cfg(cfail1)] +trait TraitChangeMethodParameterType { + fn method(a: i32); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitChangeMethodParameterType { + fn method(a: i64); +} + + + +// Change type of method parameter (&i32 => &mut i32) ----------------------------- +#[cfg(cfail1)] +trait TraitChangeMethodParameterTypeRef { + fn method(a: &i32); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitChangeMethodParameterTypeRef { + fn method(a: &mut i32); +} + + + +// Change order of method parameters ---------------------------------------------- +#[cfg(cfail1)] +trait TraitChangeMethodParametersOrder { + fn method(a: i32, b: i64); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitChangeMethodParametersOrder { + fn method(b: i64, a: i32); +} + + + +// Add default implementation to method ------------------------------------------- +#[cfg(cfail1)] +trait TraitAddMethodDefaultImplementation { + fn method(); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddMethodDefaultImplementation { + fn method() { } +} + + + +// Change order of methods -------------------------------------------------------- +#[cfg(cfail1)] +trait TraitChangeOrderOfMethods { + fn method0(); + fn method1(); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitChangeOrderOfMethods { + fn method1(); + fn method0(); +} + + + +// Change mode of self parameter -------------------------------------------------- +#[cfg(cfail1)] +trait TraitChangeModeSelfRefToMut { + fn method(&self); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitChangeModeSelfRefToMut { + fn method(&mut self); +} + + + +#[cfg(cfail1)] +trait TraitChangeModeSelfOwnToMut: Sized { + fn method(self) {} +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitChangeModeSelfOwnToMut: Sized { + fn method(mut self) {} +} + + + +#[cfg(cfail1)] +trait TraitChangeModeSelfOwnToRef { + fn method(self); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitChangeModeSelfOwnToRef { + fn method(&self); +} + + + +// Add unsafe modifier to method -------------------------------------------------- +#[cfg(cfail1)] +trait TraitAddUnsafeModifier { + fn method(); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddUnsafeModifier { + unsafe fn method(); +} + + + +// Add extern modifier to method -------------------------------------------------- +#[cfg(cfail1)] +trait TraitAddExternModifier { + fn method(); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddExternModifier { + extern fn method(); +} + + + +// Change extern "C" to extern "rust-intrinsic" ----------------------------------- +#[cfg(cfail1)] +trait TraitChangeExternCToRustIntrinsic { + extern "C" fn method(); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitChangeExternCToRustIntrinsic { + extern "rust-intrinsic" fn method(); +} + + + +// Add type parameter to method --------------------------------------------------- +#[cfg(cfail1)] +trait TraitAddTypeParameterToMethod { + fn method(); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddTypeParameterToMethod { + fn method(); +} + + + +// Add lifetime parameter to method ----------------------------------------------- +#[cfg(cfail1)] +trait TraitAddLifetimeParameterToMethod { + fn method(); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddLifetimeParameterToMethod { + fn method<'a>(); +} + + + +// dummy trait for bound +trait ReferencedTrait0 { } +trait ReferencedTrait1 { } + +// Add trait bound to method type parameter --------------------------------------- +#[cfg(cfail1)] +trait TraitAddTraitBoundToMethodTypeParameter { + fn method(); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddTraitBoundToMethodTypeParameter { + fn method(); +} + + + +// Add builtin bound to method type parameter ------------------------------------- +#[cfg(cfail1)] +trait TraitAddBuiltinBoundToMethodTypeParameter { + fn method(); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddBuiltinBoundToMethodTypeParameter { + fn method(); +} + + + +// Add lifetime bound to method lifetime parameter ------------------------------------ +#[cfg(cfail1)] +trait TraitAddLifetimeBoundToMethodLifetimeParameter { + fn method<'a, 'b>(a: &'a u32, b: &'b u32); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddLifetimeBoundToMethodLifetimeParameter { + fn method<'a, 'b: 'a>(a: &'a u32, b: &'b u32); +} + + + +// Add second trait bound to method type parameter -------------------------------- +#[cfg(cfail1)] +trait TraitAddSecondTraitBoundToMethodTypeParameter { + fn method(); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddSecondTraitBoundToMethodTypeParameter { + fn method(); +} + + + +// Add second builtin bound to method type parameter ------------------------------ +#[cfg(cfail1)] +trait TraitAddSecondBuiltinBoundToMethodTypeParameter { + fn method(); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddSecondBuiltinBoundToMethodTypeParameter { + fn method(); +} + + + +// Add second lifetime bound to method lifetime parameter ----------------------------- +#[cfg(cfail1)] +trait TraitAddSecondLifetimeBoundToMethodLifetimeParameter { + fn method<'a, 'b, 'c: 'a>(a: &'a u32, b: &'b u32, c: &'c u32); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddSecondLifetimeBoundToMethodLifetimeParameter { + fn method<'a, 'b, 'c: 'a + 'b>(a: &'a u32, b: &'b u32, c: &'c u32); +} + + + +// Add associated type ------------------------------------------------------------ +#[cfg(cfail1)] +trait TraitAddAssociatedType { + fn mathod(); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddAssociatedType { + type Associated; + + fn mathod(); +} + + + +// Add trait bound to associated type --------------------------------------------- +#[cfg(cfail1)] +trait TraitAddTraitBoundToAssociatedType { + type Associated; + + fn mathod(); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddTraitBoundToAssociatedType { + type Associated: ReferencedTrait0; + + fn mathod(); +} + + + +// Add lifetime bound to associated type ------------------------------------------ +#[cfg(cfail1)] +trait TraitAddLifetimeBoundToAssociatedType<'a> { + type Associated; + + fn mathod(); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddLifetimeBoundToAssociatedType<'a> { + type Associated: 'a; + + fn mathod(); +} + + + +// Add default to associated type ------------------------------------------------- +#[cfg(cfail1)] +trait TraitAddDefaultToAssociatedType { + type Associated; + + fn mathod(); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddDefaultToAssociatedType { + type Associated = ReferenceType0; + + fn mathod(); +} + + + +// Add associated constant -------------------------------------------------------- +#[cfg(cfail1)] +trait TraitAddAssociatedConstant { + fn mathod(); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddAssociatedConstant { + const Value: u32; + + fn mathod(); +} + + + +// Add initializer to associated constant ----------------------------------------- +#[cfg(cfail1)] +trait TraitAddInitializerToAssociatedConstant { + const Value: u32; + + fn mathod(); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddInitializerToAssociatedConstant { + const Value: u32 = 1; + + fn mathod(); +} + + + +// Change type of associated constant --------------------------------------------- +#[cfg(cfail1)] +trait TraitChangeTypeOfAssociatedConstant { + const Value: u32; + + fn mathod(); +} + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitChangeTypeOfAssociatedConstant { + const Value: f64; + + fn mathod(); +} + + + +// Add super trait ---------------------------------------------------------------- +#[cfg(cfail1)] +trait TraitAddSuperTrait { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddSuperTrait : ReferencedTrait0 { } + + + +// Add builtin bound (Send or Copy) ----------------------------------------------- +#[cfg(cfail1)] +trait TraitAddBuiltiBound { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddBuiltiBound : Send { } + + + +// Add 'static lifetime bound to trait -------------------------------------------- +#[cfg(cfail1)] +trait TraitAddStaticLifetimeBound { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddStaticLifetimeBound : 'static { } + + + +// Add super trait as second bound ------------------------------------------------ +#[cfg(cfail1)] +trait TraitAddTraitAsSecondBound : ReferencedTrait0 { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddTraitAsSecondBound : ReferencedTrait0 + ReferencedTrait1 { } + +#[cfg(cfail1)] +trait TraitAddTraitAsSecondBoundFromBuiltin : Send { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddTraitAsSecondBoundFromBuiltin : Send + ReferencedTrait0 { } + + + +// Add builtin bound as second bound ---------------------------------------------- +#[cfg(cfail1)] +trait TraitAddBuiltinBoundAsSecondBound : ReferencedTrait0 { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddBuiltinBoundAsSecondBound : ReferencedTrait0 + Send { } + +#[cfg(cfail1)] +trait TraitAddBuiltinBoundAsSecondBoundFromBuiltin : Send { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddBuiltinBoundAsSecondBoundFromBuiltin: Send + Copy { } + + + +// Add 'static bounds as second bound --------------------------------------------- +#[cfg(cfail1)] +trait TraitAddStaticBoundAsSecondBound : ReferencedTrait0 { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddStaticBoundAsSecondBound : ReferencedTrait0 + 'static { } + +#[cfg(cfail1)] +trait TraitAddStaticBoundAsSecondBoundFromBuiltin : Send { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddStaticBoundAsSecondBoundFromBuiltin : Send + 'static { } + + + +// Add type parameter to trait ---------------------------------------------------- +#[cfg(cfail1)] +trait TraitAddTypeParameterToTrait { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddTypeParameterToTrait { } + + + +// Add lifetime parameter to trait ------------------------------------------------ +#[cfg(cfail1)] +trait TraitAddLifetimeParameterToTrait { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddLifetimeParameterToTrait<'a> { } + + + +// Add trait bound to type parameter of trait ------------------------------------- +#[cfg(cfail1)] +trait TraitAddTraitBoundToTypeParameterOfTrait { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddTraitBoundToTypeParameterOfTrait { } + + + +// Add lifetime bound to type parameter of trait ---------------------------------- +#[cfg(cfail1)] +trait TraitAddLifetimeBoundToTypeParameterOfTrait<'a, T> { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddLifetimeBoundToTypeParameterOfTrait<'a, T: 'a> { } + + + +// Add lifetime bound to lifetime parameter of trait ------------------------------ +#[cfg(cfail1)] +trait TraitAddLifetimeBoundToLifetimeParameterOfTrait<'a, 'b> { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddLifetimeBoundToLifetimeParameterOfTrait<'a: 'b, 'b> { } + + + +// Add builtin bound to type parameter of trait ----------------------------------- +#[cfg(cfail1)] +trait TraitAddBuiltinBoundToTypeParameterOfTrait { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddBuiltinBoundToTypeParameterOfTrait { } + + + +// Add second type parameter to trait --------------------------------------------- +#[cfg(cfail1)] +trait TraitAddSecondTypeParameterToTrait { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddSecondTypeParameterToTrait { } + + + +// Add second lifetime parameter to trait ----------------------------------------- +#[cfg(cfail1)] +trait TraitAddSecondLifetimeParameterToTrait<'a> { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddSecondLifetimeParameterToTrait<'a, 'b> { } + + + +// Add second trait bound to type parameter of trait ------------------------------ +#[cfg(cfail1)] +trait TraitAddSecondTraitBoundToTypeParameterOfTrait { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddSecondTraitBoundToTypeParameterOfTrait { } + + + +// Add second lifetime bound to type parameter of trait --------------------------- +#[cfg(cfail1)] +trait TraitAddSecondLifetimeBoundToTypeParameterOfTrait<'a, 'b, T: 'a> { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddSecondLifetimeBoundToTypeParameterOfTrait<'a, 'b, T: 'a + 'b> { } + + + +// Add second lifetime bound to lifetime parameter of trait------------------------ +#[cfg(cfail1)] +trait TraitAddSecondLifetimeBoundToLifetimeParameterOfTrait<'a: 'b, 'b, 'c> { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddSecondLifetimeBoundToLifetimeParameterOfTrait<'a: 'b + 'c, 'b, 'c> { } + + + +// Add second builtin bound to type parameter of trait ---------------------------- +#[cfg(cfail1)] +trait TraitAddSecondBuiltinBoundToTypeParameterOfTrait { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddSecondBuiltinBoundToTypeParameterOfTrait { } + + + +// -------------------------------------------------------------------------------- +struct ReferenceType0 {} +struct ReferenceType1 {} + + + +// Add trait bound to type parameter of trait in where clause---------------------- +#[cfg(cfail1)] +trait TraitAddTraitBoundToTypeParameterOfTraitWhere { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddTraitBoundToTypeParameterOfTraitWhere where T: ReferencedTrait0 { } + + + +// Add lifetime bound to type parameter of trait in where clause------------------- +#[cfg(cfail1)] +trait TraitAddLifetimeBoundToTypeParameterOfTraitWhere<'a, T> { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddLifetimeBoundToTypeParameterOfTraitWhere<'a, T> where T: 'a { } + + + +// Add lifetime bound to lifetime parameter of trait in where clause--------------- +#[cfg(cfail1)] +trait TraitAddLifetimeBoundToLifetimeParameterOfTraitWhere<'a, 'b> { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddLifetimeBoundToLifetimeParameterOfTraitWhere<'a, 'b> where 'a: 'b { } + + + +// Add builtin bound to type parameter of trait in where clause-------------------- +#[cfg(cfail1)] +trait TraitAddBuiltinBoundToTypeParameterOfTraitWhere { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddBuiltinBoundToTypeParameterOfTraitWhere where T: Send { } + + + +// Add second trait bound to type parameter of trait in where clause--------------- +#[cfg(cfail1)] +trait TraitAddSecondTraitBoundToTypeParameterOfTraitWhere where T: ReferencedTrait0 { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddSecondTraitBoundToTypeParameterOfTraitWhere + where T: ReferencedTrait0 + ReferencedTrait1 { } + + + +// Add second lifetime bound to type parameter of trait in where clause------------ +#[cfg(cfail1)] +trait TraitAddSecondLifetimeBoundToTypeParameterOfTraitWhere<'a, 'b, T> where T: 'a { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddSecondLifetimeBoundToTypeParameterOfTraitWhere<'a, 'b, T> where T: 'a + 'b { } + + + +// Add second lifetime bound to lifetime parameter of trait in where clause-------- +#[cfg(cfail1)] +trait TraitAddSecondLifetimeBoundToLifetimeParameterOfTraitWhere<'a, 'b, 'c> where 'a: 'b { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddSecondLifetimeBoundToLifetimeParameterOfTraitWhere<'a, 'b, 'c> where 'a: 'b + 'c { } + + + +// Add second builtin bound to type parameter of trait in where clause------------- +#[cfg(cfail1)] +trait TraitAddSecondBuiltinBoundToTypeParameterOfTraitWhere where T: Send { } + +#[cfg(not(cfail1))] +#[rustc_dirty(label="Hir", cfg="cfail2")] +#[rustc_clean(label="Hir", cfg="cfail3")] +#[rustc_metadata_dirty(cfg="cfail2")] +#[rustc_metadata_clean(cfg="cfail3")] +trait TraitAddSecondBuiltinBoundToTypeParameterOfTraitWhere where T: Send + Sync { } + + + +// EDIT: Some more cases ---------------------------------------------------------- + +// Change return type of method indirectly by modifying a use statement------------ +mod change_return_type_of_method_indirectly_use { + #[cfg(cfail1)] + use super::ReferenceType0 as ReturnType; + #[cfg(not(cfail1))] + use super::ReferenceType1 as ReturnType; + + #[rustc_dirty(label="Hir", cfg="cfail2")] + #[rustc_clean(label="Hir", cfg="cfail3")] + #[rustc_metadata_dirty(cfg="cfail2")] + #[rustc_metadata_clean(cfg="cfail3")] + trait TraitChangeReturnType { + fn method() -> ReturnType; + } +} + + + +// Change type of method parameter indirectly by modifying a use statement--------- +mod change_method_parameter_type_indirectly_by_use { + #[cfg(cfail1)] + use super::ReferenceType0 as ArgType; + #[cfg(not(cfail1))] + use super::ReferenceType1 as ArgType; + + #[rustc_dirty(label="Hir", cfg="cfail2")] + #[rustc_clean(label="Hir", cfg="cfail3")] + #[rustc_metadata_dirty(cfg="cfail2")] + #[rustc_metadata_clean(cfg="cfail3")] + trait TraitChangeArgType { + fn method(a: ArgType); + } +} + + + +// Change trait bound of method type parameter indirectly by modifying a use statement +mod change_method_parameter_type_bound_indirectly_by_use { + #[cfg(cfail1)] + use super::ReferencedTrait0 as Bound; + #[cfg(not(cfail1))] + use super::ReferencedTrait1 as Bound; + + #[rustc_dirty(label="Hir", cfg="cfail2")] + #[rustc_clean(label="Hir", cfg="cfail3")] + #[rustc_metadata_dirty(cfg="cfail2")] + #[rustc_metadata_clean(cfg="cfail3")] + trait TraitChangeBoundOfMethodTypeParameter { + fn method(a: T); + } +} + + + +// Change trait bound of method type parameter in where clause indirectly +// by modifying a use statement +mod change_method_parameter_type_bound_indirectly_by_use_where { + #[cfg(cfail1)] + use super::ReferencedTrait0 as Bound; + #[cfg(not(cfail1))] + use super::ReferencedTrait1 as Bound; + + #[rustc_dirty(label="Hir", cfg="cfail2")] + #[rustc_clean(label="Hir", cfg="cfail3")] + #[rustc_metadata_dirty(cfg="cfail2")] + #[rustc_metadata_clean(cfg="cfail3")] + trait TraitChangeBoundOfMethodTypeParameterWhere { + fn method(a: T) where T: Bound; + } +} + + + +// Change trait bound of trait type parameter indirectly by modifying a use statement +mod change_method_type_parameter_bound_indirectly { + #[cfg(cfail1)] + use super::ReferencedTrait0 as Bound; + #[cfg(not(cfail1))] + use super::ReferencedTrait1 as Bound; + + #[rustc_dirty(label="Hir", cfg="cfail2")] + #[rustc_clean(label="Hir", cfg="cfail3")] + #[rustc_metadata_dirty(cfg="cfail2")] + #[rustc_metadata_clean(cfg="cfail3")] + trait TraitChangeTraitBound { + fn method(a: T); + } +} + + + +// Change trait bound of trait type parameter in where clause indirectly +// by modifying a use statement +mod change_method_type_parameter_bound_indirectly_where { + #[cfg(cfail1)] + use super::ReferencedTrait0 as Bound; + #[cfg(not(cfail1))] + use super::ReferencedTrait1 as Bound; + + #[rustc_dirty(label="Hir", cfg="cfail2")] + #[rustc_clean(label="Hir", cfg="cfail3")] + #[rustc_metadata_dirty(cfg="cfail2")] + #[rustc_metadata_clean(cfg="cfail3")] + trait TraitChangeTraitBoundWhere where T: Bound { + fn method(a: T); + } +} diff --git a/src/test/incremental/struct_change_field_name.rs b/src/test/incremental/struct_change_field_name.rs index c27294442e7a4..cb43d12740584 100644 --- a/src/test/incremental/struct_change_field_name.rs +++ b/src/test/incremental/struct_change_field_name.rs @@ -39,13 +39,13 @@ pub fn use_X() -> u32 { let x: X = X { x: 22 }; //[cfail2]~^ ERROR struct `X` has no field named `x` x.x as u32 - //[cfail2]~^ ERROR attempted access of field `x` + //[cfail2]~^ ERROR no field `x` on type `X` } #[rustc_dirty(label="TypeckItemBody", cfg="cfail2")] pub fn use_EmbedX(embed: EmbedX) -> u32 { embed.x.x as u32 - //[cfail2]~^ ERROR attempted access of field `x` + //[cfail2]~^ ERROR no field `x` on type `X` } #[rustc_clean(label="TypeckItemBody", cfg="cfail2")] diff --git a/src/test/mir-opt/deaggregator_test.rs b/src/test/mir-opt/deaggregator_test.rs index e57a9674cf683..f136d74fa517d 100644 --- a/src/test/mir-opt/deaggregator_test.rs +++ b/src/test/mir-opt/deaggregator_test.rs @@ -23,19 +23,19 @@ fn main() {} // END RUST SOURCE // START rustc.node13.Deaggregator.before.mir // bb0: { -// var0 = arg0; // scope 0 at main.rs:8:8: 8:9 -// tmp0 = var0; // scope 1 at main.rs:9:14: 9:15 -// return = Baz { x: tmp0, y: const F32(0), z: const false }; // scope ... -// goto -> bb1; // scope 1 at main.rs:8:1: 10:2 +// _2 = _1; +// _3 = _2; +// _0 = Baz { x: _3, y: const F32(0), z: const false }; +// return; // } // END rustc.node13.Deaggregator.before.mir // START rustc.node13.Deaggregator.after.mir // bb0: { -// var0 = arg0; // scope 0 at main.rs:8:8: 8:9 -// tmp0 = var0; // scope 1 at main.rs:9:14: 9:15 -// (return.0: usize) = tmp0; // scope 1 at main.rs:9:5: 9:34 -// (return.1: f32) = const F32(0); // scope 1 at main.rs:9:5: 9:34 -// (return.2: bool) = const false; // scope 1 at main.rs:9:5: 9:34 -// goto -> bb1; // scope 1 at main.rs:8:1: 10:2 +// _2 = _1; +// _3 = _2; +// (_0.0: usize) = _3; +// (_0.1: f32) = const F32(0); +// (_0.2: bool) = const false; +// return; // } -// END rustc.node13.Deaggregator.after.mir \ No newline at end of file +// END rustc.node13.Deaggregator.after.mir diff --git a/src/test/mir-opt/deaggregator_test_enum.rs b/src/test/mir-opt/deaggregator_test_enum.rs index ccfa760a28c76..25fa0e90835c6 100644 --- a/src/test/mir-opt/deaggregator_test_enum.rs +++ b/src/test/mir-opt/deaggregator_test_enum.rs @@ -28,18 +28,18 @@ fn main() { // END RUST SOURCE // START rustc.node10.Deaggregator.before.mir // bb0: { -// var0 = arg0; // scope 0 at main.rs:7:8: 7:9 -// tmp0 = var0; // scope 1 at main.rs:8:19: 8:20 -// return = Baz::Foo { x: tmp0 }; // scope 1 at main.rs:8:5: 8:21 -// goto -> bb1; // scope 1 at main.rs:7:1: 9:2 +// _2 = _1; +// _3 = _2; +// _0 = Baz::Foo { x: _3 }; +// return; // } // END rustc.node10.Deaggregator.before.mir // START rustc.node10.Deaggregator.after.mir // bb0: { -// var0 = arg0; // scope 0 at main.rs:7:8: 7:9 -// tmp0 = var0; // scope 1 at main.rs:8:19: 8:20 -// ((return as Foo).0: usize) = tmp0; // scope 1 at main.rs:8:5: 8:21 -// discriminant(return) = 1; // scope 1 at main.rs:8:5: 8:21 -// goto -> bb1; // scope 1 at main.rs:7:1: 9:2 +// _2 = _1; +// _3 = _2; +// ((_0 as Foo).0: usize) = _3; +// discriminant(_0) = 1; +// return; // } -// END rustc.node10.Deaggregator.after.mir \ No newline at end of file +// END rustc.node10.Deaggregator.after.mir diff --git a/src/test/mir-opt/simplify_if.rs b/src/test/mir-opt/simplify_if.rs index dd6a857960432..7239e32357b95 100644 --- a/src/test/mir-opt/simplify_if.rs +++ b/src/test/mir-opt/simplify_if.rs @@ -17,11 +17,11 @@ fn main() { // END RUST SOURCE // START rustc.node4.SimplifyBranches.initial-before.mir // bb0: { -// if(const false) -> [true: bb1, false: bb2]; // scope 0 at simplify_if.rs:12:5: 14:6 +// if(const false) -> [true: bb1, false: bb2]; // } // END rustc.node4.SimplifyBranches.initial-before.mir // START rustc.node4.SimplifyBranches.initial-after.mir // bb0: { -// goto -> bb2; // scope 0 at simplify_if.rs:12:5: 14:6 +// goto -> bb2; // } -// END rustc.node4.SimplifyBranches.initial-after.mir \ No newline at end of file +// END rustc.node4.SimplifyBranches.initial-after.mir diff --git a/src/test/mir-opt/storage_ranges.rs b/src/test/mir-opt/storage_ranges.rs index f93447b642a20..933bfa8df2ec2 100644 --- a/src/test/mir-opt/storage_ranges.rs +++ b/src/test/mir-opt/storage_ranges.rs @@ -19,29 +19,25 @@ fn main() { } // END RUST SOURCE -// START rustc.node4.PreTrans.after.mir +// START rustc.node4.TypeckMir.before.mir // bb0: { -// StorageLive(var0); // scope 0 at storage_ranges.rs:12:9: 12:10 -// var0 = const 0i32; // scope 0 at storage_ranges.rs:12:13: 12:14 -// StorageLive(var1); // scope 1 at storage_ranges.rs:14:13: 14:14 -// StorageLive(tmp1); // scope 1 at storage_ranges.rs:14:18: 14:25 -// StorageLive(tmp2); // scope 1 at storage_ranges.rs:14:23: 14:24 -// tmp2 = var0; // scope 1 at storage_ranges.rs:14:23: 14:24 -// tmp1 = std::option::Option::Some(tmp2,); // scope 1 at storage_ranges.rs:14:18: 14:25 -// var1 = &tmp1; // scope 1 at storage_ranges.rs:14:17: 14:25 -// StorageDead(tmp2); // scope 1 at storage_ranges.rs:14:23: 14:24 -// tmp0 = (); // scope 2 at storage_ranges.rs:13:5: 15:6 -// StorageDead(tmp1); // scope 1 at storage_ranges.rs:14:18: 14:25 -// StorageDead(var1); // scope 1 at storage_ranges.rs:14:13: 14:14 -// StorageLive(var2); // scope 1 at storage_ranges.rs:16:9: 16:10 -// var2 = const 1i32; // scope 1 at storage_ranges.rs:16:13: 16:14 -// return = (); // scope 3 at storage_ranges.rs:11:11: 17:2 -// StorageDead(var2); // scope 1 at storage_ranges.rs:16:9: 16:10 -// StorageDead(var0); // scope 0 at storage_ranges.rs:12:9: 12:10 -// goto -> bb1; // scope 0 at storage_ranges.rs:11:1: 17:2 +// StorageLive(_1); +// _1 = const 0i32; +// StorageLive(_3); +// StorageLive(_4); +// StorageLive(_5); +// _5 = _1; +// _4 = std::option::Option::Some(_5,); +// _3 = &_4; +// StorageDead(_5); +// _2 = (); +// StorageDead(_4); +// StorageDead(_3); +// StorageLive(_6); +// _6 = const 1i32; +// _0 = (); +// StorageDead(_6); +// StorageDead(_1); +// return; // } -// -// bb1: { -// return; // scope 0 at storage_ranges.rs:11:1: 17:2 -// } -// END rustc.node4.PreTrans.after.mir +// END rustc.node4.TypeckMir.before.mir diff --git a/src/test/parse-fail/extern-no-fn.rs b/src/test/parse-fail/extern-no-fn.rs index acf7187cf436f..ff3fefde40ece 100644 --- a/src/test/parse-fail/extern-no-fn.rs +++ b/src/test/parse-fail/extern-no-fn.rs @@ -11,7 +11,7 @@ // compile-flags: -Z parse-only extern { - f(); //~ ERROR expected one of `fn`, `pub`, `static`, or `}`, found `f` + f(); //~ ERROR expected one of `!` or `::`, found `(` } fn main() { diff --git a/src/test/parse-fail/issue-10412.rs b/src/test/parse-fail/issue-10412.rs index fc2598d1e9d2f..d723d94c02cc3 100644 --- a/src/test/parse-fail/issue-10412.rs +++ b/src/test/parse-fail/issue-10412.rs @@ -19,7 +19,7 @@ trait Serializable<'self, T> { //~ ERROR lifetimes cannot use keyword names impl<'self> Serializable for &'self str { //~ ERROR lifetimes cannot use keyword names //~^ ERROR lifetimes cannot use keyword names fn serialize(val : &'self str) -> Vec { //~ ERROR lifetimes cannot use keyword names - vec!(1) + vec![1] } fn deserialize(repr: &[u8]) -> &'self str { //~ ERROR lifetimes cannot use keyword names "hi" diff --git a/src/test/parse-fail/issue-21153.rs b/src/test/parse-fail/issue-21153.rs index 76a4687f544da..c03e0ef73217c 100644 --- a/src/test/parse-fail/issue-21153.rs +++ b/src/test/parse-fail/issue-21153.rs @@ -11,5 +11,6 @@ // compile-flags: -Z parse-only trait MyTrait: Iterator { - Item = T; //~ ERROR expected one of `const`, `extern`, `fn`, `type`, or `unsafe`, found `Item` + Item = T; //~ ERROR expected one of `!` or `::`, found `=` + //~| ERROR expected item, found `=` } diff --git a/src/test/parse-fail/issue-37113.rs b/src/test/parse-fail/issue-37113.rs new file mode 100644 index 0000000000000..caf451099d714 --- /dev/null +++ b/src/test/parse-fail/issue-37113.rs @@ -0,0 +1,21 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +macro_rules! test_macro { + ( $( $t:ty ),* $(),*) => { + enum SomeEnum { + $( $t, )* //~ ERROR expected identifier, found `String` + }; + }; +} + +fn main() { + test_macro!(String,); +} \ No newline at end of file diff --git a/src/test/parse-fail/recover-enum.rs b/src/test/parse-fail/recover-enum.rs new file mode 100644 index 0000000000000..7de3ed10c90fb --- /dev/null +++ b/src/test/parse-fail/recover-enum.rs @@ -0,0 +1,19 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags: -Z parse-only -Z continue-parse-after-error + +fn main() { + enum Test { + Very + Bad //~ ERROR found `Bad` + Stuff + } +} diff --git a/src/test/parse-fail/recover-enum2.rs b/src/test/parse-fail/recover-enum2.rs new file mode 100644 index 0000000000000..49380a03e156a --- /dev/null +++ b/src/test/parse-fail/recover-enum2.rs @@ -0,0 +1,43 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags: -Z parse-only -Z continue-parse-after-error + +fn main() { + enum Test { + Var1, + Var2(String), + Var3 { + abc: {}, //~ ERROR: expected type, found `{` + }, + } + + // recover... + let a = 1; + enum Test2 { + Fine, + } + + enum Test3 { + StillFine { + def: i32, + }, + } + + { + // fail again + enum Test4 { + Nope(i32 {}) //~ ERROR: found `{` + //~^ ERROR: found `{` + } + } + // still recover later + let bad_syntax = _; //~ ERROR: found `_` +} diff --git a/src/test/parse-fail/recover-struct.rs b/src/test/parse-fail/recover-struct.rs new file mode 100644 index 0000000000000..535dd529c0bb4 --- /dev/null +++ b/src/test/parse-fail/recover-struct.rs @@ -0,0 +1,19 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags: -Z parse-only -Z continue-parse-after-error + +fn main() { + struct Test { + Very + Bad //~ ERROR found `Bad` + Stuff + } +} diff --git a/src/test/parse-fail/removed-syntax-with-2.rs b/src/test/parse-fail/removed-syntax-with-2.rs index c58f42abb58f0..c4354c0760442 100644 --- a/src/test/parse-fail/removed-syntax-with-2.rs +++ b/src/test/parse-fail/removed-syntax-with-2.rs @@ -18,5 +18,5 @@ fn removed_with() { let a = S { foo: (), bar: () }; let b = S { foo: (), with a }; - //~^ ERROR expected `:`, found `a` + //~^ ERROR expected one of `,` or `}`, found `a` } diff --git a/src/test/parse-fail/struct-field-numeric-shorthand.rs b/src/test/parse-fail/struct-field-numeric-shorthand.rs new file mode 100644 index 0000000000000..2a5c25d1868f5 --- /dev/null +++ b/src/test/parse-fail/struct-field-numeric-shorthand.rs @@ -0,0 +1,19 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags: -Z parse-only + +#![feature(field_init_shorthand)] + +struct Rgb(u8, u8, u8); + +fn main() { + let _ = Rgb { 0, 1, 2 }; //~ ERROR expected identifier, found `0` +} diff --git a/src/test/pretty/block-disambig.rs b/src/test/pretty/block-disambig.rs index 5f88f9036780b..c645a66b70efc 100644 --- a/src/test/pretty/block-disambig.rs +++ b/src/test/pretty/block-disambig.rs @@ -61,9 +61,9 @@ fn test9() { } fn test10() -> isize { - let regs = vec!(0); + let regs = vec![0]; match true { true => { } _ => { } } regs[0] } -fn test11() -> Vec { if true { } vec!(1, 2) } +fn test11() -> Vec { if true { } vec![1, 2] } diff --git a/src/test/pretty/for-comment.rs b/src/test/pretty/for-comment.rs index af76c1d940664..32837fbcf87bc 100644 --- a/src/test/pretty/for-comment.rs +++ b/src/test/pretty/for-comment.rs @@ -17,6 +17,5 @@ fn f(v: &[isize]) -> isize { for e in v { n = *e; // This comment once triggered pretty printer bug } - n } diff --git a/src/test/pretty/issue-4264.pp b/src/test/pretty/issue-4264.pp index 6ce534d52b883..40ff4852e3856 100644 --- a/src/test/pretty/issue-4264.pp +++ b/src/test/pretty/issue-4264.pp @@ -38,7 +38,7 @@ - ((::std::fmt::format as + (($crate::fmt::format as fn(std::fmt::Arguments<'_>) -> std::string::String {std::fmt::format})(((::std::fmt::Arguments::new_v1 as fn(&[&str], &[std::fmt::ArgumentV1<'_>]) -> std::fmt::Arguments<'_> {std::fmt::Arguments<'_>::new_v1})(({ diff --git a/src/test/run-fail-fulldeps/qquote.rs b/src/test/run-fail-fulldeps/qquote.rs index 1458583ff5830..d2a16ac750704 100644 --- a/src/test/run-fail-fulldeps/qquote.rs +++ b/src/test/run-fail-fulldeps/qquote.rs @@ -27,7 +27,7 @@ fn main() { let ps = syntax::parse::ParseSess::new(); let mut resolver = syntax::ext::base::DummyResolver; let mut cx = syntax::ext::base::ExtCtxt::new( - &ps, vec![], + &ps, syntax::ext::expand::ExpansionConfig::default("qquote".to_string()), &mut resolver); cx.bt_push(syntax::codemap::ExpnInfo { diff --git a/src/test/run-fail/divide-by-zero.rs b/src/test/run-fail/divide-by-zero.rs index c9c4a88c9b53e..1f9e069526ce9 100644 --- a/src/test/run-fail/divide-by-zero.rs +++ b/src/test/run-fail/divide-by-zero.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty : (#23623) problems when ending with // comments - // error-pattern:attempt to divide by zero fn main() { diff --git a/src/test/run-fail/glob-use-std.rs b/src/test/run-fail/glob-use-std.rs index 6712b3b065908..a7ba283b25af3 100644 --- a/src/test/run-fail/glob-use-std.rs +++ b/src/test/run-fail/glob-use-std.rs @@ -10,10 +10,6 @@ // Issue #7580 -// ignore-pretty -// -// Expanded pretty printing causes resolve conflicts. - // error-pattern:panic works use std::*; diff --git a/src/test/run-fail/mod-zero.rs b/src/test/run-fail/mod-zero.rs index d2b598a7933bc..641d39e2324b8 100644 --- a/src/test/run-fail/mod-zero.rs +++ b/src/test/run-fail/mod-zero.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty : (#23623) problems when ending with // comments - // error-pattern:attempt to calculate the remainder with a divisor of zero fn main() { diff --git a/src/test/run-fail/overflowing-add.rs b/src/test/run-fail/overflowing-add.rs index acc7676db457e..250f0726dc9d6 100644 --- a/src/test/run-fail/overflowing-add.rs +++ b/src/test/run-fail/overflowing-add.rs @@ -8,12 +8,9 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty : (#23623) problems when ending with // comments - // error-pattern:thread 'main' panicked at 'attempt to add with overflow' // compile-flags: -C debug-assertions - fn main() { let _x = 200u8 + 200u8 + 200u8; } diff --git a/src/test/run-fail/overflowing-lsh-1.rs b/src/test/run-fail/overflowing-lsh-1.rs index 29ce3b0e6a16d..baa1e05d559b6 100644 --- a/src/test/run-fail/overflowing-lsh-1.rs +++ b/src/test/run-fail/overflowing-lsh-1.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty : (#23623) problems when ending with // comments - // error-pattern:thread 'main' panicked at 'attempt to shift left with overflow' // compile-flags: -C debug-assertions diff --git a/src/test/run-fail/overflowing-lsh-2.rs b/src/test/run-fail/overflowing-lsh-2.rs index 62fc9230f353d..3438ed2c77c18 100644 --- a/src/test/run-fail/overflowing-lsh-2.rs +++ b/src/test/run-fail/overflowing-lsh-2.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty : (#23623) problems when ending with // comments - // error-pattern:thread 'main' panicked at 'attempt to shift left with overflow' // compile-flags: -C debug-assertions diff --git a/src/test/run-fail/overflowing-lsh-3.rs b/src/test/run-fail/overflowing-lsh-3.rs index 1bc1703a89ce3..ef5c43db6e285 100644 --- a/src/test/run-fail/overflowing-lsh-3.rs +++ b/src/test/run-fail/overflowing-lsh-3.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty : (#23623) problems when ending with // comments - // error-pattern:thread 'main' panicked at 'attempt to shift left with overflow' // compile-flags: -C debug-assertions diff --git a/src/test/run-fail/overflowing-lsh-4.rs b/src/test/run-fail/overflowing-lsh-4.rs index 8de44f25e0489..226ece6020db4 100644 --- a/src/test/run-fail/overflowing-lsh-4.rs +++ b/src/test/run-fail/overflowing-lsh-4.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty : (#23623) problems when ending with // comments - // error-pattern:thread 'main' panicked at 'attempt to shift left with overflow' // compile-flags: -C debug-assertions diff --git a/src/test/run-fail/overflowing-mul.rs b/src/test/run-fail/overflowing-mul.rs index a09c0f06a5cc6..b47d0fc4136cc 100644 --- a/src/test/run-fail/overflowing-mul.rs +++ b/src/test/run-fail/overflowing-mul.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty : (#23623) problems when ending with // comments - // error-pattern:thread 'main' panicked at 'attempt to multiply with overflow' // compile-flags: -C debug-assertions diff --git a/src/test/run-fail/overflowing-neg.rs b/src/test/run-fail/overflowing-neg.rs index 96853fc565b71..836d7e37319e9 100644 --- a/src/test/run-fail/overflowing-neg.rs +++ b/src/test/run-fail/overflowing-neg.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty : (#23623) problems when ending with // comments - // error-pattern:thread 'main' panicked at 'attempt to negate with overflow' // compile-flags: -C debug-assertions diff --git a/src/test/run-fail/overflowing-pow.rs b/src/test/run-fail/overflowing-pow-signed.rs similarity index 100% rename from src/test/run-fail/overflowing-pow.rs rename to src/test/run-fail/overflowing-pow-signed.rs diff --git a/src/test/run-fail/overflowing-pow-unsigned.rs b/src/test/run-fail/overflowing-pow-unsigned.rs new file mode 100644 index 0000000000000..d3e7035279fbe --- /dev/null +++ b/src/test/run-fail/overflowing-pow-unsigned.rs @@ -0,0 +1,16 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// error-pattern:thread 'main' panicked at 'attempt to multiply with overflow' +// compile-flags: -C debug-assertions + +fn main() { + let _x = 2u32.pow(1024); +} diff --git a/src/test/run-fail/overflowing-rsh-1.rs b/src/test/run-fail/overflowing-rsh-1.rs index ef4a503cfe425..8f198c887e491 100644 --- a/src/test/run-fail/overflowing-rsh-1.rs +++ b/src/test/run-fail/overflowing-rsh-1.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty : (#23623) problems when ending with // comments - // error-pattern:thread 'main' panicked at 'attempt to shift right with overflow' // compile-flags: -C debug-assertions diff --git a/src/test/run-fail/overflowing-rsh-2.rs b/src/test/run-fail/overflowing-rsh-2.rs index da072b5a9a5a9..e4f260b2bbbbb 100644 --- a/src/test/run-fail/overflowing-rsh-2.rs +++ b/src/test/run-fail/overflowing-rsh-2.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty : (#23623) problems when ending with // comments - // error-pattern:thread 'main' panicked at 'attempt to shift right with overflow' // compile-flags: -C debug-assertions diff --git a/src/test/run-fail/overflowing-rsh-3.rs b/src/test/run-fail/overflowing-rsh-3.rs index 0b7809402e6db..11aa98a0c3cbc 100644 --- a/src/test/run-fail/overflowing-rsh-3.rs +++ b/src/test/run-fail/overflowing-rsh-3.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty : (#23623) problems when ending with // comments - // error-pattern:thread 'main' panicked at 'attempt to shift right with overflow' // compile-flags: -C debug-assertions diff --git a/src/test/run-fail/overflowing-rsh-4.rs b/src/test/run-fail/overflowing-rsh-4.rs index 1e0cc18fbdcd6..742720e83c10c 100644 --- a/src/test/run-fail/overflowing-rsh-4.rs +++ b/src/test/run-fail/overflowing-rsh-4.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty : (#23623) problems when ending with // comments - // error-pattern:thread 'main' panicked at 'attempt to shift right with overflow' // compile-flags: -C debug-assertions diff --git a/src/test/run-fail/overflowing-rsh-5.rs b/src/test/run-fail/overflowing-rsh-5.rs index 690901ff0c25b..6106fdcb16a46 100644 --- a/src/test/run-fail/overflowing-rsh-5.rs +++ b/src/test/run-fail/overflowing-rsh-5.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty : (#23623) problems when ending with // comments - // error-pattern:thread 'main' panicked at 'attempt to shift right with overflow' // compile-flags: -C debug-assertions diff --git a/src/test/run-fail/overflowing-rsh-6.rs b/src/test/run-fail/overflowing-rsh-6.rs index 6a6ed4f11f20e..d419550fcc570 100644 --- a/src/test/run-fail/overflowing-rsh-6.rs +++ b/src/test/run-fail/overflowing-rsh-6.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty : (#23623) problems when ending with // comments - // error-pattern:thread 'main' panicked at 'attempt to shift right with overflow' // compile-flags: -C debug-assertions diff --git a/src/test/run-fail/overflowing-sub.rs b/src/test/run-fail/overflowing-sub.rs index 083e8d24467fd..f94cb31b16884 100644 --- a/src/test/run-fail/overflowing-sub.rs +++ b/src/test/run-fail/overflowing-sub.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty : (#23623) problems when ending with // comments - // error-pattern:thread 'main' panicked at 'attempt to subtract with overflow' // compile-flags: -C debug-assertions diff --git a/src/test/run-fail/panic-task-name-none.rs b/src/test/run-fail/panic-task-name-none.rs index ab50503830534..36e2a4b86aa39 100644 --- a/src/test/run-fail/panic-task-name-none.rs +++ b/src/test/run-fail/panic-task-name-none.rs @@ -9,6 +9,7 @@ // except according to those terms. // error-pattern:thread '' panicked at 'test' +// ignore-emscripten Needs threads use std::thread; diff --git a/src/test/run-fail/panic-task-name-owned.rs b/src/test/run-fail/panic-task-name-owned.rs index 2d2371f5ce77c..4da40c3158b84 100644 --- a/src/test/run-fail/panic-task-name-owned.rs +++ b/src/test/run-fail/panic-task-name-owned.rs @@ -9,6 +9,7 @@ // except according to those terms. // error-pattern:thread 'owned name' panicked at 'test' +// ignore-emscripten Needs threads. use std::thread::Builder; diff --git a/src/test/run-fail/run-unexported-tests.rs b/src/test/run-fail/run-unexported-tests.rs index 8158333ade818..3f75229948df0 100644 --- a/src/test/run-fail/run-unexported-tests.rs +++ b/src/test/run-fail/run-unexported-tests.rs @@ -11,7 +11,6 @@ // error-pattern:runned an unexported test // compile-flags:--test // check-stdout -// ignore-pretty: does not work well with `--test` mod m { pub fn exported() {} diff --git a/src/test/run-fail/task-spawn-barefn.rs b/src/test/run-fail/task-spawn-barefn.rs index ede055acd61ff..108430848b9b2 100644 --- a/src/test/run-fail/task-spawn-barefn.rs +++ b/src/test/run-fail/task-spawn-barefn.rs @@ -9,6 +9,7 @@ // except according to those terms. // error-pattern:Ensure that the child thread runs by panicking +// ignore-emscripten Needs threads. use std::thread; diff --git a/src/test/run-fail/test-panic.rs b/src/test/run-fail/test-panic.rs index fa360570253b0..bb6f4abe1fc96 100644 --- a/src/test/run-fail/test-panic.rs +++ b/src/test/run-fail/test-panic.rs @@ -11,7 +11,7 @@ // check-stdout // error-pattern:thread 'test_foo' panicked at // compile-flags: --test -// ignore-pretty: does not work well with `--test` +// ignore-emscripten #[test] fn test_foo() { diff --git a/src/test/run-fail/test-should-fail-bad-message.rs b/src/test/run-fail/test-should-fail-bad-message.rs index e18c5d9631a70..eac9813f180ae 100644 --- a/src/test/run-fail/test-should-fail-bad-message.rs +++ b/src/test/run-fail/test-should-fail-bad-message.rs @@ -11,7 +11,7 @@ // check-stdout // error-pattern:thread 'test_foo' panicked at // compile-flags: --test -// ignore-pretty: does not work well with `--test` +// ignore-emscripten #[test] #[should_panic(expected = "foobar")] diff --git a/src/test/run-fail/test-tasks-invalid-value.rs b/src/test/run-fail/test-tasks-invalid-value.rs index 94ed641c79c93..fcf3559e7da66 100644 --- a/src/test/run-fail/test-tasks-invalid-value.rs +++ b/src/test/run-fail/test-tasks-invalid-value.rs @@ -14,7 +14,7 @@ // error-pattern:should be a positive integer // compile-flags: --test // exec-env:RUST_TEST_THREADS=foo -// ignore-pretty: does not work well with `--test` +// ignore-emscripten #[test] fn do_nothing() {} diff --git a/src/test/run-make/dep-info-spaces/Makefile b/src/test/run-make/dep-info-spaces/Makefile index eda8cb700810d..82686ffdd9d93 100644 --- a/src/test/run-make/dep-info-spaces/Makefile +++ b/src/test/run-make/dep-info-spaces/Makefile @@ -5,9 +5,12 @@ ifneq ($(shell uname),FreeBSD) ifndef IS_WINDOWS all: - $(RUSTC) --emit link,dep-info --crate-type=lib lib.rs + cp lib.rs $(TMPDIR)/ + cp 'foo foo.rs' $(TMPDIR)/ + cp bar.rs $(TMPDIR)/ + $(RUSTC) --emit link,dep-info --crate-type=lib $(TMPDIR)/lib.rs sleep 1 - touch 'foo foo.rs' + touch $(TMPDIR)/'foo foo.rs' -rm -f $(TMPDIR)/done $(MAKE) -drf Makefile.foo rm $(TMPDIR)/done diff --git a/src/test/run-make/dep-info-spaces/Makefile.foo b/src/test/run-make/dep-info-spaces/Makefile.foo index 2f4cc486d8667..80a5d4333cdc2 100644 --- a/src/test/run-make/dep-info-spaces/Makefile.foo +++ b/src/test/run-make/dep-info-spaces/Makefile.foo @@ -1,7 +1,7 @@ -LIB := $(shell $(RUSTC) --print file-names --crate-type=lib lib.rs) +LIB := $(shell $(RUSTC) --print file-names --crate-type=lib $(TMPDIR)/lib.rs) $(TMPDIR)/$(LIB): - $(RUSTC) --emit link,dep-info --crate-type=lib lib.rs + $(RUSTC) --emit link,dep-info --crate-type=lib $(TMPDIR)/lib.rs touch $(TMPDIR)/done -include $(TMPDIR)/lib.d diff --git a/src/test/run-make/issue-19371/foo.rs b/src/test/run-make/issue-19371/foo.rs index 35043bdaddf21..ed127b017b6a9 100644 --- a/src/test/run-make/issue-19371/foo.rs +++ b/src/test/run-make/issue-19371/foo.rs @@ -67,12 +67,6 @@ fn compile(code: String, output: PathBuf, sysroot: PathBuf) { let (sess, cstore) = basic_sess(sysroot); let cfg = build_configuration(&sess, vec![]); let control = CompileController::basic(); - - compile_input(&sess, &cstore, - cfg, - &Input::Str { name: anon_src(), input: code }, - &None, - &Some(output), - None, - &control); + let input = Input::Str { name: anon_src(), input: code }; + compile_input(&sess, &cstore, &input, &None, &Some(output), None, &control); } diff --git a/src/test/run-make/link-arg/Makefile b/src/test/run-make/link-arg/Makefile new file mode 100644 index 0000000000000..0ee239af0fa6c --- /dev/null +++ b/src/test/run-make/link-arg/Makefile @@ -0,0 +1,5 @@ +-include ../tools.mk +RUSTC_FLAGS = -C link-arg="-lfoo" -C link-arg="-lbar" -Z print-link-args + +all: + $(RUSTC) $(RUSTC_FLAGS) empty.rs | grep lfoo | grep lbar diff --git a/src/test/run-make/link-arg/empty.rs b/src/test/run-make/link-arg/empty.rs new file mode 100644 index 0000000000000..2b76fb24e5f1e --- /dev/null +++ b/src/test/run-make/link-arg/empty.rs @@ -0,0 +1,11 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn main() { } diff --git a/src/test/run-make/llvm-phase/test.rs b/src/test/run-make/llvm-phase/test.rs index 19e410fef5388..ca58e007852bd 100644 --- a/src/test/run-make/llvm-phase/test.rs +++ b/src/test/run-make/llvm-phase/test.rs @@ -22,6 +22,7 @@ use rustc_driver::driver::CompileController; use rustc_trans::ModuleSource; use rustc::session::Session; use syntax::codemap::FileLoader; +use std::env; use std::io; use std::path::{PathBuf, Path}; @@ -75,12 +76,14 @@ fn main() { path.pop(); path.pop(); - let args: Vec = + let mut args: Vec = format!("_ _ --sysroot {} --crate-type dylib", path.to_str().unwrap()) .split(' ').map(|s| s.to_string()).collect(); + args.push("--out-dir".to_string()); + args.push(env::var("TMPDIR").unwrap()); - let (result, _) = rustc_driver::run_compiler_with_file_loader( - &args, &mut JitCalls, box JitLoader); + let (result, _) = rustc_driver::run_compiler( + &args, &mut JitCalls, Some(box JitLoader), None); if let Err(n) = result { panic!("Error {}", n); } diff --git a/src/test/run-make/rustc-macro-dep-files/Makefile b/src/test/run-make/rustc-macro-dep-files/Makefile new file mode 100644 index 0000000000000..e3a6776c8080b --- /dev/null +++ b/src/test/run-make/rustc-macro-dep-files/Makefile @@ -0,0 +1,6 @@ +-include ../tools.mk + +all: + $(RUSTC) foo.rs + $(RUSTC) bar.rs --emit dep-info + grep "proc-macro source" $(TMPDIR)/bar.d && exit 1 || exit 0 diff --git a/src/test/run-make/rustc-macro-dep-files/bar.rs b/src/test/run-make/rustc-macro-dep-files/bar.rs new file mode 100644 index 0000000000000..a2db98049d287 --- /dev/null +++ b/src/test/run-make/rustc-macro-dep-files/bar.rs @@ -0,0 +1,21 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(proc_macro)] + +#[macro_use] +extern crate foo; + +#[derive(A)] +struct A; + +fn main() { + let _b = B; +} diff --git a/src/test/run-make/rustc-macro-dep-files/foo.rs b/src/test/run-make/rustc-macro-dep-files/foo.rs new file mode 100644 index 0000000000000..bd9e9158c5052 --- /dev/null +++ b/src/test/run-make/rustc-macro-dep-files/foo.rs @@ -0,0 +1,24 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![crate_type = "proc-macro"] +#![feature(proc_macro)] +#![feature(proc_macro_lib)] + +extern crate proc_macro; + +use proc_macro::TokenStream; + +#[proc_macro_derive(A)] +pub fn derive(input: TokenStream) -> TokenStream { + let input = input.to_string(); + assert!(input.contains("struct A;")); + "struct B;".parse().unwrap() +} diff --git a/src/test/run-make/sepcomp-inlining/Makefile b/src/test/run-make/sepcomp-inlining/Makefile index bc299de0c2d3f..ef43b0d97e417 100644 --- a/src/test/run-make/sepcomp-inlining/Makefile +++ b/src/test/run-make/sepcomp-inlining/Makefile @@ -1,13 +1,14 @@ -include ../tools.mk -# Test that #[inline(always)] functions still get inlined across compilation -# unit boundaries. Compilation should produce three IR files, with each one -# containing a definition of the inlined function. Also, the non-#[inline] -# function should be defined in only one compilation unit. +# Test that #[inline] functions still get inlined across compilation unit +# boundaries. Compilation should produce three IR files, but only the two +# compilation units that have a usage of the #[inline] function should +# contain a definition. Also, the non-#[inline] function should be defined +# in only one compilation unit. all: $(RUSTC) foo.rs --emit=llvm-ir -C codegen-units=3 - [ "$$(cat "$(TMPDIR)"/foo.?.ll | grep -c define\ i32\ .*inlined)" -eq "1" ] - [ "$$(cat "$(TMPDIR)"/foo.?.ll | grep -c define\ available_externally\ i32\ .*inlined)" -eq "2" ] + [ "$$(cat "$(TMPDIR)"/foo.?.ll | grep -c define\ i32\ .*inlined)" -eq "0" ] + [ "$$(cat "$(TMPDIR)"/foo.?.ll | grep -c define\ internal\ i32\ .*inlined)" -eq "2" ] [ "$$(cat "$(TMPDIR)"/foo.?.ll | grep -c define\ i32\ .*normal)" -eq "1" ] [ "$$(cat "$(TMPDIR)"/foo.?.ll | grep -c declare\ i32\ .*normal)" -eq "2" ] diff --git a/src/test/run-make/stable-symbol-names/Makefile b/src/test/run-make/stable-symbol-names/Makefile index da96d1b1dcf58..c1b14440faab6 100644 --- a/src/test/run-make/stable-symbol-names/Makefile +++ b/src/test/run-make/stable-symbol-names/Makefile @@ -1,12 +1,16 @@ -include ../tools.mk -# This test case makes sure that monomorphizations of the same function with the -# same set of generic arguments will have the same symbol names when -# instantiated in different crates. +# The following command will: +# 1. dump the symbols of a library using `nm` +# 2. extract only those lines that we are interested in via `grep` +# 3. from those lines, extract just the symbol name via `sed` +# (symbol names always start with "_ZN" and end with "E") +# 4. sort those symbol names for deterministic comparison +# 5. write the result into a file dump-symbols = nm "$(TMPDIR)/lib$(1).rlib" \ - | grep "some_test_function" \ - | sed "s/^[0-9a-f]\{8,16\}/00000000/" \ + | grep -E "some_test_function|Bar|bar" \ + | sed "s/.*\(_ZN.*E\).*/\1/" \ | sort \ > "$(TMPDIR)/$(1).nm" diff --git a/src/test/run-make/stable-symbol-names/stable-symbol-names1.rs b/src/test/run-make/stable-symbol-names/stable-symbol-names1.rs index 7b2cd85711301..5c73ff0fab6d0 100644 --- a/src/test/run-make/stable-symbol-names/stable-symbol-names1.rs +++ b/src/test/run-make/stable-symbol-names/stable-symbol-names1.rs @@ -10,6 +10,20 @@ #![crate_type="rlib"] +pub trait Foo { + fn foo(); +} + +pub struct Bar; + +impl Foo for Bar { + fn foo() {} +} + +pub fn bar() { + Bar::foo::(); +} + pub fn some_test_function(t: T) -> T { t } diff --git a/src/test/run-make/stable-symbol-names/stable-symbol-names2.rs b/src/test/run-make/stable-symbol-names/stable-symbol-names2.rs index ff027d6b46ce5..097dce876affc 100644 --- a/src/test/run-make/stable-symbol-names/stable-symbol-names2.rs +++ b/src/test/run-make/stable-symbol-names/stable-symbol-names2.rs @@ -18,3 +18,9 @@ pub fn user() { let x = 2u64; stable_symbol_names1::some_test_function(&x); } + +pub fn trait_impl_test_function() { + use stable_symbol_names1::*; + Bar::foo::(); + bar(); +} diff --git a/src/test/run-make/symbols-are-reasonable/Makefile b/src/test/run-make/symbols-are-reasonable/Makefile index c668ffc5832b6..1c117cf0c9e62 100644 --- a/src/test/run-make/symbols-are-reasonable/Makefile +++ b/src/test/run-make/symbols-are-reasonable/Makefile @@ -1,7 +1,7 @@ -include ../tools.mk # check that the compile generated symbols for strings, binaries, -# vtables, etc. have semisane names (e.g. `str1234`); it's relatively +# vtables, etc. have semisane names (e.g. `str.1234`); it's relatively # easy to accidentally modify the compiler internals to make them # become things like `str"str"(1234)`. @@ -10,6 +10,6 @@ OUT=$(TMPDIR)/lib.s all: $(RUSTC) lib.rs --emit=asm --crate-type=staticlib # just check for symbol declarations with the names we're expecting. - grep 'str[0-9][0-9]*:' $(OUT) - grep 'byte_str[0-9][0-9]*:' $(OUT) - grep 'vtable[0-9][0-9]*' $(OUT) + grep 'str.[0-9][0-9]*:' $(OUT) + grep 'byte_str.[0-9][0-9]*:' $(OUT) + grep 'vtable.[0-9][0-9]*' $(OUT) diff --git a/src/test/run-make/target-without-atomics/Makefile b/src/test/run-make/target-without-atomics/Makefile new file mode 100644 index 0000000000000..48c53a5651114 --- /dev/null +++ b/src/test/run-make/target-without-atomics/Makefile @@ -0,0 +1,5 @@ +-include ../tools.mk + +# The target used below doesn't support atomic operations. Verify that's the case +all: + $(RUSTC) --print cfg --target thumbv6m-none-eabi | grep -qv target_has_atomic diff --git a/src/test/run-make/tools.mk b/src/test/run-make/tools.mk index 38afa42a2935a..1db87d474bd72 100644 --- a/src/test/run-make/tools.mk +++ b/src/test/run-make/tools.mk @@ -22,9 +22,6 @@ RLIB_GLOB = lib$(1)*.rlib BIN = $(1) UNAME = $(shell uname) -ifneq (,$(findstring MINGW,$(UNAME))) -IS_WINDOWS=1 -endif ifeq ($(UNAME),Darwin) RUN = $(TARGET_RPATH_ENV) $(RUN_BINFILE) diff --git a/src/test/run-pass-fulldeps/ast_stmt_expr_attr.rs b/src/test/run-pass-fulldeps/ast_stmt_expr_attr.rs index 64747002a65b0..a41b34f6a53d0 100644 --- a/src/test/run-pass-fulldeps/ast_stmt_expr_attr.rs +++ b/src/test/run-pass-fulldeps/ast_stmt_expr_attr.rs @@ -31,10 +31,7 @@ use std::fmt; // Copied out of syntax::util::parser_testing pub fn string_to_parser<'a>(ps: &'a ParseSess, source_str: String) -> Parser<'a> { - new_parser_from_source_str(ps, - Vec::new(), - "bogofile".to_string(), - source_str) + new_parser_from_source_str(ps, "bogofile".to_string(), source_str) } fn with_error_checking_parse<'a, T, F>(s: String, ps: &'a ParseSess, f: F) -> PResult<'a, T> where diff --git a/src/test/run-pass-fulldeps/auxiliary/cond_noprelude_plugin.rs b/src/test/run-pass-fulldeps/auxiliary/cond_noprelude_plugin.rs index 6aee63e2858e2..48919fe876a22 100644 --- a/src/test/run-pass-fulldeps/auxiliary/cond_noprelude_plugin.rs +++ b/src/test/run-pass-fulldeps/auxiliary/cond_noprelude_plugin.rs @@ -12,13 +12,13 @@ #![feature(plugin)] #![feature(plugin_registrar)] #![feature(rustc_private)] -#![plugin(proc_macro)] +#![plugin(proc_macro_plugin)] extern crate rustc_plugin; -extern crate proc_macro; +extern crate proc_macro_tokens; extern crate syntax; -use proc_macro::build::ident_eq; +use proc_macro_tokens::build::ident_eq; use syntax::ext::base::{ExtCtxt, MacResult}; use syntax::ext::proc_macro_shim::build_block_emitter; diff --git a/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs b/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs index 8291c8a1e41c6..0ea4cec75cdda 100644 --- a/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs +++ b/src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs @@ -12,13 +12,13 @@ #![feature(plugin)] #![feature(plugin_registrar)] #![feature(rustc_private)] -#![plugin(proc_macro)] +#![plugin(proc_macro_plugin)] extern crate rustc_plugin; -extern crate proc_macro; +extern crate proc_macro_tokens; extern crate syntax; -use proc_macro::prelude::*; +use proc_macro_tokens::prelude::*; use rustc_plugin::Registry; diff --git a/src/test/run-pass-fulldeps/auxiliary/cond_prelude_plugin.rs b/src/test/run-pass-fulldeps/auxiliary/cond_prelude_plugin.rs index 2d92a0ef18199..169c96b438529 100644 --- a/src/test/run-pass-fulldeps/auxiliary/cond_prelude_plugin.rs +++ b/src/test/run-pass-fulldeps/auxiliary/cond_prelude_plugin.rs @@ -12,14 +12,14 @@ #![feature(plugin)] #![feature(plugin_registrar)] #![feature(rustc_private)] -#![plugin(proc_macro)] +#![plugin(proc_macro_plugin)] extern crate rustc_plugin; -extern crate proc_macro; +extern crate proc_macro_tokens; extern crate syntax; use syntax::ext::proc_macro_shim::prelude::*; -use proc_macro::prelude::*; +use proc_macro_tokens::prelude::*; use rustc_plugin::Registry; diff --git a/src/test/run-pass-fulldeps/auxiliary/custom_derive_partial_eq.rs b/src/test/run-pass-fulldeps/auxiliary/custom_derive_partial_eq.rs index 956f789dab839..e750d1fb1e3e6 100644 --- a/src/test/run-pass-fulldeps/auxiliary/custom_derive_partial_eq.rs +++ b/src/test/run-pass-fulldeps/auxiliary/custom_derive_partial_eq.rs @@ -53,12 +53,12 @@ fn expand_deriving_partial_eq(cx: &mut ExtCtxt, span: Span, mitem: &MetaItem, it } let inline = cx.meta_word(span, InternedString::new("inline")); - let attrs = vec!(cx.attribute(span, inline)); + let attrs = vec![cx.attribute(span, inline)]; let methods = vec![MethodDef { name: "eq", generics: LifetimeBounds::empty(), explicit_self: borrowed_explicit_self(), - args: vec!(borrowed_self()), + args: vec![borrowed_self()], ret_ty: Literal(deriving::generic::ty::Path::new_local("bool")), attributes: attrs, is_unsafe: false, diff --git a/src/test/run-pass-fulldeps/auxiliary/dummy_mir_pass.rs b/src/test/run-pass-fulldeps/auxiliary/dummy_mir_pass.rs index f7b046b30cad7..3bc4a40a39c99 100644 --- a/src/test/run-pass-fulldeps/auxiliary/dummy_mir_pass.rs +++ b/src/test/run-pass-fulldeps/auxiliary/dummy_mir_pass.rs @@ -19,7 +19,7 @@ extern crate rustc_const_math; extern crate syntax; use rustc::mir::transform::{self, MirPass, MirSource}; -use rustc::mir::repr::{Mir, Literal, Location}; +use rustc::mir::{Mir, Literal, Location}; use rustc::mir::visit::MutVisitor; use rustc::ty::TyCtxt; use rustc::middle::const_val::ConstVal; diff --git a/src/test/run-pass-fulldeps/auxiliary/macro_crate_test.rs b/src/test/run-pass-fulldeps/auxiliary/macro_crate_test.rs index 4885863122c3b..15ec0ccae8fdd 100644 --- a/src/test/run-pass-fulldeps/auxiliary/macro_crate_test.rs +++ b/src/test/run-pass-fulldeps/auxiliary/macro_crate_test.rs @@ -60,7 +60,7 @@ fn expand_make_a_1(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box Box { // Parse an expression and emit it unchanged. - let mut parser = parse::new_parser_from_tts(cx.parse_sess(), cx.cfg(), tts.to_vec()); + let mut parser = parse::new_parser_from_tts(cx.parse_sess(), tts.to_vec()); let expr = parser.parse_expr().unwrap(); MacEager::expr(quote_expr!(&mut *cx, $expr)) } diff --git a/src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs b/src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs new file mode 100644 index 0000000000000..9fce19f46f65c --- /dev/null +++ b/src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs @@ -0,0 +1,56 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(plugin, plugin_registrar, rustc_private)] + +extern crate proc_macro_tokens; +extern crate rustc_plugin; +extern crate syntax; + +use proc_macro_tokens::prelude::*; +use rustc_plugin::Registry; +use syntax::ext::base::SyntaxExtension; +use syntax::ext::proc_macro_shim::prelude::*; + +#[plugin_registrar] +pub fn plugin_registrar(reg: &mut Registry) { + reg.register_syntax_extension(token::intern("attr_tru"), + SyntaxExtension::AttrProcMacro(Box::new(attr_tru))); + reg.register_syntax_extension(token::intern("attr_identity"), + SyntaxExtension::AttrProcMacro(Box::new(attr_identity))); + reg.register_syntax_extension(token::intern("tru"), + SyntaxExtension::ProcMacro(Box::new(tru))); + reg.register_syntax_extension(token::intern("ret_tru"), + SyntaxExtension::ProcMacro(Box::new(ret_tru))); + reg.register_syntax_extension(token::intern("identity"), + SyntaxExtension::ProcMacro(Box::new(identity))); +} + +fn attr_tru(_attr: TokenStream, _item: TokenStream) -> TokenStream { + lex("fn f1() -> bool { true }") +} + +fn attr_identity(_attr: TokenStream, item: TokenStream) -> TokenStream { + let source = item.to_string(); + lex(&source) +} + +fn tru(_ts: TokenStream) -> TokenStream { + lex("true") +} + +fn ret_tru(_ts: TokenStream) -> TokenStream { + lex("return true;") +} + +fn identity(ts: TokenStream) -> TokenStream { + let source = ts.to_string(); + lex(&source) +} diff --git a/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs b/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs index 2b50c4fe11e93..5229d42f1fdd4 100644 --- a/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs +++ b/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs @@ -25,6 +25,7 @@ use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager}; use syntax::ext::build::AstBuilder; use syntax::ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal}; use syntax::ext::tt::macro_parser::{Success, Failure, Error}; +use syntax::ext::tt::macro_parser::parse_failure_msg; use syntax::ptr::P; use syntax_pos::Span; use rustc_plugin::Registry; @@ -58,8 +59,11 @@ fn expand_mbe_matches(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree]) _ => unreachable!() } } - Failure(_, s) | Error(_, s) => { - panic!("expected Success, but got Error/Failure: {}", s); + Failure(_, tok) => { + panic!("expected Success, but got Failure: {}", parse_failure_msg(tok)); + } + Error(_, s) => { + panic!("expected Success, but got Error: {}", s); } }; diff --git a/src/test/run-pass-fulldeps/compiler-calls.rs b/src/test/run-pass-fulldeps/compiler-calls.rs index 775ba38004e3a..4a397621ceb53 100644 --- a/src/test/run-pass-fulldeps/compiler-calls.rs +++ b/src/test/run-pass-fulldeps/compiler-calls.rs @@ -47,7 +47,6 @@ impl<'a> CompilerCalls<'a> for TestCalls { fn late_callback(&mut self, _: &getopts::Matches, _: &Session, - _: &ast::CrateConfig, _: &Input, _: &Option, _: &Option) @@ -86,6 +85,6 @@ fn main() { let mut tc = TestCalls { count: 1 }; // we should never get use this filename, but lets make sure they are valid args. let args = vec!["compiler-calls".to_string(), "foo.rs".to_string()]; - rustc_driver::run_compiler(&args, &mut tc); + rustc_driver::run_compiler(&args, &mut tc, None, None); assert_eq!(tc.count, 30); } diff --git a/src/test/run-pass-fulldeps/custom-derive-partial-eq.rs b/src/test/run-pass-fulldeps/custom-derive-partial-eq.rs index 8cc7ab4219dc5..47f5f8397d1c5 100644 --- a/src/test/run-pass-fulldeps/custom-derive-partial-eq.rs +++ b/src/test/run-pass-fulldeps/custom-derive-partial-eq.rs @@ -10,8 +10,6 @@ // aux-build:custom_derive_partial_eq.rs // ignore-stage1 -// ignore-pretty : (#23623) problems when ending with // comments - #![feature(plugin, custom_derive)] #![plugin(custom_derive_partial_eq)] #![allow(unused)] diff --git a/src/test/run-pass-fulldeps/issue-11881.rs b/src/test/run-pass-fulldeps/issue-11881.rs index 9da04f7235531..914e3dd493248 100644 --- a/src/test/run-pass-fulldeps/issue-11881.rs +++ b/src/test/run-pass-fulldeps/issue-11881.rs @@ -11,7 +11,6 @@ #![feature(rustc_private)] -extern crate rbml; extern crate serialize; use std::io::Cursor; @@ -21,8 +20,7 @@ use std::slice; use serialize::{Encodable, Encoder}; use serialize::json; - -use rbml::writer; +use serialize::opaque; #[derive(Encodable)] struct Foo { @@ -36,15 +34,15 @@ struct Bar { enum WireProtocol { JSON, - RBML, + Opaque, // ... } fn encode_json(val: &T, wr: &mut Cursor>) { write!(wr, "{}", json::as_json(val)); } -fn encode_rbml(val: &T, wr: &mut Cursor>) { - let mut encoder = writer::Encoder::new(wr); +fn encode_opaque(val: &T, wr: &mut Cursor>) { + let mut encoder = opaque::Encoder::new(wr); val.encode(&mut encoder); } @@ -54,6 +52,6 @@ pub fn main() { let proto = WireProtocol::JSON; match proto { WireProtocol::JSON => encode_json(&target, &mut wr), - WireProtocol::RBML => encode_rbml(&target, &mut wr) + WireProtocol::Opaque => encode_opaque(&target, &mut wr) } } diff --git a/src/test/run-pass-fulldeps/issue-16992.rs b/src/test/run-pass-fulldeps/issue-16992.rs index a439e2bb25b8c..3ab7f8429e601 100644 --- a/src/test/run-pass-fulldeps/issue-16992.rs +++ b/src/test/run-pass-fulldeps/issue-16992.rs @@ -8,7 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty // ignore-cross-compile #![feature(quote, rustc_private)] diff --git a/src/test/run-pass-fulldeps/issue-18763-quote-token-tree.rs b/src/test/run-pass-fulldeps/issue-18763-quote-token-tree.rs index 829fdb176bd31..03311d76e46d4 100644 --- a/src/test/run-pass-fulldeps/issue-18763-quote-token-tree.rs +++ b/src/test/run-pass-fulldeps/issue-18763-quote-token-tree.rs @@ -9,8 +9,6 @@ // except according to those terms. // ignore-cross-compile -// ignore-pretty: does not work well with `--test` - #![feature(quote, rustc_private)] extern crate syntax; diff --git a/src/test/run-pass-fulldeps/lint-group-plugin.rs b/src/test/run-pass-fulldeps/lint-group-plugin.rs index 21942b84bff9c..978a78cee4f1e 100644 --- a/src/test/run-pass-fulldeps/lint-group-plugin.rs +++ b/src/test/run-pass-fulldeps/lint-group-plugin.rs @@ -10,8 +10,6 @@ // aux-build:lint_group_plugin_test.rs // ignore-stage1 -// ignore-pretty - #![feature(plugin)] #![plugin(lint_group_plugin_test)] #![allow(dead_code)] diff --git a/src/test/run-pass-fulldeps/lint-plugin-cmdline-load.rs b/src/test/run-pass-fulldeps/lint-plugin-cmdline-load.rs index 2a6daa5040b71..2e86e11bd6a5f 100644 --- a/src/test/run-pass-fulldeps/lint-plugin-cmdline-load.rs +++ b/src/test/run-pass-fulldeps/lint-plugin-cmdline-load.rs @@ -10,7 +10,6 @@ // aux-build:lint_plugin_test.rs // ignore-stage1 -// ignore-pretty: Random space appears with the pretty test // compile-flags: -Z extra-plugins=lint_plugin_test #![allow(dead_code)] diff --git a/src/test/run-pass-fulldeps/lint-plugin.rs b/src/test/run-pass-fulldeps/lint-plugin.rs index b694a1c332079..753ad33bd01e9 100644 --- a/src/test/run-pass-fulldeps/lint-plugin.rs +++ b/src/test/run-pass-fulldeps/lint-plugin.rs @@ -10,8 +10,6 @@ // aux-build:lint_plugin_test.rs // ignore-stage1 -// ignore-pretty - #![feature(plugin)] #![plugin(lint_plugin_test)] #![allow(dead_code)] diff --git a/src/test/run-pass-fulldeps/macro-crate.rs b/src/test/run-pass-fulldeps/macro-crate.rs index fe2317aabea68..9b2e36c8cea15 100644 --- a/src/test/run-pass-fulldeps/macro-crate.rs +++ b/src/test/run-pass-fulldeps/macro-crate.rs @@ -17,8 +17,8 @@ #[macro_use] #[no_link] extern crate macro_crate_test; -#[into_multi_foo] #[derive(PartialEq, Clone, Debug)] +#[into_multi_foo] fn foo() -> AnotherFakeTypeThatHadBetterGoAway {} // Check that the `#[into_multi_foo]`-generated `foo2` is configured away diff --git a/src/test/run-pass-fulldeps/macro-quote-1.rs b/src/test/run-pass-fulldeps/macro-quote-1.rs index 4ee775dec0cef..914da3f746773 100644 --- a/src/test/run-pass-fulldeps/macro-quote-1.rs +++ b/src/test/run-pass-fulldeps/macro-quote-1.rs @@ -12,10 +12,10 @@ #![feature(plugin)] #![feature(rustc_private)] -#![plugin(proc_macro)] +#![plugin(proc_macro_plugin)] -extern crate proc_macro; -use proc_macro::prelude::*; +extern crate proc_macro_tokens; +use proc_macro_tokens::prelude::*; extern crate syntax; use syntax::ast::Ident; diff --git a/src/test/run-pass-fulldeps/rustc-macro/add-impl.rs b/src/test/run-pass-fulldeps/proc-macro/add-impl.rs similarity index 96% rename from src/test/run-pass-fulldeps/rustc-macro/add-impl.rs rename to src/test/run-pass-fulldeps/proc-macro/add-impl.rs index 226c082564ae4..e82282f3d8c58 100644 --- a/src/test/run-pass-fulldeps/rustc-macro/add-impl.rs +++ b/src/test/run-pass-fulldeps/proc-macro/add-impl.rs @@ -10,7 +10,7 @@ // aux-build:add-impl.rs -#![feature(rustc_macro)] +#![feature(proc_macro)] #[macro_use] extern crate add_impl; diff --git a/src/test/compile-fail-fulldeps/rustc-macro/append-impl.rs b/src/test/run-pass-fulldeps/proc-macro/append-impl.rs similarity index 88% rename from src/test/compile-fail-fulldeps/rustc-macro/append-impl.rs rename to src/test/run-pass-fulldeps/proc-macro/append-impl.rs index 1300fe66585c9..f062111df9daf 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/append-impl.rs +++ b/src/test/run-pass-fulldeps/proc-macro/append-impl.rs @@ -10,7 +10,7 @@ // aux-build:append-impl.rs -#![feature(rustc_macro)] +#![feature(proc_macro)] #![allow(warnings)] #[macro_use] @@ -24,7 +24,6 @@ trait Append { Append, Eq)] struct A { -//~^ ERROR: the semantics of constant patterns is not yet settled inner: u32, } diff --git a/src/test/run-pass-fulldeps/rustc-macro/auxiliary/add-impl.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/add-impl.rs similarity index 78% rename from src/test/run-pass-fulldeps/rustc-macro/auxiliary/add-impl.rs rename to src/test/run-pass-fulldeps/proc-macro/auxiliary/add-impl.rs index 8aab423af0a3b..99586b0bb493a 100644 --- a/src/test/run-pass-fulldeps/rustc-macro/auxiliary/add-impl.rs +++ b/src/test/run-pass-fulldeps/proc-macro/auxiliary/add-impl.rs @@ -10,16 +10,16 @@ // no-prefer-dynamic -#![crate_type = "rustc-macro"] -#![feature(rustc_macro)] -#![feature(rustc_macro_lib)] +#![crate_type = "proc-macro"] +#![feature(proc_macro)] +#![feature(proc_macro_lib)] -extern crate rustc_macro; +extern crate proc_macro; -use rustc_macro::TokenStream; +use proc_macro::TokenStream; -#[rustc_macro_derive(AddImpl)] -// #[cfg(rustc_macro)] +#[proc_macro_derive(AddImpl)] +// #[cfg(proc_macro)] pub fn derive(input: TokenStream) -> TokenStream { (input.to_string() + " impl B { diff --git a/src/test/compile-fail-fulldeps/rustc-macro/auxiliary/append-impl.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/append-impl.rs similarity index 80% rename from src/test/compile-fail-fulldeps/rustc-macro/auxiliary/append-impl.rs rename to src/test/run-pass-fulldeps/proc-macro/auxiliary/append-impl.rs index c3d295e02c163..27c3d643ca4ff 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/auxiliary/append-impl.rs +++ b/src/test/run-pass-fulldeps/proc-macro/auxiliary/append-impl.rs @@ -11,15 +11,15 @@ // force-host // no-prefer-dynamic -#![feature(rustc_macro)] -#![feature(rustc_macro_lib)] -#![crate_type = "rustc-macro"] +#![feature(proc_macro)] +#![feature(proc_macro_lib)] +#![crate_type = "proc-macro"] -extern crate rustc_macro; +extern crate proc_macro; -use rustc_macro::TokenStream; +use proc_macro::TokenStream; -#[rustc_macro_derive(Append)] +#[proc_macro_derive(Append)] pub fn derive_a(input: TokenStream) -> TokenStream { let mut input = input.to_string(); input.push_str(" diff --git a/src/test/run-pass-fulldeps/rustc-macro/auxiliary/derive-a.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/derive-a.rs similarity index 66% rename from src/test/run-pass-fulldeps/rustc-macro/auxiliary/derive-a.rs rename to src/test/run-pass-fulldeps/proc-macro/auxiliary/derive-a.rs index 4dd6ad88b757c..c2de173568b48 100644 --- a/src/test/run-pass-fulldeps/rustc-macro/auxiliary/derive-a.rs +++ b/src/test/run-pass-fulldeps/proc-macro/auxiliary/derive-a.rs @@ -10,18 +10,18 @@ // no-prefer-dynamic -#![crate_type = "rustc-macro"] -#![feature(rustc_macro)] -#![feature(rustc_macro_lib)] +#![crate_type = "proc-macro"] +#![feature(proc_macro)] +#![feature(proc_macro_lib)] -extern crate rustc_macro; +extern crate proc_macro; -use rustc_macro::TokenStream; +use proc_macro::TokenStream; -#[rustc_macro_derive(A)] +#[proc_macro_derive(A)] pub fn derive(input: TokenStream) -> TokenStream { let input = input.to_string(); assert!(input.contains("struct A;")); - assert!(input.contains("#[derive(Eq, Copy, Clone)]")); - "#[derive(Eq, Copy, Clone)] struct A;".parse().unwrap() + assert!(input.contains("#[derive(Debug, PartialEq, Eq, Copy, Clone)]")); + "#[derive(Debug, PartialEq, Eq, Copy, Clone)] struct A;".parse().unwrap() } diff --git a/src/test/run-pass-fulldeps/rustc-macro/auxiliary/derive-atob.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/derive-atob.rs similarity index 73% rename from src/test/run-pass-fulldeps/rustc-macro/auxiliary/derive-atob.rs rename to src/test/run-pass-fulldeps/proc-macro/auxiliary/derive-atob.rs index 5b85e2b2a7c4b..a942adc4c8020 100644 --- a/src/test/run-pass-fulldeps/rustc-macro/auxiliary/derive-atob.rs +++ b/src/test/run-pass-fulldeps/proc-macro/auxiliary/derive-atob.rs @@ -10,17 +10,17 @@ // no-prefer-dynamic -#![crate_type = "rustc-macro"] -#![feature(rustc_macro)] -#![feature(rustc_macro_lib)] +#![crate_type = "proc-macro"] +#![feature(proc_macro)] +#![feature(proc_macro_lib)] -extern crate rustc_macro; +extern crate proc_macro; -use rustc_macro::TokenStream; +use proc_macro::TokenStream; -#[rustc_macro_derive(AToB)] +#[proc_macro_derive(AToB)] pub fn derive(input: TokenStream) -> TokenStream { let input = input.to_string(); - assert_eq!(input, "struct A;\n"); + assert_eq!(input, "#[derive(Copy, Clone)]\nstruct A;\n"); "struct B;".parse().unwrap() } diff --git a/src/test/run-pass-fulldeps/rustc-macro/auxiliary/derive-ctod.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/derive-ctod.rs similarity index 79% rename from src/test/run-pass-fulldeps/rustc-macro/auxiliary/derive-ctod.rs rename to src/test/run-pass-fulldeps/proc-macro/auxiliary/derive-ctod.rs index 54f8dff509ab0..50f1a390b2939 100644 --- a/src/test/run-pass-fulldeps/rustc-macro/auxiliary/derive-ctod.rs +++ b/src/test/run-pass-fulldeps/proc-macro/auxiliary/derive-ctod.rs @@ -10,15 +10,15 @@ // no-prefer-dynamic -#![crate_type = "rustc-macro"] -#![feature(rustc_macro)] -#![feature(rustc_macro_lib)] +#![crate_type = "proc-macro"] +#![feature(proc_macro)] +#![feature(proc_macro_lib)] -extern crate rustc_macro; +extern crate proc_macro; -use rustc_macro::TokenStream; +use proc_macro::TokenStream; -#[rustc_macro_derive(CToD)] +#[proc_macro_derive(CToD)] pub fn derive(input: TokenStream) -> TokenStream { let input = input.to_string(); assert_eq!(input, "struct C;\n"); diff --git a/src/test/run-pass-fulldeps/rustc-macro/auxiliary/derive-same-struct.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/derive-same-struct.rs similarity index 80% rename from src/test/run-pass-fulldeps/rustc-macro/auxiliary/derive-same-struct.rs rename to src/test/run-pass-fulldeps/proc-macro/auxiliary/derive-same-struct.rs index d83e352e3b175..bd283ca57ebd9 100644 --- a/src/test/run-pass-fulldeps/rustc-macro/auxiliary/derive-same-struct.rs +++ b/src/test/run-pass-fulldeps/proc-macro/auxiliary/derive-same-struct.rs @@ -9,23 +9,23 @@ // except according to those terms. // no-prefer-dynamic -// compile-flags:--crate-type rustc-macro +// compile-flags:--crate-type proc-macro -#![feature(rustc_macro)] -#![feature(rustc_macro_lib)] +#![feature(proc_macro)] +#![feature(proc_macro_lib)] -extern crate rustc_macro; +extern crate proc_macro; -use rustc_macro::TokenStream; +use proc_macro::TokenStream; -#[rustc_macro_derive(AToB)] +#[proc_macro_derive(AToB)] pub fn derive1(input: TokenStream) -> TokenStream { println!("input1: {:?}", input.to_string()); assert_eq!(input.to_string(), "#[derive(BToC)]\nstruct A;\n"); "#[derive(BToC)] struct B;".parse().unwrap() } -#[rustc_macro_derive(BToC)] +#[proc_macro_derive(BToC)] pub fn derive2(input: TokenStream) -> TokenStream { assert_eq!(input.to_string(), "struct B;\n"); "struct C;".parse().unwrap() diff --git a/src/test/run-pass-fulldeps/rustc-macro/auxiliary/expand-with-a-macro.rs b/src/test/run-pass-fulldeps/proc-macro/auxiliary/expand-with-a-macro.rs similarity index 82% rename from src/test/run-pass-fulldeps/rustc-macro/auxiliary/expand-with-a-macro.rs rename to src/test/run-pass-fulldeps/proc-macro/auxiliary/expand-with-a-macro.rs index 96aea407e6e74..155b125690cc1 100644 --- a/src/test/run-pass-fulldeps/rustc-macro/auxiliary/expand-with-a-macro.rs +++ b/src/test/run-pass-fulldeps/proc-macro/auxiliary/expand-with-a-macro.rs @@ -10,16 +10,16 @@ // no-prefer-dynamic -#![crate_type = "rustc-macro"] -#![feature(rustc_macro)] -#![feature(rustc_macro_lib)] +#![crate_type = "proc-macro"] +#![feature(proc_macro)] +#![feature(proc_macro_lib)] #![deny(warnings)] -extern crate rustc_macro; +extern crate proc_macro; -use rustc_macro::TokenStream; +use proc_macro::TokenStream; -#[rustc_macro_derive(A)] +#[proc_macro_derive(A)] pub fn derive(input: TokenStream) -> TokenStream { let input = input.to_string(); assert!(input.contains("struct A;")); diff --git a/src/test/run-pass-fulldeps/rustc-macro/derive-same-struct.rs b/src/test/run-pass-fulldeps/proc-macro/derive-same-struct.rs similarity index 96% rename from src/test/run-pass-fulldeps/rustc-macro/derive-same-struct.rs rename to src/test/run-pass-fulldeps/proc-macro/derive-same-struct.rs index ee0d594564883..b3edc8f1c310d 100644 --- a/src/test/run-pass-fulldeps/rustc-macro/derive-same-struct.rs +++ b/src/test/run-pass-fulldeps/proc-macro/derive-same-struct.rs @@ -10,7 +10,7 @@ // aux-build:derive-same-struct.rs -#![feature(rustc_macro)] +#![feature(proc_macro)] #[macro_use] extern crate derive_same_struct; diff --git a/src/test/run-pass-fulldeps/rustc-macro/expand-with-a-macro.rs b/src/test/run-pass-fulldeps/proc-macro/expand-with-a-macro.rs similarity index 96% rename from src/test/run-pass-fulldeps/rustc-macro/expand-with-a-macro.rs rename to src/test/run-pass-fulldeps/proc-macro/expand-with-a-macro.rs index cc59be2d75df3..16f3535adae3b 100644 --- a/src/test/run-pass-fulldeps/rustc-macro/expand-with-a-macro.rs +++ b/src/test/run-pass-fulldeps/proc-macro/expand-with-a-macro.rs @@ -11,7 +11,7 @@ // aux-build:expand-with-a-macro.rs // ignore-stage1 -#![feature(rustc_macro)] +#![feature(proc_macro)] #![deny(warnings)] #[macro_use] diff --git a/src/test/run-pass-fulldeps/rustc-macro/load-two.rs b/src/test/run-pass-fulldeps/proc-macro/load-two.rs similarity index 93% rename from src/test/run-pass-fulldeps/rustc-macro/load-two.rs rename to src/test/run-pass-fulldeps/proc-macro/load-two.rs index 1500970f02dad..431c8c5902749 100644 --- a/src/test/run-pass-fulldeps/rustc-macro/load-two.rs +++ b/src/test/run-pass-fulldeps/proc-macro/load-two.rs @@ -11,13 +11,14 @@ // aux-build:derive-atob.rs // aux-build:derive-ctod.rs -#![feature(rustc_macro)] +#![feature(proc_macro)] #[macro_use] extern crate derive_atob; #[macro_use] extern crate derive_ctod; +#[derive(Copy, Clone)] #[derive(AToB)] struct A; diff --git a/src/test/run-pass-fulldeps/rustc-macro/smoke.rs b/src/test/run-pass-fulldeps/proc-macro/smoke.rs similarity index 96% rename from src/test/run-pass-fulldeps/rustc-macro/smoke.rs rename to src/test/run-pass-fulldeps/proc-macro/smoke.rs index 588380f1140c9..cd7edb726447b 100644 --- a/src/test/run-pass-fulldeps/rustc-macro/smoke.rs +++ b/src/test/run-pass-fulldeps/proc-macro/smoke.rs @@ -11,7 +11,7 @@ // aux-build:derive-a.rs // ignore-stage1 -#![feature(rustc_macro)] +#![feature(proc_macro)] #[macro_use] extern crate derive_a; diff --git a/src/test/run-pass-fulldeps/proc_macro.rs b/src/test/run-pass-fulldeps/proc_macro.rs new file mode 100644 index 0000000000000..22cc9f0f8d40e --- /dev/null +++ b/src/test/run-pass-fulldeps/proc_macro.rs @@ -0,0 +1,48 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// aux-build:proc_macro_def.rs +// ignore-stage1 +// ignore-cross-compile + +#![feature(plugin, custom_attribute)] +#![feature(type_macros)] + +#![plugin(proc_macro_def)] + +#[attr_tru] +fn f1() -> bool { + return false; +} + +#[attr_identity] +fn f2() -> bool { + return identity!(true); +} + +fn f3() -> identity!(bool) { + ret_tru!(); +} + +fn f4(x: bool) -> bool { + match x { + identity!(true) => false, + identity!(false) => true, + } +} + +fn main() { + assert!(f1()); + assert!(f2()); + assert!(tru!()); + assert!(f3()); + assert!(identity!(5 == 5)); + assert!(f4(false)); +} diff --git a/src/test/run-pass-fulldeps/qquote.rs b/src/test/run-pass-fulldeps/qquote.rs index 2a53a62a5ab60..7c0c24163fe61 100644 --- a/src/test/run-pass-fulldeps/qquote.rs +++ b/src/test/run-pass-fulldeps/qquote.rs @@ -23,7 +23,7 @@ fn main() { let ps = syntax::parse::ParseSess::new(); let mut resolver = syntax::ext::base::DummyResolver; let mut cx = syntax::ext::base::ExtCtxt::new( - &ps, vec![], + &ps, syntax::ext::expand::ExpansionConfig::default("qquote".to_string()), &mut resolver); cx.bt_push(syntax::codemap::ExpnInfo { diff --git a/src/test/run-pass-fulldeps/quote-tokens.rs b/src/test/run-pass-fulldeps/quote-tokens.rs index 710e2fd1d07a3..9e9b7ce5bf29d 100644 --- a/src/test/run-pass-fulldeps/quote-tokens.rs +++ b/src/test/run-pass-fulldeps/quote-tokens.rs @@ -9,8 +9,6 @@ // except according to those terms. // ignore-cross-compile -// ignore-pretty: does not work well with `--test` - #![feature(quote, rustc_private)] extern crate syntax; diff --git a/src/test/run-pass-fulldeps/quote-unused-sp-no-warning.rs b/src/test/run-pass-fulldeps/quote-unused-sp-no-warning.rs index 460eab998c6fb..d3be1ddcb8c32 100644 --- a/src/test/run-pass-fulldeps/quote-unused-sp-no-warning.rs +++ b/src/test/run-pass-fulldeps/quote-unused-sp-no-warning.rs @@ -9,8 +9,6 @@ // except according to those terms. // ignore-cross-compile -// ignore-pretty: does not work well with `--test` - #![feature(quote, rustc_private)] #![deny(unused_variables)] diff --git a/src/test/run-pass-valgrind/cast-enum-with-dtor.rs b/src/test/run-pass-valgrind/cast-enum-with-dtor.rs index 7cf75924a28c0..2815863fe9961 100644 --- a/src/test/run-pass-valgrind/cast-enum-with-dtor.rs +++ b/src/test/run-pass-valgrind/cast-enum-with-dtor.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty : (#23623) problems when ending with // comments - // no-prefer-dynamic #![allow(dead_code)] diff --git a/src/test/run-pass/allocator-override.rs b/src/test/run-pass/allocator-override.rs index d7a8e79bfbee8..ca2dbdf2b3de3 100644 --- a/src/test/run-pass/allocator-override.rs +++ b/src/test/run-pass/allocator-override.rs @@ -10,6 +10,7 @@ // no-prefer-dynamic // aux-build:allocator-dummy.rs +// ignore-emscripten #![feature(test)] diff --git a/src/test/run-pass/assert-ne-macro-success.rs b/src/test/run-pass/assert-ne-macro-success.rs new file mode 100644 index 0000000000000..ce671d2f1b5a2 --- /dev/null +++ b/src/test/run-pass/assert-ne-macro-success.rs @@ -0,0 +1,22 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#[derive(PartialEq, Debug)] +struct Point { x : isize } + +pub fn main() { + assert_ne!(666,14); + assert_ne!("666".to_string(),"abc".to_string()); + assert_ne!(Box::new(Point{x:666}),Box::new(Point{x:34})); + assert_ne!(&Point{x:666},&Point{x:34}); + assert_ne!(666, 42, "no gods no masters"); + assert_ne!(666, 42, "6 {} 6", "6"); + assert_ne!(666, 42, "{x}, {y}, {z}", x = 6, y = 6, z = 6); +} diff --git a/src/test/run-pass/assert-ne-macro-unsized.rs b/src/test/run-pass/assert-ne-macro-unsized.rs new file mode 100644 index 0000000000000..eeac0b6f9f687 --- /dev/null +++ b/src/test/run-pass/assert-ne-macro-unsized.rs @@ -0,0 +1,13 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub fn main() { + assert_ne!([6, 6, 6][..], vec![1, 2, 3][..]); +} diff --git a/src/test/run-pass/assignability-trait.rs b/src/test/run-pass/assignability-trait.rs index c364240f4ad69..bc95d96a8cc9d 100644 --- a/src/test/run-pass/assignability-trait.rs +++ b/src/test/run-pass/assignability-trait.rs @@ -38,7 +38,7 @@ fn length>(x: T) -> usize { } pub fn main() { - let x: Vec = vec!(0,1,2,3); + let x: Vec = vec![0,1,2,3]; // Call a method x.iterate(|y| { assert_eq!(x[*y as usize], *y); true }); // Call a parameterized function diff --git a/src/test/run-pass/associated-types-doubleendediterator-object.rs b/src/test/run-pass/associated-types-doubleendediterator-object.rs index 1661812520b3e..dd194447740b7 100644 --- a/src/test/run-pass/associated-types-doubleendediterator-object.rs +++ b/src/test/run-pass/associated-types-doubleendediterator-object.rs @@ -25,7 +25,7 @@ fn pairwise_sub(mut t: Box>) -> isize { } fn main() { - let v = vec!(1, 2, 3, 4, 5, 6); + let v = vec![1, 2, 3, 4, 5, 6]; // FIXME (#22405): Replace `Box::new` with `box` here when/if possible. let r = pairwise_sub(Box::new(v.into_iter())); assert_eq!(r, 9); diff --git a/src/test/run-pass/associated-types-iterator-binding.rs b/src/test/run-pass/associated-types-iterator-binding.rs index be854f820d4d6..abd4917cae87f 100644 --- a/src/test/run-pass/associated-types-iterator-binding.rs +++ b/src/test/run-pass/associated-types-iterator-binding.rs @@ -22,7 +22,7 @@ fn pairwise_sub>(mut t: T) -> isize { } fn main() { - let v = vec!(1, 2, 3, 4, 5, 6); + let v = vec![1, 2, 3, 4, 5, 6]; let r = pairwise_sub(v.into_iter()); assert_eq!(r, 9); } diff --git a/src/test/run-pass/attr-on-generic-formals.rs b/src/test/run-pass/attr-on-generic-formals.rs new file mode 100644 index 0000000000000..5985284d8496b --- /dev/null +++ b/src/test/run-pass/attr-on-generic-formals.rs @@ -0,0 +1,60 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// This test ensures we can attach attributes to the formals in all +// places where generic parameter lists occur, assuming appropriate +// feature gates are enabled. +// +// (We are prefixing all tested features with `rustc_`, to ensure that +// the attributes themselves won't be rejected by the compiler when +// using `rustc_attrs` feature. There is a separate compile-fail/ test +// ensuring that the attribute feature-gating works in this context.) + +#![feature(generic_param_attrs, rustc_attrs)] +#![allow(dead_code)] + +struct StLt<#[rustc_lt_struct] 'a>(&'a u32); +struct StTy<#[rustc_ty_struct] I>(I); + +enum EnLt<#[rustc_lt_enum] 'b> { A(&'b u32), B } +enum EnTy<#[rustc_ty_enum] J> { A(J), B } + +trait TrLt<#[rustc_lt_trait] 'c> { fn foo(&self, _: &'c [u32]) -> &'c u32; } +trait TrTy<#[rustc_ty_trait] K> { fn foo(&self, _: K); } + +type TyLt<#[rustc_lt_type] 'd> = &'d u32; +type TyTy<#[rustc_ty_type] L> = (L, ); + +impl<#[rustc_lt_inherent] 'e> StLt<'e> { } +impl<#[rustc_ty_inherent] M> StTy { } + +impl<#[rustc_lt_impl_for] 'f> TrLt<'f> for StLt<'f> { + fn foo(&self, _: &'f [u32]) -> &'f u32 { loop { } } +} +impl<#[rustc_ty_impl_for] N> TrTy for StTy { + fn foo(&self, _: N) { } +} + +fn f_lt<#[rustc_lt_fn] 'g>(_: &'g [u32]) -> &'g u32 { loop { } } +fn f_ty<#[rustc_ty_fn] O>(_: O) { } + +impl StTy { + fn m_lt<#[rustc_lt_meth] 'h>(_: &'h [u32]) -> &'h u32 { loop { } } + fn m_ty<#[rustc_ty_meth] P>(_: P) { } +} + +fn hof_lt(_: Q) + where Q: for <#[rustc_lt_hof] 'i> Fn(&'i [u32]) -> &'i u32 +{ +} + +fn main() { + +} diff --git a/src/test/run-pass/auto-loop.rs b/src/test/run-pass/auto-loop.rs index babc0db4c3190..b0afae79c3696 100644 --- a/src/test/run-pass/auto-loop.rs +++ b/src/test/run-pass/auto-loop.rs @@ -11,7 +11,7 @@ pub fn main() { let mut sum = 0; - let xs = vec!(1, 2, 3, 4, 5); + let xs = vec![1, 2, 3, 4, 5]; for x in &xs { sum += *x; } diff --git a/src/test/run-pass/auto-ref-sliceable.rs b/src/test/run-pass/auto-ref-sliceable.rs index 5b12edb427562..f6cb314d06e2d 100644 --- a/src/test/run-pass/auto-ref-sliceable.rs +++ b/src/test/run-pass/auto-ref-sliceable.rs @@ -21,7 +21,7 @@ impl Pushable for Vec { } pub fn main() { - let mut v = vec!(1); + let mut v = vec![1]; v.push_val(2); v.push_val(3); assert_eq!(v, [1, 2, 3]); diff --git a/src/test/run-pass/autobind.rs b/src/test/run-pass/autobind.rs index 1f3d17ad55c08..ed0b9eca0e058 100644 --- a/src/test/run-pass/autobind.rs +++ b/src/test/run-pass/autobind.rs @@ -12,7 +12,7 @@ fn f(x: Vec) -> T { return x.into_iter().next().unwrap(); } -fn g(act: F) -> isize where F: FnOnce(Vec) -> isize { return act(vec!(1, 2, 3)); } +fn g(act: F) -> isize where F: FnOnce(Vec) -> isize { return act(vec![1, 2, 3]); } pub fn main() { assert_eq!(g(f), 1); diff --git a/src/test/run-pass/auxiliary/check_static_recursion_foreign_helper.rs b/src/test/run-pass/auxiliary/check_static_recursion_foreign_helper.rs index c0d81cd8e1bad..cd36a8eedb482 100644 --- a/src/test/run-pass/auxiliary/check_static_recursion_foreign_helper.rs +++ b/src/test/run-pass/auxiliary/check_static_recursion_foreign_helper.rs @@ -12,8 +12,8 @@ #![feature(libc)] -#[crate_id = "check_static_recursion_foreign_helper"] -#[crate_type = "lib"] +#![crate_name = "check_static_recursion_foreign_helper"] +#![crate_type = "lib"] extern crate libc; diff --git a/src/test/run-pass/auxiliary/dropck_eyepatch_extern_crate.rs b/src/test/run-pass/auxiliary/dropck_eyepatch_extern_crate.rs new file mode 100644 index 0000000000000..1266e589b127e --- /dev/null +++ b/src/test/run-pass/auxiliary/dropck_eyepatch_extern_crate.rs @@ -0,0 +1,61 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generic_param_attrs)] +#![feature(dropck_eyepatch)] + +// The point of this test is to illustrate that the `#[may_dangle]` +// attribute specifically allows, in the context of a type +// implementing `Drop`, a generic parameter to be instantiated with a +// lifetime that does not strictly outlive the owning type itself, +// and that this attributes effects are preserved when importing +// the type from another crate. +// +// See also dropck-eyepatch.rs for more information about the general +// structure of the test. + +use std::cell::RefCell; + +pub trait Foo { fn foo(&self, _: &str); } + +pub struct Dt(pub &'static str, pub A); +pub struct Dr<'a, B:'a+Foo>(pub &'static str, pub &'a B); +pub struct Pt(pub &'static str, pub A, pub B); +pub struct Pr<'a, 'b, B:'a+'b+Foo>(pub &'static str, pub &'a B, pub &'b B); +pub struct St(pub &'static str, pub A); +pub struct Sr<'a, B:'a+Foo>(pub &'static str, pub &'a B); + +impl Drop for Dt { + fn drop(&mut self) { println!("drop {}", self.0); self.1.foo(self.0); } +} +impl<'a, B: Foo> Drop for Dr<'a, B> { + fn drop(&mut self) { println!("drop {}", self.0); self.1.foo(self.0); } +} +unsafe impl<#[may_dangle] A, B: Foo> Drop for Pt { + // (unsafe to access self.1 due to #[may_dangle] on A) + fn drop(&mut self) { println!("drop {}", self.0); self.2.foo(self.0); } +} +unsafe impl<#[may_dangle] 'a, 'b, B: Foo> Drop for Pr<'a, 'b, B> { + // (unsafe to access self.1 due to #[may_dangle] on 'a) + fn drop(&mut self) { println!("drop {}", self.0); self.2.foo(self.0); } +} + +impl Foo for RefCell { + fn foo(&self, s: &str) { + let s2 = format!("{}|{}", *self.borrow(), s); + *self.borrow_mut() = s2; + } +} + +impl<'a, T:Foo> Foo for &'a T { + fn foo(&self, s: &str) { + (*self).foo(s); + } +} diff --git a/src/test/run-pass/auxiliary/issue-17718-aux.rs b/src/test/run-pass/auxiliary/issue-17718-aux.rs index 373fc04217540..cf7fdd7f983f0 100644 --- a/src/test/run-pass/auxiliary/issue-17718-aux.rs +++ b/src/test/run-pass/auxiliary/issue-17718-aux.rs @@ -14,11 +14,10 @@ use std::sync::atomic; pub const C1: usize = 1; pub const C2: atomic::AtomicUsize = atomic::AtomicUsize::new(0); -pub const C3: fn() = foo; +pub const C3: fn() = { fn foo() {} foo }; pub const C4: usize = C1 * C1 + C1 / C1; pub const C5: &'static usize = &C4; pub static S1: usize = 3; pub static S2: atomic::AtomicUsize = atomic::AtomicUsize::new(0); -fn foo() {} diff --git a/src/test/run-pass/backtrace-debuginfo.rs b/src/test/run-pass/backtrace-debuginfo.rs index 838005cbc9119..72cf109fd5974 100644 --- a/src/test/run-pass/backtrace-debuginfo.rs +++ b/src/test/run-pass/backtrace-debuginfo.rs @@ -16,7 +16,7 @@ // "enable" to 0 instead. // compile-flags:-g -Cllvm-args=-enable-tail-merge=0 -// ignore-pretty as this critically relies on line numbers +// ignore-pretty issue #37195 // ignore-emscripten spawning processes is not supported use std::io; diff --git a/src/test/run-pass/backtrace.rs b/src/test/run-pass/backtrace.rs index f26706d1754e0..c438c17f51e3a 100644 --- a/src/test/run-pass/backtrace.rs +++ b/src/test/run-pass/backtrace.rs @@ -8,7 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// no-pretty-expanded FIXME #15189 // ignore-android FIXME #17520 // ignore-emscripten spawning processes is not supported // compile-flags:-g diff --git a/src/test/run-pass/block-arg.rs b/src/test/run-pass/block-arg.rs index 2f530331a2bde..7fca4bccab3ff 100644 --- a/src/test/run-pass/block-arg.rs +++ b/src/test/run-pass/block-arg.rs @@ -10,7 +10,7 @@ // Check usage and precedence of block arguments in expressions: pub fn main() { - let v = vec!(-1.0f64, 0.0, 1.0, 2.0, 3.0); + let v = vec![-1.0f64, 0.0, 1.0, 2.0, 3.0]; // Statement form does not require parentheses: for i in &v { diff --git a/src/test/run-pass/borrow-by-val-method-receiver.rs b/src/test/run-pass/borrow-by-val-method-receiver.rs index 052b605393145..44f4a54610a9b 100644 --- a/src/test/run-pass/borrow-by-val-method-receiver.rs +++ b/src/test/run-pass/borrow-by-val-method-receiver.rs @@ -17,6 +17,6 @@ impl<'a> Foo for &'a [isize] { } pub fn main() { - let items = vec!( 3, 5, 1, 2, 4 ); + let items = vec![ 3, 5, 1, 2, 4 ]; items.foo(); } diff --git a/src/test/run-pass/borrowck/borrowck-binding-mutbl.rs b/src/test/run-pass/borrowck/borrowck-binding-mutbl.rs index 187063968f7f9..b6c2a3a61ea4f 100644 --- a/src/test/run-pass/borrowck/borrowck-binding-mutbl.rs +++ b/src/test/run-pass/borrowck/borrowck-binding-mutbl.rs @@ -14,7 +14,7 @@ fn impure(_v: &[isize]) { } pub fn main() { - let mut x = F {f: vec!(3)}; + let mut x = F {f: vec![3]}; match x { F {f: ref mut v} => { diff --git a/src/test/run-pass/borrowck/borrowck-mut-vec-as-imm-slice.rs b/src/test/run-pass/borrowck/borrowck-mut-vec-as-imm-slice.rs index d55517c65d667..4699f376313ba 100644 --- a/src/test/run-pass/borrowck/borrowck-mut-vec-as-imm-slice.rs +++ b/src/test/run-pass/borrowck/borrowck-mut-vec-as-imm-slice.rs @@ -21,5 +21,5 @@ fn has_mut_vec(v: Vec ) -> isize { } pub fn main() { - assert_eq!(has_mut_vec(vec!(1, 2, 3)), 6); + assert_eq!(has_mut_vec(vec![1, 2, 3]), 6); } diff --git a/src/test/run-pass/borrowck/borrowck-pat-enum.rs b/src/test/run-pass/borrowck/borrowck-pat-enum.rs index b29cb63f6fa3b..8de45e4205dfb 100644 --- a/src/test/run-pass/borrowck/borrowck-pat-enum.rs +++ b/src/test/run-pass/borrowck/borrowck-pat-enum.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty +// ignore-pretty issue #37199 fn match_ref(v: Option) -> isize { match v { diff --git a/src/test/run-pass/break.rs b/src/test/run-pass/break.rs index ea136e2dc4854..9a32fbc103147 100644 --- a/src/test/run-pass/break.rs +++ b/src/test/run-pass/break.rs @@ -26,7 +26,7 @@ pub fn main() { i += 1; if i % 2 == 0 { continue; } assert!((i % 2 != 0)); if i >= 10 { break; } } - let ys = vec!(1, 2, 3, 4, 5, 6); + let ys = vec![1, 2, 3, 4, 5, 6]; for x in &ys { if *x % 2 == 0 { continue; } assert!((*x % 2 != 0)); diff --git a/src/test/run-pass/by-value-self-in-mut-slot.rs b/src/test/run-pass/by-value-self-in-mut-slot.rs index 5bbdec95b1576..846b695c35b3d 100644 --- a/src/test/run-pass/by-value-self-in-mut-slot.rs +++ b/src/test/run-pass/by-value-self-in-mut-slot.rs @@ -14,7 +14,7 @@ struct X { } trait Changer { - fn change(mut self) -> Self; + fn change(self) -> Self; } impl Changer for X { diff --git a/src/test/run-pass/byte-literals.rs b/src/test/run-pass/byte-literals.rs index 9f7b98a57fcec..ad779d26f9e46 100644 --- a/src/test/run-pass/byte-literals.rs +++ b/src/test/run-pass/byte-literals.rs @@ -57,7 +57,7 @@ pub fn main() { _ => panic!(), } - let buf = vec!(97u8, 98, 99, 100); + let buf = vec![97u8, 98, 99, 100]; assert_eq!(match &buf[0..3] { b"def" => 1, b"abc" => 2, diff --git a/src/test/run-pass/cci_no_inline_exe.rs b/src/test/run-pass/cci_no_inline_exe.rs index cc76ed530c4b5..b105411c284a7 100644 --- a/src/test/run-pass/cci_no_inline_exe.rs +++ b/src/test/run-pass/cci_no_inline_exe.rs @@ -21,7 +21,7 @@ pub fn main() { // actually working. //let bt0 = sys::frame_address(); //println!("%?", bt0); - iter(vec!(1, 2, 3), |i| { + iter(vec![1, 2, 3], |i| { println!("{}", i); //let bt1 = sys::frame_address(); diff --git a/src/test/run-pass/cfg-in-crate-1.rs b/src/test/run-pass/cfg-in-crate-1.rs index 06f679b7fca9a..5dd6fa45bb96e 100644 --- a/src/test/run-pass/cfg-in-crate-1.rs +++ b/src/test/run-pass/cfg-in-crate-1.rs @@ -9,8 +9,6 @@ // except according to those terms. // compile-flags: --cfg bar -D warnings -// ignore-pretty - #![cfg(bar)] fn main() {} diff --git a/src/test/run-pass/class-poly-methods-cross-crate.rs b/src/test/run-pass/class-poly-methods-cross-crate.rs index 4d247bde190de..7d266181c9eb1 100644 --- a/src/test/run-pass/class-poly-methods-cross-crate.rs +++ b/src/test/run-pass/class-poly-methods-cross-crate.rs @@ -14,12 +14,12 @@ extern crate cci_class_6; use cci_class_6::kitties::cat; pub fn main() { - let mut nyan : cat = cat::(52_usize, 99, vec!('p')); - let mut kitty = cat(1000_usize, 2, vec!("tabby".to_string())); + let mut nyan : cat = cat::(52_usize, 99, vec!['p']); + let mut kitty = cat(1000_usize, 2, vec!["tabby".to_string()]); assert_eq!(nyan.how_hungry, 99); assert_eq!(kitty.how_hungry, 2); - nyan.speak(vec!(1_usize,2_usize,3_usize)); + nyan.speak(vec![1_usize,2_usize,3_usize]); assert_eq!(nyan.meow_count(), 55_usize); - kitty.speak(vec!("meow".to_string(), "mew".to_string(), "purr".to_string(), "chirp".to_string())); + kitty.speak(vec!["meow".to_string(), "mew".to_string(), "purr".to_string(), "chirp".to_string()]); assert_eq!(kitty.meow_count(), 1004_usize); } diff --git a/src/test/run-pass/class-poly-methods.rs b/src/test/run-pass/class-poly-methods.rs index 2528ff5128f9c..5da858e3c4097 100644 --- a/src/test/run-pass/class-poly-methods.rs +++ b/src/test/run-pass/class-poly-methods.rs @@ -33,12 +33,12 @@ fn cat(in_x : usize, in_y : isize, in_info: Vec ) -> cat { } pub fn main() { - let mut nyan : cat = cat::(52, 99, vec!(9)); - let mut kitty = cat(1000, 2, vec!("tabby".to_string())); + let mut nyan : cat = cat::(52, 99, vec![9]); + let mut kitty = cat(1000, 2, vec!["tabby".to_string()]); assert_eq!(nyan.how_hungry, 99); assert_eq!(kitty.how_hungry, 2); - nyan.speak(vec!(1,2,3)); + nyan.speak(vec![1,2,3]); assert_eq!(nyan.meow_count(), 55); - kitty.speak(vec!("meow".to_string(), "mew".to_string(), "purr".to_string(), "chirp".to_string())); + kitty.speak(vec!["meow".to_string(), "mew".to_string(), "purr".to_string(), "chirp".to_string()]); assert_eq!(kitty.meow_count(), 1004); } diff --git a/src/test/run-pass/cleanup-rvalue-temp-during-incomplete-alloc.rs b/src/test/run-pass/cleanup-rvalue-temp-during-incomplete-alloc.rs index 25d3eb3bbe28c..c401b529c30d9 100644 --- a/src/test/run-pass/cleanup-rvalue-temp-during-incomplete-alloc.rs +++ b/src/test/run-pass/cleanup-rvalue-temp-during-incomplete-alloc.rs @@ -41,7 +41,7 @@ fn do_it(x: &[usize]) -> Foo { panic!() } -fn get_bar(x: usize) -> Vec { vec!(x * 2) } +fn get_bar(x: usize) -> Vec { vec![x * 2] } pub fn fails() { let x = 2; diff --git a/src/test/run-pass/coerce-reborrow-imm-vec-rcvr.rs b/src/test/run-pass/coerce-reborrow-imm-vec-rcvr.rs index 4e116ae146691..e86f20694e103 100644 --- a/src/test/run-pass/coerce-reborrow-imm-vec-rcvr.rs +++ b/src/test/run-pass/coerce-reborrow-imm-vec-rcvr.rs @@ -19,7 +19,7 @@ fn bip(v: &[usize]) -> Vec { } pub fn main() { - let mut the_vec = vec!(1, 2, 3, 100); + let mut the_vec = vec![1, 2, 3, 100]; assert_eq!(the_vec.clone(), bar(&mut the_vec)); assert_eq!(the_vec.clone(), bip(&the_vec)); } diff --git a/src/test/run-pass/coerce-reborrow-mut-vec-arg.rs b/src/test/run-pass/coerce-reborrow-mut-vec-arg.rs index ce0bc33905fe5..ca4ee4a97d524 100644 --- a/src/test/run-pass/coerce-reborrow-mut-vec-arg.rs +++ b/src/test/run-pass/coerce-reborrow-mut-vec-arg.rs @@ -21,7 +21,7 @@ fn bar(v: &mut [usize]) { } pub fn main() { - let mut the_vec = vec!(1, 2, 3, 100); + let mut the_vec = vec![1, 2, 3, 100]; bar(&mut the_vec); assert_eq!(the_vec, [100, 3, 2, 1]); } diff --git a/src/test/run-pass/coerce-reborrow-mut-vec-rcvr.rs b/src/test/run-pass/coerce-reborrow-mut-vec-rcvr.rs index 066b33e007b89..f35735adbcfe2 100644 --- a/src/test/run-pass/coerce-reborrow-mut-vec-rcvr.rs +++ b/src/test/run-pass/coerce-reborrow-mut-vec-rcvr.rs @@ -17,7 +17,7 @@ fn bar(v: &mut [usize]) { } pub fn main() { - let mut the_vec = vec!(1, 2, 3, 100); + let mut the_vec = vec![1, 2, 3, 100]; bar(&mut the_vec); assert_eq!(the_vec, [100, 3, 2, 1]); } diff --git a/src/test/run-pass/command-exec.rs b/src/test/run-pass/command-exec.rs index 130526e72b19c..5be9b97aac7e6 100644 --- a/src/test/run-pass/command-exec.rs +++ b/src/test/run-pass/command-exec.rs @@ -9,9 +9,8 @@ // except according to those terms. // ignore-windows - this is a unix-specific test +// ignore-pretty issue #37199 // ignore-emscripten -// ignore-pretty - #![feature(process_exec)] use std::env; diff --git a/src/test/run-pass/conditional-compile.rs b/src/test/run-pass/conditional-compile.rs index 5891d9f1aa019..c8e9cbdae1e84 100644 --- a/src/test/run-pass/conditional-compile.rs +++ b/src/test/run-pass/conditional-compile.rs @@ -148,3 +148,6 @@ mod test_methods { fn the(&self); } } + +#[cfg(any())] +mod nonexistent_file; // Check that unconfigured non-inline modules are not loaded or parsed. diff --git a/src/test/run-pass/const-err.rs b/src/test/run-pass/const-err.rs index 9f4ae1ad927d7..a1c9ff8a21edb 100644 --- a/src/test/run-pass/const-err.rs +++ b/src/test/run-pass/const-err.rs @@ -17,4 +17,8 @@ const X: *const u8 = b"" as _; fn main() { let _ = ((-1 as i8) << 8 - 1) as f32; let _ = 0u8 as char; + let _ = true > false; + let _ = true >= false; + let _ = true < false; + let _ = true >= false; } diff --git a/src/test/run-pass/const-negation.rs b/src/test/run-pass/const-negation.rs index 96f4217e4cb80..012fe0d95ec49 100644 --- a/src/test/run-pass/const-negation.rs +++ b/src/test/run-pass/const-negation.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(stmt_expr_attributes)] - #[deny(const_err)] fn main() { diff --git a/src/test/run-pass/core-run-destroy.rs b/src/test/run-pass/core-run-destroy.rs index ffcc1891c579e..c5b5b6b24ab1c 100644 --- a/src/test/run-pass/core-run-destroy.rs +++ b/src/test/run-pass/core-run-destroy.rs @@ -8,7 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty // compile-flags:--test // ignore-emscripten diff --git a/src/test/run-pass/cstring-drop.rs b/src/test/run-pass/cstring-drop.rs deleted file mode 100644 index 960391bb8deac..0000000000000 --- a/src/test/run-pass/cstring-drop.rs +++ /dev/null @@ -1,49 +0,0 @@ -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// ignore-emscripten - -// Test that `CString::new("hello").unwrap().as_ptr()` pattern -// leads to failure. - -use std::env; -use std::ffi::{CString, CStr}; -use std::os::raw::c_char; -use std::process::{Command, Stdio}; - -fn main() { - let args: Vec = env::args().collect(); - if args.len() > 1 && args[1] == "child" { - // Repeat several times to be more confident that - // it is `Drop` for `CString` that does the cleanup, - // and not just some lucky UB. - let xs = vec![CString::new("Hello").unwrap(); 10]; - let ys = xs.iter().map(|s| s.as_ptr()).collect::>(); - drop(xs); - assert!(ys.into_iter().any(is_hello)); - return; - } - - let output = Command::new(&args[0]).arg("child").output().unwrap(); - assert!(!output.status.success()); -} - -fn is_hello(s: *const c_char) -> bool { - // `s` is a dangling pointer and reading it is technically - // undefined behavior. But we want to prevent the most diabolical - // kind of UB (apart from nasal demons): reading a value that was - // previously written. - // - // Segfaulting or reading an empty string is Ok, - // reading "Hello" is bad. - let s = unsafe { CStr::from_ptr(s) }; - let hello = CString::new("Hello").unwrap(); - s == hello.as_ref() -} diff --git a/src/test/run-pass/deprecated-macro_escape-inner.rs b/src/test/run-pass/deprecated-macro_escape-inner.rs index 7960a91bdc4fc..1a2be7a719e17 100644 --- a/src/test/run-pass/deprecated-macro_escape-inner.rs +++ b/src/test/run-pass/deprecated-macro_escape-inner.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty - mod foo { #![macro_escape] //~ WARNING macro_escape is a deprecated synonym for macro_use //~^ HELP consider an outer attribute diff --git a/src/test/run-pass/deprecated-macro_escape.rs b/src/test/run-pass/deprecated-macro_escape.rs index b03905e1a0d63..b9f756cc79c8f 100644 --- a/src/test/run-pass/deprecated-macro_escape.rs +++ b/src/test/run-pass/deprecated-macro_escape.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty - #[macro_escape] //~ WARNING macro_escape is a deprecated synonym for macro_use mod foo { } diff --git a/src/test/run-pass/deriving-cmp-generic-enum.rs b/src/test/run-pass/deriving-cmp-generic-enum.rs index b1cd1877a7667..b2add21dcd487 100644 --- a/src/test/run-pass/deriving-cmp-generic-enum.rs +++ b/src/test/run-pass/deriving-cmp-generic-enum.rs @@ -8,9 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// no-pretty-expanded FIXME #15189 - - #[derive(PartialEq, Eq, PartialOrd, Ord)] enum E { E0, diff --git a/src/test/run-pass/deriving-in-macro.rs b/src/test/run-pass/deriving-in-macro.rs index b23075e6d0af4..adc3e3efd70df 100644 --- a/src/test/run-pass/deriving-in-macro.rs +++ b/src/test/run-pass/deriving-in-macro.rs @@ -19,6 +19,6 @@ macro_rules! define_vec { ) } -define_vec!(); +define_vec![]; pub fn main() {} diff --git a/src/test/run-pass/deriving-meta-empty-trait-list.rs b/src/test/run-pass/deriving-meta-empty-trait-list.rs index ff513325d5e68..ed8a50998daee 100644 --- a/src/test/run-pass/deriving-meta-empty-trait-list.rs +++ b/src/test/run-pass/deriving-meta-empty-trait-list.rs @@ -1,5 +1,3 @@ -// ignore-pretty - // Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. diff --git a/src/test/run-pass/deriving-show.rs b/src/test/run-pass/deriving-show.rs index 1f30f3ecedc96..e858ba8c823b8 100644 --- a/src/test/run-pass/deriving-show.rs +++ b/src/test/run-pass/deriving-show.rs @@ -24,6 +24,9 @@ enum Enum { StructVariant { x: isize, y : usize } } +#[derive(Debug)] +struct Pointers(*const Send, *mut Sync); + macro_rules! t { ($x:expr, $expected:expr) => { assert_eq!(format!("{:?}", $x), $expected.to_string()) diff --git a/src/test/run-pass/discriminant_value-wrapper.rs b/src/test/run-pass/discriminant_value-wrapper.rs new file mode 100644 index 0000000000000..2dbda0be18d98 --- /dev/null +++ b/src/test/run-pass/discriminant_value-wrapper.rs @@ -0,0 +1,28 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(discriminant_value)] + +use std::mem; + +enum ADT { + First(u32, u32), + Second(u64) +} + +pub fn main() { + assert!(mem::discriminant(&ADT::First(0,0)) == mem::discriminant(&ADT::First(1,1))); + assert!(mem::discriminant(&ADT::Second(5)) == mem::discriminant(&ADT::Second(6))); + assert!(mem::discriminant(&ADT::First(2,2)) != mem::discriminant(&ADT::Second(2))); + + let _ = mem::discriminant(&10); + let _ = mem::discriminant(&"test"); +} + diff --git a/src/test/run-pass/drop-with-type-ascription-2.rs b/src/test/run-pass/drop-with-type-ascription-2.rs index cb3712dea3224..53005ea5291fd 100644 --- a/src/test/run-pass/drop-with-type-ascription-2.rs +++ b/src/test/run-pass/drop-with-type-ascription-2.rs @@ -12,7 +12,7 @@ #![feature(collections)] fn main() { - let args = vec!("foobie", "asdf::asdf"); + let args = vec!["foobie", "asdf::asdf"]; let arr: Vec<&str> = args[1].split("::").collect(); assert_eq!(arr[0], "asdf"); assert_eq!(arr[0], "asdf"); diff --git a/src/test/run-pass/dropck-eyepatch-extern-crate.rs b/src/test/run-pass/dropck-eyepatch-extern-crate.rs new file mode 100644 index 0000000000000..20f069f77ea15 --- /dev/null +++ b/src/test/run-pass/dropck-eyepatch-extern-crate.rs @@ -0,0 +1,48 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// aux-build:dropck_eyepatch_extern_crate.rs + +extern crate dropck_eyepatch_extern_crate as other; + +use other::{Dt,Dr,Pt,Pr,St,Sr}; + +fn main() { + use std::cell::RefCell; + + struct CheckOnDrop(RefCell, &'static str); + impl Drop for CheckOnDrop { + fn drop(&mut self) { assert_eq!(*self.0.borrow(), self.1); } + } + + let c_long; + let (c, dt, dr, pt, pr, st, sr) + : (CheckOnDrop, Dt<_>, Dr<_>, Pt<_, _>, Pr<_>, St<_>, Sr<_>); + c_long = CheckOnDrop(RefCell::new("c_long".to_string()), + "c_long|pr|pt|dr|dt"); + c = CheckOnDrop(RefCell::new("c".to_string()), + "c"); + + // No error: sufficiently long-lived state can be referenced in dtors + dt = Dt("dt", &c_long.0); + dr = Dr("dr", &c_long.0); + + // No error: Drop impl asserts .1 (A and &'a _) are not accessed + pt = Pt("pt", &c.0, &c_long.0); + pr = Pr("pr", &c.0, &c_long.0); + + // No error: St and Sr have no destructor. + st = St("st", &c.0); + sr = Sr("sr", &c.0); + + println!("{:?}", (dt.0, dr.0, pt.0, pr.0, st.0, sr.0)); + assert_eq!(*c_long.0.borrow(), "c_long"); + assert_eq!(*c.0.borrow(), "c"); +} diff --git a/src/test/run-pass/dropck-eyepatch-reorder.rs b/src/test/run-pass/dropck-eyepatch-reorder.rs new file mode 100644 index 0000000000000..bbf8bb8c35238 --- /dev/null +++ b/src/test/run-pass/dropck-eyepatch-reorder.rs @@ -0,0 +1,89 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generic_param_attrs)] +#![feature(dropck_eyepatch)] + +// The point of this test is to test uses of `#[may_dangle]` attribute +// where the formal declaration order (in the impl generics) does not +// match the actual usage order (in the type instantiation). +// +// See also dropck-eyepatch.rs for more information about the general +// structure of the test. + +trait Foo { fn foo(&self, _: &str); } + +struct Dt(&'static str, A); +struct Dr<'a, B:'a+Foo>(&'static str, &'a B); +struct Pt(&'static str, A, B); +struct Pr<'a, 'b, B:'a+'b+Foo>(&'static str, &'a B, &'b B); +struct St(&'static str, A); +struct Sr<'a, B:'a+Foo>(&'static str, &'a B); + +impl Drop for Dt { + fn drop(&mut self) { println!("drop {}", self.0); self.1.foo(self.0); } +} +impl<'a, B: Foo> Drop for Dr<'a, B> { + fn drop(&mut self) { println!("drop {}", self.0); self.1.foo(self.0); } +} +unsafe impl Drop for Pt { + // (unsafe to access self.1 due to #[may_dangle] on A) + fn drop(&mut self) { println!("drop {}", self.0); self.2.foo(self.0); } +} +unsafe impl<'b, #[may_dangle] 'a, B: Foo> Drop for Pr<'a, 'b, B> { + // (unsafe to access self.1 due to #[may_dangle] on 'a) + fn drop(&mut self) { println!("drop {}", self.0); self.2.foo(self.0); } +} + +fn main() { + use std::cell::RefCell; + + impl Foo for RefCell { + fn foo(&self, s: &str) { + let s2 = format!("{}|{}", *self.borrow(), s); + *self.borrow_mut() = s2; + } + } + + impl<'a, T:Foo> Foo for &'a T { + fn foo(&self, s: &str) { + (*self).foo(s); + } + } + + struct CheckOnDrop(RefCell, &'static str); + impl Drop for CheckOnDrop { + fn drop(&mut self) { assert_eq!(*self.0.borrow(), self.1); } + } + + let c_long; + let (c, dt, dr, pt, pr, st, sr) + : (CheckOnDrop, Dt<_>, Dr<_>, Pt<_, _>, Pr<_>, St<_>, Sr<_>); + c_long = CheckOnDrop(RefCell::new("c_long".to_string()), + "c_long|pr|pt|dr|dt"); + c = CheckOnDrop(RefCell::new("c".to_string()), + "c"); + + // No error: sufficiently long-lived state can be referenced in dtors + dt = Dt("dt", &c_long.0); + dr = Dr("dr", &c_long.0); + + // No error: Drop impl asserts .1 (A and &'a _) are not accessed + pt = Pt("pt", &c.0, &c_long.0); + pr = Pr("pr", &c.0, &c_long.0); + + // No error: St and Sr have no destructor. + st = St("st", &c.0); + sr = Sr("sr", &c.0); + + println!("{:?}", (dt.0, dr.0, pt.0, pr.0, st.0, sr.0)); + assert_eq!(*c_long.0.borrow(), "c_long"); + assert_eq!(*c.0.borrow(), "c"); +} diff --git a/src/test/run-pass/dropck-eyepatch.rs b/src/test/run-pass/dropck-eyepatch.rs new file mode 100644 index 0000000000000..4a09ba05dff5e --- /dev/null +++ b/src/test/run-pass/dropck-eyepatch.rs @@ -0,0 +1,112 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generic_param_attrs)] +#![feature(dropck_eyepatch)] + +// The point of this test is to illustrate that the `#[may_dangle]` +// attribute specifically allows, in the context of a type +// implementing `Drop`, a generic parameter to be instantiated with a +// lifetime that does not strictly outlive the owning type itself. +// +// Here we test that a model use of `#[may_dangle]` will compile and run. +// +// The illustration is made concrete by comparison with two variations +// on the type with `#[may_dangle]`: +// +// 1. an analogous type that does not implement `Drop` (and thus +// should exhibit maximal flexibility with respect to dropck), and +// +// 2. an analogous type that does not use `#[may_dangle]` (and thus +// should exhibit the standard limitations imposed by dropck. +// +// The types in this file follow a pattern, {D,P,S}{t,r}, where: +// +// - D means "I implement Drop" +// +// - P means "I implement Drop but guarantee my (first) parameter is +// pure, i.e. not accessed from the destructor"; no other parameters +// are pure. +// +// - S means "I do not implement Drop" +// +// - t suffix is used when the first generic is a type +// +// - r suffix is used when the first generic is a lifetime. + +trait Foo { fn foo(&self, _: &str); } + +struct Dt(&'static str, A); +struct Dr<'a, B:'a+Foo>(&'static str, &'a B); +struct Pt(&'static str, A, B); +struct Pr<'a, 'b, B:'a+'b+Foo>(&'static str, &'a B, &'b B); +struct St(&'static str, A); +struct Sr<'a, B:'a+Foo>(&'static str, &'a B); + +impl Drop for Dt { + fn drop(&mut self) { println!("drop {}", self.0); self.1.foo(self.0); } +} +impl<'a, B: Foo> Drop for Dr<'a, B> { + fn drop(&mut self) { println!("drop {}", self.0); self.1.foo(self.0); } +} +unsafe impl<#[may_dangle] A, B: Foo> Drop for Pt { + // (unsafe to access self.1 due to #[may_dangle] on A) + fn drop(&mut self) { println!("drop {}", self.0); self.2.foo(self.0); } +} +unsafe impl<#[may_dangle] 'a, 'b, B: Foo> Drop for Pr<'a, 'b, B> { + // (unsafe to access self.1 due to #[may_dangle] on 'a) + fn drop(&mut self) { println!("drop {}", self.0); self.2.foo(self.0); } +} + +fn main() { + use std::cell::RefCell; + + impl Foo for RefCell { + fn foo(&self, s: &str) { + let s2 = format!("{}|{}", *self.borrow(), s); + *self.borrow_mut() = s2; + } + } + + impl<'a, T:Foo> Foo for &'a T { + fn foo(&self, s: &str) { + (*self).foo(s); + } + } + + struct CheckOnDrop(RefCell, &'static str); + impl Drop for CheckOnDrop { + fn drop(&mut self) { assert_eq!(*self.0.borrow(), self.1); } + } + + let c_long; + let (c, dt, dr, pt, pr, st, sr) + : (CheckOnDrop, Dt<_>, Dr<_>, Pt<_, _>, Pr<_>, St<_>, Sr<_>); + c_long = CheckOnDrop(RefCell::new("c_long".to_string()), + "c_long|pr|pt|dr|dt"); + c = CheckOnDrop(RefCell::new("c".to_string()), + "c"); + + // No error: sufficiently long-lived state can be referenced in dtors + dt = Dt("dt", &c_long.0); + dr = Dr("dr", &c_long.0); + + // No error: Drop impl asserts .1 (A and &'a _) are not accessed + pt = Pt("pt", &c.0, &c_long.0); + pr = Pr("pr", &c.0, &c_long.0); + + // No error: St and Sr have no destructor. + st = St("st", &c.0); + sr = Sr("sr", &c.0); + + println!("{:?}", (dt.0, dr.0, pt.0, pr.0, st.0, sr.0)); + assert_eq!(*c_long.0.borrow(), "c_long"); + assert_eq!(*c.0.borrow(), "c"); +} diff --git a/src/test/run-pass/enum-discrim-autosizing.rs b/src/test/run-pass/enum-discrim-autosizing.rs index 53c44f2bb24b5..c85ab535fc16b 100644 --- a/src/test/run-pass/enum-discrim-autosizing.rs +++ b/src/test/run-pass/enum-discrim-autosizing.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(stmt_expr_attributes)] - use std::mem::size_of; enum Ei8 { diff --git a/src/test/run-pass/enum-size-variance.rs b/src/test/run-pass/enum-size-variance.rs index 21996c5fabf1a..26deb0ed72ade 100644 --- a/src/test/run-pass/enum-size-variance.rs +++ b/src/test/run-pass/enum-size-variance.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. // -// ignore-pretty - #![warn(variant_size_differences)] #![allow(dead_code)] diff --git a/src/test/run-pass/expr-fn.rs b/src/test/run-pass/expr-fn.rs index aeca388d317b6..cc9a2e60decee 100644 --- a/src/test/run-pass/expr-fn.rs +++ b/src/test/run-pass/expr-fn.rs @@ -16,7 +16,7 @@ fn test_int() { } fn test_vec() { - fn f() -> Vec { vec!(10, 11) } + fn f() -> Vec { vec![10, 11] } let vect = f(); assert_eq!(vect[1], 11); } diff --git a/src/test/run-pass/expr-match-panic.rs b/src/test/run-pass/expr-match-panic.rs index 89dc7b09c7b15..1a6466048d90d 100644 --- a/src/test/run-pass/expr-match-panic.rs +++ b/src/test/run-pass/expr-match-panic.rs @@ -16,7 +16,7 @@ fn test_simple() { } fn test_box() { - let r = match true { true => { vec!(10) } false => { panic!() } }; + let r = match true { true => { vec![10] } false => { panic!() } }; assert_eq!(r[0], 10); } diff --git a/src/test/run-pass/extern-methods.rs b/src/test/run-pass/extern-methods.rs index 421b19f2864fe..2587a97863679 100644 --- a/src/test/run-pass/extern-methods.rs +++ b/src/test/run-pass/extern-methods.rs @@ -8,6 +8,9 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +// ignore-arm +// ignore-aarch64 + trait A { extern "fastcall" fn test1(i: i32); extern fn test2(i: i32); diff --git a/src/test/run-pass/extern-pass-empty.rs b/src/test/run-pass/extern-pass-empty.rs index 21948d2e5ad23..801a3c40ab47d 100644 --- a/src/test/run-pass/extern-pass-empty.rs +++ b/src/test/run-pass/extern-pass-empty.rs @@ -12,6 +12,7 @@ // pretty-expanded FIXME #23616 // ignore-msvc +// ignore-emscripten struct TwoU8s { one: u8, diff --git a/src/test/run-pass/extern-vectorcall.rs b/src/test/run-pass/extern-vectorcall.rs index e8a9f92a93d4a..90c3459036b6b 100644 --- a/src/test/run-pass/extern-vectorcall.rs +++ b/src/test/run-pass/extern-vectorcall.rs @@ -8,6 +8,9 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +// ignore-arm +// ignore-aarch64 + #![feature(abi_vectorcall)] trait A { diff --git a/src/test/run-pass/fds-are-cloexec.rs b/src/test/run-pass/fds-are-cloexec.rs index 7d49bd25309f5..0abe44d82591d 100644 --- a/src/test/run-pass/fds-are-cloexec.rs +++ b/src/test/run-pass/fds-are-cloexec.rs @@ -11,6 +11,7 @@ // ignore-windows // ignore-android // ignore-emscripten +// ignore-haiku #![feature(libc)] diff --git a/src/test/run-pass/for-destruct.rs b/src/test/run-pass/for-destruct.rs index 963d34a2d2fbd..ba78ff4d53937 100644 --- a/src/test/run-pass/for-destruct.rs +++ b/src/test/run-pass/for-destruct.rs @@ -12,7 +12,7 @@ struct Pair { x: isize, y: isize } pub fn main() { - for elt in &(vec!(Pair {x: 10, y: 20}, Pair {x: 30, y: 0})) { + for elt in &(vec![Pair {x: 10, y: 20}, Pair {x: 30, y: 0}]) { assert_eq!(elt.x + elt.y, 30); } } diff --git a/src/test/run-pass/foreach-nested.rs b/src/test/run-pass/foreach-nested.rs index 60068185f5a52..2c4d0cc7648ce 100644 --- a/src/test/run-pass/foreach-nested.rs +++ b/src/test/run-pass/foreach-nested.rs @@ -13,7 +13,7 @@ fn two(mut it: F) where F: FnMut(isize) { it(0); it(1); } pub fn main() { - let mut a: Vec = vec!(-1, -1, -1, -1); + let mut a: Vec = vec![-1, -1, -1, -1]; let mut p: isize = 0; two(|i| { two(|j| { a[p as usize] = 10 * i + j; p += 1; }) diff --git a/src/test/run-pass/format-no-std.rs b/src/test/run-pass/format-no-std.rs index 62d54da56b2ec..1b9b4ab32ca40 100644 --- a/src/test/run-pass/format-no-std.rs +++ b/src/test/run-pass/format-no-std.rs @@ -8,6 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +// ignore-emscripten missing rust_begin_unwind + #![feature(lang_items, start, collections)] #![no_std] diff --git a/src/test/run-pass/generic-ivec-leak.rs b/src/test/run-pass/generic-ivec-leak.rs index eb0546063f715..d439c62380185 100644 --- a/src/test/run-pass/generic-ivec-leak.rs +++ b/src/test/run-pass/generic-ivec-leak.rs @@ -10,4 +10,4 @@ enum wrapper { wrapped(T), } -pub fn main() { let _w = wrapper::wrapped(vec!(1, 2, 3, 4, 5)); } +pub fn main() { let _w = wrapper::wrapped(vec![1, 2, 3, 4, 5]); } diff --git a/src/test/run-pass/generic-static-methods.rs b/src/test/run-pass/generic-static-methods.rs index 7a496ebf8ce34..ad501ec7e9ba1 100644 --- a/src/test/run-pass/generic-static-methods.rs +++ b/src/test/run-pass/generic-static-methods.rs @@ -25,5 +25,5 @@ impl vec_utils for Vec { } pub fn main() { - assert_eq!(vec_utils::map_(&vec!(1,2,3), |&x| x+1), [2,3,4]); + assert_eq!(vec_utils::map_(&vec![1,2,3], |&x| x+1), [2,3,4]); } diff --git a/src/test/run-pass/getopts_ref.rs b/src/test/run-pass/getopts_ref.rs index c9595d09e21b2..90726c21fac47 100644 --- a/src/test/run-pass/getopts_ref.rs +++ b/src/test/run-pass/getopts_ref.rs @@ -17,7 +17,7 @@ use getopts::{optopt, getopts}; pub fn main() { let args = Vec::new(); - let opts = vec!(optopt("b", "", "something", "SMTHNG")); + let opts = vec![optopt("b", "", "something", "SMTHNG")]; match getopts(&args, &opts) { Ok(ref m) => diff --git a/src/test/run-pass/hashmap-memory.rs b/src/test/run-pass/hashmap-memory.rs index 8efc4cb1b1725..2306fa9afa2ef 100644 --- a/src/test/run-pass/hashmap-memory.rs +++ b/src/test/run-pass/hashmap-memory.rs @@ -99,5 +99,5 @@ mod map_reduce { pub fn main() { map_reduce::map_reduce( - vec!("../src/test/run-pass/hashmap-memory.rs".to_string())); + vec!["../src/test/run-pass/hashmap-memory.rs".to_string()]); } diff --git a/src/test/run-pass/html-literals.rs b/src/test/run-pass/html-literals.rs index a9cfe7a380261..1e1fde4d1e2be 100644 --- a/src/test/run-pass/html-literals.rs +++ b/src/test/run-pass/html-literals.rs @@ -40,7 +40,7 @@ macro_rules! parse_node { parse_node!( [$(: $tags ($(:$tag_nodes),*))*]; [$(:$head_nodes,)* :tag(stringify!($head).to_string(), - vec!($($nodes),*))]; + vec![$($nodes),*])]; $($rest)* ) ); diff --git a/src/test/run-pass/hygiene.rs b/src/test/run-pass/hygiene.rs index d72386190ecd2..91648ee579826 100644 --- a/src/test/run-pass/hygiene.rs +++ b/src/test/run-pass/hygiene.rs @@ -22,23 +22,23 @@ fn f() { fn g() { let x = 0; - macro_rules! m { ($x:ident) => { - macro_rules! m2 { () => { ($x, x) } } + macro_rules! m { ($m1:ident, $m2:ident, $x:ident) => { + macro_rules! $m1 { () => { ($x, x) } } let x = 1; - macro_rules! m3 { () => { ($x, x) } } + macro_rules! $m2 { () => { ($x, x) } } } } let x = 2; - m!(x); + m!(m2, m3, x); let x = 3; assert_eq!(m2!(), (2, 0)); assert_eq!(m3!(), (2, 1)); let x = 4; - m!(x); - assert_eq!(m2!(), (4, 0)); - assert_eq!(m3!(), (4, 1)); + m!(m4, m5, x); + assert_eq!(m4!(), (4, 0)); + assert_eq!(m5!(), (4, 1)); } mod foo { diff --git a/src/test/run-pass/hygienic-labels-in-let.rs b/src/test/run-pass/hygienic-labels-in-let.rs index 5b45f1e0d3928..2aa5f59cda62d 100644 --- a/src/test/run-pass/hygienic-labels-in-let.rs +++ b/src/test/run-pass/hygienic-labels-in-let.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty: pprust doesn't print hygiene output - // Test that labels injected by macros do not break hygiene. This // checks cases where the macros invocations are under the rhs of a // let statement. diff --git a/src/test/run-pass/ifmt.rs b/src/test/run-pass/ifmt.rs index 8b5536de12e49..2a7a593d26800 100644 --- a/src/test/run-pass/ifmt.rs +++ b/src/test/run-pass/ifmt.rs @@ -8,13 +8,10 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// no-pretty-expanded unnecessary unsafe block generated - #![deny(warnings)] #![allow(unused_must_use)] #![allow(unused_features)] #![feature(box_syntax)] -#![feature(question_mark)] use std::fmt::{self, Write}; use std::usize; @@ -240,7 +237,7 @@ fn test_write() { // can do with them just yet (to test the output) fn test_print() { print!("hi"); - print!("{:?}", vec!(0u8)); + print!("{:?}", vec![0u8]); println!("hello"); println!("this is a {}", "test"); println!("{foo}", foo="bar"); diff --git a/src/test/run-pass/impl-trait/example-st.rs b/src/test/run-pass/impl-trait/example-st.rs index 461d4cf4ff053..e9326ed286aff 100644 --- a/src/test/run-pass/impl-trait/example-st.rs +++ b/src/test/run-pass/impl-trait/example-st.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(conservative_impl_trait, question_mark)] +#![feature(conservative_impl_trait)] struct State; type Error = (); diff --git a/src/test/run-pass/import-glob-crate.rs b/src/test/run-pass/import-glob-crate.rs index b2a9b08b01b86..fec46c7e1f82d 100644 --- a/src/test/run-pass/import-glob-crate.rs +++ b/src/test/run-pass/import-glob-crate.rs @@ -8,9 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. - -#![allow(dead_assignment)] - use std::mem::*; pub fn main() { @@ -20,3 +17,12 @@ pub fn main() { assert_eq!(x, 2); assert_eq!(y, 1); } + +#[allow(unused)] +fn f() { + mod foo { pub use *; } + mod bar { pub use ::*; } + + foo::main(); + bar::main(); +} diff --git a/src/test/run-pass/imports.rs b/src/test/run-pass/imports.rs index 9851dfe0262f8..195b99c9788e8 100644 --- a/src/test/run-pass/imports.rs +++ b/src/test/run-pass/imports.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty : (#23623) problems when ending with // comments - #![feature(item_like_imports)] #![allow(unused)] diff --git a/src/test/run-pass/issue-11709.rs b/src/test/run-pass/issue-11709.rs index cfff7eb339522..88d74a65813be 100644 --- a/src/test/run-pass/issue-11709.rs +++ b/src/test/run-pass/issue-11709.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty +// ignore-pretty issue #37199 // Don't panic on blocks without results // There are several tests in this run-pass that raised diff --git a/src/test/run-pass/issue-13204.rs b/src/test/run-pass/issue-13204.rs index 36f606e5d73c9..13e8fe0e964c4 100644 --- a/src/test/run-pass/issue-13204.rs +++ b/src/test/run-pass/issue-13204.rs @@ -28,6 +28,6 @@ impl Foo for Baz { fn main() { let x = Baz; - let y = vec!((), (), ()); + let y = vec![(), (), ()]; assert_eq!(x.bar(y.iter()), 3); } diff --git a/src/test/run-pass/issue-14936.rs b/src/test/run-pass/issue-14936.rs index 428d4e4dbb12d..8a628b73c0067 100644 --- a/src/test/run-pass/issue-14936.rs +++ b/src/test/run-pass/issue-14936.rs @@ -24,7 +24,7 @@ macro_rules! demo { let mut x: isize = 0; let y: isize = 1; - let mut history: History = vec!(); + let mut history: History = vec![]; unsafe { asm!("mov ($1), $0" : $output_constraint (*wrap(&mut x, "out", &mut history)) diff --git a/src/test/run-pass/issue-15080.rs b/src/test/run-pass/issue-15080.rs index cee0caeb465f5..14e0037884698 100644 --- a/src/test/run-pass/issue-15080.rs +++ b/src/test/run-pass/issue-15080.rs @@ -14,7 +14,7 @@ fn main() { let mut x: &[_] = &[1, 2, 3, 4]; - let mut result = vec!(); + let mut result = vec![]; loop { x = match *x { [1, n, 3, ref rest..] => { diff --git a/src/test/run-pass/issue-15189.rs b/src/test/run-pass/issue-15189.rs index 24340ac3f13d7..54b96d6630749 100644 --- a/src/test/run-pass/issue-15189.rs +++ b/src/test/run-pass/issue-15189.rs @@ -8,14 +8,12 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty - macro_rules! third { ($e:expr) => ({let x = 2; $e[x]}) } fn main() { - let x = vec!(10_usize,11_usize,12_usize,13_usize); + let x = vec![10_usize,11_usize,12_usize,13_usize]; let t = third!(x); assert_eq!(t,12_usize); } diff --git a/src/test/run-pass/issue-15734.rs b/src/test/run-pass/issue-15734.rs index 66b0aeeb988d7..daf14b4c2ffc4 100644 --- a/src/test/run-pass/issue-15734.rs +++ b/src/test/run-pass/issue-15734.rs @@ -54,7 +54,7 @@ impl> Index for Row { } fn main() { - let m = Mat::new(vec!(1, 2, 3, 4, 5, 6), 3); + let m = Mat::new(vec![1, 2, 3, 4, 5, 6], 3); let r = m.row(1); assert_eq!(r.index(2), &6); diff --git a/src/test/run-pass/issue-16492.rs b/src/test/run-pass/issue-16492.rs index 975557726ce8b..177550a0dd4a1 100644 --- a/src/test/run-pass/issue-16492.rs +++ b/src/test/run-pass/issue-16492.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty - use std::rc::Rc; use std::cell::Cell; diff --git a/src/test/run-pass/issue-16597-empty.rs b/src/test/run-pass/issue-16597-empty.rs index a6a1c5f16b31c..c51e33c01040b 100644 --- a/src/test/run-pass/issue-16597-empty.rs +++ b/src/test/run-pass/issue-16597-empty.rs @@ -9,7 +9,6 @@ // except according to those terms. // compile-flags:--test -// no-pretty-expanded // This verifies that the test generation doesn't crash when we have // no tests - for more information, see PR #16892. diff --git a/src/test/run-pass/issue-16597.rs b/src/test/run-pass/issue-16597.rs index 7f0a341f14715..583d8d46235d6 100644 --- a/src/test/run-pass/issue-16597.rs +++ b/src/test/run-pass/issue-16597.rs @@ -9,7 +9,6 @@ // except according to those terms. // compile-flags:--test -// ignore-pretty turns out the pretty-printer doesn't handle gensym'd things... mod tests { use super::*; diff --git a/src/test/run-pass/issue-16668.rs b/src/test/run-pass/issue-16668.rs index 0fd9965028489..18861feb1997a 100644 --- a/src/test/run-pass/issue-16668.rs +++ b/src/test/run-pass/issue-16668.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty - #![allow(unknown_features)] struct Parser<'a, I, O> { diff --git a/src/test/run-pass/issue-17121.rs b/src/test/run-pass/issue-17121.rs index b3c80041ef88b..dcbcc2d44b5dd 100644 --- a/src/test/run-pass/issue-17121.rs +++ b/src/test/run-pass/issue-17121.rs @@ -10,8 +10,6 @@ // pretty-expanded FIXME #23616 -#![feature(question_mark)] - use std::fs::File; use std::io::{self, BufReader, Read}; diff --git a/src/test/run-pass/issue-18088.rs b/src/test/run-pass/issue-18088.rs new file mode 100644 index 0000000000000..a45256387d6c0 --- /dev/null +++ b/src/test/run-pass/issue-18088.rs @@ -0,0 +1,17 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + + +pub trait Indexable: std::ops::Index { + fn index2(&self, i: usize) -> &T { + &self[i] + } +} +fn main() {} diff --git a/src/test/run-pass/issue-18464.rs b/src/test/run-pass/issue-18464.rs index 70217868a54b8..dff86bc1b4527 100644 --- a/src/test/run-pass/issue-18464.rs +++ b/src/test/run-pass/issue-18464.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty - #![deny(dead_code)] const LOW_RANGE: char = '0'; diff --git a/src/test/run-pass/issue-19404.rs b/src/test/run-pass/issue-19404.rs index 0eea6ba22cae8..c0f13b0b6c7ad 100644 --- a/src/test/run-pass/issue-19404.rs +++ b/src/test/run-pass/issue-19404.rs @@ -8,17 +8,14 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(reflect_marker)] - use std::any::TypeId; -use std::marker::Reflect; use std::rc::Rc; type Fp = Rc; struct Engine; -trait Component: 'static + Reflect {} +trait Component: 'static {} impl Component for Engine {} trait Env { diff --git a/src/test/run-pass/issue-20427.rs b/src/test/run-pass/issue-20427.rs index dd3d952224c05..985ca067350a3 100644 --- a/src/test/run-pass/issue-20427.rs +++ b/src/test/run-pass/issue-20427.rs @@ -9,7 +9,7 @@ // except according to those terms. // aux-build:i8.rs -// ignore-pretty (#23623) +// ignore-pretty issue #37201 extern crate i8; use std::string as i16; diff --git a/src/test/run-pass/issue-20797.rs b/src/test/run-pass/issue-20797.rs index de95243665082..e45582dc2d39a 100644 --- a/src/test/run-pass/issue-20797.rs +++ b/src/test/run-pass/issue-20797.rs @@ -10,8 +10,6 @@ // Regression test for #20797. -#![feature(question_mark)] - use std::default::Default; use std::io; use std::fs; diff --git a/src/test/run-pass/issue-20823.rs b/src/test/run-pass/issue-20823.rs index c297998b6493a..4d31d0cedb6d5 100644 --- a/src/test/run-pass/issue-20823.rs +++ b/src/test/run-pass/issue-20823.rs @@ -9,7 +9,6 @@ // except according to those terms. // compile-flags: --test -// no-pretty-expanded #![deny(unstable)] diff --git a/src/test/run-pass/issue-21400.rs b/src/test/run-pass/issue-21400.rs index 0d1be964748bb..6715b71a5f506 100644 --- a/src/test/run-pass/issue-21400.rs +++ b/src/test/run-pass/issue-21400.rs @@ -11,8 +11,6 @@ // Regression test for #21400 which itself was extracted from // stackoverflow.com/questions/28031155/is-my-borrow-checker-drunk/28031580 -#![feature(question_mark)] - fn main() { let mut t = Test; assert_eq!(t.method1("one"), Ok(1)); diff --git a/src/test/run-pass/issue-22546.rs b/src/test/run-pass/issue-22546.rs index b3cb8a78213b8..8516d344e1cdc 100644 --- a/src/test/run-pass/issue-22546.rs +++ b/src/test/run-pass/issue-22546.rs @@ -51,4 +51,10 @@ fn main() { if let None:: = Some(8) { panic!(); } + if let None:: { .. } = Some(8) { + panic!(); + } + if let Option::None:: { .. } = Some(8) { + panic!(); + } } diff --git a/src/test/run-pass/issue-22992.rs b/src/test/run-pass/issue-22992.rs index ca8f804482aa1..dc612fc0bc71c 100644 --- a/src/test/run-pass/issue-22992.rs +++ b/src/test/run-pass/issue-22992.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty +// ignore-pretty issue #37201 struct X { val: i32 } impl std::ops::Deref for X { diff --git a/src/test/run-pass/issue-23338-ensure-param-drop-order.rs b/src/test/run-pass/issue-23338-ensure-param-drop-order.rs index fb84e7bae514d..9d0612f2a8daa 100644 --- a/src/test/run-pass/issue-23338-ensure-param-drop-order.rs +++ b/src/test/run-pass/issue-23338-ensure-param-drop-order.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty : (#23623) problems when ending with // comments +// ignore-pretty issue #37201 // This test is ensuring that parameters are indeed dropped after // temporaries in a fn body. diff --git a/src/test/run-pass/issue-2631-b.rs b/src/test/run-pass/issue-2631-b.rs index 365b594c99e36..913b07613e030 100644 --- a/src/test/run-pass/issue-2631-b.rs +++ b/src/test/run-pass/issue-2631-b.rs @@ -19,7 +19,7 @@ use std::collections::HashMap; use std::rc::Rc; pub fn main() { - let v = vec!(Rc::new("hi".to_string())); + let v = vec![Rc::new("hi".to_string())]; let mut m: req::header_map = HashMap::new(); m.insert("METHOD".to_string(), Rc::new(RefCell::new(v))); request::(&m); diff --git a/src/test/run-pass/issue-26873-multifile.rs b/src/test/run-pass/issue-26873-multifile.rs index aa525ae95192e..51bf4bfe0e13e 100644 --- a/src/test/run-pass/issue-26873-multifile.rs +++ b/src/test/run-pass/issue-26873-multifile.rs @@ -7,10 +7,9 @@ // , at your // option. This file may not be copied, modified, or distributed // except according to those terms. -// -// ignore-pretty + +// ignore-pretty issue #37195 mod issue_26873_multifile; fn main() {} - diff --git a/src/test/run-pass/issue-2723-b.rs b/src/test/run-pass/issue-2723-b.rs index bab7b0d24db70..a6ba957a1b110 100644 --- a/src/test/run-pass/issue-2723-b.rs +++ b/src/test/run-pass/issue-2723-b.rs @@ -15,6 +15,6 @@ use issue_2723_a::f; pub fn main() { unsafe { - f(vec!(2)); + f(vec![2]); } } diff --git a/src/test/run-pass/issue-27401-dropflag-reinit.rs b/src/test/run-pass/issue-27401-dropflag-reinit.rs index ab8f22e78be72..37dc060d5d64d 100644 --- a/src/test/run-pass/issue-27401-dropflag-reinit.rs +++ b/src/test/run-pass/issue-27401-dropflag-reinit.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty #27582 +// ignore-pretty issue #37201 // Check that when a `let`-binding occurs in a loop, its associated // drop-flag is reinitialized (to indicate "needs-drop" at the end of diff --git a/src/test/run-pass/issue-27639.rs b/src/test/run-pass/issue-27639.rs index 44c1eb86de8df..ce1abb163d53b 100644 --- a/src/test/run-pass/issue-27639.rs +++ b/src/test/run-pass/issue-27639.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty - fn main() { const iter: i32 = 0; diff --git a/src/test/run-pass/issue-28839.rs b/src/test/run-pass/issue-28839.rs index a1012296626e2..2ff4403a42f97 100644 --- a/src/test/run-pass/issue-28839.rs +++ b/src/test/run-pass/issue-28839.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty : (#23623) problems with newlines before // comments +// ignore-pretty issue #37199 pub struct Foo; diff --git a/src/test/run-pass/issue-28936.rs b/src/test/run-pass/issue-28936.rs index 2a932cd7756d2..992fbdce26898 100644 --- a/src/test/run-pass/issue-28936.rs +++ b/src/test/run-pass/issue-28936.rs @@ -23,7 +23,7 @@ pub fn parse_stream, U, F>( where F: Fn(&mut StreamParser) -> U { panic!(); } pub fn thing(session: &mut Session) { - let mut stream = vec!(1, 2, 3).into_iter(); + let mut stream = vec![1, 2, 3].into_iter(); let _b = parse_stream(session, stream.by_ref(), diff --git a/src/test/run-pass/issue-29740.rs b/src/test/run-pass/issue-29740.rs index 75bcd431ec8e1..eb7b740db6e4b 100644 --- a/src/test/run-pass/issue-29740.rs +++ b/src/test/run-pass/issue-29740.rs @@ -11,8 +11,6 @@ // Regression test for #29740. Inefficient MIR matching algorithms // generated way too much code for this sort of case, leading to OOM. -// ignore-pretty - pub mod KeyboardEventConstants { pub const DOM_KEY_LOCATION_STANDARD: u32 = 0; pub const DOM_KEY_LOCATION_LEFT: u32 = 1; diff --git a/src/test/run-pass/issue-2989.rs b/src/test/run-pass/issue-2989.rs index 8b6eb12f102ee..a4342f33402d3 100644 --- a/src/test/run-pass/issue-2989.rs +++ b/src/test/run-pass/issue-2989.rs @@ -32,8 +32,8 @@ fn to_bools(bitv: Storage) -> Vec { struct Storage { storage: Vec } pub fn main() { - let bools = vec!(false, false, true, false, false, true, true, false); - let bools2 = to_bools(Storage{storage: vec!(0b01100100)}); + let bools = vec![false, false, true, false, false, true, true, false]; + let bools2 = to_bools(Storage{storage: vec![0b01100100]}); for i in 0..8 { println!("{} => {} vs {}", i, bools[i], bools2[i]); diff --git a/src/test/run-pass/issue-3389.rs b/src/test/run-pass/issue-3389.rs index 26558bdd30c3b..70e3484a0c57a 100644 --- a/src/test/run-pass/issue-3389.rs +++ b/src/test/run-pass/issue-3389.rs @@ -25,8 +25,8 @@ pub fn main() { content: Vec::new(), children: Vec::new() }; - let v = vec!("123".to_string(), "abc".to_string()); - node.content = vec!("123".to_string(), "abc".to_string()); + let v = vec!["123".to_string(), "abc".to_string()]; + node.content = vec!["123".to_string(), "abc".to_string()]; print_str_vector(v); print_str_vector(node.content.clone()); diff --git a/src/test/run-pass/issue-34427.rs b/src/test/run-pass/issue-34427.rs new file mode 100644 index 0000000000000..6bf8a2ac6a72d --- /dev/null +++ b/src/test/run-pass/issue-34427.rs @@ -0,0 +1,26 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Issue #34427: On ARM, the code in `foo` at one time was generating +// a machine code instruction of the form: `str r0, [r0, rN]!` (for +// some N), which is not legal because the source register and base +// register cannot be identical in the preindexed form signalled by +// the `!`. +// +// See LLVM bug: https://llvm.org/bugs/show_bug.cgi?id=28809 + +#[inline(never)] +fn foo(n: usize) -> Vec> { + (0..n).map(|_| None).collect() +} + +fn main() { + let _ = (foo(10), foo(32)); +} diff --git a/src/test/run-pass/issue-34932.rs b/src/test/run-pass/issue-34932.rs index e83939e7aec6b..dca387dcc2117 100644 --- a/src/test/run-pass/issue-34932.rs +++ b/src/test/run-pass/issue-34932.rs @@ -10,8 +10,6 @@ // compile-flags:--test // rustc-env:RUSTC_BOOTSTRAP_KEY= -// ignore-pretty : (#23623) problems when ending with // comments - #![cfg(any())] // This test should be configured away #![feature(rustc_attrs)] // Test that this is allowed on stable/beta #![feature(iter_arith_traits)] // Test that this is not unused diff --git a/src/test/run-pass/issue-36381.rs b/src/test/run-pass/issue-36381.rs new file mode 100644 index 0000000000000..6cd991bd942df --- /dev/null +++ b/src/test/run-pass/issue-36381.rs @@ -0,0 +1,34 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Regression test for #36381. The trans collector was asserting that +// there are no projection types, but the `<&str as +// StreamOnce>::Position` projection contained a late-bound region, +// and we don't currently normalize in that case until the function is +// actually invoked. + +pub trait StreamOnce { + type Position; +} + +impl<'a> StreamOnce for &'a str { + type Position = usize; +} + +pub fn parser(_: F) { +} + +fn follow(_: &str) -> <&str as StreamOnce>::Position { + panic!() +} + +fn main() { + parser(follow); +} diff --git a/src/test/run-pass/issue-36474.rs b/src/test/run-pass/issue-36474.rs new file mode 100644 index 0000000000000..025244ca6648c --- /dev/null +++ b/src/test/run-pass/issue-36474.rs @@ -0,0 +1,40 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn main() { + remove_axis(&3, 0); +} + +trait Dimension { + fn slice(&self) -> &[usize]; +} + +impl Dimension for () { + fn slice(&self) -> &[usize] { &[] } +} + +impl Dimension for usize { + fn slice(&self) -> &[usize] { + unsafe { + ::std::slice::from_raw_parts(self, 1) + } + } +} + +fn remove_axis(value: &usize, axis: usize) -> () { + let tup = (); + let mut it = tup.slice().iter(); + for (i, _) in value.slice().iter().enumerate() { + if i == axis { + continue; + } + it.next(); + } +} diff --git a/src/test/run-pass/issue-36744-bitcast-args-if-needed.rs b/src/test/run-pass/issue-36744-bitcast-args-if-needed.rs new file mode 100644 index 0000000000000..1859cc9ca00b5 --- /dev/null +++ b/src/test/run-pass/issue-36744-bitcast-args-if-needed.rs @@ -0,0 +1,32 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// This tests for an ICE (and, if ignored, subsequent LLVM abort) when +// a lifetime-parametric fn is passed into a context whose expected +// type has a differing lifetime parameterization. + +struct A<'a> { + _a: &'a i32, +} + +fn call(s: T, functions: &Vec fn(&'n T)>) { + for function in functions { + function(&s); + } +} + +fn f(a: &A) { println!("a holds {}", a._a); } + +fn main() { + let a = A { _a: &10 }; + + let vec: Vec fn(&'u A<'v>)> = vec![f]; + call(a, &vec); +} diff --git a/src/test/run-pass/issue-36744-without-calls.rs b/src/test/run-pass/issue-36744-without-calls.rs new file mode 100644 index 0000000000000..1766edb06b481 --- /dev/null +++ b/src/test/run-pass/issue-36744-without-calls.rs @@ -0,0 +1,22 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Tests for an LLVM abort when storing a lifetime-parametric fn into +// context that is expecting one that is not lifetime-parametric +// (i.e. has no `for <'_>`). + +pub struct A<'a>(&'a ()); +pub struct S(T); + +pub fn bad<'s>(v: &mut S)>, y: S fn(A<'b>)>) { + *v = y; +} + +fn main() {} diff --git a/src/test/run-pass/issue-36768.rs b/src/test/run-pass/issue-36768.rs new file mode 100644 index 0000000000000..bb4d12919a1c2 --- /dev/null +++ b/src/test/run-pass/issue-36768.rs @@ -0,0 +1,18 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags:--test +#![deny(private_in_public)] + +#[test] fn foo() {} +mod foo {} + +#[test] fn core() {} +extern crate core; diff --git a/src/test/run-pass/issue-36786-resolve-call.rs b/src/test/run-pass/issue-36786-resolve-call.rs new file mode 100644 index 0000000000000..0d718c7ba4683 --- /dev/null +++ b/src/test/run-pass/issue-36786-resolve-call.rs @@ -0,0 +1,17 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Ensure that types that rely on obligations are autoderefed +// correctly + +fn main() { + let x : Vec> = vec![Box::new(|| ())]; + x[0]() +} diff --git a/src/test/run-pass/issue-36816.rs b/src/test/run-pass/issue-36816.rs new file mode 100644 index 0000000000000..22f3a52b26e74 --- /dev/null +++ b/src/test/run-pass/issue-36816.rs @@ -0,0 +1,16 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +macro_rules! m { () => { 1 } } +macro_rules! n { () => { 1 + m!() } } + +fn main() { + let _: [u32; n!()] = [0, 0]; +} diff --git a/src/test/run-pass/issue-36856.rs b/src/test/run-pass/issue-36856.rs new file mode 100644 index 0000000000000..91a0dadd65328 --- /dev/null +++ b/src/test/run-pass/issue-36856.rs @@ -0,0 +1,24 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Regression test for #36856. + +// compile-flags:-g + +fn g() -> bool { + false +} + +pub fn main() { + let a = !g(); + if a != !g() { + panic!(); + } +} diff --git a/src/test/run-pass/issue-36936.rs b/src/test/run-pass/issue-36936.rs new file mode 100644 index 0000000000000..34a9c2916683f --- /dev/null +++ b/src/test/run-pass/issue-36936.rs @@ -0,0 +1,35 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// check that casts are not being treated as lexprs. + +fn main() { + let mut a = 0i32; + let b = &(a as i32); + a = 1; + assert_ne!(&a as *const i32, b as *const i32); + assert_eq!(*b, 0); + + assert_eq!(issue_36936(), 1); +} + + +struct A(u32); + +impl Drop for A { + fn drop(&mut self) { + self.0 = 0; + } +} + +fn issue_36936() -> u32 { + let a = &(A(1) as A); + a.0 +} diff --git a/src/test/run-pass/issue-37020.rs b/src/test/run-pass/issue-37020.rs new file mode 100644 index 0000000000000..7d0d20269aba7 --- /dev/null +++ b/src/test/run-pass/issue-37020.rs @@ -0,0 +1,25 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![allow(private_in_public)] + +mod foo { + pub mod bar { + extern crate core; + } +} + +mod baz { + pub use foo::bar::core; +} + +fn main() { + baz::core::cell::Cell::new(0u32); +} diff --git a/src/test/run-pass/issue-37109.rs b/src/test/run-pass/issue-37109.rs new file mode 100644 index 0000000000000..1c893071d5589 --- /dev/null +++ b/src/test/run-pass/issue-37109.rs @@ -0,0 +1,25 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +trait ToRef<'a> { + type Ref: 'a; +} + +impl<'a, U: 'a> ToRef<'a> for U { + type Ref = &'a U; +} + +fn example<'a, T>(value: &'a T) -> (>::Ref, u32) { + (value, 0) +} + +fn main() { + example(&0); +} diff --git a/src/test/run-pass/issue-37175.rs b/src/test/run-pass/issue-37175.rs new file mode 100644 index 0000000000000..0d3613b573e12 --- /dev/null +++ b/src/test/run-pass/issue-37175.rs @@ -0,0 +1,14 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +macro_rules! m { (<$t:ty>) => { stringify!($t) } } +fn main() { + println!("{}", m!(>)); +} diff --git a/src/test/run-pass/issue-37222.rs b/src/test/run-pass/issue-37222.rs new file mode 100644 index 0000000000000..381a5799cc555 --- /dev/null +++ b/src/test/run-pass/issue-37222.rs @@ -0,0 +1,25 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#[derive(Debug, PartialEq)] +enum Bar { + A(i64), + B(i32), + C, +} + +#[derive(Debug, PartialEq)] +struct Foo(Bar, u8); + +static FOO: [Foo; 2] = [Foo(Bar::C, 0), Foo(Bar::C, 0xFF)]; + +fn main() { + assert_eq!(&FOO[1], &Foo(Bar::C, 0xFF)); +} diff --git a/src/test/run-pass/issue-6153.rs b/src/test/run-pass/issue-6153.rs index 16e7060f4b9f1..1b16418ac4259 100644 --- a/src/test/run-pass/issue-6153.rs +++ b/src/test/run-pass/issue-6153.rs @@ -11,7 +11,7 @@ fn swap(f: F) -> Vec where F: FnOnce(Vec) -> Vec { - let x = vec!(1, 2, 3); + let x = vec![1, 2, 3]; f(x) } diff --git a/src/test/run-pass/issue-7911.rs b/src/test/run-pass/issue-7911.rs index 5324ddb49e79f..764d6fa791805 100644 --- a/src/test/run-pass/issue-7911.rs +++ b/src/test/run-pass/issue-7911.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty - // (Closes #7911) Test that we can use the same self expression // with different mutability in macro in two methods diff --git a/src/test/run-pass/issue-8460.rs b/src/test/run-pass/issue-8460.rs index 8d15fe30a1b07..5148be5af8307 100644 --- a/src/test/run-pass/issue-8460.rs +++ b/src/test/run-pass/issue-8460.rs @@ -9,9 +9,7 @@ // except according to those terms. // ignore-emscripten no threads support -// ignore-pretty : (#23623) problems when ending with // comments - -#![feature(rustc_attrs, stmt_expr_attributes, zero_one)] +#![feature(rustc_attrs, zero_one)] use std::num::Zero; use std::thread; diff --git a/src/test/run-pass/issue-9129.rs b/src/test/run-pass/issue-9129.rs index 99db47c172e21..c46e8494e73b8 100644 --- a/src/test/run-pass/issue-9129.rs +++ b/src/test/run-pass/issue-9129.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty +// ignore-pretty unreported #![allow(unknown_features)] #![feature(box_syntax)] diff --git a/src/test/run-pass/issue-9837.rs b/src/test/run-pass/issue-9837.rs new file mode 100644 index 0000000000000..f887459836c6b --- /dev/null +++ b/src/test/run-pass/issue-9837.rs @@ -0,0 +1,20 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +const C1: i32 = 0x12345678; +const C2: isize = C1 as i16 as isize; + +enum E { + V = C2 +} + +fn main() { + assert_eq!(C2 as u64, E::V as u64); +} diff --git a/src/test/run-pass/issue36260.rs b/src/test/run-pass/issue36260.rs new file mode 100644 index 0000000000000..08dbbb5c9fe13 --- /dev/null +++ b/src/test/run-pass/issue36260.rs @@ -0,0 +1,22 @@ +// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Make sure this compiles without getting a linker error because of missing +// drop-glue because the collector missed adding drop-glue for the closure: + +fn create_fn() -> Box { + let text = String::new(); + + Box::new(move || { let _ = &text; }) +} + +fn main() { + let _ = create_fn(); +} diff --git a/src/test/run-pass/iter-zip.rs b/src/test/run-pass/iter-zip.rs new file mode 100644 index 0000000000000..b0503bc2048e7 --- /dev/null +++ b/src/test/run-pass/iter-zip.rs @@ -0,0 +1,112 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Test that .zip() specialization preserves side effects +// in sideeffectful iterator adaptors. + +use std::cell::Cell; + +#[derive(Debug)] +struct CountClone(Cell); + +fn count_clone() -> CountClone { CountClone(Cell::new(0)) } + +impl PartialEq for CountClone { + fn eq(&self, rhs: &i32) -> bool { + self.0.get() == *rhs + } +} + +impl Clone for CountClone { + fn clone(&self) -> Self { + let ret = CountClone(self.0.clone()); + let n = self.0.get(); + self.0.set(n + 1); + ret + } +} + +fn test_zip_cloned_sideffectful() { + let xs = [count_clone(), count_clone(), count_clone(), count_clone()]; + let ys = [count_clone(), count_clone()]; + + for _ in xs.iter().cloned().zip(ys.iter().cloned()) { } + + assert_eq!(&xs, &[1, 1, 1, 0][..]); + assert_eq!(&ys, &[1, 1][..]); + + let xs = [count_clone(), count_clone()]; + let ys = [count_clone(), count_clone(), count_clone(), count_clone()]; + + for _ in xs.iter().cloned().zip(ys.iter().cloned()) { } + + assert_eq!(&xs, &[1, 1][..]); + assert_eq!(&ys, &[1, 1, 0, 0][..]); +} + +fn test_zip_map_sideffectful() { + let mut xs = [0; 6]; + let mut ys = [0; 4]; + + for _ in xs.iter_mut().map(|x| *x += 1).zip(ys.iter_mut().map(|y| *y += 1)) { } + + assert_eq!(&xs, &[1, 1, 1, 1, 1, 0]); + assert_eq!(&ys, &[1, 1, 1, 1]); + + let mut xs = [0; 4]; + let mut ys = [0; 6]; + + for _ in xs.iter_mut().map(|x| *x += 1).zip(ys.iter_mut().map(|y| *y += 1)) { } + + assert_eq!(&xs, &[1, 1, 1, 1]); + assert_eq!(&ys, &[1, 1, 1, 1, 0, 0]); +} + +fn test_zip_map_rev_sideffectful() { + let mut xs = [0; 6]; + let mut ys = [0; 4]; + + { + let mut it = xs.iter_mut().map(|x| *x += 1).zip(ys.iter_mut().map(|y| *y += 1)); + it.next_back(); + } + assert_eq!(&xs, &[0, 0, 0, 1, 1, 1]); + assert_eq!(&ys, &[0, 0, 0, 1]); + + let mut xs = [0; 6]; + let mut ys = [0; 4]; + + { + let mut it = xs.iter_mut().map(|x| *x += 1).zip(ys.iter_mut().map(|y| *y += 1)); + (&mut it).take(5).count(); + it.next_back(); + } + assert_eq!(&xs, &[1, 1, 1, 1, 1, 1]); + assert_eq!(&ys, &[1, 1, 1, 1]); +} + +fn test_zip_nested_sideffectful() { + let mut xs = [0; 6]; + let ys = [0; 4]; + + { + // test that it has the side effect nested inside enumerate + let it = xs.iter_mut().map(|x| *x = 1).enumerate().zip(&ys); + it.count(); + } + assert_eq!(&xs, &[1, 1, 1, 1, 1, 0]); +} + +fn main() { + test_zip_cloned_sideffectful(); + test_zip_map_sideffectful(); + test_zip_map_rev_sideffectful(); + test_zip_nested_sideffectful(); +} diff --git a/src/test/run-pass/ivec-pass-by-value.rs b/src/test/run-pass/ivec-pass-by-value.rs index 5b40105a97916..e3b42e60645a3 100644 --- a/src/test/run-pass/ivec-pass-by-value.rs +++ b/src/test/run-pass/ivec-pass-by-value.rs @@ -9,4 +9,4 @@ // except according to those terms. fn f(_a: Vec ) { } -pub fn main() { f(vec!(1, 2, 3, 4, 5)); } +pub fn main() { f(vec![1, 2, 3, 4, 5]); } diff --git a/src/test/run-pass/ivec-tag.rs b/src/test/run-pass/ivec-tag.rs index b8238774bc1e0..a511db8e9397d 100644 --- a/src/test/run-pass/ivec-tag.rs +++ b/src/test/run-pass/ivec-tag.rs @@ -17,8 +17,8 @@ use std::sync::mpsc::{channel, Sender}; fn producer(tx: &Sender>) { tx.send( - vec!(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, - 13)).unwrap(); + vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, + 13]).unwrap(); } pub fn main() { diff --git a/src/test/run-pass/lambda-infer-unresolved.rs b/src/test/run-pass/lambda-infer-unresolved.rs index fca700f6e4a84..5109c6fc77726 100644 --- a/src/test/run-pass/lambda-infer-unresolved.rs +++ b/src/test/run-pass/lambda-infer-unresolved.rs @@ -15,7 +15,7 @@ struct Refs { refs: Vec , n: isize } pub fn main() { - let mut e = Refs{refs: vec!(), n: 0}; + let mut e = Refs{refs: vec![], n: 0}; let _f = || println!("{}", e.n); let x: &[isize] = &e.refs; assert_eq!(x.len(), 0); diff --git a/src/test/run-pass/lexer-crlf-line-endings-string-literal-doc-comment.rs b/src/test/run-pass/lexer-crlf-line-endings-string-literal-doc-comment.rs index 5c8db524cc2ed..05f1f1bfea0fb 100644 --- a/src/test/run-pass/lexer-crlf-line-endings-string-literal-doc-comment.rs +++ b/src/test/run-pass/lexer-crlf-line-endings-string-literal-doc-comment.rs @@ -15,7 +15,7 @@ // NB: this file needs CRLF line endings. The .gitattributes file in // this directory should enforce it. -// ignore-pretty +// ignore-pretty issue #37195 /// Doc comment that ends in CRLF pub fn foo() {} diff --git a/src/test/run-pass/linear-for-loop.rs b/src/test/run-pass/linear-for-loop.rs index ddb4e40aea633..fc6d435b034be 100644 --- a/src/test/run-pass/linear-for-loop.rs +++ b/src/test/run-pass/linear-for-loop.rs @@ -8,10 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// no-pretty-expanded FIXME #15189 - pub fn main() { - let x = vec!(1, 2, 3); + let x = vec![1, 2, 3]; let mut y = 0; for i in &x { println!("{}", *i); y += *i; } println!("{}", y); diff --git a/src/test/run-pass/log-poly.rs b/src/test/run-pass/log-poly.rs index d8a69177cafcc..b54b4692a41bf 100644 --- a/src/test/run-pass/log-poly.rs +++ b/src/test/run-pass/log-poly.rs @@ -17,5 +17,5 @@ pub fn main() { println!("{:?}", 1); println!("{:?}", 2.0f64); println!("{:?}", Numbers::Three); - println!("{:?}", vec!(4)); + println!("{:?}", vec![4]); } diff --git a/src/test/run-pass/loop-scope.rs b/src/test/run-pass/loop-scope.rs index 0c1e7916cdb18..6916bfb8c616a 100644 --- a/src/test/run-pass/loop-scope.rs +++ b/src/test/run-pass/loop-scope.rs @@ -10,7 +10,7 @@ pub fn main() { - let x = vec!(10, 20, 30); + let x = vec![10, 20, 30]; let mut sum = 0; for x in &x { sum += *x; } assert_eq!(sum, 60); diff --git a/src/test/run-pass/macro-2.rs b/src/test/run-pass/macro-2.rs index 2cac9226117a2..801d92b6dcb16 100644 --- a/src/test/run-pass/macro-2.rs +++ b/src/test/run-pass/macro-2.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty - token trees can't pretty print - pub fn main() { macro_rules! mylambda_tt { diff --git a/src/test/run-pass/macro-attribute-expansion.rs b/src/test/run-pass/macro-attribute-expansion.rs index 60217139cd778..c3de9f736fbe5 100644 --- a/src/test/run-pass/macro-attribute-expansion.rs +++ b/src/test/run-pass/macro-attribute-expansion.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty - token trees can't pretty print - macro_rules! descriptions { ($name:ident is $desc:expr) => { // Check that we will correctly expand attributes diff --git a/src/test/run-pass/macro-attributes.rs b/src/test/run-pass/macro-attributes.rs index 2752fc88b456b..839fee3a2d214 100644 --- a/src/test/run-pass/macro-attributes.rs +++ b/src/test/run-pass/macro-attributes.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty - token trees can't pretty print - #![feature(custom_attribute)] macro_rules! compiles_fine { diff --git a/src/test/run-pass/macro-include-items.rs b/src/test/run-pass/macro-include-items.rs index 1e31c85afad62..f8728ebb91517 100644 --- a/src/test/run-pass/macro-include-items.rs +++ b/src/test/run-pass/macro-include-items.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty +// ignore-pretty issue #37195 fn bar() {} diff --git a/src/test/run-pass/macro-meta-items.rs b/src/test/run-pass/macro-meta-items.rs index 605cade2b3f7e..9c1e1fca3413e 100644 --- a/src/test/run-pass/macro-meta-items.rs +++ b/src/test/run-pass/macro-meta-items.rs @@ -8,7 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty - token trees can't pretty print // compile-flags: --cfg foo macro_rules! compiles_fine { diff --git a/src/test/run-pass/macro-multiple-items.rs b/src/test/run-pass/macro-multiple-items.rs index f78f93e84810c..190bfc53a9edb 100644 --- a/src/test/run-pass/macro-multiple-items.rs +++ b/src/test/run-pass/macro-multiple-items.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty - token trees can't pretty print - macro_rules! make_foo { () => ( struct Foo; diff --git a/src/test/run-pass/macro-stmt.rs b/src/test/run-pass/macro-stmt.rs index 0d8b86012d6e0..027df9f93a88d 100644 --- a/src/test/run-pass/macro-stmt.rs +++ b/src/test/run-pass/macro-stmt.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty - token trees can't pretty print - macro_rules! myfn { ( $f:ident, ( $( $x:ident ),* ), $body:block ) => ( fn $f( $( $x : isize),* ) -> isize $body diff --git a/src/test/run-pass/match-byte-array-patterns.rs b/src/test/run-pass/match-byte-array-patterns.rs new file mode 100644 index 0000000000000..dbfe588fb0c6a --- /dev/null +++ b/src/test/run-pass/match-byte-array-patterns.rs @@ -0,0 +1,54 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(slice_patterns)] + +fn main() { + let buf = &[0u8; 4]; + match buf { + &[0, 1, 0, 0] => unimplemented!(), + b"true" => unimplemented!(), + _ => {} + } + + match buf { + b"true" => unimplemented!(), + &[0, 1, 0, 0] => unimplemented!(), + _ => {} + } + + match buf { + b"true" => unimplemented!(), + &[0, x, 0, 0] => assert_eq!(x, 0), + _ => unimplemented!(), + } + + let buf: &[u8] = buf; + + match buf { + &[0, 1, 0, 0] => unimplemented!(), + &[_] => unimplemented!(), + &[_, _, _, _, _, ..] => unimplemented!(), + b"true" => unimplemented!(), + _ => {} + } + + match buf { + b"true" => unimplemented!(), + &[0, 1, 0, 0] => unimplemented!(), + _ => {} + } + + match buf { + b"true" => unimplemented!(), + &[0, x, 0, 0] => assert_eq!(x, 0), + _ => unimplemented!(), + } +} diff --git a/src/test/run-pass/match-vec-rvalue.rs b/src/test/run-pass/match-vec-rvalue.rs index a10f9b1d7d61b..3d221927b96e1 100644 --- a/src/test/run-pass/match-vec-rvalue.rs +++ b/src/test/run-pass/match-vec-rvalue.rs @@ -13,7 +13,7 @@ pub fn main() { - match vec!(1, 2, 3) { + match vec![1, 2, 3] { x => { assert_eq!(x.len(), 3); assert_eq!(x[0], 1); diff --git a/src/test/run-pass/mir_raw_fat_ptr.rs b/src/test/run-pass/mir_raw_fat_ptr.rs index c9fd88f2fb3cf..846318ec4fd34 100644 --- a/src/test/run-pass/mir_raw_fat_ptr.rs +++ b/src/test/run-pass/mir_raw_fat_ptr.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty : (#23623) problems when ending with // comments - // check raw fat pointer ops in mir // FIXME: please improve this when we get monomorphization support diff --git a/src/test/run-pass/mod_dir_implicit.rs b/src/test/run-pass/mod_dir_implicit.rs index 1b89464c543ce..f8034f9e07386 100644 --- a/src/test/run-pass/mod_dir_implicit.rs +++ b/src/test/run-pass/mod_dir_implicit.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty +// ignore-pretty issue #37195 mod mod_dir_implicit_aux; diff --git a/src/test/run-pass/mod_dir_path.rs b/src/test/run-pass/mod_dir_path.rs index e0327a1dcd4ed..e2f33963c4bad 100644 --- a/src/test/run-pass/mod_dir_path.rs +++ b/src/test/run-pass/mod_dir_path.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty +// ignore-pretty issue #37195 mod mod_dir_simple { #[path = "test.rs"] @@ -17,4 +17,15 @@ mod mod_dir_simple { pub fn main() { assert_eq!(mod_dir_simple::syrup::foo(), 10); + + #[path = "auxiliary"] + mod foo { + mod two_macros; + } + + #[path = "auxiliary"] + mod bar { + macro_rules! m { () => { mod two_macros; } } + m!(); + } } diff --git a/src/test/run-pass/mod_dir_path2.rs b/src/test/run-pass/mod_dir_path2.rs index 2b5e67a6e8396..b96c1ae072243 100644 --- a/src/test/run-pass/mod_dir_path2.rs +++ b/src/test/run-pass/mod_dir_path2.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty +// ignore-pretty issue #37195 #[path = "mod_dir_simple"] mod pancakes { diff --git a/src/test/run-pass/mod_dir_path3.rs b/src/test/run-pass/mod_dir_path3.rs index d6037bef6e576..3160064d7c244 100644 --- a/src/test/run-pass/mod_dir_path3.rs +++ b/src/test/run-pass/mod_dir_path3.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty +// ignore-pretty issue #37195 #[path = "mod_dir_simple"] mod pancakes { diff --git a/src/test/run-pass/mod_dir_path_multi.rs b/src/test/run-pass/mod_dir_path_multi.rs index f1bf83ed767f7..12b28cf98af0b 100644 --- a/src/test/run-pass/mod_dir_path_multi.rs +++ b/src/test/run-pass/mod_dir_path_multi.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty +// ignore-pretty issue #37195 #[path = "mod_dir_simple"] mod biscuits { diff --git a/src/test/run-pass/mod_dir_recursive.rs b/src/test/run-pass/mod_dir_recursive.rs index d7121ef769078..8964d9ccd2527 100644 --- a/src/test/run-pass/mod_dir_recursive.rs +++ b/src/test/run-pass/mod_dir_recursive.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty +// ignore-pretty issue #37195 // Testing that the parser for each file tracks its modules // and paths independently. The load_another_mod module should diff --git a/src/test/run-pass/mod_dir_simple.rs b/src/test/run-pass/mod_dir_simple.rs index 41c810b6fddab..429b4ebe63970 100644 --- a/src/test/run-pass/mod_dir_simple.rs +++ b/src/test/run-pass/mod_dir_simple.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty +// ignore-pretty issue #37195 mod mod_dir_simple { pub mod test; diff --git a/src/test/run-pass/mod_file.rs b/src/test/run-pass/mod_file.rs index ddda38bafd369..c18fecd7c569e 100644 --- a/src/test/run-pass/mod_file.rs +++ b/src/test/run-pass/mod_file.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty +// ignore-pretty issue #37195 // Testing that a plain .rs file can load modules from other source files diff --git a/src/test/run-pass/mod_file_with_path_attr.rs b/src/test/run-pass/mod_file_with_path_attr.rs index c6e51daaaf054..d9f28ceb0ecac 100644 --- a/src/test/run-pass/mod_file_with_path_attr.rs +++ b/src/test/run-pass/mod_file_with_path_attr.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty +// ignore-pretty issue #37195 // Testing that a plain .rs file can load modules from other source files diff --git a/src/test/run-pass/monad.rs b/src/test/run-pass/monad.rs index b28e5ec64de34..211827f92222c 100644 --- a/src/test/run-pass/monad.rs +++ b/src/test/run-pass/monad.rs @@ -45,9 +45,9 @@ fn transform(x: Option) -> Option { pub fn main() { assert_eq!(transform(Some(10)), Some("11".to_string())); assert_eq!(transform(None), None); - assert_eq!((vec!("hi".to_string())) - .bind(|x| vec!(x.clone(), format!("{}!", x)) ) - .bind(|x| vec!(x.clone(), format!("{}?", x)) ), + assert_eq!((vec!["hi".to_string()]) + .bind(|x| vec![x.clone(), format!("{}!", x)] ) + .bind(|x| vec![x.clone(), format!("{}?", x)] ), ["hi".to_string(), "hi?".to_string(), "hi!".to_string(), diff --git a/src/test/run-pass/move-arg-2-unique.rs b/src/test/run-pass/move-arg-2-unique.rs index bed339e158637..0ff5a66adc269 100644 --- a/src/test/run-pass/move-arg-2-unique.rs +++ b/src/test/run-pass/move-arg-2-unique.rs @@ -15,10 +15,10 @@ fn test(foo: Box> ) { assert_eq!((*foo)[0], 10); } pub fn main() { - let x = box vec!(10); + let x = box vec![10]; // Test forgetting a local by move-in test(x); // Test forgetting a temporary by move-in. - test(box vec!(10)); + test(box vec![10]); } diff --git a/src/test/run-pass/move-arg-2.rs b/src/test/run-pass/move-arg-2.rs index a6a26ab357876..8de487bc3631c 100644 --- a/src/test/run-pass/move-arg-2.rs +++ b/src/test/run-pass/move-arg-2.rs @@ -15,10 +15,10 @@ fn test(foo: Box>) { assert_eq!((*foo)[0], 10); } pub fn main() { - let x = box vec!(10); + let x = box vec![10]; // Test forgetting a local by move-in test(x); // Test forgetting a temporary by move-in. - test(box vec!(10)); + test(box vec![10]); } diff --git a/src/test/run-pass/newtype-polymorphic.rs b/src/test/run-pass/newtype-polymorphic.rs index 91599608ceed6..e7da8d7bf93c7 100644 --- a/src/test/run-pass/newtype-polymorphic.rs +++ b/src/test/run-pass/newtype-polymorphic.rs @@ -24,7 +24,7 @@ fn myvec_elt(mv: myvec) -> X { } pub fn main() { - let mv = myvec(vec!(1, 2, 3)); + let mv = myvec(vec![1, 2, 3]); let mv_clone = mv.clone(); let mv_clone = myvec_deref(mv_clone); assert_eq!(mv_clone[1], 2); diff --git a/src/test/run-pass/nonzero-enum.rs b/src/test/run-pass/nonzero-enum.rs new file mode 100644 index 0000000000000..266506e04b74d --- /dev/null +++ b/src/test/run-pass/nonzero-enum.rs @@ -0,0 +1,39 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::mem::size_of; + +enum E { + A = 1, + B = 2, + C = 3, +} + +struct S { + a: u16, + b: u8, + e: E, +} + +fn main() { + assert_eq!(size_of::(), 1); + assert_eq!(size_of::>(), 1); + assert_eq!(size_of::>(), 1); + assert_eq!(size_of::(), 4); + assert_eq!(size_of::>(), 4); + let enone = None::; + let esome = Some(E::A); + if let Some(..) = enone { + panic!(); + } + if let None = esome { + panic!(); + } +} diff --git a/src/test/run-pass/nullable-pointer-iotareduction.rs b/src/test/run-pass/nullable-pointer-iotareduction.rs index dffdcfe0af562..7e8d082a286a8 100644 --- a/src/test/run-pass/nullable-pointer-iotareduction.rs +++ b/src/test/run-pass/nullable-pointer-iotareduction.rs @@ -76,7 +76,7 @@ pub fn main() { check_type!(&17, &isize); check_type!(box 18, Box); check_type!("foo".to_string(), String); - check_type!(vec!(20, 22), Vec); + check_type!(vec![20, 22], Vec); check_type!(main, fn(), |pthing| { assert_eq!(main as fn(), *pthing as fn()) }); diff --git a/src/test/run-pass/numeric-method-autoexport.rs b/src/test/run-pass/numeric-method-autoexport.rs index b1d71abc78599..15ece09abd80f 100644 --- a/src/test/run-pass/numeric-method-autoexport.rs +++ b/src/test/run-pass/numeric-method-autoexport.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// no-pretty-expanded - // This file is intended to test only that methods are automatically // reachable for each numeric type, for each exported impl, with no imports // necessary. Testing the methods of the impls is done within the source diff --git a/src/test/run-pass/objects-owned-object-borrowed-method-headerless.rs b/src/test/run-pass/objects-owned-object-borrowed-method-headerless.rs index 176f67fd3a18a..768f126e4edfe 100644 --- a/src/test/run-pass/objects-owned-object-borrowed-method-headerless.rs +++ b/src/test/run-pass/objects-owned-object-borrowed-method-headerless.rs @@ -32,11 +32,11 @@ impl FooTrait for BarStruct { } pub fn main() { - let foos: Vec> = vec!( + let foos: Vec> = vec![ box BarStruct{ x: 0 } as Box, box BarStruct{ x: 1 } as Box, box BarStruct{ x: 2 } as Box - ); + ]; for i in 0..foos.len() { assert_eq!(i, foos[i].foo()); diff --git a/src/test/run-pass/overloaded-deref.rs b/src/test/run-pass/overloaded-deref.rs index 8541c1c0a8986..e2ca880719a8e 100644 --- a/src/test/run-pass/overloaded-deref.rs +++ b/src/test/run-pass/overloaded-deref.rs @@ -45,7 +45,7 @@ pub fn main() { (*(*p).borrow_mut()).y += 3; assert_eq!(*(*p).borrow(), Point {x: 3, y: 5}); - let v = Rc::new(RefCell::new(vec!(1, 2, 3))); + let v = Rc::new(RefCell::new(vec![1, 2, 3])); (*(*v).borrow_mut())[0] = 3; (*(*v).borrow_mut())[1] += 3; assert_eq!(((*(*v).borrow())[0], diff --git a/src/test/run-pass/packed-struct-layout.rs b/src/test/run-pass/packed-struct-layout.rs index 92308c9fc3e4e..d1e05e5a0184c 100644 --- a/src/test/run-pass/packed-struct-layout.rs +++ b/src/test/run-pass/packed-struct-layout.rs @@ -7,6 +7,7 @@ // , at your // option. This file may not be copied, modified, or distributed // except according to those terms. +// ignore-emscripten Not sure what's happening here. use std::mem; diff --git a/src/test/run-pass/packed-tuple-struct-layout.rs b/src/test/run-pass/packed-tuple-struct-layout.rs index 411c1807a16b7..ee4eb86ed0de3 100644 --- a/src/test/run-pass/packed-tuple-struct-layout.rs +++ b/src/test/run-pass/packed-tuple-struct-layout.rs @@ -7,6 +7,7 @@ // , at your // option. This file may not be copied, modified, or distributed // except according to those terms. +// ignore-emscripten use std::mem; diff --git a/src/test/run-pass/panic-runtime/abort-link-to-unwinding-crates.rs b/src/test/run-pass/panic-runtime/abort-link-to-unwinding-crates.rs index 71c1a61062d10..1c273fcba02da 100644 --- a/src/test/run-pass/panic-runtime/abort-link-to-unwinding-crates.rs +++ b/src/test/run-pass/panic-runtime/abort-link-to-unwinding-crates.rs @@ -11,6 +11,7 @@ // compile-flags:-C panic=abort // aux-build:exit-success-if-unwind.rs // no-prefer-dynamic +// ignore-emscripten Function not implemented extern crate exit_success_if_unwind; diff --git a/src/test/run-pass/panic-runtime/abort.rs b/src/test/run-pass/panic-runtime/abort.rs index 2fc9d6cfd04a1..be38f6ea3643a 100644 --- a/src/test/run-pass/panic-runtime/abort.rs +++ b/src/test/run-pass/panic-runtime/abort.rs @@ -10,6 +10,7 @@ // compile-flags:-C panic=abort // no-prefer-dynamic +// ignore-emscripten Function not implemented. use std::process::Command; use std::env; diff --git a/src/test/run-pass/panic-runtime/lto-abort.rs b/src/test/run-pass/panic-runtime/lto-abort.rs index 09e33b88189fe..e4cd4e809a4c6 100644 --- a/src/test/run-pass/panic-runtime/lto-abort.rs +++ b/src/test/run-pass/panic-runtime/lto-abort.rs @@ -10,6 +10,7 @@ // compile-flags:-C lto -C panic=abort // no-prefer-dynamic +// ignore-emscripten Function not implemented. use std::process::Command; use std::env; diff --git a/src/test/run-pass/panic-runtime/lto-unwind.rs b/src/test/run-pass/panic-runtime/lto-unwind.rs index 10e633b3775b3..768b88fd09e0b 100644 --- a/src/test/run-pass/panic-runtime/lto-unwind.rs +++ b/src/test/run-pass/panic-runtime/lto-unwind.rs @@ -10,6 +10,7 @@ // compile-flags:-C lto -C panic=unwind // no-prefer-dynamic +// ignore-emscripten Function not implemented. use std::process::Command; use std::env; diff --git a/src/test/run-pass/process-status-inherits-stdin.rs b/src/test/run-pass/process-status-inherits-stdin.rs index 2ad47c4f116ae..ff389bec899ef 100644 --- a/src/test/run-pass/process-status-inherits-stdin.rs +++ b/src/test/run-pass/process-status-inherits-stdin.rs @@ -7,6 +7,7 @@ // , at your // option. This file may not be copied, modified, or distributed // except according to those terms. +// ignore-emscripten Function not implemented. use std::env; use std::io; diff --git a/src/test/run-pass/project-cache-issue-37154.rs b/src/test/run-pass/project-cache-issue-37154.rs new file mode 100644 index 0000000000000..29dc6984e234a --- /dev/null +++ b/src/test/run-pass/project-cache-issue-37154.rs @@ -0,0 +1,28 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Regression test for #37154: the problem here was that the cache +// results in a false error because it was caching skolemized results +// even after those skolemized regions had been popped. + +trait Foo { + fn method(&self) {} +} + +struct Wrapper(T); + +impl Foo for Wrapper where for<'a> &'a T: IntoIterator {} + +fn f(x: Wrapper>) { + x.method(); // This works. + x.method(); // error: no method named `method` +} + +fn main() { } diff --git a/src/test/run-pass/range_inclusive.rs b/src/test/run-pass/range_inclusive.rs index aaf129e7b8e4a..cfa9f6e36e9bc 100644 --- a/src/test/run-pass/range_inclusive.rs +++ b/src/test/run-pass/range_inclusive.rs @@ -75,7 +75,7 @@ pub fn main() { // test the size hints and emptying let mut long = 0...255u8; - let mut short = 42...42; + let mut short = 42...42u8; assert_eq!(long.size_hint(), (256, Some(256))); assert_eq!(short.size_hint(), (1, Some(1))); long.next(); diff --git a/src/test/run-pass/rcvr-borrowed-to-slice.rs b/src/test/run-pass/rcvr-borrowed-to-slice.rs index 1ec16747181c2..efa73ad92ce83 100644 --- a/src/test/run-pass/rcvr-borrowed-to-slice.rs +++ b/src/test/run-pass/rcvr-borrowed-to-slice.rs @@ -23,17 +23,17 @@ impl<'a> sum for &'a [isize] { fn call_sum(x: &[isize]) -> isize { x.sum_() } pub fn main() { - let x = vec!(1, 2, 3); + let x = vec![1, 2, 3]; let y = call_sum(&x); println!("y=={}", y); assert_eq!(y, 6); - let x = vec!(1, 2, 3); + let x = vec![1, 2, 3]; let y = x.sum_(); println!("y=={}", y); assert_eq!(y, 6); - let x = vec!(1, 2, 3); + let x = vec![1, 2, 3]; let y = x.sum_(); println!("y=={}", y); assert_eq!(y, 6); diff --git a/src/test/run-pass/reexport-test-harness-main.rs b/src/test/run-pass/reexport-test-harness-main.rs index 309ae1bcc56ec..88e3e6ba4acd3 100644 --- a/src/test/run-pass/reexport-test-harness-main.rs +++ b/src/test/run-pass/reexport-test-harness-main.rs @@ -8,7 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty // compile-flags:--test #![reexport_test_harness_main = "test_main"] diff --git a/src/test/run-pass/regions-borrow-evec-uniq.rs b/src/test/run-pass/regions-borrow-evec-uniq.rs index ec1f4eda28cc5..e61a8d147757c 100644 --- a/src/test/run-pass/regions-borrow-evec-uniq.rs +++ b/src/test/run-pass/regions-borrow-evec-uniq.rs @@ -15,11 +15,11 @@ fn foo(x: &[isize]) -> isize { } pub fn main() { - let p = vec!(1,2,3,4,5); + let p = vec![1,2,3,4,5]; let r = foo(&p); assert_eq!(r, 1); - let p = vec!(5,4,3,2,1); + let p = vec![5,4,3,2,1]; let r = foo(&p); assert_eq!(r, 5); } diff --git a/src/test/run-pass/regions-bound-lists-feature-gate-2.rs b/src/test/run-pass/regions-bound-lists-feature-gate-2.rs index a06e0f6da785a..47d2fe363d369 100644 --- a/src/test/run-pass/regions-bound-lists-feature-gate-2.rs +++ b/src/test/run-pass/regions-bound-lists-feature-gate-2.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty - #![feature(issue_5723_bootstrap)] trait Foo { diff --git a/src/test/run-pass/regions-bound-lists-feature-gate.rs b/src/test/run-pass/regions-bound-lists-feature-gate.rs index 996583dc6de93..72db92aa93ce0 100644 --- a/src/test/run-pass/regions-bound-lists-feature-gate.rs +++ b/src/test/run-pass/regions-bound-lists-feature-gate.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty - #![feature(issue_5723_bootstrap)] trait Foo { diff --git a/src/test/run-pass/regions-dependent-addr-of.rs b/src/test/run-pass/regions-dependent-addr-of.rs index a6a179c432c3c..e9a3e16438f81 100644 --- a/src/test/run-pass/regions-dependent-addr-of.rs +++ b/src/test/run-pass/regions-dependent-addr-of.rs @@ -90,7 +90,7 @@ fn get_v5_ref(a: &A, _i: usize) -> &isize { pub fn main() { let a = A {value: B {v1: 22, v2: [23, 24, 25], - v3: vec!(26, 27, 28), + v3: vec![26, 27, 28], v4: C { f: 29 }, v5: box C { f: 30 }, v6: Some(C { f: 31 })}}; diff --git a/src/test/run-pass/regions-dependent-autoslice.rs b/src/test/run-pass/regions-dependent-autoslice.rs index 7183937fe8035..cd140f7aa599b 100644 --- a/src/test/run-pass/regions-dependent-autoslice.rs +++ b/src/test/run-pass/regions-dependent-autoslice.rs @@ -18,6 +18,6 @@ fn both<'r>(v: &'r [usize]) -> &'r [usize] { } pub fn main() { - let v = vec!(1,2,3); + let v = vec![1,2,3]; both(&v); } diff --git a/src/test/run-pass/regions-infer-borrow-scope-view.rs b/src/test/run-pass/regions-infer-borrow-scope-view.rs index f9ba8e82ef715..262e936826e51 100644 --- a/src/test/run-pass/regions-infer-borrow-scope-view.rs +++ b/src/test/run-pass/regions-infer-borrow-scope-view.rs @@ -13,7 +13,7 @@ fn view(x: &[T]) -> &[T] {x} pub fn main() { - let v = vec!(1, 2, 3); + let v = vec![1, 2, 3]; let x = view(&v); let y = view(x); assert!((v[0] == x[0]) && (v[0] == y[0])); diff --git a/src/test/run-pass/regions-relate-bound-regions-on-closures-to-inference-variables.rs b/src/test/run-pass/regions-relate-bound-regions-on-closures-to-inference-variables.rs index 465f43e36b94f..8eee54b3fec66 100644 --- a/src/test/run-pass/regions-relate-bound-regions-on-closures-to-inference-variables.rs +++ b/src/test/run-pass/regions-relate-bound-regions-on-closures-to-inference-variables.rs @@ -63,7 +63,7 @@ impl<'a,'tcx> Foo<'a,'tcx> { } fn main() { - let v = vec!(); + let v = vec![]; let cx = Ctxt { x: &v }; let mut foo = Foo { cx: &cx }; assert_eq!(foo.bother(), 22); // just so the code is not dead, basically diff --git a/src/test/run-pass/seq-compare.rs b/src/test/run-pass/seq-compare.rs index f1a21d90ab2dc..43612f529772a 100644 --- a/src/test/run-pass/seq-compare.rs +++ b/src/test/run-pass/seq-compare.rs @@ -14,13 +14,13 @@ pub fn main() { assert!(("hello".to_string() < "hellr".to_string())); assert!(("hello ".to_string() > "hello".to_string())); assert!(("hello".to_string() != "there".to_string())); - assert!((vec!(1, 2, 3, 4) > vec!(1, 2, 3))); - assert!((vec!(1, 2, 3) < vec!(1, 2, 3, 4))); - assert!((vec!(1, 2, 4, 4) > vec!(1, 2, 3, 4))); - assert!((vec!(1, 2, 3, 4) < vec!(1, 2, 4, 4))); - assert!((vec!(1, 2, 3) <= vec!(1, 2, 3))); - assert!((vec!(1, 2, 3) <= vec!(1, 2, 3, 3))); - assert!((vec!(1, 2, 3, 4) > vec!(1, 2, 3))); - assert_eq!(vec!(1, 2, 3), vec!(1, 2, 3)); - assert!((vec!(1, 2, 3) != vec!(1, 1, 3))); + assert!((vec![1, 2, 3, 4] > vec![1, 2, 3])); + assert!((vec![1, 2, 3] < vec![1, 2, 3, 4])); + assert!((vec![1, 2, 4, 4] > vec![1, 2, 3, 4])); + assert!((vec![1, 2, 3, 4] < vec![1, 2, 4, 4])); + assert!((vec![1, 2, 3] <= vec![1, 2, 3])); + assert!((vec![1, 2, 3] <= vec![1, 2, 3, 3])); + assert!((vec![1, 2, 3, 4] > vec![1, 2, 3])); + assert_eq!(vec![1, 2, 3], vec![1, 2, 3]); + assert!((vec![1, 2, 3] != vec![1, 1, 3])); } diff --git a/src/test/run-pass/shebang.rs b/src/test/run-pass/shebang.rs index 15ab21bbc8da7..a0947cd49a42d 100644 --- a/src/test/run-pass/shebang.rs +++ b/src/test/run-pass/shebang.rs @@ -9,7 +9,4 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty: `expand` adds some preludes before shebang -// - pub fn main() { println!("Hello World"); } diff --git a/src/test/run-pass/simd-intrinsic-generic-cast.rs b/src/test/run-pass/simd-intrinsic-generic-cast.rs index 2efd9333999b2..d32fa01c7b945 100644 --- a/src/test/run-pass/simd-intrinsic-generic-cast.rs +++ b/src/test/run-pass/simd-intrinsic-generic-cast.rs @@ -7,6 +7,7 @@ // , at your // option. This file may not be copied, modified, or distributed // except according to those terms. +// ignore-emscripten linking with emcc failed #![feature(repr_simd, platform_intrinsics, concat_idents, test)] #![allow(non_camel_case_types)] diff --git a/src/test/run-pass/simd-intrinsic-generic-elements.rs b/src/test/run-pass/simd-intrinsic-generic-elements.rs index 5cb57b63ada2c..f0444c2717056 100644 --- a/src/test/run-pass/simd-intrinsic-generic-elements.rs +++ b/src/test/run-pass/simd-intrinsic-generic-elements.rs @@ -10,8 +10,6 @@ #![feature(repr_simd, platform_intrinsics)] -// ignore-pretty : (#23623) problems when ending with // comments - #[repr(simd)] #[derive(Copy, Clone, Debug, PartialEq)] #[allow(non_camel_case_types)] diff --git a/src/test/run-pass/size-and-align.rs b/src/test/run-pass/size-and-align.rs index 007ce52d7c455..13d55e0172e71 100644 --- a/src/test/run-pass/size-and-align.rs +++ b/src/test/run-pass/size-and-align.rs @@ -22,6 +22,6 @@ fn uhoh(v: Vec> ) { } pub fn main() { - let v: Vec> = vec!(clam::b::, clam::b::, clam::a::(42, 17)); + let v: Vec> = vec![clam::b::, clam::b::, clam::a::(42, 17)]; uhoh::(v); } diff --git a/src/test/run-pass/static-impl.rs b/src/test/run-pass/static-impl.rs index 84bb1b871b97e..89fd83ced4c8c 100644 --- a/src/test/run-pass/static-impl.rs +++ b/src/test/run-pass/static-impl.rs @@ -62,10 +62,10 @@ pub fn main() { assert_eq!(10_usize.plus(), 30); assert_eq!(("hi".to_string()).plus(), 200); - assert_eq!((vec!(1)).length_().str(), "1".to_string()); - let vect = vec!(3, 4).map_(|a| *a + 4); + assert_eq!((vec![1]).length_().str(), "1".to_string()); + let vect = vec![3, 4].map_(|a| *a + 4); assert_eq!(vect[0], 7); - let vect = (vec!(3, 4)).map_::(|a| *a as usize + 4_usize); + let vect = (vec![3, 4]).map_::(|a| *a as usize + 4_usize); assert_eq!(vect[0], 7_usize); let mut x = 0_usize; 10_usize.multi(|_n| x += 2_usize ); diff --git a/src/test/run-pass/struct-field-shorthand.rs b/src/test/run-pass/struct-field-shorthand.rs new file mode 100644 index 0000000000000..fe91db572d20e --- /dev/null +++ b/src/test/run-pass/struct-field-shorthand.rs @@ -0,0 +1,37 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(field_init_shorthand)] + +struct Foo { + x: i32, + y: bool, + z: i32 +} + +struct Bar { + x: i32 +} + +pub fn main() { + let (x, y, z) = (1, true, 2); + let a = Foo { x, y: y, z }; + assert_eq!(a.x, x); + assert_eq!(a.y, y); + assert_eq!(a.z, z); + + let b = Bar { x, }; + assert_eq!(b.x, x); + + let c = Foo { z, y, x }; + assert_eq!(c.x, x); + assert_eq!(c.y, y); + assert_eq!(c.z, z); +} diff --git a/src/test/run-pass/struct-path-associated-type.rs b/src/test/run-pass/struct-path-associated-type.rs new file mode 100644 index 0000000000000..b033ed5c80210 --- /dev/null +++ b/src/test/run-pass/struct-path-associated-type.rs @@ -0,0 +1,35 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +struct S { + a: T, + b: U, +} + +trait Tr { + type A; +} +impl Tr for u8 { + type A = S; +} + +fn f>>() { + let s = T::A { a: 0, b: 1 }; + match s { + T::A { a, b } => { + assert_eq!(a, 0); + assert_eq!(b, 1); + } + } +} + +fn main() { + f::(); +} diff --git a/src/test/run-pass/struct-path-self.rs b/src/test/run-pass/struct-path-self.rs new file mode 100644 index 0000000000000..c7a282c2a2fa0 --- /dev/null +++ b/src/test/run-pass/struct-path-self.rs @@ -0,0 +1,54 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::ops::Add; + +struct S { + a: T, + b: U, +} + +trait Tr { + fn f(&self) -> Self; +} + +impl, U: Default> Tr for S { + fn f(&self) -> Self { + let s = Self { a: Default::default(), b: Default::default() }; + match s { + Self { a, b } => Self { a: a + 1, b: b } + } + } +} + +impl> S { + fn g(&self) -> Self { + let s = Self { a: Default::default(), b: Default::default() }; + match s { + Self { a, b } => Self { a: a, b: b + 1 } + } + } +} + +impl S { + fn new() -> Self { + Self { a: 0, b: 1 } + } +} + +fn main() { + let s0 = S::new(); + let s1 = s0.f(); + assert_eq!(s1.a, 1); + assert_eq!(s1.b, 0); + let s2 = s0.g(); + assert_eq!(s2.a, 0); + assert_eq!(s2.b, 1); +} diff --git a/src/test/run-pass/super-fast-paren-parsing.rs b/src/test/run-pass/super-fast-paren-parsing.rs index b764a983a0c09..a1bbd19021127 100644 --- a/src/test/run-pass/super-fast-paren-parsing.rs +++ b/src/test/run-pass/super-fast-paren-parsing.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty -// // exec-env:RUST_MIN_STACK=16000000 // rustc-env:RUST_MIN_STACK=16000000 // diff --git a/src/test/run-pass/swap-2.rs b/src/test/run-pass/swap-2.rs index 3dbd7f1a60126..4601b7d7cf567 100644 --- a/src/test/run-pass/swap-2.rs +++ b/src/test/run-pass/swap-2.rs @@ -12,7 +12,7 @@ use std::mem::swap; pub fn main() { - let mut a: Vec = vec!(0, 1, 2, 3, 4, 5, 6); + let mut a: Vec = vec![0, 1, 2, 3, 4, 5, 6]; a.swap(2, 4); assert_eq!(a[2], 4); assert_eq!(a[4], 2); diff --git a/src/test/run-pass/syntax-extension-source-utils.rs b/src/test/run-pass/syntax-extension-source-utils.rs index 2f52e424936ea..3b5f033d07b7d 100644 --- a/src/test/run-pass/syntax-extension-source-utils.rs +++ b/src/test/run-pass/syntax-extension-source-utils.rs @@ -8,10 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(core)] - -// This test is brittle! -// ignore-pretty - the pretty tests lose path information, breaking include! +// ignore-pretty issue #37195 pub mod m1 { pub mod m2 { @@ -24,9 +21,9 @@ pub mod m1 { macro_rules! indirect_line { () => ( line!() ) } pub fn main() { - assert_eq!(line!(), 27); + assert_eq!(line!(), 24); assert_eq!(column!(), 4); - assert_eq!(indirect_line!(), 29); + assert_eq!(indirect_line!(), 26); assert!((file!().ends_with("syntax-extension-source-utils.rs"))); assert_eq!(stringify!((2*3) + 5).to_string(), "( 2 * 3 ) + 5".to_string()); assert!(include!("syntax-extension-source-utils-files/includeme.\ @@ -43,5 +40,5 @@ pub fn main() { // The Windows tests are wrapped in an extra module for some reason assert!((m1::m2::where_am_i().ends_with("m1::m2"))); - assert_eq!((46, "( 2 * 3 ) + 5"), (line!(), stringify!((2*3) + 5))); + assert_eq!((43, "( 2 * 3 ) + 5"), (line!(), stringify!((2*3) + 5))); } diff --git a/src/test/run-pass/task-comm-16.rs b/src/test/run-pass/task-comm-16.rs index c6d8f3c0d9b0b..0caf21ead3969 100644 --- a/src/test/run-pass/task-comm-16.rs +++ b/src/test/run-pass/task-comm-16.rs @@ -27,7 +27,7 @@ fn test_rec() { fn test_vec() { let (tx, rx) = channel(); - let v0: Vec = vec!(0, 1, 2); + let v0: Vec = vec![0, 1, 2]; tx.send(v0).unwrap(); let v1 = rx.recv().unwrap(); assert_eq!(v1[0], 0); diff --git a/src/test/run-pass/task-comm-3.rs b/src/test/run-pass/task-comm-3.rs index 0e8542bababdd..78f29f46edf4a 100644 --- a/src/test/run-pass/task-comm-3.rs +++ b/src/test/run-pass/task-comm-3.rs @@ -11,7 +11,6 @@ #![feature(std_misc)] // ignore-emscripten no threads support -// no-pretty-expanded FIXME #15189 use std::thread; use std::sync::mpsc::{channel, Sender}; diff --git a/src/test/run-pass/task-stderr.rs b/src/test/run-pass/task-stderr.rs index 1f64f40c5255b..13d5cc989e94d 100644 --- a/src/test/run-pass/task-stderr.rs +++ b/src/test/run-pass/task-stderr.rs @@ -30,7 +30,7 @@ fn main() { let data = Arc::new(Mutex::new(Vec::new())); let sink = Sink(data.clone()); let res = thread::Builder::new().spawn(move|| -> () { - io::set_panic(Box::new(sink)); + io::set_panic(Some(Box::new(sink))); panic!("Hello, world!") }).unwrap().join(); assert!(res.is_err()); diff --git a/src/test/run-pass/test-fn-signature-verification-for-explicit-return-type.rs b/src/test/run-pass/test-fn-signature-verification-for-explicit-return-type.rs index d58b5d3a00fec..10ad838d3cb87 100644 --- a/src/test/run-pass/test-fn-signature-verification-for-explicit-return-type.rs +++ b/src/test/run-pass/test-fn-signature-verification-for-explicit-return-type.rs @@ -11,7 +11,6 @@ #![feature(test)] // compile-flags: --test -// no-pretty-expanded extern crate test; #[bench] diff --git a/src/test/run-pass/test-runner-hides-main.rs b/src/test/run-pass/test-runner-hides-main.rs index 839e91f3793d4..7b696c1f8d27c 100644 --- a/src/test/run-pass/test-runner-hides-main.rs +++ b/src/test/run-pass/test-runner-hides-main.rs @@ -9,8 +9,6 @@ // except according to those terms. // compile-flags:--test -// ignore-pretty: does not work well with `--test` - // Building as a test runner means that a synthetic main will be run, // not ours pub fn main() { panic!(); } diff --git a/src/test/run-pass/test-should-fail-good-message.rs b/src/test/run-pass/test-should-fail-good-message.rs index 28698499303a9..e665fa4fc7b58 100644 --- a/src/test/run-pass/test-should-fail-good-message.rs +++ b/src/test/run-pass/test-should-fail-good-message.rs @@ -9,8 +9,6 @@ // except according to those terms. // compile-flags: --test -// ignore-pretty: does not work well with `--test` - #[test] #[should_panic(expected = "foo")] pub fn test_foo() { diff --git a/src/test/run-pass/trait-bounds-in-arc.rs b/src/test/run-pass/trait-bounds-in-arc.rs index 0de6fbc91cc6b..9877dffe9df02 100644 --- a/src/test/run-pass/trait-bounds-in-arc.rs +++ b/src/test/run-pass/trait-bounds-in-arc.rs @@ -12,8 +12,6 @@ // and shared between threads as long as all types fulfill Send. // ignore-emscripten no threads support -// ignore-pretty - #![allow(unknown_features)] #![feature(box_syntax, std_misc)] @@ -77,10 +75,10 @@ pub fn main() { swim_speed: 998, name: "alec_guinness".to_string(), }; - let arc = Arc::new(vec!(box catte as Box, + let arc = Arc::new(vec![box catte as Box, box dogge1 as Box, box fishe as Box, - box dogge2 as Box)); + box dogge2 as Box]); let (tx1, rx1) = channel(); let arc1 = arc.clone(); let t1 = thread::spawn(move|| { check_legs(arc1); tx1.send(()); }); diff --git a/src/test/run-pass/trait-generic.rs b/src/test/run-pass/trait-generic.rs index 4998236629153..eadda5dfe299c 100644 --- a/src/test/run-pass/trait-generic.rs +++ b/src/test/run-pass/trait-generic.rs @@ -45,9 +45,9 @@ fn bar>(x: T) -> Vec { } pub fn main() { - assert_eq!(foo(vec!(1)), ["hi".to_string()]); - assert_eq!(bar:: >(vec!(4, 5)), ["4".to_string(), "5".to_string()]); - assert_eq!(bar:: >(vec!("x".to_string(), "y".to_string())), + assert_eq!(foo(vec![1]), ["hi".to_string()]); + assert_eq!(bar:: >(vec![4, 5]), ["4".to_string(), "5".to_string()]); + assert_eq!(bar:: >(vec!["x".to_string(), "y".to_string()]), ["x".to_string(), "y".to_string()]); - assert_eq!(bar::<(), Vec<()>>(vec!(())), ["()".to_string()]); + assert_eq!(bar::<(), Vec<()>>(vec![()]), ["()".to_string()]); } diff --git a/src/test/run-pass/trait-to-str.rs b/src/test/run-pass/trait-to-str.rs index f5af05d872bd4..9671e31d7e48c 100644 --- a/src/test/run-pass/trait-to-str.rs +++ b/src/test/run-pass/trait-to-str.rs @@ -30,15 +30,15 @@ impl to_str for Vec { pub fn main() { assert_eq!(1.to_string_(), "1".to_string()); - assert_eq!((vec!(2, 3, 4)).to_string_(), "[2, 3, 4]".to_string()); + assert_eq!((vec![2, 3, 4]).to_string_(), "[2, 3, 4]".to_string()); fn indirect(x: T) -> String { format!("{}!", x.to_string_()) } - assert_eq!(indirect(vec!(10, 20)), "[10, 20]!".to_string()); + assert_eq!(indirect(vec![10, 20]), "[10, 20]!".to_string()); fn indirect2(x: T) -> String { indirect(x) } - assert_eq!(indirect2(vec!(1)), "[1]!".to_string()); + assert_eq!(indirect2(vec![1]), "[1]!".to_string()); } diff --git a/src/test/run-pass/try-operator-hygiene.rs b/src/test/run-pass/try-operator-hygiene.rs index ae622df498f94..53d6185020a02 100644 --- a/src/test/run-pass/try-operator-hygiene.rs +++ b/src/test/run-pass/try-operator-hygiene.rs @@ -18,8 +18,6 @@ // This test verifies that the expansion is hygienic, i.e. it's not affected by other `val` and // `err` bindings that may be in scope. -#![feature(question_mark)] - use std::num::ParseIntError; fn main() { diff --git a/src/test/run-pass/try-operator.rs b/src/test/run-pass/try-operator.rs index de5ccf09c5923..29de6364bf147 100644 --- a/src/test/run-pass/try-operator.rs +++ b/src/test/run-pass/try-operator.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(question_mark)] - use std::fs::File; use std::io::{Read, self}; use std::num::ParseIntError; diff --git a/src/test/run-pass/type-macros-simple.rs b/src/test/run-pass/type-macros-simple.rs index 7d1045cf3f1a8..c61133f795b34 100644 --- a/src/test/run-pass/type-macros-simple.rs +++ b/src/test/run-pass/type-macros-simple.rs @@ -15,3 +15,22 @@ macro_rules! Tuple { fn main() { let x: Tuple!(i32, i32) = (1, 2); } + +fn issue_36540() { + let i32 = 0; + macro_rules! m { () => { i32 } } + struct S(m!(), T) where T: Trait; + + let x: m!() = m!(); + std::cell::Cell::::new(m!()); + impl std::ops::Index for Trait<(m!(), T)> + where T: Trait + { + type Output = m!(); + fn index(&self, i: m!()) -> &m!() { + unimplemented!() + } + } +} + +trait Trait {} diff --git a/src/test/run-pass/typeck-fn-to-unsafe-fn-ptr.rs b/src/test/run-pass/typeck-fn-to-unsafe-fn-ptr.rs new file mode 100644 index 0000000000000..323705f3f955f --- /dev/null +++ b/src/test/run-pass/typeck-fn-to-unsafe-fn-ptr.rs @@ -0,0 +1,21 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// This tests reification from safe function to `unsafe fn` pointer + +fn do_nothing() -> () {} + +unsafe fn call_unsafe(func: unsafe fn() -> ()) -> () { + func() +} + +pub fn main() { + unsafe { call_unsafe(do_nothing); } +} diff --git a/src/test/run-pass/typeid-intrinsic.rs b/src/test/run-pass/typeid-intrinsic.rs index 36650368d57be..54d5415a5539b 100644 --- a/src/test/run-pass/typeid-intrinsic.rs +++ b/src/test/run-pass/typeid-intrinsic.rs @@ -87,4 +87,8 @@ pub fn main() { assert_eq!(other1::id_u32_iterator(), other2::id_u32_iterator()); assert!(other1::id_i32_iterator() != other1::id_u32_iterator()); assert!(TypeId::of::() != TypeId::of::()); + + // Check fn pointer against collisions + assert!(TypeId::of:: A) -> A>() != + TypeId::of:: A, A) -> A>()); } diff --git a/src/test/run-pass/unboxed-closures-counter-not-moved.rs b/src/test/run-pass/unboxed-closures-counter-not-moved.rs index 0b85916d22410..300a0ee63f817 100644 --- a/src/test/run-pass/unboxed-closures-counter-not-moved.rs +++ b/src/test/run-pass/unboxed-closures-counter-not-moved.rs @@ -15,7 +15,7 @@ fn call(f: F) where F : FnOnce() { } fn main() { - let y = vec!(format!("Hello"), format!("World")); + let y = vec![format!("Hello"), format!("World")]; let mut counter = 22_u32; call(|| { diff --git a/src/test/run-pass/unboxed-closures-move-some-upvars-in-by-ref-closure.rs b/src/test/run-pass/unboxed-closures-move-some-upvars-in-by-ref-closure.rs index 99663646254e7..b9a16535c420a 100644 --- a/src/test/run-pass/unboxed-closures-move-some-upvars-in-by-ref-closure.rs +++ b/src/test/run-pass/unboxed-closures-move-some-upvars-in-by-ref-closure.rs @@ -16,8 +16,8 @@ fn call(f: F) where F : FnOnce() { } fn main() { - let mut x = vec!(format!("Hello")); - let y = vec!(format!("World")); + let mut x = vec![format!("Hello")]; + let y = vec![format!("World")]; call(|| { // Here: `x` must be captured with a mutable reference in // order for us to append on it, and `y` must be captured by diff --git a/src/test/run-pass/union/union-backcomp.rs b/src/test/run-pass/union/union-backcomp.rs index 9394b618ddf25..0f8c996bebda8 100644 --- a/src/test/run-pass/union/union-backcomp.rs +++ b/src/test/run-pass/union/union-backcomp.rs @@ -10,6 +10,12 @@ #![feature(untagged_unions)] +macro_rules! union { + () => (struct S;) +} + +union!(); + fn union() {} fn main() { diff --git a/src/test/run-pass/union/union-with-drop-fields-lint.rs b/src/test/run-pass/union/union-with-drop-fields-lint.rs index 5a1424830d074..6cb7e82d6b740 100644 --- a/src/test/run-pass/union/union-with-drop-fields-lint.rs +++ b/src/test/run-pass/union/union-with-drop-fields-lint.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// ignore-pretty - #![feature(untagged_unions)] #![allow(dead_code)] #![allow(unions_with_drop_fields)] diff --git a/src/test/run-pass/uniq-self-in-mut-slot.rs b/src/test/run-pass/uniq-self-in-mut-slot.rs index baca157a488b7..7910380abeef0 100644 --- a/src/test/run-pass/uniq-self-in-mut-slot.rs +++ b/src/test/run-pass/uniq-self-in-mut-slot.rs @@ -17,7 +17,7 @@ struct X { } trait Changer { - fn change(mut self: Box) -> Box; + fn change(self: Box) -> Box; } impl Changer for X { diff --git a/src/test/run-pass/unique-autoderef-index.rs b/src/test/run-pass/unique-autoderef-index.rs index c68ff1f0612f5..1ef61008b3c2b 100644 --- a/src/test/run-pass/unique-autoderef-index.rs +++ b/src/test/run-pass/unique-autoderef-index.rs @@ -13,6 +13,6 @@ #![feature(box_syntax)] pub fn main() { - let i: Box<_> = box vec!(100); + let i: Box<_> = box vec![100]; assert_eq!((*i)[0], 100); } diff --git a/src/test/run-pass/unique-create.rs b/src/test/run-pass/unique-create.rs index 8469ae702009a..6d638bbf562c7 100644 --- a/src/test/run-pass/unique-create.rs +++ b/src/test/run-pass/unique-create.rs @@ -18,5 +18,5 @@ pub fn main() { } fn vec() { - vec!(0); + vec![0]; } diff --git a/src/test/run-pass/unique-drop-complex.rs b/src/test/run-pass/unique-drop-complex.rs index 056acd162082b..1910d51bd0bcb 100644 --- a/src/test/run-pass/unique-drop-complex.rs +++ b/src/test/run-pass/unique-drop-complex.rs @@ -14,5 +14,5 @@ #![feature(box_syntax)] pub fn main() { - let _x: Box<_> = box vec!(0,0,0,0,0); + let _x: Box<_> = box vec![0,0,0,0,0]; } diff --git a/src/test/run-pass/unique-in-vec-copy.rs b/src/test/run-pass/unique-in-vec-copy.rs index ab0e3ee809dbd..ece206caa02e5 100644 --- a/src/test/run-pass/unique-in-vec-copy.rs +++ b/src/test/run-pass/unique-in-vec-copy.rs @@ -13,7 +13,7 @@ #![feature(box_syntax)] pub fn main() { - let mut a: Vec> = vec!(box 10); + let mut a: Vec> = vec![box 10]; let b = a.clone(); assert_eq!(*a[0], 10); diff --git a/src/test/run-pass/unique-in-vec.rs b/src/test/run-pass/unique-in-vec.rs index 026bc0435d91c..bd965d41eea2c 100644 --- a/src/test/run-pass/unique-in-vec.rs +++ b/src/test/run-pass/unique-in-vec.rs @@ -13,6 +13,6 @@ #![feature(box_syntax)] pub fn main() { - let vect : Vec> = vec!(box 100); + let vect : Vec> = vec![box 100]; assert_eq!(vect[0], box 100); } diff --git a/src/test/run-pass/unsized2.rs b/src/test/run-pass/unsized2.rs index 5b9fa5230d1e0..50d8d3d27f27b 100644 --- a/src/test/run-pass/unsized2.rs +++ b/src/test/run-pass/unsized2.rs @@ -89,7 +89,7 @@ trait T7 { fn m2(&self, x: &T5); } -// The last field in a struct or variant may be unsized +// The last field in a struct may be unsized struct S2 { f: X, } @@ -97,12 +97,6 @@ struct S3 { f1: isize, f2: X, } -enum E { - V1(X), - V2{x: X}, - V3(isize, X), - V4{u: isize, x: X}, -} pub fn main() { } diff --git a/src/test/run-pass/utf8_chars.rs b/src/test/run-pass/utf8_chars.rs index 0f751501293f5..0a984429fabbf 100644 --- a/src/test/run-pass/utf8_chars.rs +++ b/src/test/run-pass/utf8_chars.rs @@ -15,7 +15,7 @@ use std::str; pub fn main() { // Chars of 1, 2, 3, and 4 bytes - let chs: Vec = vec!('e', 'é', '€', '\u{10000}'); + let chs: Vec = vec!['e', 'é', '€', '\u{10000}']; let s: String = chs.iter().cloned().collect(); let schs: Vec = s.chars().collect(); diff --git a/src/test/run-pass/lint-non-uppercase-statics-lowercase-mut-statics.rs b/src/test/run-pass/variance-iterators-in-libcore.rs similarity index 65% rename from src/test/run-pass/lint-non-uppercase-statics-lowercase-mut-statics.rs rename to src/test/run-pass/variance-iterators-in-libcore.rs index aa5b3834c0178..b9677d5ba8598 100644 --- a/src/test/run-pass/lint-non-uppercase-statics-lowercase-mut-statics.rs +++ b/src/test/run-pass/variance-iterators-in-libcore.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -8,12 +8,10 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +#![allow(warnings)] -// pretty-expanded FIXME #23616 +use std::iter::Zip; -#![forbid(non_camel_case_types)] -#![forbid(non_upper_case_globals)] +fn zip_covariant<'a, A, B>(iter: Zip<&'static A, &'static B>) -> Zip<&'a A, &'a B> { iter } -static mut bar: isize = 2; - -pub fn main() {} +fn main() { } diff --git a/src/test/run-pass/vec-concat.rs b/src/test/run-pass/vec-concat.rs index 5fe9dd60591ce..8ba8df57e542c 100644 --- a/src/test/run-pass/vec-concat.rs +++ b/src/test/run-pass/vec-concat.rs @@ -11,8 +11,8 @@ use std::vec; pub fn main() { - let a: Vec = vec!(1, 2, 3, 4, 5); - let b: Vec = vec!(6, 7, 8, 9, 0); + let a: Vec = vec![1, 2, 3, 4, 5]; + let b: Vec = vec![6, 7, 8, 9, 0]; let mut v: Vec = a; v.extend_from_slice(&b); println!("{}", v[9]); diff --git a/src/test/run-pass/vec-growth.rs b/src/test/run-pass/vec-growth.rs index e51d898e1d46e..5bf6a457df9b3 100644 --- a/src/test/run-pass/vec-growth.rs +++ b/src/test/run-pass/vec-growth.rs @@ -11,7 +11,7 @@ pub fn main() { - let mut v = vec!(1); + let mut v = vec![1]; v.push(2); v.push(3); v.push(4); diff --git a/src/test/run-pass/vec-late-init.rs b/src/test/run-pass/vec-late-init.rs index 7a8c0739efeb9..420f6a429f1f8 100644 --- a/src/test/run-pass/vec-late-init.rs +++ b/src/test/run-pass/vec-late-init.rs @@ -11,6 +11,6 @@ pub fn main() { let mut later: Vec ; - if true { later = vec!(1); } else { later = vec!(2); } + if true { later = vec![1]; } else { later = vec![2]; } println!("{}", later[0]); } diff --git a/src/test/run-pass/vec-macro-with-trailing-comma.rs b/src/test/run-pass/vec-macro-with-trailing-comma.rs index 35af249ef5fad..135ecb4749845 100644 --- a/src/test/run-pass/vec-macro-with-trailing-comma.rs +++ b/src/test/run-pass/vec-macro-with-trailing-comma.rs @@ -11,6 +11,6 @@ pub fn main() { - assert_eq!(vec!(1), vec!(1,)); - assert_eq!(vec!(1, 2, 3), vec!(1, 2, 3,)); + assert_eq!(vec![1], vec![1,]); + assert_eq!(vec![1, 2, 3], vec![1, 2, 3,]); } diff --git a/src/test/run-pass/vec-matching.rs b/src/test/run-pass/vec-matching.rs index 97006f54cd955..bd0731a555cb6 100644 --- a/src/test/run-pass/vec-matching.rs +++ b/src/test/run-pass/vec-matching.rs @@ -144,6 +144,20 @@ fn e() { assert_eq!(c, 1); } +fn f() { + let x = &[1, 2, 3, 4, 5]; + let [a, [b, [c, ..].., d].., e] = *x; + assert_eq!((a, b, c, d, e), (1, 2, 3, 4, 5)); + + let x: &[isize] = x; + let (a, b, c, d, e) = match *x { + [a, [b, [c, ..].., d].., e] => (a, b, c, d, e), + _ => unimplemented!() + }; + + assert_eq!((a, b, c, d, e), (1, 2, 3, 4, 5)); +} + pub fn main() { a(); b(); @@ -151,4 +165,5 @@ pub fn main() { c(); d(); e(); + f(); } diff --git a/src/test/run-pass/vec-push.rs b/src/test/run-pass/vec-push.rs index 33f01c5bd41c8..14a52cc4b5c55 100644 --- a/src/test/run-pass/vec-push.rs +++ b/src/test/run-pass/vec-push.rs @@ -8,4 +8,4 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -pub fn main() { let mut v = vec!(1, 2, 3); v.push(1); } +pub fn main() { let mut v = vec![1, 2, 3]; v.push(1); } diff --git a/src/test/run-pass/vec-to_str.rs b/src/test/run-pass/vec-to_str.rs index f000ada770a40..1fed6a0be486c 100644 --- a/src/test/run-pass/vec-to_str.rs +++ b/src/test/run-pass/vec-to_str.rs @@ -10,9 +10,9 @@ pub fn main() { - assert_eq!(format!("{:?}", vec!(0, 1)), "[0, 1]".to_string()); + assert_eq!(format!("{:?}", vec![0, 1]), "[0, 1]".to_string()); - let foo = vec!(3, 4); + let foo = vec![3, 4]; let bar: &[isize] = &[4, 5]; assert_eq!(format!("{:?}", foo), "[3, 4]"); diff --git a/src/test/run-pass/vec.rs b/src/test/run-pass/vec.rs index c61b3d56dbfb9..9cacb9db20ea9 100644 --- a/src/test/run-pass/vec.rs +++ b/src/test/run-pass/vec.rs @@ -11,7 +11,7 @@ pub fn main() { - let v: Vec = vec!(10, 20); + let v: Vec = vec![10, 20]; assert_eq!(v[0], 10); assert_eq!(v[1], 20); let mut x: usize = 0; diff --git a/src/test/run-pass/while-with-break.rs b/src/test/run-pass/while-with-break.rs index ed149ad5109db..4c599e9c42898 100644 --- a/src/test/run-pass/while-with-break.rs +++ b/src/test/run-pass/while-with-break.rs @@ -16,7 +16,7 @@ pub fn main() { i = i + 1; if i == 95 { let _v: Vec = - vec!(1, 2, 3, 4, 5); // we check that it is freed by break + vec![1, 2, 3, 4, 5]; // we check that it is freed by break println!("breaking"); break; diff --git a/src/test/rustdoc/line-breaks.rs b/src/test/rustdoc/line-breaks.rs new file mode 100644 index 0000000000000..cc608a2447574 --- /dev/null +++ b/src/test/rustdoc/line-breaks.rs @@ -0,0 +1,21 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![crate_name = "foo"] + +//@count foo/fn.function_with_a_really_long_name.html //pre/br 2 +pub fn function_with_a_really_long_name(parameter_one: i32, + parameter_two: i32) + -> Option { + Some(parameter_one + parameter_two) +} + +//@count foo/fn.short_name.html //pre/br 0 +pub fn short_name(param: i32) -> i32 { param + 1 } diff --git a/src/test/rustdoc/playground-empty.rs b/src/test/rustdoc/playground-empty.rs new file mode 100644 index 0000000000000..00881a62dd0a8 --- /dev/null +++ b/src/test/rustdoc/playground-empty.rs @@ -0,0 +1,21 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![crate_name = "foo"] + +#![doc(html_playground_url = "")] + +//! module docs +//! +//! ``` +//! println!("Hello, world!"); +//! ``` + +// @!has foo/index.html '//a[@class="test-arrow"]' "Run" diff --git a/src/test/rustdoc/playground-none.rs b/src/test/rustdoc/playground-none.rs new file mode 100644 index 0000000000000..83c312d7ab220 --- /dev/null +++ b/src/test/rustdoc/playground-none.rs @@ -0,0 +1,19 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![crate_name = "foo"] + +//! module docs +//! +//! ``` +//! println!("Hello, world!"); +//! ``` + +// @!has foo/index.html '//a[@class="test-arrow"]' "Run" diff --git a/src/test/rustdoc/playground.rs b/src/test/rustdoc/playground.rs new file mode 100644 index 0000000000000..9eb8dec51a7f2 --- /dev/null +++ b/src/test/rustdoc/playground.rs @@ -0,0 +1,39 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// ignore-tidy-linelength + +#![crate_name = "foo"] + +#![doc(html_playground_url = "https://www.example.com/")] + +//! module docs +//! +//! ``` +//! println!("Hello, world!"); +//! ``` +//! +//! ``` +//! fn main() { +//! println!("Hello, world!"); +//! } +//! ``` +//! +//! ``` +//! #![feature(something)] +//! +//! fn main() { +//! println!("Hello, world!"); +//! } +//! ``` + +// @matches foo/index.html '//a[@class="test-arrow"][@href="https://www.example.com/?code=fn%20main()%20%7B%0A%20%20%20%20println!(%22Hello%2C%20world!%22)%3B%0A%7D%0A"]' "Run" +// @matches foo/index.html '//a[@class="test-arrow"][@href="https://www.example.com/?code=fn%20main()%20%7B%0Aprintln!(%22Hello%2C%20world!%22)%3B%0A%7D"]' "Run" +// @matches foo/index.html '//a[@class="test-arrow"][@href="https://www.example.com/?code=%23!%5Bfeature(something)%5D%0A%0Afn%20main()%20%7B%0A%20%20%20%20println!(%22Hello%2C%20world!%22)%3B%0A%7D%0A&version=nightly"]' "Run" diff --git a/src/test/compile-fail-fulldeps/rustc-macro/auxiliary/derive-a-2.rs b/src/test/rustdoc/rustc-macro-crate.rs similarity index 68% rename from src/test/compile-fail-fulldeps/rustc-macro/auxiliary/derive-a-2.rs rename to src/test/rustdoc/rustc-macro-crate.rs index ff00a9d96a30a..fe80a90955045 100644 --- a/src/test/compile-fail-fulldeps/rustc-macro/auxiliary/derive-a-2.rs +++ b/src/test/rustdoc/rustc-macro-crate.rs @@ -8,18 +8,17 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// force-host // no-prefer-dynamic -#![feature(rustc_macro)] -#![feature(rustc_macro_lib)] -#![crate_type = "rustc-macro"] +#![feature(proc_macro)] +#![feature(proc_macro_lib)] +#![crate_type = "proc-macro"] -extern crate rustc_macro; +extern crate proc_macro; -use rustc_macro::TokenStream; +use proc_macro::TokenStream; -#[rustc_macro_derive(A)] -pub fn derive_a(input: TokenStream) -> TokenStream { +#[proc_macro_derive(Foo)] +pub fn foo(input: TokenStream) -> TokenStream { input } diff --git a/src/test/ui/check_match/issue-35609.rs b/src/test/ui/check_match/issue-35609.rs new file mode 100644 index 0000000000000..6497f69035dec --- /dev/null +++ b/src/test/ui/check_match/issue-35609.rs @@ -0,0 +1,53 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +enum Enum { + A, B, C, D, E, F +} +use Enum::*; + +struct S(Enum, ()); +struct Sd { x: Enum, y: () } + +fn main() { + match (A, ()) { + (A, _) => {} + } + + match (A, A) { + (_, A) => {} + } + + match ((A, ()), ()) { + ((A, ()), _) => {} + } + + match ((A, ()), A) { + ((A, ()), _) => {} + } + + match ((A, ()), ()) { + ((A, _), _) => {} + } + + + match S(A, ()) { + S(A, _) => {} + } + + match (Sd { x: A, y: () }) { + Sd { x: A, y: _ } => {} + } + + match Some(A) { + Some(A) => (), + None => () + } +} diff --git a/src/test/ui/check_match/issue-35609.stderr b/src/test/ui/check_match/issue-35609.stderr new file mode 100644 index 0000000000000..66069c7a86a34 --- /dev/null +++ b/src/test/ui/check_match/issue-35609.stderr @@ -0,0 +1,50 @@ +error[E0004]: non-exhaustive patterns: `(B, _)`, `(C, _)`, `(D, _)` and 2 more not covered + --> $DIR/issue-35609.rs:20:11 + | +20 | match (A, ()) { + | ^^^^^^^ patterns `(B, _)`, `(C, _)`, `(D, _)` and 2 more not covered + +error[E0004]: non-exhaustive patterns: `(A, B)`, `(B, B)`, `(C, B)` and 27 more not covered + --> $DIR/issue-35609.rs:24:11 + | +24 | match (A, A) { + | ^^^^^^ patterns `(A, B)`, `(B, B)`, `(C, B)` and 27 more not covered + +error[E0004]: non-exhaustive patterns: `((B, _), _)`, `((C, _), _)`, `((D, _), _)` and 2 more not covered + --> $DIR/issue-35609.rs:28:11 + | +28 | match ((A, ()), ()) { + | ^^^^^^^^^^^^^ patterns `((B, _), _)`, `((C, _), _)`, `((D, _), _)` and 2 more not covered + +error[E0004]: non-exhaustive patterns: `((B, _), _)`, `((C, _), _)`, `((D, _), _)` and 2 more not covered + --> $DIR/issue-35609.rs:32:11 + | +32 | match ((A, ()), A) { + | ^^^^^^^^^^^^ patterns `((B, _), _)`, `((C, _), _)`, `((D, _), _)` and 2 more not covered + +error[E0004]: non-exhaustive patterns: `((B, _), _)`, `((C, _), _)`, `((D, _), _)` and 2 more not covered + --> $DIR/issue-35609.rs:36:11 + | +36 | match ((A, ()), ()) { + | ^^^^^^^^^^^^^ patterns `((B, _), _)`, `((C, _), _)`, `((D, _), _)` and 2 more not covered + +error[E0004]: non-exhaustive patterns: `S(B, _)`, `S(C, _)`, `S(D, _)` and 2 more not covered + --> $DIR/issue-35609.rs:41:11 + | +41 | match S(A, ()) { + | ^^^^^^^^ patterns `S(B, _)`, `S(C, _)`, `S(D, _)` and 2 more not covered + +error[E0004]: non-exhaustive patterns: `Sd { x: B, .. }`, `Sd { x: C, .. }`, `Sd { x: D, .. }` and 2 more not covered + --> $DIR/issue-35609.rs:45:11 + | +45 | match (Sd { x: A, y: () }) { + | ^^^^^^^^^^^^^^^^^^^^ patterns `Sd { x: B, .. }`, `Sd { x: C, .. }`, `Sd { x: D, .. }` and 2 more not covered + +error[E0004]: non-exhaustive patterns: `Some(B)`, `Some(C)`, `Some(D)` and 2 more not covered + --> $DIR/issue-35609.rs:49:11 + | +49 | match Some(A) { + | ^^^^^^^ patterns `Some(B)`, `Some(C)`, `Some(D)` and 2 more not covered + +error: aborting due to 8 previous errors + diff --git a/src/test/ui/codemap_tests/bad-format-args.stderr b/src/test/ui/codemap_tests/bad-format-args.stderr index fab8e2c8ce136..87255dfe77476 100644 --- a/src/test/ui/codemap_tests/bad-format-args.stderr +++ b/src/test/ui/codemap_tests/bad-format-args.stderr @@ -4,7 +4,7 @@ error: requires at least a format string argument 12 | format!(); | ^^^^^^^^^^ | - = note: this error originates in a macro from the standard library + = note: this error originates in a macro outside of the current crate error: expected token: `,` --> $DIR/bad-format-args.rs:13:5 @@ -12,7 +12,7 @@ error: expected token: `,` 13 | format!("" 1); | ^^^^^^^^^^^^^^ | - = note: this error originates in a macro from the standard library + = note: this error originates in a macro outside of the current crate error: expected token: `,` --> $DIR/bad-format-args.rs:14:5 @@ -20,7 +20,7 @@ error: expected token: `,` 14 | format!("", 1 1); | ^^^^^^^^^^^^^^^^^ | - = note: this error originates in a macro from the standard library + = note: this error originates in a macro outside of the current crate error: aborting due to 3 previous errors diff --git a/src/test/ui/codemap_tests/issue-28308.stderr b/src/test/ui/codemap_tests/issue-28308.stderr index 0d51a3f36e923..d65b34f3f4164 100644 --- a/src/test/ui/codemap_tests/issue-28308.stderr +++ b/src/test/ui/codemap_tests/issue-28308.stderr @@ -4,7 +4,7 @@ error: cannot apply unary operator `!` to type `&'static str` 12 | assert!("foo"); | ^^^^^^^^^^^^^^^ | - = note: this error originates in a macro from the standard library + = note: this error originates in a macro outside of the current crate error: aborting due to previous error diff --git a/src/test/ui/codemap_tests/repair_span_std_macros.stderr b/src/test/ui/codemap_tests/repair_span_std_macros.stderr index 1c9cbd63c3388..73a1c5bae85e2 100644 --- a/src/test/ui/codemap_tests/repair_span_std_macros.stderr +++ b/src/test/ui/codemap_tests/repair_span_std_macros.stderr @@ -5,7 +5,7 @@ error[E0282]: unable to infer enough type information about `_` | ^^^^^^ cannot infer type for `_` | = note: type annotations or generic parameter binding required - = note: this error originates in a macro from the standard library + = note: this error originates in a macro outside of the current crate error: aborting due to previous error diff --git a/src/test/ui/codemap_tests/tab.stderr b/src/test/ui/codemap_tests/tab.stderr index 543c02fb701f3..f865f0a5f23cd 100644 --- a/src/test/ui/codemap_tests/tab.stderr +++ b/src/test/ui/codemap_tests/tab.stderr @@ -2,7 +2,7 @@ error[E0425]: unresolved name `bar` --> $DIR/tab.rs:14:2 | 14 | \tbar; - | \t^^^ + | \t^^^ unresolved name error: aborting due to previous error diff --git a/src/test/ui/cross-crate-macro-backtrace/main.stderr b/src/test/ui/cross-crate-macro-backtrace/main.stderr index fceaa70288cf3..84db85ac092db 100644 --- a/src/test/ui/cross-crate-macro-backtrace/main.stderr +++ b/src/test/ui/cross-crate-macro-backtrace/main.stderr @@ -4,7 +4,7 @@ error: invalid reference to argument `0` (no arguments given) 16 | myprintln!("{}"); //~ ERROR in this macro | ^^^^^^^^^^^^^^^^^ | - = note: this error originates in a macro from the standard library + = note: this error originates in a macro outside of the current crate error: aborting due to previous error diff --git a/src/test/compile-fail/E560.rs b/src/test/ui/did_you_mean/issue-36798.rs similarity index 84% rename from src/test/compile-fail/E560.rs rename to src/test/ui/did_you_mean/issue-36798.rs index ec9b86ee1f00f..cd0d0951abf8a 100644 --- a/src/test/compile-fail/E560.rs +++ b/src/test/ui/did_you_mean/issue-36798.rs @@ -8,10 +8,11 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -struct Simba { - mother: u32, +struct Foo { + bar: u8 } fn main() { - let s = Simba { mother: 1, father: 0 }; //~ ERROR E0560 + let f = Foo { bar: 22 }; + f.baz; } diff --git a/src/test/ui/did_you_mean/issue-36798.stderr b/src/test/ui/did_you_mean/issue-36798.stderr new file mode 100644 index 0000000000000..c124747c801d8 --- /dev/null +++ b/src/test/ui/did_you_mean/issue-36798.stderr @@ -0,0 +1,8 @@ +error: no field `baz` on type `Foo` + --> $DIR/issue-36798.rs:17:7 + | +17 | f.baz; + | ^^^ did you mean `bar`? + +error: aborting due to previous error + diff --git a/src/test/ui/did_you_mean/issue-36798_unknown_field.rs b/src/test/ui/did_you_mean/issue-36798_unknown_field.rs new file mode 100644 index 0000000000000..2970a325a6af8 --- /dev/null +++ b/src/test/ui/did_you_mean/issue-36798_unknown_field.rs @@ -0,0 +1,18 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +struct Foo { + bar: u8 +} + +fn main() { + let f = Foo { bar: 22 }; + f.zz; +} diff --git a/src/test/ui/did_you_mean/issue-36798_unknown_field.stderr b/src/test/ui/did_you_mean/issue-36798_unknown_field.stderr new file mode 100644 index 0000000000000..4e02f8bd0cfb2 --- /dev/null +++ b/src/test/ui/did_you_mean/issue-36798_unknown_field.stderr @@ -0,0 +1,8 @@ +error: no field `zz` on type `Foo` + --> $DIR/issue-36798_unknown_field.rs:17:7 + | +17 | f.zz; + | ^^ unknown field + +error: aborting due to previous error + diff --git a/src/test/ui/dropck/auxiliary/dropck_eyepatch_extern_crate.rs b/src/test/ui/dropck/auxiliary/dropck_eyepatch_extern_crate.rs new file mode 100644 index 0000000000000..1b00d88dcb3df --- /dev/null +++ b/src/test/ui/dropck/auxiliary/dropck_eyepatch_extern_crate.rs @@ -0,0 +1,48 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generic_param_attrs)] +#![feature(dropck_eyepatch)] + +// This is a support file for ../dropck-eyepatch-extern-crate.rs +// +// The point of this test is to illustrate that the `#[may_dangle]` +// attribute specifically allows, in the context of a type +// implementing `Drop`, a generic parameter to be instantiated with a +// lifetime that does not strictly outlive the owning type itself, +// and that this attribute's effects are preserved when importing +// the type from another crate. +// +// See also ../dropck-eyepatch.rs for more information about the general +// structure of the test. + +use std::fmt; + +pub struct Dt(pub &'static str, pub A); +pub struct Dr<'a, B:'a+fmt::Debug>(pub &'static str, pub &'a B); +pub struct Pt(pub &'static str, pub A, pub B); +pub struct Pr<'a, 'b, B:'a+'b+fmt::Debug>(pub &'static str, pub &'a B, pub &'b B); +pub struct St(pub &'static str, pub A); +pub struct Sr<'a, B:'a+fmt::Debug>(pub &'static str, pub &'a B); + +impl Drop for Dt { + fn drop(&mut self) { println!("drop {} {:?}", self.0, self.1); } +} +impl<'a, B: fmt::Debug> Drop for Dr<'a, B> { + fn drop(&mut self) { println!("drop {} {:?}", self.0, self.1); } +} +unsafe impl<#[may_dangle] A, B: fmt::Debug> Drop for Pt { + // (unsafe to access self.1 due to #[may_dangle] on A) + fn drop(&mut self) { println!("drop {} {:?}", self.0, self.2); } +} +unsafe impl<#[may_dangle] 'a, 'b, B: fmt::Debug> Drop for Pr<'a, 'b, B> { + // (unsafe to access self.1 due to #[may_dangle] on 'a) + fn drop(&mut self) { println!("drop {} {:?}", self.0, self.2); } +} diff --git a/src/test/ui/dropck/dropck-eyepatch-extern-crate.rs b/src/test/ui/dropck/dropck-eyepatch-extern-crate.rs new file mode 100644 index 0000000000000..5e200dbdbfa01 --- /dev/null +++ b/src/test/ui/dropck/dropck-eyepatch-extern-crate.rs @@ -0,0 +1,55 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// aux-build:dropck_eyepatch_extern_crate.rs + +// The point of this test is to illustrate that the `#[may_dangle]` +// attribute specifically allows, in the context of a type +// implementing `Drop`, a generic parameter to be instantiated with a +// lifetime that does not strictly outlive the owning type itself, +// and that this attribute's effects are preserved when importing +// the type from another crate. +// +// See also dropck-eyepatch.rs for more information about the general +// structure of the test. + +extern crate dropck_eyepatch_extern_crate as other; + +use other::{Dt,Dr,Pt,Pr,St,Sr}; + +fn main() { + use std::cell::Cell; + let c_long; + let (c, mut dt, mut dr, mut pt, mut pr, st, sr) + : (Cell<_>, Dt<_>, Dr<_>, Pt<_, _>, Pr<_>, St<_>, Sr<_>); + c_long = Cell::new(1); + c = Cell::new(1); + + // No error: sufficiently long-lived state can be referenced in dtors + dt = Dt("dt", &c_long); + dr = Dr("dr", &c_long); + // Error: destructor order imprecisely modelled + dt = Dt("dt", &c); //~ ERROR `c` does not live long enough + dr = Dr("dr", &c); //~ ERROR `c` does not live long enough + + // No error: Drop impl asserts .1 (A and &'a _) are not accessed + pt = Pt("pt", &c, &c_long); + pr = Pr("pr", &c, &c_long); + + // Error: Drop impl's assertion does not apply to `B` nor `&'b _` + pt = Pt("pt", &c_long, &c); //~ ERROR `c` does not live long enough + pr = Pr("pr", &c_long, &c); //~ ERROR `c` does not live long enough + + // No error: St and Sr have no destructor. + st = St("st", &c); + sr = Sr("sr", &c); + + println!("{:?}", (dt.0, dr.0, pt.0, pr.0, st.0, sr.0)); +} diff --git a/src/test/ui/dropck/dropck-eyepatch-extern-crate.stderr b/src/test/ui/dropck/dropck-eyepatch-extern-crate.stderr new file mode 100644 index 0000000000000..2a4ba22ecc467 --- /dev/null +++ b/src/test/ui/dropck/dropck-eyepatch-extern-crate.stderr @@ -0,0 +1,46 @@ +error: `c` does not live long enough + --> $DIR/dropck-eyepatch-extern-crate.rs:55:1 + | +39 | dt = Dt("dt", &c); //~ ERROR `c` does not live long enough + | - borrow occurs here +... +55 | } + | ^ `c` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `c` does not live long enough + --> $DIR/dropck-eyepatch-extern-crate.rs:55:1 + | +40 | dr = Dr("dr", &c); //~ ERROR `c` does not live long enough + | - borrow occurs here +... +55 | } + | ^ `c` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `c` does not live long enough + --> $DIR/dropck-eyepatch-extern-crate.rs:55:1 + | +47 | pt = Pt("pt", &c_long, &c); //~ ERROR `c` does not live long enough + | - borrow occurs here +... +55 | } + | ^ `c` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `c` does not live long enough + --> $DIR/dropck-eyepatch-extern-crate.rs:55:1 + | +48 | pr = Pr("pr", &c_long, &c); //~ ERROR `c` does not live long enough + | - borrow occurs here +... +55 | } + | ^ `c` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: aborting due to 4 previous errors + diff --git a/src/test/ui/dropck/dropck-eyepatch-implies-unsafe-impl.rs b/src/test/ui/dropck/dropck-eyepatch-implies-unsafe-impl.rs new file mode 100644 index 0000000000000..f92c8703dc927 --- /dev/null +++ b/src/test/ui/dropck/dropck-eyepatch-implies-unsafe-impl.rs @@ -0,0 +1,46 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generic_param_attrs)] +#![feature(dropck_eyepatch)] + +// This test ensures that a use of `#[may_dangle]` is rejected if +// it is not attached to an `unsafe impl`. + +use std::fmt; + +struct Dt(&'static str, A); +struct Dr<'a, B:'a+fmt::Debug>(&'static str, &'a B); +struct Pt(&'static str, A, B); +struct Pr<'a, 'b, B:'a+'b+fmt::Debug>(&'static str, &'a B, &'b B); +struct St(&'static str, A); +struct Sr<'a, B:'a+fmt::Debug>(&'static str, &'a B); + +impl Drop for Dt { + fn drop(&mut self) { println!("drop {} {:?}", self.0, self.1); } +} +impl<'a, B: fmt::Debug> Drop for Dr<'a, B> { + fn drop(&mut self) { println!("drop {} {:?}", self.0, self.1); } +} +impl<#[may_dangle] A, B: fmt::Debug> Drop for Pt { + //~^ ERROR requires an `unsafe impl` declaration due to `#[may_dangle]` attribute + + // (unsafe to access self.1 due to #[may_dangle] on A) + fn drop(&mut self) { println!("drop {} {:?}", self.0, self.2); } +} +impl<#[may_dangle] 'a, 'b, B: fmt::Debug> Drop for Pr<'a, 'b, B> { + //~^ ERROR requires an `unsafe impl` declaration due to `#[may_dangle]` attribute + + // (unsafe to access self.1 due to #[may_dangle] on 'a) + fn drop(&mut self) { println!("drop {} {:?}", self.0, self.2); } +} + +fn main() { +} diff --git a/src/test/ui/dropck/dropck-eyepatch-implies-unsafe-impl.stderr b/src/test/ui/dropck/dropck-eyepatch-implies-unsafe-impl.stderr new file mode 100644 index 0000000000000..c53cf020a9bc5 --- /dev/null +++ b/src/test/ui/dropck/dropck-eyepatch-implies-unsafe-impl.stderr @@ -0,0 +1,14 @@ +error[E0569]: requires an `unsafe impl` declaration due to `#[may_dangle]` attribute + --> $DIR/dropck-eyepatch-implies-unsafe-impl.rs:32:1 + | +32 | impl<#[may_dangle] A, B: fmt::Debug> Drop for Pt { + | ^ + +error[E0569]: requires an `unsafe impl` declaration due to `#[may_dangle]` attribute + --> $DIR/dropck-eyepatch-implies-unsafe-impl.rs:38:1 + | +38 | impl<#[may_dangle] 'a, 'b, B: fmt::Debug> Drop for Pr<'a, 'b, B> { + | ^ + +error: aborting due to 2 previous errors + diff --git a/src/test/ui/dropck/dropck-eyepatch-reorder.rs b/src/test/ui/dropck/dropck-eyepatch-reorder.rs new file mode 100644 index 0000000000000..68b0ff3b5f096 --- /dev/null +++ b/src/test/ui/dropck/dropck-eyepatch-reorder.rs @@ -0,0 +1,73 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generic_param_attrs)] +#![feature(dropck_eyepatch)] + +// The point of this test is to test uses of `#[may_dangle]` attribute +// where the formal declaration order (in the impl generics) does not +// match the actual usage order (in the type instantiation). +// +// See also dropck-eyepatch.rs for more information about the general +// structure of the test. + +use std::fmt; + +struct Dt(&'static str, A); +struct Dr<'a, B:'a+fmt::Debug>(&'static str, &'a B); +struct Pt(&'static str, A, B); +struct Pr<'a, 'b, B:'a+'b+fmt::Debug>(&'static str, &'a B, &'b B); +struct St(&'static str, A); +struct Sr<'a, B:'a+fmt::Debug>(&'static str, &'a B); + +impl Drop for Dt { + fn drop(&mut self) { println!("drop {} {:?}", self.0, self.1); } +} +impl<'a, B: fmt::Debug> Drop for Dr<'a, B> { + fn drop(&mut self) { println!("drop {} {:?}", self.0, self.1); } +} +unsafe impl Drop for Pt { + // (unsafe to access self.1 due to #[may_dangle] on A) + fn drop(&mut self) { println!("drop {} {:?}", self.0, self.2); } +} +unsafe impl<'b, #[may_dangle] 'a, B: fmt::Debug> Drop for Pr<'a, 'b, B> { + // (unsafe to access self.1 due to #[may_dangle] on 'a) + fn drop(&mut self) { println!("drop {} {:?}", self.0, self.2); } +} + +fn main() { + use std::cell::Cell; + let c_long; + let (c, mut dt, mut dr, mut pt, mut pr, st, sr) + : (Cell<_>, Dt<_>, Dr<_>, Pt<_, _>, Pr<_>, St<_>, Sr<_>); + c_long = Cell::new(1); + c = Cell::new(1); + + // No error: sufficiently long-lived state can be referenced in dtors + dt = Dt("dt", &c_long); + dr = Dr("dr", &c_long); + // Error: destructor order imprecisely modelled + dt = Dt("dt", &c); //~ ERROR `c` does not live long enough + dr = Dr("dr", &c); //~ ERROR `c` does not live long enough + + // No error: Drop impl asserts .1 (A and &'a _) are not accessed + pt = Pt("pt", &c, &c_long); + pr = Pr("pr", &c, &c_long); + + // Error: Drop impl's assertion does not apply to `B` nor `&'b _` + pt = Pt("pt", &c_long, &c); //~ ERROR `c` does not live long enough + pr = Pr("pr", &c_long, &c); //~ ERROR `c` does not live long enough + + // No error: St and Sr have no destructor. + st = St("st", &c); + sr = Sr("sr", &c); + + println!("{:?}", (dt.0, dr.0, pt.0, pr.0, st.0, sr.0)); +} diff --git a/src/test/ui/dropck/dropck-eyepatch-reorder.stderr b/src/test/ui/dropck/dropck-eyepatch-reorder.stderr new file mode 100644 index 0000000000000..cfcf988f31f7b --- /dev/null +++ b/src/test/ui/dropck/dropck-eyepatch-reorder.stderr @@ -0,0 +1,46 @@ +error: `c` does not live long enough + --> $DIR/dropck-eyepatch-reorder.rs:73:1 + | +57 | dt = Dt("dt", &c); //~ ERROR `c` does not live long enough + | - borrow occurs here +... +73 | } + | ^ `c` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `c` does not live long enough + --> $DIR/dropck-eyepatch-reorder.rs:73:1 + | +58 | dr = Dr("dr", &c); //~ ERROR `c` does not live long enough + | - borrow occurs here +... +73 | } + | ^ `c` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `c` does not live long enough + --> $DIR/dropck-eyepatch-reorder.rs:73:1 + | +65 | pt = Pt("pt", &c_long, &c); //~ ERROR `c` does not live long enough + | - borrow occurs here +... +73 | } + | ^ `c` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `c` does not live long enough + --> $DIR/dropck-eyepatch-reorder.rs:73:1 + | +66 | pr = Pr("pr", &c_long, &c); //~ ERROR `c` does not live long enough + | - borrow occurs here +... +73 | } + | ^ `c` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: aborting due to 4 previous errors + diff --git a/src/test/ui/dropck/dropck-eyepatch.rs b/src/test/ui/dropck/dropck-eyepatch.rs new file mode 100644 index 0000000000000..e442fade19730 --- /dev/null +++ b/src/test/ui/dropck/dropck-eyepatch.rs @@ -0,0 +1,96 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(generic_param_attrs)] +#![feature(dropck_eyepatch)] + +// The point of this test is to illustrate that the `#[may_dangle]` +// attribute specifically allows, in the context of a type +// implementing `Drop`, a generic parameter to be instantiated with a +// lifetime that does not strictly outlive the owning type itself. +// +// Here we test that only the expected errors are issued. +// +// The illustration is made concrete by comparison with two variations +// on the type with `#[may_dangle]`: +// +// 1. an analogous type that does not implement `Drop` (and thus +// should exhibit maximal flexibility with respect to dropck), and +// +// 2. an analogous type that does not use `#[may_dangle]` (and thus +// should exhibit the standard limitations imposed by dropck. +// +// The types in this file follow a pattern, {D,P,S}{t,r}, where: +// +// - D means "I implement Drop" +// +// - P means "I implement Drop but guarantee my (first) parameter is +// pure, i.e. not accessed from the destructor"; no other parameters +// are pure. +// +// - S means "I do not implement Drop" +// +// - t suffix is used when the first generic is a type +// +// - r suffix is used when the first generic is a lifetime. + +use std::fmt; + +struct Dt(&'static str, A); +struct Dr<'a, B:'a+fmt::Debug>(&'static str, &'a B); +struct Pt(&'static str, A, B); +struct Pr<'a, 'b, B:'a+'b+fmt::Debug>(&'static str, &'a B, &'b B); +struct St(&'static str, A); +struct Sr<'a, B:'a+fmt::Debug>(&'static str, &'a B); + +impl Drop for Dt { + fn drop(&mut self) { println!("drop {} {:?}", self.0, self.1); } +} +impl<'a, B: fmt::Debug> Drop for Dr<'a, B> { + fn drop(&mut self) { println!("drop {} {:?}", self.0, self.1); } +} +unsafe impl<#[may_dangle] A, B: fmt::Debug> Drop for Pt { + // (unsafe to access self.1 due to #[may_dangle] on A) + fn drop(&mut self) { println!("drop {} {:?}", self.0, self.2); } +} +unsafe impl<#[may_dangle] 'a, 'b, B: fmt::Debug> Drop for Pr<'a, 'b, B> { + // (unsafe to access self.1 due to #[may_dangle] on 'a) + fn drop(&mut self) { println!("drop {} {:?}", self.0, self.2); } +} + +fn main() { + use std::cell::Cell; + let c_long; + let (c, mut dt, mut dr, mut pt, mut pr, st, sr) + : (Cell<_>, Dt<_>, Dr<_>, Pt<_, _>, Pr<_>, St<_>, Sr<_>); + c_long = Cell::new(1); + c = Cell::new(1); + + // No error: sufficiently long-lived state can be referenced in dtors + dt = Dt("dt", &c_long); + dr = Dr("dr", &c_long); + // Error: destructor order imprecisely modelled + dt = Dt("dt", &c); //~ ERROR `c` does not live long enough + dr = Dr("dr", &c); //~ ERROR `c` does not live long enough + + // No error: Drop impl asserts .1 (A and &'a _) are not accessed + pt = Pt("pt", &c, &c_long); + pr = Pr("pr", &c, &c_long); + + // Error: Drop impl's assertion does not apply to `B` nor `&'b _` + pt = Pt("pt", &c_long, &c); //~ ERROR `c` does not live long enough + pr = Pr("pr", &c_long, &c); //~ ERROR `c` does not live long enough + + // No error: St and Sr have no destructor. + st = St("st", &c); + sr = Sr("sr", &c); + + println!("{:?}", (dt.0, dr.0, pt.0, pr.0, st.0, sr.0)); +} diff --git a/src/test/ui/dropck/dropck-eyepatch.stderr b/src/test/ui/dropck/dropck-eyepatch.stderr new file mode 100644 index 0000000000000..cd420756b444c --- /dev/null +++ b/src/test/ui/dropck/dropck-eyepatch.stderr @@ -0,0 +1,46 @@ +error: `c` does not live long enough + --> $DIR/dropck-eyepatch.rs:96:1 + | +80 | dt = Dt("dt", &c); //~ ERROR `c` does not live long enough + | - borrow occurs here +... +96 | } + | ^ `c` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `c` does not live long enough + --> $DIR/dropck-eyepatch.rs:96:1 + | +81 | dr = Dr("dr", &c); //~ ERROR `c` does not live long enough + | - borrow occurs here +... +96 | } + | ^ `c` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `c` does not live long enough + --> $DIR/dropck-eyepatch.rs:96:1 + | +88 | pt = Pt("pt", &c_long, &c); //~ ERROR `c` does not live long enough + | - borrow occurs here +... +96 | } + | ^ `c` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `c` does not live long enough + --> $DIR/dropck-eyepatch.rs:96:1 + | +89 | pr = Pr("pr", &c_long, &c); //~ ERROR `c` does not live long enough + | - borrow occurs here +... +96 | } + | ^ `c` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: aborting due to 4 previous errors + diff --git a/src/test/ui/macros/macro-backtrace-invalid-internals.stderr b/src/test/ui/macros/macro-backtrace-invalid-internals.stderr index 82000a59bfb17..48f9bac906ecf 100644 --- a/src/test/ui/macros/macro-backtrace-invalid-internals.stderr +++ b/src/test/ui/macros/macro-backtrace-invalid-internals.stderr @@ -7,11 +7,11 @@ error: no method named `fake` found for type `{integer}` in the current scope 50 | fake_method_stmt!(); | -------------------- in this macro invocation -error: attempted access of field `fake` on type `{integer}`, but no field with that name was found - --> $DIR/macro-backtrace-invalid-internals.rs:21:11 +error: no field `fake` on type `{integer}` + --> $DIR/macro-backtrace-invalid-internals.rs:21:13 | 21 | 1.fake - | ^^^^^^ + | ^^^^ ... 51 | fake_field_stmt!(); | ------------------- in this macro invocation @@ -34,11 +34,11 @@ error: no method named `fake` found for type `{integer}` in the current scope 54 | let _ = fake_method_expr!(); | ------------------- in this macro invocation -error: attempted access of field `fake` on type `{integer}`, but no field with that name was found - --> $DIR/macro-backtrace-invalid-internals.rs:39:11 +error: no field `fake` on type `{integer}` + --> $DIR/macro-backtrace-invalid-internals.rs:39:13 | 39 | 1.fake - | ^^^^^^ + | ^^^^ ... 55 | let _ = fake_field_expr!(); | ------------------ in this macro invocation diff --git a/src/test/ui/macros/macro-backtrace-nested.stderr b/src/test/ui/macros/macro-backtrace-nested.stderr index e452e8d69bdad..1c7fac894f976 100644 --- a/src/test/ui/macros/macro-backtrace-nested.stderr +++ b/src/test/ui/macros/macro-backtrace-nested.stderr @@ -2,7 +2,7 @@ error[E0425]: unresolved name `fake` --> $DIR/macro-backtrace-nested.rs:15:12 | 15 | () => (fake) - | ^^^^ + | ^^^^ unresolved name ... 27 | 1 + call_nested_expr!(); | ------------------- in this macro invocation @@ -11,7 +11,7 @@ error[E0425]: unresolved name `fake` --> $DIR/macro-backtrace-nested.rs:15:12 | 15 | () => (fake) - | ^^^^ + | ^^^^ unresolved name ... 28 | call_nested_expr_sum!(); | ------------------------ in this macro invocation diff --git a/src/test/ui/span/E0057.rs b/src/test/ui/span/E0057.rs new file mode 100644 index 0000000000000..1fb5498b099c9 --- /dev/null +++ b/src/test/ui/span/E0057.rs @@ -0,0 +1,16 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn main() { + let f = |x| x * 3; + let a = f(); //~ ERROR E0057 + let b = f(4); + let c = f(2, 3); //~ ERROR E0057 +} diff --git a/src/test/ui/span/E0057.stderr b/src/test/ui/span/E0057.stderr new file mode 100644 index 0000000000000..656fdbe2b29bd --- /dev/null +++ b/src/test/ui/span/E0057.stderr @@ -0,0 +1,18 @@ +error[E0057]: this function takes 1 parameter but 0 parameters were supplied + --> $DIR/E0057.rs:13:13 + | +13 | let a = f(); //~ ERROR E0057 + | ^^^ + | + = note: the following parameter type was expected: (_,) + +error[E0057]: this function takes 1 parameter but 2 parameters were supplied + --> $DIR/E0057.rs:15:15 + | +15 | let c = f(2, 3); //~ ERROR E0057 + | ^^^^ + | + = note: the following parameter type was expected: (_,) + +error: aborting due to 2 previous errors + diff --git a/src/test/compile-fail/borrowck/borrowck-let-suggestion-suffixes.rs b/src/test/ui/span/borrowck-let-suggestion-suffixes.rs similarity index 89% rename from src/test/compile-fail/borrowck/borrowck-let-suggestion-suffixes.rs rename to src/test/ui/span/borrowck-let-suggestion-suffixes.rs index 95c74348e788b..1206d71667264 100644 --- a/src/test/compile-fail/borrowck/borrowck-let-suggestion-suffixes.rs +++ b/src/test/ui/span/borrowck-let-suggestion-suffixes.rs @@ -17,9 +17,7 @@ fn f() { let young = ['y']; // statement 3 v2.push(&young[0]); // statement 4 - //~^ ERROR `young[..]` does not live long enough - //~| NOTE does not live long enough - //~| NOTE values in a scope are dropped in the opposite order they are created + //~^ NOTE borrow occurs here let mut v3 = Vec::new(); // statement 5 @@ -52,7 +50,9 @@ fn f() { v1.push(&old[0]); } -//~^ NOTE borrowed value dropped before borrower +//~^ ERROR `young[..]` does not live long enough +//~| NOTE `young[..]` dropped here while still borrowed +//~| NOTE values in a scope are dropped in the opposite order they are created //~| NOTE temporary value needs to live until here //~| NOTE temporary value needs to live until here diff --git a/src/test/ui/span/borrowck-let-suggestion-suffixes.stderr b/src/test/ui/span/borrowck-let-suggestion-suffixes.stderr new file mode 100644 index 0000000000000..0bba986e437b3 --- /dev/null +++ b/src/test/ui/span/borrowck-let-suggestion-suffixes.stderr @@ -0,0 +1,52 @@ +error: `young[..]` does not live long enough + --> $DIR/borrowck-let-suggestion-suffixes.rs:52:1 + | +19 | v2.push(&young[0]); // statement 4 + | -------- borrow occurs here +... +52 | } + | ^ `young[..]` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: borrowed value does not live long enough + --> $DIR/borrowck-let-suggestion-suffixes.rs:24:14 + | +24 | v3.push(&'x'); // statement 6 + | ^^^ - temporary value only lives until here + | | + | temporary value created here +... +52 | } + | - temporary value needs to live until here + | + = note: consider using a `let` binding to increase its lifetime + +error: borrowed value does not live long enough + --> $DIR/borrowck-let-suggestion-suffixes.rs:34:18 + | +34 | v4.push(&'y'); + | ^^^ - temporary value only lives until here + | | + | temporary value created here +... +40 | } // (statement 7) + | - temporary value needs to live until here + | + = note: consider using a `let` binding to increase its lifetime + +error: borrowed value does not live long enough + --> $DIR/borrowck-let-suggestion-suffixes.rs:45:14 + | +45 | v5.push(&'z'); + | ^^^ - temporary value only lives until here + | | + | temporary value created here +... +52 | } + | - temporary value needs to live until here + | + = note: consider using a `let` binding to increase its lifetime + +error: aborting due to 4 previous errors + diff --git a/src/test/compile-fail/dropck-object-cycle.rs b/src/test/ui/span/dropck-object-cycle.rs similarity index 89% rename from src/test/compile-fail/dropck-object-cycle.rs rename to src/test/ui/span/dropck-object-cycle.rs index 5432cbf402a0c..ce9bc17432ef9 100644 --- a/src/test/compile-fail/dropck-object-cycle.rs +++ b/src/test/ui/span/dropck-object-cycle.rs @@ -35,7 +35,7 @@ impl<'t> MakerTrait for Box+'static> { pub fn main() { let m : Box = make_val(); assert_eq!(object_invoke1(&*m), (4,5)); - //~^ ERROR `*m` does not live long enough + //~^ NOTE borrow occurs here // the problem here is that the full type of `m` is // @@ -55,3 +55,7 @@ pub fn main() { // the type of `m` *strictly outlives* `'m`. Hence we get an // error. } +//~^ ERROR `*m` does not live long enough +//~| NOTE `*m` dropped here while still borrowed +//~| NOTE values in a scope are dropped in the opposite order they are created + diff --git a/src/test/ui/span/dropck-object-cycle.stderr b/src/test/ui/span/dropck-object-cycle.stderr new file mode 100644 index 0000000000000..e31c36e83dfca --- /dev/null +++ b/src/test/ui/span/dropck-object-cycle.stderr @@ -0,0 +1,13 @@ +error: `*m` does not live long enough + --> $DIR/dropck-object-cycle.rs:57:1 + | +37 | assert_eq!(object_invoke1(&*m), (4,5)); + | -- borrow occurs here +... +57 | } + | ^ `*m` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: aborting due to previous error + diff --git a/src/test/compile-fail/dropck_arr_cycle_checked.rs b/src/test/ui/span/dropck_arr_cycle_checked.rs similarity index 84% rename from src/test/compile-fail/dropck_arr_cycle_checked.rs rename to src/test/ui/span/dropck_arr_cycle_checked.rs index 9cfeaca6dfadf..8e8b3c24c76b0 100644 --- a/src/test/compile-fail/dropck_arr_cycle_checked.rs +++ b/src/test/ui/span/dropck_arr_cycle_checked.rs @@ -100,13 +100,19 @@ fn f() { b1 = B::new(); b2 = B::new(); b3 = B::new(); - b1.a[0].v.set(Some(&b2)); //~ ERROR `b2` does not live long enough - b1.a[1].v.set(Some(&b3)); //~ ERROR `b3` does not live long enough - b2.a[0].v.set(Some(&b2)); //~ ERROR `b2` does not live long enough - b2.a[1].v.set(Some(&b3)); //~ ERROR `b3` does not live long enough - b3.a[0].v.set(Some(&b1)); //~ ERROR `b1` does not live long enough - b3.a[1].v.set(Some(&b2)); //~ ERROR `b2` does not live long enough + b1.a[0].v.set(Some(&b2)); + b1.a[1].v.set(Some(&b3)); + b2.a[0].v.set(Some(&b2)); + b2.a[1].v.set(Some(&b3)); + b3.a[0].v.set(Some(&b1)); + b3.a[1].v.set(Some(&b2)); } +//~^ ERROR `b2` does not live long enough +//~| ERROR `b3` does not live long enough +//~| ERROR `b2` does not live long enough +//~| ERROR `b3` does not live long enough +//~| ERROR `b1` does not live long enough +//~| ERROR `b2` does not live long enough fn main() { f(); diff --git a/src/test/ui/span/dropck_arr_cycle_checked.stderr b/src/test/ui/span/dropck_arr_cycle_checked.stderr new file mode 100644 index 0000000000000..c89da0baef233 --- /dev/null +++ b/src/test/ui/span/dropck_arr_cycle_checked.stderr @@ -0,0 +1,67 @@ +error: `b2` does not live long enough + --> $DIR/dropck_arr_cycle_checked.rs:109:1 + | +103 | b1.a[0].v.set(Some(&b2)); + | -- borrow occurs here +... +109 | } + | ^ `b2` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `b3` does not live long enough + --> $DIR/dropck_arr_cycle_checked.rs:109:1 + | +104 | b1.a[1].v.set(Some(&b3)); + | -- borrow occurs here +... +109 | } + | ^ `b3` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `b2` does not live long enough + --> $DIR/dropck_arr_cycle_checked.rs:109:1 + | +105 | b2.a[0].v.set(Some(&b2)); + | -- borrow occurs here +... +109 | } + | ^ `b2` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `b3` does not live long enough + --> $DIR/dropck_arr_cycle_checked.rs:109:1 + | +106 | b2.a[1].v.set(Some(&b3)); + | -- borrow occurs here +... +109 | } + | ^ `b3` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `b1` does not live long enough + --> $DIR/dropck_arr_cycle_checked.rs:109:1 + | +107 | b3.a[0].v.set(Some(&b1)); + | -- borrow occurs here +108 | b3.a[1].v.set(Some(&b2)); +109 | } + | ^ `b1` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `b2` does not live long enough + --> $DIR/dropck_arr_cycle_checked.rs:109:1 + | +108 | b3.a[1].v.set(Some(&b2)); + | -- borrow occurs here +109 | } + | ^ `b2` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: aborting due to 6 previous errors + diff --git a/src/test/compile-fail/dropck_direct_cycle_with_drop.rs b/src/test/ui/span/dropck_direct_cycle_with_drop.rs similarity index 92% rename from src/test/compile-fail/dropck_direct_cycle_with_drop.rs rename to src/test/ui/span/dropck_direct_cycle_with_drop.rs index 5db23721259a0..6d13dfc7a794f 100644 --- a/src/test/compile-fail/dropck_direct_cycle_with_drop.rs +++ b/src/test/ui/span/dropck_direct_cycle_with_drop.rs @@ -43,9 +43,11 @@ impl<'a> Drop for D<'a> { fn g() { let (d1, d2) = (D::new(format!("d1")), D::new(format!("d2"))); - d1.p.set(Some(&d2)); //~ ERROR `d2` does not live long enough - d2.p.set(Some(&d1)); //~ ERROR `d1` does not live long enough + d1.p.set(Some(&d2)); + d2.p.set(Some(&d1)); } +//~^ ERROR `d2` does not live long enough +//~| ERROR `d1` does not live long enough fn main() { g(); diff --git a/src/test/ui/span/dropck_direct_cycle_with_drop.stderr b/src/test/ui/span/dropck_direct_cycle_with_drop.stderr new file mode 100644 index 0000000000000..9eb2a21577170 --- /dev/null +++ b/src/test/ui/span/dropck_direct_cycle_with_drop.stderr @@ -0,0 +1,23 @@ +error: `d2` does not live long enough + --> $DIR/dropck_direct_cycle_with_drop.rs:48:1 + | +46 | d1.p.set(Some(&d2)); + | -- borrow occurs here +47 | d2.p.set(Some(&d1)); +48 | } + | ^ `d2` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `d1` does not live long enough + --> $DIR/dropck_direct_cycle_with_drop.rs:48:1 + | +47 | d2.p.set(Some(&d1)); + | -- borrow occurs here +48 | } + | ^ `d1` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: aborting due to 2 previous errors + diff --git a/src/test/compile-fail/dropck_misc_variants.rs b/src/test/ui/span/dropck_misc_variants.rs similarity index 92% rename from src/test/compile-fail/dropck_misc_variants.rs rename to src/test/ui/span/dropck_misc_variants.rs index ee957f20d005b..7b94eb10dfc6e 100644 --- a/src/test/compile-fail/dropck_misc_variants.rs +++ b/src/test/ui/span/dropck_misc_variants.rs @@ -31,17 +31,17 @@ fn projection() { let (_w, bomb); bomb = vec![""]; _w = Wrap::<&[&str]>(NoisyDrop(&bomb)); - //~^ ERROR `bomb` does not live long enough } +//~^ ERROR `bomb` does not live long enough fn closure() { let (_w,v); v = vec![""]; _w = { let u = NoisyDrop(&v); - //~^ ERROR `v` does not live long enough move || u.0.len() }; } +//~^ ERROR `v` does not live long enough fn main() { closure(); projection() } diff --git a/src/test/ui/span/dropck_misc_variants.stderr b/src/test/ui/span/dropck_misc_variants.stderr new file mode 100644 index 0000000000000..98c1cbbba7b3f --- /dev/null +++ b/src/test/ui/span/dropck_misc_variants.stderr @@ -0,0 +1,23 @@ +error: `bomb` does not live long enough + --> $DIR/dropck_misc_variants.rs:34:1 + | +33 | _w = Wrap::<&[&str]>(NoisyDrop(&bomb)); + | ---- borrow occurs here +34 | } + | ^ `bomb` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `v` does not live long enough + --> $DIR/dropck_misc_variants.rs:44:1 + | +41 | let u = NoisyDrop(&v); + | - borrow occurs here +... +44 | } + | ^ `v` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: aborting due to 2 previous errors + diff --git a/src/test/compile-fail/dropck_vec_cycle_checked.rs b/src/test/ui/span/dropck_vec_cycle_checked.rs similarity index 85% rename from src/test/compile-fail/dropck_vec_cycle_checked.rs rename to src/test/ui/span/dropck_vec_cycle_checked.rs index caf25e68d568c..65db2a56b7d81 100644 --- a/src/test/compile-fail/dropck_vec_cycle_checked.rs +++ b/src/test/ui/span/dropck_vec_cycle_checked.rs @@ -107,13 +107,19 @@ fn f() { c3.v.push(CheckId(Cell::new(None))); c3.v.push(CheckId(Cell::new(None))); - c1.v[0].v.set(Some(&c2)); //~ ERROR `c2` does not live long enough - c1.v[1].v.set(Some(&c3)); //~ ERROR `c3` does not live long enough - c2.v[0].v.set(Some(&c2)); //~ ERROR `c2` does not live long enough - c2.v[1].v.set(Some(&c3)); //~ ERROR `c3` does not live long enough - c3.v[0].v.set(Some(&c1)); //~ ERROR `c1` does not live long enough - c3.v[1].v.set(Some(&c2)); //~ ERROR `c2` does not live long enough + c1.v[0].v.set(Some(&c2)); + c1.v[1].v.set(Some(&c3)); + c2.v[0].v.set(Some(&c2)); + c2.v[1].v.set(Some(&c3)); + c3.v[0].v.set(Some(&c1)); + c3.v[1].v.set(Some(&c2)); } +//~^ ERROR `c2` does not live long enough +//~| ERROR `c3` does not live long enough +//~| ERROR `c2` does not live long enough +//~| ERROR `c3` does not live long enough +//~| ERROR `c1` does not live long enough +//~| ERROR `c2` does not live long enough fn main() { f(); diff --git a/src/test/ui/span/dropck_vec_cycle_checked.stderr b/src/test/ui/span/dropck_vec_cycle_checked.stderr new file mode 100644 index 0000000000000..961ac81cf8280 --- /dev/null +++ b/src/test/ui/span/dropck_vec_cycle_checked.stderr @@ -0,0 +1,67 @@ +error: `c2` does not live long enough + --> $DIR/dropck_vec_cycle_checked.rs:116:1 + | +110 | c1.v[0].v.set(Some(&c2)); + | -- borrow occurs here +... +116 | } + | ^ `c2` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `c3` does not live long enough + --> $DIR/dropck_vec_cycle_checked.rs:116:1 + | +111 | c1.v[1].v.set(Some(&c3)); + | -- borrow occurs here +... +116 | } + | ^ `c3` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `c2` does not live long enough + --> $DIR/dropck_vec_cycle_checked.rs:116:1 + | +112 | c2.v[0].v.set(Some(&c2)); + | -- borrow occurs here +... +116 | } + | ^ `c2` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `c3` does not live long enough + --> $DIR/dropck_vec_cycle_checked.rs:116:1 + | +113 | c2.v[1].v.set(Some(&c3)); + | -- borrow occurs here +... +116 | } + | ^ `c3` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `c1` does not live long enough + --> $DIR/dropck_vec_cycle_checked.rs:116:1 + | +114 | c3.v[0].v.set(Some(&c1)); + | -- borrow occurs here +115 | c3.v[1].v.set(Some(&c2)); +116 | } + | ^ `c1` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `c2` does not live long enough + --> $DIR/dropck_vec_cycle_checked.rs:116:1 + | +115 | c3.v[1].v.set(Some(&c2)); + | -- borrow occurs here +116 | } + | ^ `c2` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: aborting due to 6 previous errors + diff --git a/src/test/compile-fail/issue-23338-locals-die-before-temps-of-body.rs b/src/test/ui/span/issue-23338-locals-die-before-temps-of-body.rs similarity index 94% rename from src/test/compile-fail/issue-23338-locals-die-before-temps-of-body.rs rename to src/test/ui/span/issue-23338-locals-die-before-temps-of-body.rs index 993893438e536..a04edd99b8b6b 100644 --- a/src/test/compile-fail/issue-23338-locals-die-before-temps-of-body.rs +++ b/src/test/ui/span/issue-23338-locals-die-before-temps-of-body.rs @@ -17,8 +17,9 @@ use std::cell::RefCell; fn foo(x: RefCell) -> String { let y = x; - y.borrow().clone() //~ ERROR `y` does not live long enough + y.borrow().clone() } +//~^ ERROR `y` does not live long enough fn foo2(x: RefCell) -> String { let ret = { diff --git a/src/test/ui/span/issue-23338-locals-die-before-temps-of-body.stderr b/src/test/ui/span/issue-23338-locals-die-before-temps-of-body.stderr new file mode 100644 index 0000000000000..f10ba0bf2210f --- /dev/null +++ b/src/test/ui/span/issue-23338-locals-die-before-temps-of-body.stderr @@ -0,0 +1,22 @@ +error: `y` does not live long enough + --> $DIR/issue-23338-locals-die-before-temps-of-body.rs:21:1 + | +20 | y.borrow().clone() + | - borrow occurs here +21 | } + | ^ `y` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `y` does not live long enough + --> $DIR/issue-23338-locals-die-before-temps-of-body.rs:27:9 + | +27 | y.borrow().clone() //~ ERROR `y` does not live long enough + | ^ does not live long enough +28 | }; + | -- borrowed value needs to live until here + | | + | borrowed value only lives until here + +error: aborting due to 2 previous errors + diff --git a/src/test/ui/span/issue-24690.rs b/src/test/ui/span/issue-24690.rs new file mode 100644 index 0000000000000..def0d9aced3c3 --- /dev/null +++ b/src/test/ui/span/issue-24690.rs @@ -0,0 +1,22 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! A test to ensure that helpful `note` messages aren't emitted more often +//! than necessary. + +// Although there are three errors, we should only get two "lint level defined +// here" notes pointing at the `warnings` span, one for each error type. +#![deny(warnings)] + +fn main() { + let theTwo = 2; + let theOtherTwo = 2; + println!("{}", theTwo); +} diff --git a/src/test/ui/span/issue-24690.stderr b/src/test/ui/span/issue-24690.stderr new file mode 100644 index 0000000000000..0d2a2ef751666 --- /dev/null +++ b/src/test/ui/span/issue-24690.stderr @@ -0,0 +1,32 @@ +error: unused variable: `theOtherTwo` + --> $DIR/issue-24690.rs:20:9 + | +20 | let theOtherTwo = 2; + | ^^^^^^^^^^^ + | +note: lint level defined here + --> $DIR/issue-24690.rs:16:9 + | +16 | #![deny(warnings)] + | ^^^^^^^^ + +error: variable `theTwo` should have a snake case name such as `the_two` + --> $DIR/issue-24690.rs:19:9 + | +19 | let theTwo = 2; + | ^^^^^^ + | +note: lint level defined here + --> $DIR/issue-24690.rs:16:9 + | +16 | #![deny(warnings)] + | ^^^^^^^^ + +error: variable `theOtherTwo` should have a snake case name such as `the_other_two` + --> $DIR/issue-24690.rs:20:9 + | +20 | let theOtherTwo = 2; + | ^^^^^^^^^^^ + +error: aborting due to 3 previous errors + diff --git a/src/test/compile-fail/issue-24805-dropck-child-has-items-via-parent.rs b/src/test/ui/span/issue-24805-dropck-child-has-items-via-parent.rs similarity index 95% rename from src/test/compile-fail/issue-24805-dropck-child-has-items-via-parent.rs rename to src/test/ui/span/issue-24805-dropck-child-has-items-via-parent.rs index 37ef81e6866ef..acd363fc6b052 100644 --- a/src/test/compile-fail/issue-24805-dropck-child-has-items-via-parent.rs +++ b/src/test/ui/span/issue-24805-dropck-child-has-items-via-parent.rs @@ -35,11 +35,12 @@ fn f_child() { d1 = D_Child(1); // ... we store a reference to `d1` within `_d` ... - _d = D_Child(&d1); //~ ERROR `d1` does not live long enough + _d = D_Child(&d1); // ... dropck *should* complain, because Drop of _d could (and // does) access the already dropped `d1` via the `foo` method. } +//~^ ERROR `d1` does not live long enough fn main() { f_child(); diff --git a/src/test/ui/span/issue-24805-dropck-child-has-items-via-parent.stderr b/src/test/ui/span/issue-24805-dropck-child-has-items-via-parent.stderr new file mode 100644 index 0000000000000..a622e7cfb7180 --- /dev/null +++ b/src/test/ui/span/issue-24805-dropck-child-has-items-via-parent.stderr @@ -0,0 +1,13 @@ +error: `d1` does not live long enough + --> $DIR/issue-24805-dropck-child-has-items-via-parent.rs:42:1 + | +38 | _d = D_Child(&d1); + | -- borrow occurs here +... +42 | } + | ^ `d1` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: aborting due to previous error + diff --git a/src/test/compile-fail/issue-24805-dropck-trait-has-items.rs b/src/test/ui/span/issue-24805-dropck-trait-has-items.rs similarity index 88% rename from src/test/compile-fail/issue-24805-dropck-trait-has-items.rs rename to src/test/ui/span/issue-24805-dropck-trait-has-items.rs index 0da1b9fc6e196..3deb71411e84f 100644 --- a/src/test/compile-fail/issue-24805-dropck-trait-has-items.rs +++ b/src/test/ui/span/issue-24805-dropck-trait-has-items.rs @@ -44,18 +44,21 @@ impl_drop!{HasType, D_HasType} fn f_sm() { let (_d, d1); d1 = D_HasSelfMethod(1); - _d = D_HasSelfMethod(&d1); //~ ERROR `d1` does not live long enough + _d = D_HasSelfMethod(&d1); } +//~^ ERROR `d1` does not live long enough fn f_mwsa() { let (_d, d1); d1 = D_HasMethodWithSelfArg(1); - _d = D_HasMethodWithSelfArg(&d1); //~ ERROR `d1` does not live long enough + _d = D_HasMethodWithSelfArg(&d1); } +//~^ ERROR `d1` does not live long enough fn f_t() { let (_d, d1); d1 = D_HasType(1); - _d = D_HasType(&d1); //~ ERROR `d1` does not live long enough + _d = D_HasType(&d1); } +//~^ ERROR `d1` does not live long enough fn main() { f_sm(); diff --git a/src/test/ui/span/issue-24805-dropck-trait-has-items.stderr b/src/test/ui/span/issue-24805-dropck-trait-has-items.stderr new file mode 100644 index 0000000000000..d06c8af62ee60 --- /dev/null +++ b/src/test/ui/span/issue-24805-dropck-trait-has-items.stderr @@ -0,0 +1,32 @@ +error: `d1` does not live long enough + --> $DIR/issue-24805-dropck-trait-has-items.rs:48:1 + | +47 | _d = D_HasSelfMethod(&d1); + | -- borrow occurs here +48 | } + | ^ `d1` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `d1` does not live long enough + --> $DIR/issue-24805-dropck-trait-has-items.rs:54:1 + | +53 | _d = D_HasMethodWithSelfArg(&d1); + | -- borrow occurs here +54 | } + | ^ `d1` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `d1` does not live long enough + --> $DIR/issue-24805-dropck-trait-has-items.rs:60:1 + | +59 | _d = D_HasType(&d1); + | -- borrow occurs here +60 | } + | ^ `d1` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: aborting due to 3 previous errors + diff --git a/src/test/compile-fail/issue-24895-copy-clone-dropck.rs b/src/test/ui/span/issue-24895-copy-clone-dropck.rs similarity index 93% rename from src/test/compile-fail/issue-24895-copy-clone-dropck.rs rename to src/test/ui/span/issue-24895-copy-clone-dropck.rs index 2883511736920..a4207eb0aa949 100644 --- a/src/test/compile-fail/issue-24895-copy-clone-dropck.rs +++ b/src/test/ui/span/issue-24895-copy-clone-dropck.rs @@ -34,5 +34,5 @@ impl Drop for D { fn main() { let (d2, d1); d1 = D(34, "d1"); - d2 = D(S(&d1, "inner"), "d2"); //~ ERROR `d1` does not live long enough -} + d2 = D(S(&d1, "inner"), "d2"); +} //~ ERROR `d1` does not live long enough diff --git a/src/test/ui/span/issue-24895-copy-clone-dropck.stderr b/src/test/ui/span/issue-24895-copy-clone-dropck.stderr new file mode 100644 index 0000000000000..160bfb6390045 --- /dev/null +++ b/src/test/ui/span/issue-24895-copy-clone-dropck.stderr @@ -0,0 +1,12 @@ +error: `d1` does not live long enough + --> $DIR/issue-24895-copy-clone-dropck.rs:38:1 + | +37 | d2 = D(S(&d1, "inner"), "d2"); + | -- borrow occurs here +38 | } //~ ERROR `d1` does not live long enough + | ^ `d1` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: aborting due to previous error + diff --git a/src/test/compile-fail/issue-25199.rs b/src/test/ui/span/issue-25199.rs similarity index 93% rename from src/test/compile-fail/issue-25199.rs rename to src/test/ui/span/issue-25199.rs index 74ea1ca2947f2..b88c58c29ac3d 100644 --- a/src/test/compile-fail/issue-25199.rs +++ b/src/test/ui/span/issue-25199.rs @@ -77,7 +77,9 @@ impl<'a> Drop for Test<'a> { fn main() { let container = Container::new(); - let test = Test{test: &container}; //~ ERROR `container` does not live long enough + let test = Test{test: &container}; println!("container.v[30]: {:?}", container.v.v[30]); - container.store(test); //~ ERROR `container` does not live long enough + container.store(test); } +//~^ ERROR `container` does not live long enough +//~| ERROR `container` does not live long enough diff --git a/src/test/ui/span/issue-25199.stderr b/src/test/ui/span/issue-25199.stderr new file mode 100644 index 0000000000000..3c8ee07a1fdd8 --- /dev/null +++ b/src/test/ui/span/issue-25199.stderr @@ -0,0 +1,23 @@ +error: `container` does not live long enough + --> $DIR/issue-25199.rs:83:1 + | +80 | let test = Test{test: &container}; + | --------- borrow occurs here +... +83 | } + | ^ `container` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `container` does not live long enough + --> $DIR/issue-25199.rs:83:1 + | +82 | container.store(test); + | --------- borrow occurs here +83 | } + | ^ `container` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: aborting due to 2 previous errors + diff --git a/src/test/compile-fail/issue-26656.rs b/src/test/ui/span/issue-26656.rs similarity index 96% rename from src/test/compile-fail/issue-26656.rs rename to src/test/ui/span/issue-26656.rs index e5fa65498de2c..621da477ddcbf 100644 --- a/src/test/compile-fail/issue-26656.rs +++ b/src/test/ui/span/issue-26656.rs @@ -48,5 +48,5 @@ fn main() { trigger: Box::new(()) }; ticking = Bomb { usable: true }; zook.button = B::BigRedButton(&ticking); - //~^ ERROR `ticking` does not live long enough } +//~^ ERROR `ticking` does not live long enough diff --git a/src/test/ui/span/issue-26656.stderr b/src/test/ui/span/issue-26656.stderr new file mode 100644 index 0000000000000..f960844c818ff --- /dev/null +++ b/src/test/ui/span/issue-26656.stderr @@ -0,0 +1,12 @@ +error: `ticking` does not live long enough + --> $DIR/issue-26656.rs:51:1 + | +50 | zook.button = B::BigRedButton(&ticking); + | ------- borrow occurs here +51 | } + | ^ `ticking` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: aborting due to previous error + diff --git a/src/test/compile-fail/issue-29106.rs b/src/test/ui/span/issue-29106.rs similarity index 83% rename from src/test/compile-fail/issue-29106.rs rename to src/test/ui/span/issue-29106.rs index 1872c62e366de..8d28c64f264c9 100644 --- a/src/test/compile-fail/issue-29106.rs +++ b/src/test/ui/span/issue-29106.rs @@ -23,12 +23,12 @@ fn main() { { let (y, x); x = "alive".to_string(); - y = Arc::new(Foo(&x)); //~ ERROR `x` does not live long enough - } + y = Arc::new(Foo(&x)); + } //~ ERROR `x` does not live long enough { let (y, x); x = "alive".to_string(); - y = Rc::new(Foo(&x)); //~ ERROR `x` does not live long enough - } + y = Rc::new(Foo(&x)); + } //~ ERROR `x` does not live long enough } diff --git a/src/test/ui/span/issue-29106.stderr b/src/test/ui/span/issue-29106.stderr new file mode 100644 index 0000000000000..a7d3b84daba46 --- /dev/null +++ b/src/test/ui/span/issue-29106.stderr @@ -0,0 +1,22 @@ +error: `x` does not live long enough + --> $DIR/issue-29106.rs:27:5 + | +26 | y = Arc::new(Foo(&x)); + | - borrow occurs here +27 | } //~ ERROR `x` does not live long enough + | ^ `x` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `x` does not live long enough + --> $DIR/issue-29106.rs:33:5 + | +32 | y = Rc::new(Foo(&x)); + | - borrow occurs here +33 | } //~ ERROR `x` does not live long enough + | ^ `x` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: aborting due to 2 previous errors + diff --git a/src/test/ui/span/issue-36530.rs b/src/test/ui/span/issue-36530.rs new file mode 100644 index 0000000000000..893c2168c2e16 --- /dev/null +++ b/src/test/ui/span/issue-36530.rs @@ -0,0 +1,14 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#[foo] +mod foo { + #![foo] +} diff --git a/src/test/ui/span/issue-36530.stderr b/src/test/ui/span/issue-36530.stderr new file mode 100644 index 0000000000000..dc6190c2e76b0 --- /dev/null +++ b/src/test/ui/span/issue-36530.stderr @@ -0,0 +1,18 @@ +error: The attribute `foo` is currently unknown to the compiler and may have meaning added to it in the future (see issue #29642) + --> $DIR/issue-36530.rs:11:1 + | +11 | #[foo] + | ^^^^^^ + | + = help: add #![feature(custom_attribute)] to the crate attributes to enable + +error: The attribute `foo` is currently unknown to the compiler and may have meaning added to it in the future (see issue #29642) + --> $DIR/issue-36530.rs:13:5 + | +13 | #![foo] + | ^^^^^^^ + | + = help: add #![feature(custom_attribute)] to the crate attributes to enable + +error: aborting due to 2 previous errors + diff --git a/src/test/ui/span/issue-36537.rs b/src/test/ui/span/issue-36537.rs new file mode 100644 index 0000000000000..33182e02fa3c4 --- /dev/null +++ b/src/test/ui/span/issue-36537.rs @@ -0,0 +1,18 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn main() { + let p; + let a = 42; + p = &a; //~ NOTE borrow occurs here +} +//~^ ERROR `a` does not live long enough +//~| NOTE `a` dropped here while still borrowed +//~| NOTE values in a scope are dropped in the opposite order they are created diff --git a/src/test/ui/span/issue-36537.stderr b/src/test/ui/span/issue-36537.stderr new file mode 100644 index 0000000000000..a335194580d45 --- /dev/null +++ b/src/test/ui/span/issue-36537.stderr @@ -0,0 +1,12 @@ +error: `a` does not live long enough + --> $DIR/issue-36537.rs:15:1 + | +14 | p = &a; //~ NOTE borrow occurs here + | - borrow occurs here +15 | } + | ^ `a` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: aborting due to previous error + diff --git a/src/test/compile-fail/issue28498-reject-ex1.rs b/src/test/ui/span/issue28498-reject-ex1.rs similarity index 93% rename from src/test/compile-fail/issue28498-reject-ex1.rs rename to src/test/ui/span/issue28498-reject-ex1.rs index cee7c57c2019f..bed768005bf69 100644 --- a/src/test/compile-fail/issue28498-reject-ex1.rs +++ b/src/test/ui/span/issue28498-reject-ex1.rs @@ -42,7 +42,7 @@ fn main() { foo.data.push(Concrete(0, Cell::new(None))); foo.data[0].1.set(Some(&foo.data[1])); - //~^ ERROR `foo.data` does not live long enough foo.data[1].1.set(Some(&foo.data[0])); - //~^ ERROR `foo.data` does not live long enough } +//~^ ERROR `foo.data` does not live long enough +//~| ERROR `foo.data` does not live long enough diff --git a/src/test/ui/span/issue28498-reject-ex1.stderr b/src/test/ui/span/issue28498-reject-ex1.stderr new file mode 100644 index 0000000000000..b5fbe99ec72e8 --- /dev/null +++ b/src/test/ui/span/issue28498-reject-ex1.stderr @@ -0,0 +1,23 @@ +error: `foo.data` does not live long enough + --> $DIR/issue28498-reject-ex1.rs:46:1 + | +44 | foo.data[0].1.set(Some(&foo.data[1])); + | -------- borrow occurs here +45 | foo.data[1].1.set(Some(&foo.data[0])); +46 | } + | ^ `foo.data` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `foo.data` does not live long enough + --> $DIR/issue28498-reject-ex1.rs:46:1 + | +45 | foo.data[1].1.set(Some(&foo.data[0])); + | -------- borrow occurs here +46 | } + | ^ `foo.data` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: aborting due to 2 previous errors + diff --git a/src/test/compile-fail/issue28498-reject-lifetime-param.rs b/src/test/ui/span/issue28498-reject-lifetime-param.rs similarity index 92% rename from src/test/compile-fail/issue28498-reject-lifetime-param.rs rename to src/test/ui/span/issue28498-reject-lifetime-param.rs index 92028c7a818c2..7e7893ac3c864 100644 --- a/src/test/compile-fail/issue28498-reject-lifetime-param.rs +++ b/src/test/ui/span/issue28498-reject-lifetime-param.rs @@ -40,9 +40,9 @@ fn main() { last_dropped = ScribbleOnDrop(format!("last")); first_dropped = ScribbleOnDrop(format!("first")); foo0 = Foo(0, &last_dropped); - //~^ ERROR `last_dropped` does not live long enough foo1 = Foo(1, &first_dropped); - //~^ ERROR `first_dropped` does not live long enough println!("foo0.1: {:?} foo1.1: {:?}", foo0.1, foo1.1); } +//~^ ERROR `last_dropped` does not live long enough +//~| ERROR `first_dropped` does not live long enough diff --git a/src/test/ui/span/issue28498-reject-lifetime-param.stderr b/src/test/ui/span/issue28498-reject-lifetime-param.stderr new file mode 100644 index 0000000000000..debb8354568cc --- /dev/null +++ b/src/test/ui/span/issue28498-reject-lifetime-param.stderr @@ -0,0 +1,24 @@ +error: `last_dropped` does not live long enough + --> $DIR/issue28498-reject-lifetime-param.rs:46:1 + | +42 | foo0 = Foo(0, &last_dropped); + | ------------ borrow occurs here +... +46 | } + | ^ `last_dropped` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `first_dropped` does not live long enough + --> $DIR/issue28498-reject-lifetime-param.rs:46:1 + | +43 | foo1 = Foo(1, &first_dropped); + | ------------- borrow occurs here +... +46 | } + | ^ `first_dropped` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: aborting due to 2 previous errors + diff --git a/src/test/compile-fail/issue28498-reject-passed-to-fn.rs b/src/test/ui/span/issue28498-reject-passed-to-fn.rs similarity index 93% rename from src/test/compile-fail/issue28498-reject-passed-to-fn.rs rename to src/test/ui/span/issue28498-reject-passed-to-fn.rs index 27378b1e0bec0..54fc20b62f25a 100644 --- a/src/test/compile-fail/issue28498-reject-passed-to-fn.rs +++ b/src/test/ui/span/issue28498-reject-passed-to-fn.rs @@ -42,9 +42,9 @@ fn main() { last_dropped = ScribbleOnDrop(format!("last")); first_dropped = ScribbleOnDrop(format!("first")); foo0 = Foo(0, &last_dropped, Box::new(callback)); - //~^ ERROR `last_dropped` does not live long enough foo1 = Foo(1, &first_dropped, Box::new(callback)); - //~^ ERROR `first_dropped` does not live long enough println!("foo0.1: {:?} foo1.1: {:?}", foo0.1, foo1.1); } +//~^ ERROR `last_dropped` does not live long enough +//~| ERROR `first_dropped` does not live long enough diff --git a/src/test/ui/span/issue28498-reject-passed-to-fn.stderr b/src/test/ui/span/issue28498-reject-passed-to-fn.stderr new file mode 100644 index 0000000000000..7d3ac3e9d0391 --- /dev/null +++ b/src/test/ui/span/issue28498-reject-passed-to-fn.stderr @@ -0,0 +1,24 @@ +error: `last_dropped` does not live long enough + --> $DIR/issue28498-reject-passed-to-fn.rs:48:1 + | +44 | foo0 = Foo(0, &last_dropped, Box::new(callback)); + | ------------ borrow occurs here +... +48 | } + | ^ `last_dropped` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `first_dropped` does not live long enough + --> $DIR/issue28498-reject-passed-to-fn.rs:48:1 + | +45 | foo1 = Foo(1, &first_dropped, Box::new(callback)); + | ------------- borrow occurs here +... +48 | } + | ^ `first_dropped` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: aborting due to 2 previous errors + diff --git a/src/test/compile-fail/issue28498-reject-trait-bound.rs b/src/test/ui/span/issue28498-reject-trait-bound.rs similarity index 92% rename from src/test/compile-fail/issue28498-reject-trait-bound.rs rename to src/test/ui/span/issue28498-reject-trait-bound.rs index 3904d68ba19db..6164beaf858dd 100644 --- a/src/test/compile-fail/issue28498-reject-trait-bound.rs +++ b/src/test/ui/span/issue28498-reject-trait-bound.rs @@ -42,9 +42,9 @@ fn main() { last_dropped = ScribbleOnDrop(format!("last")); first_dropped = ScribbleOnDrop(format!("first")); foo0 = Foo(0, &last_dropped); - //~^ ERROR `last_dropped` does not live long enough foo1 = Foo(1, &first_dropped); - //~^ ERROR `first_dropped` does not live long enough println!("foo0.1: {:?} foo1.1: {:?}", foo0.1, foo1.1); } +//~^ ERROR `last_dropped` does not live long enough +//~| ERROR `first_dropped` does not live long enough diff --git a/src/test/ui/span/issue28498-reject-trait-bound.stderr b/src/test/ui/span/issue28498-reject-trait-bound.stderr new file mode 100644 index 0000000000000..ae96cace91c47 --- /dev/null +++ b/src/test/ui/span/issue28498-reject-trait-bound.stderr @@ -0,0 +1,24 @@ +error: `last_dropped` does not live long enough + --> $DIR/issue28498-reject-trait-bound.rs:48:1 + | +44 | foo0 = Foo(0, &last_dropped); + | ------------ borrow occurs here +... +48 | } + | ^ `last_dropped` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `first_dropped` does not live long enough + --> $DIR/issue28498-reject-trait-bound.rs:48:1 + | +45 | foo1 = Foo(1, &first_dropped); + | ------------- borrow occurs here +... +48 | } + | ^ `first_dropped` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: aborting due to 2 previous errors + diff --git a/src/test/compile-fail/impl-trait/loan-extend.rs b/src/test/ui/span/loan-extend.rs similarity index 76% rename from src/test/compile-fail/impl-trait/loan-extend.rs rename to src/test/ui/span/loan-extend.rs index 8dfcb08cff339..a4b951daab439 100644 --- a/src/test/compile-fail/impl-trait/loan-extend.rs +++ b/src/test/ui/span/loan-extend.rs @@ -17,7 +17,8 @@ fn main() { let long; let mut short = 0; long = borrow(&mut short); - //~^ ERROR `short` does not live long enough - //~| NOTE does not live long enough - //~| NOTE values in a scope are dropped in the opposite order they are created -} //~ borrowed value dropped before borrower + //~^ NOTE borrow occurs here +} +//~^ ERROR `short` does not live long enough +//~| NOTE `short` dropped here while still borrowed +//~| NOTE values in a scope are dropped in the opposite order they are created diff --git a/src/test/ui/span/loan-extend.stderr b/src/test/ui/span/loan-extend.stderr new file mode 100644 index 0000000000000..b0f191e29d1be --- /dev/null +++ b/src/test/ui/span/loan-extend.stderr @@ -0,0 +1,13 @@ +error: `short` does not live long enough + --> $DIR/loan-extend.rs:21:1 + | +19 | long = borrow(&mut short); + | ----- borrow occurs here +20 | //~^ NOTE borrow occurs here +21 | } + | ^ `short` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: aborting due to previous error + diff --git a/src/test/ui/span/type-binding.stderr b/src/test/ui/span/type-binding.stderr index 3cd1791a34ff7..dc37acaf3f98e 100644 --- a/src/test/ui/span/type-binding.stderr +++ b/src/test/ui/span/type-binding.stderr @@ -2,7 +2,7 @@ error[E0220]: associated type `Trget` not found for `std::ops::Deref` --> $DIR/type-binding.rs:16:20 | 16 | fn homura>(_: T) {} - | ^^^^^^^^^^^ + | ^^^^^^^^^^^ associated type `Trget` not found error: aborting due to previous error diff --git a/src/test/compile-fail/typo-suggestion.rs b/src/test/ui/span/typo-suggestion.rs similarity index 87% rename from src/test/compile-fail/typo-suggestion.rs rename to src/test/ui/span/typo-suggestion.rs index d5cf6a294ee4d..536bf16142b30 100644 --- a/src/test/compile-fail/typo-suggestion.rs +++ b/src/test/ui/span/typo-suggestion.rs @@ -13,9 +13,7 @@ fn main() { // `foo` shouldn't be suggested, it is too dissimilar from `bar`. println!("Hello {}", bar); - //~^ ERROR: unresolved name `bar` // But this is close enough. println!("Hello {}", fob); - //~^ ERROR: unresolved name `fob`. Did you mean `foo`? } diff --git a/src/test/ui/span/typo-suggestion.stderr b/src/test/ui/span/typo-suggestion.stderr new file mode 100644 index 0000000000000..5446175aa2505 --- /dev/null +++ b/src/test/ui/span/typo-suggestion.stderr @@ -0,0 +1,14 @@ +error[E0425]: unresolved name `bar` + --> $DIR/typo-suggestion.rs:15:26 + | +15 | println!("Hello {}", bar); + | ^^^ unresolved name + +error[E0425]: unresolved name `fob` + --> $DIR/typo-suggestion.rs:18:26 + | +18 | println!("Hello {}", fob); + | ^^^ did you mean `foo`? + +error: aborting due to 2 previous errors + diff --git a/src/test/compile-fail/vec-must-not-hide-type-from-dropck.rs b/src/test/ui/span/vec-must-not-hide-type-from-dropck.rs similarity index 96% rename from src/test/compile-fail/vec-must-not-hide-type-from-dropck.rs rename to src/test/ui/span/vec-must-not-hide-type-from-dropck.rs index c8f4326bb2824..310ab20489a35 100644 --- a/src/test/compile-fail/vec-must-not-hide-type-from-dropck.rs +++ b/src/test/ui/span/vec-must-not-hide-type-from-dropck.rs @@ -124,9 +124,11 @@ fn f() { c1.v.push(CheckId(Cell::new(None))); c2.v.push(CheckId(Cell::new(None))); - c1.v[0].v.set(Some(&c2)); //~ ERROR `c2` does not live long enough - c2.v[0].v.set(Some(&c1)); //~ ERROR `c1` does not live long enough + c1.v[0].v.set(Some(&c2)); + c2.v[0].v.set(Some(&c1)); } +//~^ ERROR `c2` does not live long enough +//~| ERROR `c1` does not live long enough fn main() { f(); diff --git a/src/test/ui/span/vec-must-not-hide-type-from-dropck.stderr b/src/test/ui/span/vec-must-not-hide-type-from-dropck.stderr new file mode 100644 index 0000000000000..11031ee0ab263 --- /dev/null +++ b/src/test/ui/span/vec-must-not-hide-type-from-dropck.stderr @@ -0,0 +1,23 @@ +error: `c2` does not live long enough + --> $DIR/vec-must-not-hide-type-from-dropck.rs:129:1 + | +127 | c1.v[0].v.set(Some(&c2)); + | -- borrow occurs here +128 | c2.v[0].v.set(Some(&c1)); +129 | } + | ^ `c2` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `c1` does not live long enough + --> $DIR/vec-must-not-hide-type-from-dropck.rs:129:1 + | +128 | c2.v[0].v.set(Some(&c1)); + | -- borrow occurs here +129 | } + | ^ `c1` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: aborting due to 2 previous errors + diff --git a/src/test/compile-fail/vec_refs_data_with_early_death.rs b/src/test/ui/span/vec_refs_data_with_early_death.rs similarity index 90% rename from src/test/compile-fail/vec_refs_data_with_early_death.rs rename to src/test/ui/span/vec_refs_data_with_early_death.rs index 0025449a3dbb7..f40a25920bf3a 100644 --- a/src/test/compile-fail/vec_refs_data_with_early_death.rs +++ b/src/test/ui/span/vec_refs_data_with_early_death.rs @@ -24,8 +24,10 @@ fn main() { let x: i8 = 3; let y: i8 = 4; - v.push(&x); //~ ERROR `x` does not live long enough - v.push(&y); //~ ERROR `y` does not live long enough + v.push(&x); + v.push(&y); assert_eq!(v, [&3, &4]); } +//~^ ERROR `x` does not live long enough +//~| ERROR `y` does not live long enough diff --git a/src/test/ui/span/vec_refs_data_with_early_death.stderr b/src/test/ui/span/vec_refs_data_with_early_death.stderr new file mode 100644 index 0000000000000..8cc12c32b9303 --- /dev/null +++ b/src/test/ui/span/vec_refs_data_with_early_death.stderr @@ -0,0 +1,24 @@ +error: `x` does not live long enough + --> $DIR/vec_refs_data_with_early_death.rs:31:1 + | +27 | v.push(&x); + | - borrow occurs here +... +31 | } + | ^ `x` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: `y` does not live long enough + --> $DIR/vec_refs_data_with_early_death.rs:31:1 + | +28 | v.push(&y); + | - borrow occurs here +... +31 | } + | ^ `y` dropped here while still borrowed + | + = note: values in a scope are dropped in the opposite order they are created + +error: aborting due to 2 previous errors + diff --git a/src/tools/cargotest/Cargo.lock b/src/tools/cargotest/Cargo.lock deleted file mode 100644 index bafde903baa44..0000000000000 --- a/src/tools/cargotest/Cargo.lock +++ /dev/null @@ -1,4 +0,0 @@ -[root] -name = "cargotest" -version = "0.1.0" - diff --git a/src/tools/cargotest/main.rs b/src/tools/cargotest/main.rs index acf12dab16aff..800186a926df6 100644 --- a/src/tools/cargotest/main.rs +++ b/src/tools/cargotest/main.rs @@ -24,7 +24,7 @@ struct Test { const TEST_REPOS: &'static [Test] = &[Test { name: "cargo", repo: "https://github.com/rust-lang/cargo", - sha: "2d85908217f99a30aa5f68e05a8980704bb71fad", + sha: "806e3c368a15f618244a3b4e918bf77f9c403fd0", lock: None, }, Test { @@ -36,6 +36,20 @@ const TEST_REPOS: &'static [Test] = &[Test { fn main() { + // One of the projects being tested here is Cargo, and when being tested + // Cargo will at some point call `nmake.exe` on Windows MSVC. Unfortunately + // `nmake` will read these two environment variables below and try to + // intepret them. We're likely being run, however, from MSYS `make` which + // uses the same variables. + // + // As a result, to prevent confusion and errors, we remove these variables + // from our environment to prevent passing MSYS make flags to nmake, causing + // it to blow up. + if cfg!(target_env = "msvc") { + env::remove_var("MAKE"); + env::remove_var("MAKEFLAGS"); + } + let args = env::args().collect::>(); let ref cargo = args[1]; let out_dir = Path::new(&args[2]); diff --git a/src/tools/compiletest/Cargo.lock b/src/tools/compiletest/Cargo.lock deleted file mode 100644 index 755697806a00e..0000000000000 --- a/src/tools/compiletest/Cargo.lock +++ /dev/null @@ -1,92 +0,0 @@ -[root] -name = "compiletest" -version = "0.0.0" -dependencies = [ - "env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", - "serialize 0.0.0", -] - -[[package]] -name = "aho-corasick" -version = "0.5.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "env_logger" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", - "regex 0.1.62 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "libc" -version = "0.2.9" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "log" -version = "0.0.0" - -[[package]] -name = "log" -version = "0.3.6" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "memchr" -version = "0.1.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "mempool" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "regex" -version = "0.1.62" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "aho-corasick 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", - "memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", - "mempool 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", - "regex-syntax 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", - "utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "regex-syntax" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "serialize" -version = "0.0.0" -dependencies = [ - "log 0.0.0", -] - -[[package]] -name = "utf8-ranges" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[metadata] -"checksum aho-corasick 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "67077478f0a03952bed2e6786338d400d40c25e9836e08ad50af96607317fd03" -"checksum env_logger 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "aba65b63ffcc17ffacd6cf5aa843da7c5a25e3bd4bbe0b7def8b214e411250e5" -"checksum libc 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "95ca44454e7cfe7f8a2095a41a10c79d96a177c0b1672cbf1a30d901a9c16ee5" -"checksum log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ab83497bf8bf4ed2a74259c1c802351fcd67a65baa86394b6ba73c36f4838054" -"checksum memchr 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d8b629fb514376c675b98c1421e80b151d3817ac42d7c667717d282761418d20" -"checksum mempool 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f997e65fe3eb7a6f8557a7a477de9ed5c511850c85363d13f7b0145b526ed36a" -"checksum regex 0.1.62 (registry+https://github.com/rust-lang/crates.io-index)" = "22bdab319e36735729aa280752c9293b29ec0053a6810679d697515f80a986fe" -"checksum regex-syntax 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "841591b1e05609a643e3b4d0045fce04f701daba7151ddcd3ad47b080693d5a9" -"checksum utf8-ranges 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a1ca13c08c41c9c3e04224ed9ff80461d97e121589ff27c753a16cb10830ae0f" diff --git a/src/tools/compiletest/Cargo.toml b/src/tools/compiletest/Cargo.toml index 573ee38886693..e05d57365f875 100644 --- a/src/tools/compiletest/Cargo.toml +++ b/src/tools/compiletest/Cargo.toml @@ -12,5 +12,5 @@ opt-level = 2 [dependencies] log = "0.3" -env_logger = "0.3" -serialize = { path = "../../libserialize" } \ No newline at end of file +env_logger = { version = "0.3.5", default-features = false } +serialize = { path = "../../libserialize" } diff --git a/src/tools/compiletest/src/common.rs b/src/tools/compiletest/src/common.rs index 5d522736089ea..81cb927f26b02 100644 --- a/src/tools/compiletest/src/common.rs +++ b/src/tools/compiletest/src/common.rs @@ -183,4 +183,5 @@ pub struct Config { pub cflags: String, pub llvm_components: String, pub llvm_cxxflags: String, + pub nodejs: Option, } diff --git a/src/tools/compiletest/src/header.rs b/src/tools/compiletest/src/header.rs index 899a366a4bb74..503a851676925 100644 --- a/src/tools/compiletest/src/header.rs +++ b/src/tools/compiletest/src/header.rs @@ -182,42 +182,32 @@ pub struct TestProps { // testing harness and used when generating compilation // arguments. (In particular, it propagates to the aux-builds.) pub incremental_dir: Option, + // Specifies that a cfail test must actually compile without errors. + pub must_compile_successfully: bool, } impl TestProps { pub fn new() -> Self { - let error_patterns = Vec::new(); - let aux_builds = Vec::new(); - let exec_env = Vec::new(); - let run_flags = None; - let pp_exact = None; - let check_lines = Vec::new(); - let build_aux_docs = false; - let force_host = false; - let check_stdout = false; - let no_prefer_dynamic = false; - let pretty_expanded = false; - let pretty_compare_only = false; - let forbid_output = Vec::new(); TestProps { - error_patterns: error_patterns, + error_patterns: vec![], compile_flags: vec![], - run_flags: run_flags, - pp_exact: pp_exact, - aux_builds: aux_builds, + run_flags: None, + pp_exact: None, + aux_builds: vec![], revisions: vec![], rustc_env: vec![], - exec_env: exec_env, - check_lines: check_lines, - build_aux_docs: build_aux_docs, - force_host: force_host, - check_stdout: check_stdout, - no_prefer_dynamic: no_prefer_dynamic, - pretty_expanded: pretty_expanded, + exec_env: vec![], + check_lines: vec![], + build_aux_docs: false, + force_host: false, + check_stdout: false, + no_prefer_dynamic: false, + pretty_expanded: false, pretty_mode: format!("normal"), - pretty_compare_only: pretty_compare_only, - forbid_output: forbid_output, + pretty_compare_only: false, + forbid_output: vec![], incremental_dir: None, + must_compile_successfully: false, } } @@ -313,6 +303,10 @@ impl TestProps { if let Some(of) = parse_forbid_output(ln) { self.forbid_output.push(of); } + + if !self.must_compile_successfully { + self.must_compile_successfully = parse_must_compile_successfully(ln); + } }); for key in vec!["RUST_TEST_NOCAPTURE", "RUST_TEST_THREADS"] { @@ -420,6 +414,10 @@ fn parse_pretty_compare_only(line: &str) -> bool { parse_name_directive(line, "pretty-compare-only") } +fn parse_must_compile_successfully(line: &str) -> bool { + parse_name_directive(line, "must-compile-successfully") +} + fn parse_env(line: &str, name: &str) -> Option<(String, String)> { parse_name_value_directive(line, name).map(|nv| { // nv is either FOO or FOO=BAR diff --git a/src/tools/compiletest/src/main.rs b/src/tools/compiletest/src/main.rs index 4afeb3613319b..2dc7cdbf93502 100644 --- a/src/tools/compiletest/src/main.rs +++ b/src/tools/compiletest/src/main.rs @@ -13,9 +13,10 @@ #![feature(box_syntax)] #![feature(rustc_private)] #![feature(test)] -#![feature(question_mark)] #![feature(libc)] +#![cfg_attr(stage0, feature(question_mark))] + #![deny(warnings)] extern crate libc; @@ -72,7 +73,7 @@ fn main() { pub fn parse_config(args: Vec ) -> Config { let groups : Vec = - vec!(reqopt("", "compile-lib-path", "path to host shared libraries", "PATH"), + vec![reqopt("", "compile-lib-path", "path to host shared libraries", "PATH"), reqopt("", "run-lib-path", "path to target shared libraries", "PATH"), reqopt("", "rustc-path", "path to rustc to use for compiling", "PATH"), reqopt("", "rustdoc-path", "path to rustdoc to use for compiling", "PATH"), @@ -109,7 +110,8 @@ pub fn parse_config(args: Vec ) -> Config { reqopt("", "cflags", "flags for the C compiler", "FLAGS"), reqopt("", "llvm-components", "list of LLVM components built in", "LIST"), reqopt("", "llvm-cxxflags", "C++ flags for LLVM", "FLAGS"), - optflag("h", "help", "show this message")); + optopt("", "nodejs", "the name of nodejs", "PATH"), + optflag("h", "help", "show this message")]; let (argv0, args_) = args.split_first().unwrap(); if args.len() == 1 || args[1] == "-h" || args[1] == "--help" { @@ -190,6 +192,7 @@ pub fn parse_config(args: Vec ) -> Config { cflags: matches.opt_str("cflags").unwrap(), llvm_components: matches.opt_str("llvm-components").unwrap(), llvm_cxxflags: matches.opt_str("llvm-cxxflags").unwrap(), + nodejs: matches.opt_str("nodejs"), } } @@ -313,6 +316,7 @@ pub fn test_opts(config: &Config) -> test::TestOpts { }, color: test::AutoColor, test_threads: None, + skip: vec![], } } @@ -430,10 +434,17 @@ pub fn make_test(config: &Config, testpaths: &TestPaths) -> test::TestDescAndFn } }; + // Debugging emscripten code doesn't make sense today + let mut ignore = early_props.ignore; + if (config.mode == DebugInfoGdb || config.mode == DebugInfoLldb) && + config.target.contains("emscripten") { + ignore = true; + } + test::TestDescAndFn { desc: test::TestDesc { name: make_test_name(config, testpaths), - ignore: early_props.ignore, + ignore: ignore, should_panic: should_panic, }, testfn: make_test_closure(config, testpaths), @@ -454,7 +465,7 @@ pub fn make_test_name(config: &Config, testpaths: &TestPaths) -> test::TestName pub fn make_test_closure(config: &Config, testpaths: &TestPaths) -> test::TestFn { let config = config.clone(); let testpaths = testpaths.clone(); - test::DynTestFn(Box::new(move || { + test::DynTestFn(Box::new(move |()| { runtest::run(config, &testpaths) })) } diff --git a/src/tools/compiletest/src/runtest.rs b/src/tools/compiletest/src/runtest.rs index bfb85dd479d56..03c05f919b79e 100644 --- a/src/tools/compiletest/src/runtest.rs +++ b/src/tools/compiletest/src/runtest.rs @@ -129,13 +129,21 @@ impl<'test> TestCx<'test> { fn run_cfail_test(&self) { let proc_res = self.compile_test(); - if proc_res.status.success() { - self.fatal_proc_rec( - &format!("{} test compiled successfully!", self.config.mode)[..], - &proc_res); - } + if self.props.must_compile_successfully { + if !proc_res.status.success() { + self.fatal_proc_rec( + "test compilation failed although it shouldn't!", + &proc_res); + } + } else { + if proc_res.status.success() { + self.fatal_proc_rec( + &format!("{} test compiled successfully!", self.config.mode)[..], + &proc_res); + } - self.check_correct_failure_status(&proc_res); + self.check_correct_failure_status(&proc_res); + } let output_to_check = self.get_output(&proc_res); let expected_errors = errors::load_errors(&self.testpaths.file, self.revision); @@ -147,6 +155,7 @@ impl<'test> TestCx<'test> { } else { self.check_error_patterns(&output_to_check, &proc_res); } + self.check_no_compiler_crash(&proc_res); self.check_forbid_output(&output_to_check, &proc_res); } @@ -244,7 +253,7 @@ impl<'test> TestCx<'test> { let mut src = String::new(); File::open(&self.testpaths.file).unwrap().read_to_string(&mut src).unwrap(); - let mut srcs = vec!(src); + let mut srcs = vec![src]; let mut round = 0; while round < rounds { @@ -326,13 +335,13 @@ impl<'test> TestCx<'test> { -> ProcArgs { let aux_dir = self.aux_output_dir_name(); // FIXME (#9639): This needs to handle non-utf8 paths - let mut args = vec!("-".to_owned(), + let mut args = vec!["-".to_owned(), "-Zunstable-options".to_owned(), "--unpretty".to_owned(), pretty_type, format!("--target={}", self.config.target), "-L".to_owned(), - aux_dir.to_str().unwrap().to_owned()); + aux_dir.to_str().unwrap().to_owned()]; args.extend(self.split_maybe_args(&self.config.target_rustcflags)); args.extend(self.props.compile_flags.iter().cloned()); return ProcArgs { @@ -379,7 +388,7 @@ actual:\n\ self.create_dir_racy(&out_dir); // FIXME (#9639): This needs to handle non-utf8 paths - let mut args = vec!("-".to_owned(), + let mut args = vec!["-".to_owned(), "-Zno-trans".to_owned(), "--out-dir".to_owned(), out_dir.to_str().unwrap().to_owned(), @@ -387,7 +396,7 @@ actual:\n\ "-L".to_owned(), self.config.build_base.to_str().unwrap().to_owned(), "-L".to_owned(), - aux_dir.to_str().unwrap().to_owned()); + aux_dir.to_str().unwrap().to_owned()]; if let Some(revision) = self.revision { args.extend(vec![ format!("--cfg"), @@ -478,7 +487,7 @@ actual:\n\ exe_file.to_str().unwrap().to_owned(), self.config.adb_test_dir.clone() ], - vec!(("".to_owned(), "".to_owned())), + vec![("".to_owned(), "".to_owned())], Some("".to_owned())) .expect(&format!("failed to exec `{:?}`", self.config.adb_path)); @@ -490,7 +499,7 @@ actual:\n\ "tcp:5039".to_owned(), "tcp:5039".to_owned() ], - vec!(("".to_owned(), "".to_owned())), + vec![("".to_owned(), "".to_owned())], Some("".to_owned())) .expect(&format!("failed to exec `{:?}`", self.config.adb_path)); @@ -511,8 +520,8 @@ actual:\n\ "shell".to_owned(), adb_arg.clone() ], - vec!(("".to_owned(), - "".to_owned())), + vec![("".to_owned(), + "".to_owned())], Some("".to_owned())) .expect(&format!("failed to exec `{:?}`", self.config.adb_path)); loop { @@ -526,10 +535,10 @@ actual:\n\ let debugger_script = self.make_out_name("debugger.script"); // FIXME (#9639): This needs to handle non-utf8 paths let debugger_opts = - vec!("-quiet".to_owned(), + vec!["-quiet".to_owned(), "-batch".to_owned(), "-nx".to_owned(), - format!("-command={}", debugger_script.to_str().unwrap())); + format!("-command={}", debugger_script.to_str().unwrap())]; let mut gdb_path = tool_path; gdb_path.push_str(&format!("/bin/{}-gdb", self.config.target)); @@ -541,7 +550,7 @@ actual:\n\ &gdb_path, None, &debugger_opts, - vec!(("".to_owned(), "".to_owned())), + vec![("".to_owned(), "".to_owned())], None) .expect(&format!("failed to exec `{:?}`", gdb_path)); let cmdline = { @@ -633,10 +642,10 @@ actual:\n\ // FIXME (#9639): This needs to handle non-utf8 paths let debugger_opts = - vec!("-quiet".to_owned(), + vec!["-quiet".to_owned(), "-batch".to_owned(), "-nx".to_owned(), - format!("-command={}", debugger_script.to_str().unwrap())); + format!("-command={}", debugger_script.to_str().unwrap())]; let proc_args = ProcArgs { prog: debugger().to_owned(), @@ -821,9 +830,9 @@ actual:\n\ let command_directive = format!("{}-command", debugger_prefix); let check_directive = format!("{}-check", debugger_prefix); - let mut breakpoint_lines = vec!(); - let mut commands = vec!(); - let mut check_lines = vec!(); + let mut breakpoint_lines = vec![]; + let mut commands = vec![]; + let mut check_lines = vec![]; let mut counter = 1; let reader = BufReader::new(File::open(&self.testpaths.file).unwrap()); for line in reader.lines() { @@ -943,8 +952,12 @@ actual:\n\ output_to_check: &str, proc_res: &ProcRes) { if self.props.error_patterns.is_empty() { - self.fatal(&format!("no error pattern specified in {:?}", - self.testpaths.file.display())); + if self.props.must_compile_successfully { + return + } else { + self.fatal(&format!("no error pattern specified in {:?}", + self.testpaths.file.display())); + } } let mut next_err_idx = 0; let mut next_err_pat = self.props.error_patterns[next_err_idx].trim(); @@ -978,7 +991,7 @@ actual:\n\ fn check_no_compiler_crash(&self, proc_res: &ProcRes) { for line in proc_res.stderr.lines() { - if line.starts_with("error: internal compiler error:") { + if line.contains("error: internal compiler error") { self.fatal_proc_rec("compiler encountered internal error", proc_res); } } @@ -1107,8 +1120,8 @@ actual:\n\ fn compile_test(&self) -> ProcRes { let aux_dir = self.aux_output_dir_name(); // FIXME (#9639): This needs to handle non-utf8 paths - let link_args = vec!("-L".to_owned(), - aux_dir.to_str().unwrap().to_owned()); + let link_args = vec!["-L".to_owned(), + aux_dir.to_str().unwrap().to_owned()]; let args = self.make_compile_args(link_args, &self.testpaths.file, TargetLocation::ThisFile(self.make_exe_name())); @@ -1155,7 +1168,6 @@ actual:\n\ "arm-linux-androideabi" | "armv7-linux-androideabi" | "aarch64-linux-android" => { self._arm_exec_compiled_test(env) } - _=> { let aux_dir = self.aux_output_dir_name(); self.compose_and_run(self.make_run_args(), @@ -1219,9 +1231,9 @@ actual:\n\ if (self.config.target.contains("musl") && !aux_props.force_host) || self.config.target.contains("emscripten") { - vec!("--crate-type=lib".to_owned()) + vec!["--crate-type=lib".to_owned()] } else { - vec!("--crate-type=dylib".to_owned()) + vec!["--crate-type=dylib".to_owned()] } }; crate_type.extend(extra_link_args.clone()); @@ -1303,10 +1315,10 @@ actual:\n\ }; // FIXME (#9639): This needs to handle non-utf8 paths - let mut args = vec!(input_file.to_str().unwrap().to_owned(), + let mut args = vec![input_file.to_str().unwrap().to_owned(), "-L".to_owned(), self.config.build_base.to_str().unwrap().to_owned(), - format!("--target={}", target)); + format!("--target={}", target)]; if let Some(revision) = self.revision { args.extend(vec![ @@ -1408,7 +1420,7 @@ actual:\n\ fn make_exe_name(&self) -> PathBuf { let mut f = self.output_base_name(); // FIXME: This is using the host architecture exe suffix, not target! - if self.config.target == "asmjs-unknown-emscripten" { + if self.config.target.contains("emscripten") { let mut fname = f.file_name().unwrap().to_os_string(); fname.push(".js"); f.set_file_name(&fname); @@ -1426,8 +1438,9 @@ actual:\n\ let mut args = self.split_maybe_args(&self.config.runtool); // If this is emscripten, then run tests under nodejs - if self.config.target == "asmjs-unknown-emscripten" { - args.push("nodejs".to_owned()); + if self.config.target.contains("emscripten") { + let nodejs = self.config.nodejs.clone().unwrap_or("nodejs".to_string()); + args.push(nodejs); } let exe_file = self.make_exe_name(); @@ -1600,7 +1613,7 @@ actual:\n\ args.prog.clone(), self.config.adb_test_dir.clone() ], - vec!(("".to_owned(), "".to_owned())), + vec![("".to_owned(), "".to_owned())], Some("".to_owned())) .expect(&format!("failed to exec `{}`", self.config.adb_path)); @@ -1632,7 +1645,7 @@ actual:\n\ &self.config.adb_path, None, &runargs, - vec!(("".to_owned(), "".to_owned())), Some("".to_owned())) + vec![("".to_owned(), "".to_owned())], Some("".to_owned())) .expect(&format!("failed to exec `{}`", self.config.adb_path)); // get exitcode of result @@ -1646,7 +1659,7 @@ actual:\n\ &self.config.adb_path, None, &runargs, - vec!(("".to_owned(), "".to_owned())), + vec![("".to_owned(), "".to_owned())], Some("".to_owned())) .expect(&format!("failed to exec `{}`", self.config.adb_path)); @@ -1670,7 +1683,7 @@ actual:\n\ &self.config.adb_path, None, &runargs, - vec!(("".to_owned(), "".to_owned())), + vec![("".to_owned(), "".to_owned())], Some("".to_owned())) .expect(&format!("failed to exec `{}`", self.config.adb_path)); @@ -1685,7 +1698,7 @@ actual:\n\ &self.config.adb_path, None, &runargs, - vec!(("".to_owned(), "".to_owned())), + vec![("".to_owned(), "".to_owned())], Some("".to_owned())) .expect(&format!("failed to exec `{}`", self.config.adb_path)); @@ -1717,8 +1730,8 @@ actual:\n\ .to_owned(), self.config.adb_test_dir.to_owned(), ], - vec!(("".to_owned(), - "".to_owned())), + vec![("".to_owned(), + "".to_owned())], Some("".to_owned())) .expect(&format!("failed to exec `{}`", self.config.adb_path)); @@ -1736,9 +1749,9 @@ actual:\n\ fn compile_test_and_save_ir(&self) -> ProcRes { let aux_dir = self.aux_output_dir_name(); // FIXME (#9639): This needs to handle non-utf8 paths - let mut link_args = vec!("-L".to_owned(), - aux_dir.to_str().unwrap().to_owned()); - let llvm_args = vec!("--emit=llvm-ir".to_owned(),); + let mut link_args = vec!["-L".to_owned(), + aux_dir.to_str().unwrap().to_owned()]; + let llvm_args = vec!["--emit=llvm-ir".to_owned(),]; link_args.extend(llvm_args); let args = self.make_compile_args(link_args, &self.testpaths.file, @@ -1755,8 +1768,8 @@ actual:\n\ let proc_args = ProcArgs { // FIXME (#9639): This needs to handle non-utf8 paths prog: prog.to_str().unwrap().to_owned(), - args: vec!(format!("-input-file={}", irfile.to_str().unwrap()), - self.testpaths.file.to_str().unwrap().to_owned()) + args: vec![format!("-input-file={}", irfile.to_str().unwrap()), + self.testpaths.file.to_str().unwrap().to_owned()] }; self.compose_and_run(proc_args, Vec::new(), "", None, None) } @@ -2092,12 +2105,17 @@ actual:\n\ .collect::>().join(" "); cmd.env("IS_MSVC", "1") + .env("IS_WINDOWS", "1") .env("MSVC_LIB", format!("'{}' -nologo", lib.display())) .env("CC", format!("'{}' {}", self.config.cc, cflags)) .env("CXX", &self.config.cxx); } else { cmd.env("CC", format!("{} {}", self.config.cc, self.config.cflags)) .env("CXX", format!("{} {}", self.config.cxx, self.config.cflags)); + + if self.config.target.contains("windows") { + cmd.env("IS_WINDOWS", "1"); + } } let output = cmd.output().expect("failed to spawn `make`"); diff --git a/src/tools/compiletest/src/util.rs b/src/tools/compiletest/src/util.rs index 2db53947d881d..cad71c59f0a4a 100644 --- a/src/tools/compiletest/src/util.rs +++ b/src/tools/compiletest/src/util.rs @@ -17,6 +17,7 @@ const OS_TABLE: &'static [(&'static str, &'static str)] = &[("android", "android ("darwin", "macos"), ("dragonfly", "dragonfly"), ("freebsd", "freebsd"), + ("haiku", "haiku"), ("ios", "ios"), ("linux", "linux"), ("mingw32", "windows"), @@ -42,7 +43,8 @@ const ARCH_TABLE: &'static [(&'static str, &'static str)] = &[("aarch64", "aarch ("sparc", "sparc"), ("x86_64", "x86_64"), ("xcore", "xcore"), - ("asmjs", "asmjs")]; + ("asmjs", "asmjs"), + ("wasm32", "wasm32")]; pub fn get_os(triple: &str) -> &'static str { for &(triple_os, os) in OS_TABLE { diff --git a/src/tools/error_index_generator/Cargo.lock b/src/tools/error_index_generator/Cargo.lock deleted file mode 100644 index b7d2cfcaaa1a8..0000000000000 --- a/src/tools/error_index_generator/Cargo.lock +++ /dev/null @@ -1,4 +0,0 @@ -[root] -name = "error_index_generator" -version = "0.0.0" - diff --git a/src/tools/error_index_generator/main.rs b/src/tools/error_index_generator/main.rs index 2c734c8e3e4d4..e33df0dfbc8de 100644 --- a/src/tools/error_index_generator/main.rs +++ b/src/tools/error_index_generator/main.rs @@ -9,7 +9,6 @@ // except according to those terms. #![feature(rustc_private, rustdoc)] -#![feature(question_mark)] extern crate syntax; extern crate rustdoc; @@ -25,7 +24,7 @@ use std::path::PathBuf; use syntax::diagnostics::metadata::{get_metadata_dir, ErrorMetadataMap, ErrorMetadata}; -use rustdoc::html::markdown::Markdown; +use rustdoc::html::markdown::{Markdown, PLAYGROUND}; use rustc_serialize::json; enum OutputFormat { @@ -202,6 +201,9 @@ fn parse_args() -> (OutputFormat, PathBuf) { } fn main() { + PLAYGROUND.with(|slot| { + *slot.borrow_mut() = Some((None, String::from("https://play.rust-lang.org/"))); + }); let (format, dst) = parse_args(); if let Err(e) = main_with_result(format, &dst) { panic!("{}", e.description()); diff --git a/src/tools/linkchecker/Cargo.lock b/src/tools/linkchecker/Cargo.lock deleted file mode 100644 index d71df6d3f83a8..0000000000000 --- a/src/tools/linkchecker/Cargo.lock +++ /dev/null @@ -1,50 +0,0 @@ -[root] -name = "linkchecker" -version = "0.1.0" -dependencies = [ - "url 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "idna" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-bidi 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)", - "unicode-normalization 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "matches" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "unicode-bidi" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[[package]] -name = "unicode-normalization" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "url" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "idna 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", - "matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", -] - -[metadata] -"checksum idna 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1053236e00ce4f668aeca4a769a09b3bf5a682d802abd6f3cb39374f6b162c11" -"checksum matches 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "15305656809ce5a4805b1ff2946892810992197ce1270ff79baded852187942e" -"checksum unicode-bidi 0.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c1f7ceb96afdfeedee42bade65a0d585a6a0106f681b6749c8ff4daa8df30b3f" -"checksum unicode-normalization 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "26643a2f83bac55f1976fb716c10234485f9202dcd65cfbdf9da49867b271172" -"checksum url 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "afe9ec54bc4db14bc8744b7fed060d785ac756791450959b2248443319d5b119" diff --git a/src/tools/linkchecker/main.rs b/src/tools/linkchecker/main.rs index 3e2bc9032a1cc..f79cc76e67d00 100644 --- a/src/tools/linkchecker/main.rs +++ b/src/tools/linkchecker/main.rs @@ -24,8 +24,6 @@ //! A few whitelisted exceptions are allowed as there's known bugs in rustdoc, //! but this should catch the majority of "broken link" cases. -#![feature(question_mark)] - extern crate url; use std::env; diff --git a/src/tools/rustbook/Cargo.lock b/src/tools/rustbook/Cargo.lock deleted file mode 100644 index e541ce4b2b807..0000000000000 --- a/src/tools/rustbook/Cargo.lock +++ /dev/null @@ -1,4 +0,0 @@ -[root] -name = "rustbook" -version = "0.0.0" - diff --git a/src/tools/rustbook/book.rs b/src/tools/rustbook/book.rs index 36a37dba1fa0f..c5f72127a9c80 100644 --- a/src/tools/rustbook/book.rs +++ b/src/tools/rustbook/book.rs @@ -94,16 +94,16 @@ pub fn parse_summary(input: &mut Read, src: &Path) -> Result> } } - let mut top_items = vec!(); - let mut stack = vec!(); - let mut errors = vec!(); + let mut top_items = vec![]; + let mut stack = vec![]; + let mut errors = vec![]; // always include the introduction top_items.push(BookItem { title: "Introduction".to_string(), path: PathBuf::from("README.md"), path_to_root: PathBuf::from(""), - children: vec!(), + children: vec![], }); for line_result in BufReader::new(input).lines() { @@ -142,7 +142,7 @@ pub fn parse_summary(input: &mut Read, src: &Path) -> Result> title: title, path: path_from_root, path_to_root: path_to_root, - children: vec!(), + children: vec![], }; let level = indent.chars().map(|c| -> usize { match c { diff --git a/src/tools/rustbook/build.rs b/src/tools/rustbook/build.rs index 09c2d2510e317..d88ff48843a40 100644 --- a/src/tools/rustbook/build.rs +++ b/src/tools/rustbook/build.rs @@ -131,7 +131,6 @@ fn render(book: &Book, tgt: &Path) -> CliResult<()> { { let mut buffer = BufWriter::new(File::create(&postlude)?); writeln!(&mut buffer, "")?; - writeln!(&mut buffer, "")?; writeln!(&mut buffer, "")?; } @@ -143,7 +142,7 @@ fn render(book: &Book, tgt: &Path) -> CliResult<()> { format!("-o{}", out_path.display()), format!("--html-before-content={}", prelude.display()), format!("--html-after-content={}", postlude.display()), - format!("--markdown-playground-url=https://play.rust-lang.org"), + format!("--markdown-playground-url=https://play.rust-lang.org/"), format!("--markdown-css={}", item.path_to_root.join("rustbook.css").display()), "--markdown-no-toc".to_string(), ]; @@ -158,10 +157,6 @@ fn render(book: &Book, tgt: &Path) -> CliResult<()> { // create index.html from the root README fs::copy(&tgt.join("README.html"), &tgt.join("index.html"))?; - // Copy js for playpen - let mut playpen = File::create(tgt.join("playpen.js"))?; - let js = include_bytes!("../../librustdoc/html/static/playpen.js"); - playpen.write_all(js)?; Ok(()) } diff --git a/src/tools/rustbook/main.rs b/src/tools/rustbook/main.rs index 436dc11975336..906251db1c2f2 100644 --- a/src/tools/rustbook/main.rs +++ b/src/tools/rustbook/main.rs @@ -12,7 +12,6 @@ #![feature(rustc_private)] #![feature(rustdoc)] -#![feature(question_mark)] extern crate rustdoc; extern crate rustc_back; diff --git a/src/tools/tidy/Cargo.lock b/src/tools/tidy/Cargo.lock deleted file mode 100644 index acaf9e5550fd3..0000000000000 --- a/src/tools/tidy/Cargo.lock +++ /dev/null @@ -1,4 +0,0 @@ -[root] -name = "tidy" -version = "0.1.0" - diff --git a/src/tools/tidy/src/bins.rs b/src/tools/tidy/src/bins.rs index 876ae404bbaed..ef93b0858b02f 100644 --- a/src/tools/tidy/src/bins.rs +++ b/src/tools/tidy/src/bins.rs @@ -24,37 +24,47 @@ pub fn check(_path: &Path, _bad: &mut bool) {} #[cfg(unix)] pub fn check(path: &Path, bad: &mut bool) { use std::fs; + use std::io::Read; use std::process::{Command, Stdio}; use std::os::unix::prelude::*; + if let Ok(mut file) = fs::File::open("/proc/version") { + let mut contents = String::new(); + file.read_to_string(&mut contents).unwrap(); + // Probably on Windows Linux Subsystem, all files will be marked as + // executable, so skip checking. + if contents.contains("Microsoft") { + return; + } + } + super::walk(path, &mut |path| super::filter_dirs(path) || path.ends_with("src/etc"), &mut |file| { let filename = file.file_name().unwrap().to_string_lossy(); let extensions = [".py", ".sh"]; if extensions.iter().any(|e| filename.ends_with(e)) { - return + return; } let metadata = t!(fs::symlink_metadata(&file), &file); if metadata.mode() & 0o111 != 0 { let rel_path = file.strip_prefix(path).unwrap(); let git_friendly_path = rel_path.to_str().unwrap().replace("\\", "/"); - let ret_code = Command::new("git") - .arg("ls-files") - .arg(&git_friendly_path) - .current_dir(path) - .stdout(Stdio::null()) - .stderr(Stdio::null()) - .status() - .unwrap_or_else(|e| { - panic!("could not run git ls-files: {}", e); - }); - if ret_code.success() { + let output = Command::new("git") + .arg("ls-files") + .arg(&git_friendly_path) + .current_dir(path) + .stderr(Stdio::null()) + .output() + .unwrap_or_else(|e| { + panic!("could not run git ls-files: {}", e); + }); + let path_bytes = rel_path.as_os_str().as_bytes(); + if output.status.success() && output.stdout.starts_with(path_bytes) { println!("binary checked into source: {}", file.display()); *bad = true; } } }) } - diff --git a/src/tools/tidy/src/cargo.rs b/src/tools/tidy/src/cargo.rs index 4932fd5147afa..a7784e65c5b1c 100644 --- a/src/tools/tidy/src/cargo.rs +++ b/src/tools/tidy/src/cargo.rs @@ -88,9 +88,11 @@ fn verify(tomlfile: &Path, libfile: &Path, bad: &mut bool) { continue } - // We want the compiler to depend on the proc_macro crate so that it is built and - // included in the end, but we don't want to actually use it in the compiler. - if toml.contains("name = \"rustc_driver\"") && krate == "proc_macro" { + // We want the compiler to depend on the proc_macro_plugin crate so + // that it is built and included in the end, but we don't want to + // actually use it in the compiler. + if toml.contains("name = \"rustc_driver\"") && + krate == "proc_macro_plugin" { continue } diff --git a/src/tools/tidy/src/features.rs b/src/tools/tidy/src/features.rs index 199e8a77df717..4ef07f7e4b896 100644 --- a/src/tools/tidy/src/features.rs +++ b/src/tools/tidy/src/features.rs @@ -18,27 +18,42 @@ //! * Library features have at most one `since` value use std::collections::HashMap; +use std::fmt; use std::fs::File; use std::io::prelude::*; use std::path::Path; -const STATUSES: &'static [&'static str] = &[ - "Active", "Deprecated", "Removed", "Accepted", -]; +#[derive(PartialEq)] +enum Status { + Stable, + Unstable, +} + +impl fmt::Display for Status { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + let as_str = match *self { + Status::Stable => "stable", + Status::Unstable => "unstable", + }; + fmt::Display::fmt(as_str, f) + } +} + struct Feature { name: String, + level: Status, since: String, - status: String, } struct LibFeature { - level: String, + level: Status, since: String, } pub fn check(path: &Path, bad: &mut bool) { let features = collect_lang_features(&path.join("libsyntax/feature_gate.rs")); + assert!(!features.is_empty()); let mut lib_features = HashMap::::new(); let mut contents = String::new(); @@ -48,7 +63,7 @@ pub fn check(path: &Path, bad: &mut bool) { let filename = file.file_name().unwrap().to_string_lossy(); if !filename.ends_with(".rs") || filename == "features.rs" || filename == "diagnostic_list.rs" { - return + return; } contents.truncate(0); @@ -60,24 +75,24 @@ pub fn check(path: &Path, bad: &mut bool) { *bad = true; }; let level = if line.contains("[unstable(") { - "unstable" + Status::Unstable } else if line.contains("[stable(") { - "stable" + Status::Stable } else { - continue + continue; }; let feature_name = match find_attr_val(line, "feature") { Some(name) => name, None => { err("malformed stability attribute"); - continue + continue; } }; let since = match find_attr_val(line, "since") { Some(name) => name, - None if level == "stable" => { + None if level == Status::Stable => { err("malformed stability attribute"); - continue + continue; } None => "None", }; @@ -92,27 +107,34 @@ pub fn check(path: &Path, bad: &mut bool) { if s.since != since { err("different `since` than before"); } - continue + continue; } - lib_features.insert(feature_name.to_owned(), LibFeature { - level: level.to_owned(), - since: since.to_owned(), - }); + lib_features.insert(feature_name.to_owned(), + LibFeature { + level: level, + since: since.to_owned(), + }); } }); if *bad { - return + return; } let mut lines = Vec::new(); for feature in features { lines.push(format!("{:<32} {:<8} {:<12} {:<8}", - feature.name, "lang", feature.status, feature.since)); + feature.name, + "lang", + feature.level, + feature.since)); } for (name, feature) in lib_features { lines.push(format!("{:<32} {:<8} {:<12} {:<8}", - name, "lib", feature.level, feature.since)); + name, + "lib", + feature.level, + feature.since)); } lines.sort(); @@ -122,39 +144,32 @@ pub fn check(path: &Path, bad: &mut bool) { } fn find_attr_val<'a>(line: &'a str, attr: &str) -> Option<&'a str> { - line.find(attr).and_then(|i| { - line[i..].find("\"").map(|j| i + j + 1) - }).and_then(|i| { - line[i..].find("\"").map(|j| (i, i + j)) - }).map(|(i, j)| { - &line[i..j] - }) + line.find(attr) + .and_then(|i| line[i..].find('"').map(|j| i + j + 1)) + .and_then(|i| line[i..].find('"').map(|j| (i, i + j))) + .map(|(i, j)| &line[i..j]) } fn collect_lang_features(path: &Path) -> Vec { let mut contents = String::new(); t!(t!(File::open(path)).read_to_string(&mut contents)); - let mut features = Vec::new(); - for line in contents.lines().map(|l| l.trim()) { - if !STATUSES.iter().any(|s| line.starts_with(&format!("({}", s))) { - continue - } - let mut parts = line.split(","); - let status = match &parts.next().unwrap().trim().replace("(", "")[..] { - "active" => "unstable", - "removed" => "unstable", - "accepted" => "stable", - s => panic!("unknown status: {}", s), - }; - let name = parts.next().unwrap().trim().to_owned(); - let since = parts.next().unwrap().trim().replace("\"", ""); - - features.push(Feature { - name: name, - since: since, - status: status.to_owned(), - }); - } - return features + contents.lines() + .filter_map(|line| { + let mut parts = line.trim().split(","); + let level = match parts.next().map(|l| l.trim().trim_left_matches('(')) { + Some("active") => Status::Unstable, + Some("removed") => Status::Unstable, + Some("accepted") => Status::Stable, + _ => return None, + }; + let name = parts.next().unwrap().trim(); + let since = parts.next().unwrap().trim().trim_matches('"'); + Some(Feature { + name: name.to_owned(), + level: level, + since: since.to_owned(), + }) + }) + .collect() } diff --git a/src/tools/tidy/src/main.rs b/src/tools/tidy/src/main.rs index 2839bbded1a5f..cabaee5d06005 100644 --- a/src/tools/tidy/src/main.rs +++ b/src/tools/tidy/src/main.rs @@ -36,6 +36,7 @@ mod errors; mod features; mod cargo; mod cargo_lock; +mod pal; fn main() { let path = env::args_os().skip(1).next().expect("need an argument"); @@ -48,6 +49,7 @@ fn main() { cargo::check(&path, &mut bad); features::check(&path, &mut bad); cargo_lock::check(&path, &mut bad); + pal::check(&path, &mut bad); if bad { panic!("some tidy checks failed"); diff --git a/src/tools/tidy/src/pal.rs b/src/tools/tidy/src/pal.rs new file mode 100644 index 0000000000000..1d04e8fc8eb79 --- /dev/null +++ b/src/tools/tidy/src/pal.rs @@ -0,0 +1,230 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Tidy check to enforce rules about platform-specific code in std +//! +//! This is intended to maintain existing standards of code +//! organization in hopes that the standard library will continue to +//! be refactored to isolate platform-specific bits, making porting +//! easier; where "standard library" roughly means "all the +//! dependencies of the std and test crates". +//! +//! This generally means placing restrictions on where `cfg(unix)`, +//! `cfg(windows)`, `cfg(target_os)` and `cfg(target_env)` may appear, +//! the basic objective being to isolate platform-specific code to the +//! platform-specific `std::sys` modules, and to the allocation, +//! unwinding, and libc crates. +//! +//! Following are the basic rules, though there are currently +//! exceptions: +//! +//! - core may not have platform-specific code +//! - liballoc_system may have platform-specific code +//! - liballoc_jemalloc may have platform-specific code +//! - libpanic_abort may have platform-specific code +//! - libpanic_unwind may have platform-specific code +//! - libunwind may have platform-specific code +//! - other crates in the std facade may not +//! - std may have platform-specific code in the following places +//! - sys/unix/ +//! - sys/windows/ +//! - os/ +//! +//! `std/sys_common` should _not_ contain platform-specific code. +//! Finally, because std contains tests with platform-specific +//! `ignore` attributes, once the parser encounters `mod tests`, +//! platform-specific cfgs are allowed. Not sure yet how to deal with +//! this in the long term. + +use std::fs::File; +use std::io::Read; +use std::path::Path; +use std::iter::Iterator; + +// Paths that may contain platform-specific code +const EXCEPTION_PATHS: &'static [&'static str] = &[ + // std crates + "src/liballoc_jemalloc", + "src/liballoc_system", + "src/liblibc", + "src/libpanic_abort", + "src/libpanic_unwind", + "src/libunwind", + "src/libstd/sys/unix", // This is where platform-specific code for std should live + "src/libstd/sys/windows", // Ditto + "src/libstd/os", // Platform-specific public interfaces + "src/rtstartup", // Not sure what to do about this. magic stuff for mingw + + // temporary exceptions + "src/libstd/lib.rs", // This could probably be done within the sys directory + "src/libstd/rtdeps.rs", // Until rustbuild replaces make + "src/libstd/path.rs", + "src/libstd/io/stdio.rs", + "src/libstd/num/f32.rs", + "src/libstd/num/f64.rs", + "src/libstd/thread/local.rs", + "src/libstd/sys/common/mod.rs", + "src/libstd/sys/common/net.rs", + "src/libstd/sys/common/util.rs", + "src/libterm", // Not sure how to make this crate portable, but test needs it + "src/libtest", // Probably should defer to unstable std::sys APIs + + // std testing crates, ok for now at least + "src/libcoretest", + + // non-std crates + "src/test", + "src/tools", + "src/librustc", + "src/librustdoc", + "src/libsyntax", + "src/bootstrap", +]; + +pub fn check(path: &Path, bad: &mut bool) { + let ref mut contents = String::new(); + // Sanity check that the complex parsing here works + let ref mut saw_target_arch = false; + let ref mut saw_cfg_bang = false; + super::walk(path, &mut super::filter_dirs, &mut |file| { + let filestr = file.to_string_lossy().replace("\\", "/"); + if !filestr.ends_with(".rs") { return } + + let is_exception_path = EXCEPTION_PATHS.iter().any(|s| filestr.contains(&**s)); + if is_exception_path { return } + + check_cfgs(contents, &file, bad, saw_target_arch, saw_cfg_bang); + }); + + assert!(*saw_target_arch); + assert!(*saw_cfg_bang); +} + +fn check_cfgs(contents: &mut String, file: &Path, + bad: &mut bool, saw_target_arch: &mut bool, saw_cfg_bang: &mut bool) { + contents.truncate(0); + t!(t!(File::open(file), file).read_to_string(contents)); + + // For now it's ok to have platform-specific code after 'mod tests'. + let mod_tests_idx = find_test_mod(contents); + let contents = &contents[..mod_tests_idx]; + // Pull out all "cfg(...)" and "cfg!(...)" strings + let cfgs = parse_cfgs(contents); + + let mut line_numbers: Option> = None; + let mut err = |idx: usize, cfg: &str| { + if line_numbers.is_none() { + line_numbers = Some(contents.match_indices('\n').map(|(i, _)| i).collect()); + } + let line_numbers = line_numbers.as_ref().expect(""); + let line = match line_numbers.binary_search(&idx) { + Ok(_) => unreachable!(), + Err(i) => i + 1 + }; + println!("{}:{}: platform-specific cfg: {}", file.display(), line, cfg); + *bad = true; + }; + + for (idx, cfg) in cfgs.into_iter() { + // Sanity check that the parsing here works + if !*saw_target_arch && cfg.contains("target_arch") { *saw_target_arch = true } + if !*saw_cfg_bang && cfg.contains("cfg!") { *saw_cfg_bang = true } + + let contains_platform_specific_cfg = + cfg.contains("target_os") + || cfg.contains("target_env") + || cfg.contains("target_vendor") + || cfg.contains("unix") + || cfg.contains("windows"); + + if !contains_platform_specific_cfg { continue } + + let preceeded_by_doc_comment = { + let pre_contents = &contents[..idx]; + let pre_newline = pre_contents.rfind('\n'); + let pre_doc_comment = pre_contents.rfind("///"); + match (pre_newline, pre_doc_comment) { + (Some(n), Some(c)) => n < c, + (None, Some(_)) => true, + (_, None) => false, + } + }; + + if preceeded_by_doc_comment { continue } + + err(idx, cfg); + } +} + +fn find_test_mod(contents: &str) -> usize { + if let Some(mod_tests_idx) = contents.find("mod tests") { + // Also capture a previos line indicating "mod tests" in cfg-ed out + let prev_newline_idx = contents[..mod_tests_idx].rfind('\n').unwrap_or(mod_tests_idx); + let prev_newline_idx = contents[..prev_newline_idx].rfind('\n'); + if let Some(nl) = prev_newline_idx { + let prev_line = &contents[nl + 1 .. mod_tests_idx]; + let emcc_cfg = "cfg(all(test, not(target_os"; + if prev_line.contains(emcc_cfg) { + nl + } else { + mod_tests_idx + } + } else { + mod_tests_idx + } + } else { + contents.len() + } +} + +fn parse_cfgs<'a>(contents: &'a str) -> Vec<(usize, &'a str)> { + let candidate_cfgs = contents.match_indices("cfg"); + let candidate_cfg_idxs = candidate_cfgs.map(|(i, _)| i); + // This is puling out the indexes of all "cfg" strings + // that appear to be tokens succeeded by a paren. + let cfgs = candidate_cfg_idxs.filter(|i| { + let pre_idx = i.saturating_sub(*i); + let succeeds_non_ident = !contents.as_bytes().get(pre_idx) + .cloned() + .map(char::from) + .map(char::is_alphanumeric) + .unwrap_or(false); + let contents_after = &contents[*i..]; + let first_paren = contents_after.find('('); + let paren_idx = first_paren.map(|ip| i + ip); + let preceeds_whitespace_and_paren = paren_idx.map(|ip| { + let maybe_space = &contents[*i + "cfg".len() .. ip]; + maybe_space.chars().all(|c| char::is_whitespace(c) || c == '!') + }).unwrap_or(false); + + succeeds_non_ident && preceeds_whitespace_and_paren + }); + + cfgs.map(|i| { + let mut depth = 0; + let contents_from = &contents[i..]; + for (j, byte) in contents_from.bytes().enumerate() { + match byte { + b'(' => { + depth += 1; + } + b')' => { + depth -= 1; + if depth == 0 { + return (i, &contents_from[.. j + 1]); + } + } + _ => { } + } + } + + unreachable!() + }).collect() +}