Skip to content

Commit

Permalink
Auto merge of rust-lang#103316 - cuviper:beta-next, r=cuviper
Browse files Browse the repository at this point in the history
[beta] backports

- Use rebind instead of dummy binder in `SameTypeModuloInfer` relation rust-lang#102059
- Add missing space between notable trait tooltip and where clause rust-lang#102107
- Avoid repeated re-initialization of the BufReader buffer rust-lang#102760
- Ensure enum cast moves rust-lang#103016
- Fix `TyKind::is_simple_path` rust-lang#103176
- Do anonymous lifetimes remapping correctly for nested rpits rust-lang#103205
- [beta] Cargo backport 1.65.0 rust-lang#103303
- linker: Fix weak lang item linking with combination windows-gnu + LLD + LTO rust-lang#103092

r? `@ghost`
  • Loading branch information
bors committed Oct 21, 2022
2 parents da7ffa2 + 731bbc8 commit a317055
Show file tree
Hide file tree
Showing 28 changed files with 287 additions and 63 deletions.
2 changes: 1 addition & 1 deletion Cargo.lock
Original file line number Diff line number Diff line change
Expand Up @@ -435,7 +435,7 @@ dependencies = [

[[package]]
name = "cargo-util"
version = "0.2.1"
version = "0.2.2"
dependencies = [
"anyhow",
"core-foundation",
Expand Down
7 changes: 5 additions & 2 deletions compiler/rustc_ast/src/ast.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2060,8 +2060,11 @@ impl TyKind {
}

pub fn is_simple_path(&self) -> Option<Symbol> {
if let TyKind::Path(None, Path { segments, .. }) = &self && segments.len() == 1 {
Some(segments[0].ident.name)
if let TyKind::Path(None, Path { segments, .. }) = &self
&& let [segment] = &segments[..]
&& segment.args.is_none()
{
Some(segment.ident.name)
} else {
None
}
Expand Down
43 changes: 23 additions & 20 deletions compiler/rustc_ast_lowering/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -499,6 +499,17 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
start
}

/// Given the id of some node in the AST, finds the `LocalDefId` associated with it by the name
/// resolver (if any).
fn orig_opt_local_def_id(&self, node: NodeId) -> Option<LocalDefId> {
self.resolver.node_id_to_def_id.get(&node).map(|local_def_id| *local_def_id)
}

fn orig_local_def_id(&self, node: NodeId) -> LocalDefId {
self.orig_opt_local_def_id(node)
.unwrap_or_else(|| panic!("no entry for node id: `{:?}`", node))
}

/// Given the id of some node in the AST, finds the `LocalDefId` associated with it by the name
/// resolver (if any), after applying any remapping from `get_remapped_def_id`.
///
Expand All @@ -513,10 +524,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
/// we would create an opaque type `type FooReturn<'a1> = impl Debug + 'a1`.
/// When lowering the `Debug + 'a` bounds, we add a remapping to map `'a` to `'a1`.
fn opt_local_def_id(&self, node: NodeId) -> Option<LocalDefId> {
self.resolver
.node_id_to_def_id
.get(&node)
.map(|local_def_id| self.get_remapped_def_id(*local_def_id))
self.orig_opt_local_def_id(node).map(|local_def_id| self.get_remapped_def_id(local_def_id))
}

fn local_def_id(&self, node: NodeId) -> LocalDefId {
Expand All @@ -525,9 +533,9 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {

/// Get the previously recorded `to` local def id given the `from` local def id, obtained using
/// `generics_def_id_map` field.
fn get_remapped_def_id(&self, mut local_def_id: LocalDefId) -> LocalDefId {
fn get_remapped_def_id(&self, local_def_id: LocalDefId) -> LocalDefId {
// `generics_def_id_map` is a stack of mappings. As we go deeper in impl traits nesting we
// push new mappings so we need to try first the latest mappings, hence `iter().rev()`.
// push new mappings, so we first need to get the latest (innermost) mappings, hence `iter().rev()`.
//
// Consider:
//
Expand All @@ -537,18 +545,13 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
//
// `[[fn#'b -> impl_trait#'b], [fn#'b -> impl_sized#'b]]`
//
// for the opaque type generated on `impl Sized + 'b`, We want the result to be:
// impl_sized#'b, so iterating forward is the wrong thing to do.
for map in self.generics_def_id_map.iter().rev() {
if let Some(r) = map.get(&local_def_id) {
debug!("def_id_remapper: remapping from `{local_def_id:?}` to `{r:?}`");
local_def_id = *r;
} else {
debug!("def_id_remapper: no remapping for `{local_def_id:?}` found in map");
}
}

local_def_id
// for the opaque type generated on `impl Sized + 'b`, we want the result to be: impl_sized#'b.
// So, if we were trying to find first from the start (outermost) would give the wrong result, impl_trait#'b.
self.generics_def_id_map
.iter()
.rev()
.find_map(|map| map.get(&local_def_id).map(|local_def_id| *local_def_id))
.unwrap_or(local_def_id)
}

/// Freshen the `LoweringContext` and ready it to lower a nested item.
Expand Down Expand Up @@ -1607,7 +1610,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {

LifetimeRes::Fresh { param, binder: _ } => {
debug_assert_eq!(lifetime.ident.name, kw::UnderscoreLifetime);
if let Some(old_def_id) = self.opt_local_def_id(param) && remapping.get(&old_def_id).is_none() {
if let Some(old_def_id) = self.orig_opt_local_def_id(param) && remapping.get(&old_def_id).is_none() {
let node_id = self.next_node_id();

let new_def_id = self.create_def(
Expand Down Expand Up @@ -1876,7 +1879,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
let extra_lifetime_params = self.resolver.take_extra_lifetime_params(opaque_ty_node_id);
debug!(?extra_lifetime_params);
for (ident, outer_node_id, outer_res) in extra_lifetime_params {
let outer_def_id = self.local_def_id(outer_node_id);
let outer_def_id = self.orig_local_def_id(outer_node_id);
let inner_node_id = self.next_node_id();

// Add a definition for the in scope lifetime def.
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_codegen_ssa/src/back/link.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2698,7 +2698,7 @@ fn relevant_lib(sess: &Session, lib: &NativeLib) -> bool {
}
}

fn are_upstream_rust_objects_already_included(sess: &Session) -> bool {
pub(crate) fn are_upstream_rust_objects_already_included(sess: &Session) -> bool {
match sess.lto() {
config::Lto::Fat => true,
config::Lto::Thin => {
Expand Down
11 changes: 8 additions & 3 deletions compiler/rustc_codegen_ssa/src/base.rs
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
use crate::back::link::are_upstream_rust_objects_already_included;
use crate::back::metadata::create_compressed_metadata_file;
use crate::back::write::{
compute_per_cgu_lto_type, start_async_codegen, submit_codegened_module_to_llvm,
Expand Down Expand Up @@ -854,10 +855,14 @@ impl CrateInfo {

// Handle circular dependencies in the standard library.
// See comment before `add_linked_symbol_object` function for the details.
// With msvc-like linkers it's both unnecessary (they support circular dependencies),
// and causes linking issues (when weak lang item symbols are "privatized" by LTO).
// If global LTO is enabled then almost everything (*) is glued into a single object file,
// so this logic is not necessary and can cause issues on some targets (due to weak lang
// item symbols being "privatized" to that object file), so we disable it.
// (*) Native libs, and `#[compiler_builtins]` and `#[no_builtins]` crates are not glued,
// and we assume that they cannot define weak lang items. This is not currently enforced
// by the compiler, but that's ok because all this stuff is unstable anyway.
let target = &tcx.sess.target;
if !target.is_like_msvc {
if !are_upstream_rust_objects_already_included(tcx.sess) {
let missing_weak_lang_items: FxHashSet<&Symbol> = info
.used_crates
.iter()
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_infer/src/infer/error_reporting/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2765,7 +2765,7 @@ impl<'tcx> TypeRelation<'tcx> for SameTypeModuloInfer<'_, 'tcx> {
where
T: relate::Relate<'tcx>,
{
Ok(ty::Binder::dummy(self.relate(a.skip_binder(), b.skip_binder())?))
Ok(a.rebind(self.relate(a.skip_binder(), b.skip_binder())?))
}

fn consts(
Expand Down
4 changes: 2 additions & 2 deletions compiler/rustc_mir_build/src/build/expr/as_rvalue.rs
Original file line number Diff line number Diff line change
Expand Up @@ -197,13 +197,13 @@ impl<'a, 'tcx> Builder<'a, 'tcx> {
// create all the steps directly in MIR with operations all backends need to support anyway.
let (source, ty) = if let ty::Adt(adt_def, ..) = source.ty.kind() && adt_def.is_enum() {
let discr_ty = adt_def.repr().discr_type().to_ty(this.tcx);
let place = unpack!(block = this.as_place(block, source));
let temp = unpack!(block = this.as_temp(block, scope, source, Mutability::Not));
let discr = this.temp(discr_ty, source.span);
this.cfg.push_assign(
block,
source_info,
discr,
Rvalue::Discriminant(place),
Rvalue::Discriminant(temp.into()),
);

(Operand::Move(discr), discr_ty)
Expand Down
8 changes: 8 additions & 0 deletions library/std/src/io/buffered/bufreader.rs
Original file line number Diff line number Diff line change
Expand Up @@ -224,6 +224,14 @@ impl<R> BufReader<R> {
}
}

// This is only used by a test which asserts that the initialization-tracking is correct.
#[cfg(test)]
impl<R> BufReader<R> {
pub fn initialized(&self) -> usize {
self.buf.initialized()
}
}

impl<R: Seek> BufReader<R> {
/// Seeks relative to the current position. If the new position lies within the buffer,
/// the buffer will not be flushed, allowing for more efficient seeks.
Expand Down
19 changes: 16 additions & 3 deletions library/std/src/io/buffered/bufreader/buffer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,19 @@ pub struct Buffer {
// Each call to `fill_buf` sets `filled` to indicate how many bytes at the start of `buf` are
// initialized with bytes from a read.
filled: usize,
// This is the max number of bytes returned across all `fill_buf` calls. We track this so that we
// can accurately tell `read_buf` how many bytes of buf are initialized, to bypass as much of its
// defensive initialization as possible. Note that while this often the same as `filled`, it
// doesn't need to be. Calls to `fill_buf` are not required to actually fill the buffer, and
// omitting this is a huge perf regression for `Read` impls that do not.
initialized: usize,
}

impl Buffer {
#[inline]
pub fn with_capacity(capacity: usize) -> Self {
let buf = Box::new_uninit_slice(capacity);
Self { buf, pos: 0, filled: 0 }
Self { buf, pos: 0, filled: 0, initialized: 0 }
}

#[inline]
Expand All @@ -51,6 +57,12 @@ impl Buffer {
self.pos
}

// This is only used by a test which asserts that the initialization-tracking is correct.
#[cfg(test)]
pub fn initialized(&self) -> usize {
self.initialized
}

#[inline]
pub fn discard_buffer(&mut self) {
self.pos = 0;
Expand Down Expand Up @@ -96,13 +108,14 @@ impl Buffer {
let mut buf = BorrowedBuf::from(&mut *self.buf);
// SAFETY: `self.filled` bytes will always have been initialized.
unsafe {
buf.set_init(self.filled);
buf.set_init(self.initialized);
}

reader.read_buf(buf.unfilled())?;

self.filled = buf.len();
self.pos = 0;
self.filled = buf.len();
self.initialized = buf.init_len();
}
Ok(self.buffer())
}
Expand Down
24 changes: 24 additions & 0 deletions library/std/src/io/buffered/tests.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1039,3 +1039,27 @@ fn single_formatted_write() {
writeln!(&mut writer, "{}, {}!", "hello", "world").unwrap();
assert_eq!(writer.get_ref().events, [RecordedEvent::Write("hello, world!\n".to_string())]);
}

#[test]
fn bufreader_full_initialize() {
struct OneByteReader;
impl Read for OneByteReader {
fn read(&mut self, buf: &mut [u8]) -> crate::io::Result<usize> {
if buf.len() > 0 {
buf[0] = 0;
Ok(1)
} else {
Ok(0)
}
}
}
let mut reader = BufReader::new(OneByteReader);
// Nothing is initialized yet.
assert_eq!(reader.initialized(), 0);

let buf = reader.fill_buf().unwrap();
// We read one byte...
assert_eq!(buf.len(), 1);
// But we initialized the whole buffer!
assert_eq!(reader.initialized(), reader.capacity());
}
2 changes: 1 addition & 1 deletion src/librustdoc/html/format.rs
Original file line number Diff line number Diff line change
Expand Up @@ -371,7 +371,7 @@ pub(crate) fn print_where_clause<'a, 'tcx: 'a>(
format!("<br><span class=\"where\">where{where_preds}</span>")
} else {
let mut clause = br_with_padding;
clause.truncate(clause.len() - 5 * "&nbsp;".len());
clause.truncate(clause.len() - 4 * "&nbsp;".len());
write!(clause, "<span class=\"where\">where{where_preds}</span>")?;
clause
}
Expand Down
10 changes: 7 additions & 3 deletions src/test/mir-opt/enum_cast.bar.mir_map.0.mir
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,15 @@
fn bar(_1: Bar) -> usize {
debug bar => _1; // in scope 0 at $DIR/enum_cast.rs:+0:8: +0:11
let mut _0: usize; // return place in scope 0 at $DIR/enum_cast.rs:+0:21: +0:26
let mut _2: isize; // in scope 0 at $DIR/enum_cast.rs:+1:5: +1:8
let _2: Bar; // in scope 0 at $DIR/enum_cast.rs:+1:5: +1:8
let mut _3: isize; // in scope 0 at $DIR/enum_cast.rs:+1:5: +1:8

bb0: {
_2 = discriminant(_1); // scope 0 at $DIR/enum_cast.rs:+1:5: +1:17
_0 = move _2 as usize (Misc); // scope 0 at $DIR/enum_cast.rs:+1:5: +1:17
StorageLive(_2); // scope 0 at $DIR/enum_cast.rs:+1:5: +1:8
_2 = move _1; // scope 0 at $DIR/enum_cast.rs:+1:5: +1:8
_3 = discriminant(_2); // scope 0 at $DIR/enum_cast.rs:+1:5: +1:17
_0 = move _3 as usize (Misc); // scope 0 at $DIR/enum_cast.rs:+1:5: +1:17
StorageDead(_2); // scope 0 at $DIR/enum_cast.rs:+1:16: +1:17
return; // scope 0 at $DIR/enum_cast.rs:+2:2: +2:2
}
}
10 changes: 7 additions & 3 deletions src/test/mir-opt/enum_cast.boo.mir_map.0.mir
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,15 @@
fn boo(_1: Boo) -> usize {
debug boo => _1; // in scope 0 at $DIR/enum_cast.rs:+0:8: +0:11
let mut _0: usize; // return place in scope 0 at $DIR/enum_cast.rs:+0:21: +0:26
let mut _2: u8; // in scope 0 at $DIR/enum_cast.rs:+1:5: +1:8
let _2: Boo; // in scope 0 at $DIR/enum_cast.rs:+1:5: +1:8
let mut _3: u8; // in scope 0 at $DIR/enum_cast.rs:+1:5: +1:8

bb0: {
_2 = discriminant(_1); // scope 0 at $DIR/enum_cast.rs:+1:5: +1:17
_0 = move _2 as usize (Misc); // scope 0 at $DIR/enum_cast.rs:+1:5: +1:17
StorageLive(_2); // scope 0 at $DIR/enum_cast.rs:+1:5: +1:8
_2 = move _1; // scope 0 at $DIR/enum_cast.rs:+1:5: +1:8
_3 = discriminant(_2); // scope 0 at $DIR/enum_cast.rs:+1:5: +1:17
_0 = move _3 as usize (Misc); // scope 0 at $DIR/enum_cast.rs:+1:5: +1:17
StorageDead(_2); // scope 0 at $DIR/enum_cast.rs:+1:16: +1:17
return; // scope 0 at $DIR/enum_cast.rs:+2:2: +2:2
}
}
40 changes: 26 additions & 14 deletions src/test/mir-opt/enum_cast.droppy.mir_map.0.mir
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,9 @@ fn droppy() -> () {
let mut _0: (); // return place in scope 0 at $DIR/enum_cast.rs:+0:13: +0:13
let _1: (); // in scope 0 at $DIR/enum_cast.rs:+1:5: +6:6
let _2: Droppy; // in scope 0 at $DIR/enum_cast.rs:+2:13: +2:14
let mut _4: isize; // in scope 0 at $DIR/enum_cast.rs:+5:17: +5:18
let _5: Droppy; // in scope 0 at $DIR/enum_cast.rs:+7:9: +7:10
let _4: Droppy; // in scope 0 at $DIR/enum_cast.rs:+5:17: +5:18
let mut _5: isize; // in scope 0 at $DIR/enum_cast.rs:+5:17: +5:18
let _6: Droppy; // in scope 0 at $DIR/enum_cast.rs:+7:9: +7:10
scope 1 {
debug x => _2; // in scope 1 at $DIR/enum_cast.rs:+2:13: +2:14
scope 2 {
Expand All @@ -16,7 +17,7 @@ fn droppy() -> () {
}
}
scope 4 {
debug z => _5; // in scope 4 at $DIR/enum_cast.rs:+7:9: +7:10
debug z => _6; // in scope 4 at $DIR/enum_cast.rs:+7:9: +7:10
}

bb0: {
Expand All @@ -25,30 +26,41 @@ fn droppy() -> () {
_2 = Droppy::C; // scope 0 at $DIR/enum_cast.rs:+2:17: +2:26
FakeRead(ForLet(None), _2); // scope 0 at $DIR/enum_cast.rs:+2:13: +2:14
StorageLive(_3); // scope 3 at $DIR/enum_cast.rs:+5:13: +5:14
_4 = discriminant(_2); // scope 3 at $DIR/enum_cast.rs:+5:17: +5:27
_3 = move _4 as usize (Misc); // scope 3 at $DIR/enum_cast.rs:+5:17: +5:27
StorageLive(_4); // scope 3 at $DIR/enum_cast.rs:+5:17: +5:18
_4 = move _2; // scope 3 at $DIR/enum_cast.rs:+5:17: +5:18
_5 = discriminant(_4); // scope 3 at $DIR/enum_cast.rs:+5:17: +5:27
_3 = move _5 as usize (Misc); // scope 3 at $DIR/enum_cast.rs:+5:17: +5:27
drop(_4) -> [return: bb1, unwind: bb4]; // scope 3 at $DIR/enum_cast.rs:+5:26: +5:27
}

bb1: {
StorageDead(_4); // scope 3 at $DIR/enum_cast.rs:+5:26: +5:27
FakeRead(ForLet(None), _3); // scope 3 at $DIR/enum_cast.rs:+5:13: +5:14
_1 = const (); // scope 0 at $DIR/enum_cast.rs:+1:5: +6:6
StorageDead(_3); // scope 1 at $DIR/enum_cast.rs:+6:5: +6:6
drop(_2) -> [return: bb1, unwind: bb3]; // scope 0 at $DIR/enum_cast.rs:+6:5: +6:6
drop(_2) -> [return: bb2, unwind: bb5]; // scope 0 at $DIR/enum_cast.rs:+6:5: +6:6
}

bb1: {
bb2: {
StorageDead(_2); // scope 0 at $DIR/enum_cast.rs:+6:5: +6:6
StorageDead(_1); // scope 0 at $DIR/enum_cast.rs:+6:5: +6:6
StorageLive(_5); // scope 0 at $DIR/enum_cast.rs:+7:9: +7:10
_5 = Droppy::B; // scope 0 at $DIR/enum_cast.rs:+7:13: +7:22
FakeRead(ForLet(None), _5); // scope 0 at $DIR/enum_cast.rs:+7:9: +7:10
StorageLive(_6); // scope 0 at $DIR/enum_cast.rs:+7:9: +7:10
_6 = Droppy::B; // scope 0 at $DIR/enum_cast.rs:+7:13: +7:22
FakeRead(ForLet(None), _6); // scope 0 at $DIR/enum_cast.rs:+7:9: +7:10
_0 = const (); // scope 0 at $DIR/enum_cast.rs:+0:13: +8:2
drop(_5) -> [return: bb2, unwind: bb3]; // scope 0 at $DIR/enum_cast.rs:+8:1: +8:2
drop(_6) -> [return: bb3, unwind: bb5]; // scope 0 at $DIR/enum_cast.rs:+8:1: +8:2
}

bb2: {
StorageDead(_5); // scope 0 at $DIR/enum_cast.rs:+8:1: +8:2
bb3: {
StorageDead(_6); // scope 0 at $DIR/enum_cast.rs:+8:1: +8:2
return; // scope 0 at $DIR/enum_cast.rs:+8:2: +8:2
}

bb3 (cleanup): {
bb4 (cleanup): {
drop(_2) -> bb5; // scope 0 at $DIR/enum_cast.rs:+6:5: +6:6
}

bb5 (cleanup): {
resume; // scope 0 at $DIR/enum_cast.rs:+0:1: +8:2
}
}
10 changes: 7 additions & 3 deletions src/test/mir-opt/enum_cast.foo.mir_map.0.mir
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,15 @@
fn foo(_1: Foo) -> usize {
debug foo => _1; // in scope 0 at $DIR/enum_cast.rs:+0:8: +0:11
let mut _0: usize; // return place in scope 0 at $DIR/enum_cast.rs:+0:21: +0:26
let mut _2: isize; // in scope 0 at $DIR/enum_cast.rs:+1:5: +1:8
let _2: Foo; // in scope 0 at $DIR/enum_cast.rs:+1:5: +1:8
let mut _3: isize; // in scope 0 at $DIR/enum_cast.rs:+1:5: +1:8

bb0: {
_2 = discriminant(_1); // scope 0 at $DIR/enum_cast.rs:+1:5: +1:17
_0 = move _2 as usize (Misc); // scope 0 at $DIR/enum_cast.rs:+1:5: +1:17
StorageLive(_2); // scope 0 at $DIR/enum_cast.rs:+1:5: +1:8
_2 = move _1; // scope 0 at $DIR/enum_cast.rs:+1:5: +1:8
_3 = discriminant(_2); // scope 0 at $DIR/enum_cast.rs:+1:5: +1:17
_0 = move _3 as usize (Misc); // scope 0 at $DIR/enum_cast.rs:+1:5: +1:17
StorageDead(_2); // scope 0 at $DIR/enum_cast.rs:+1:16: +1:17
return; // scope 0 at $DIR/enum_cast.rs:+2:2: +2:2
}
}
Loading

0 comments on commit a317055

Please sign in to comment.