Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Rollup of 8 pull requests #78176

Closed
wants to merge 29 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
64839ee
Add Pin::new_static.
m-ou-se Oct 8, 2020
390883e
Make Pin::new_static const.
m-ou-se Oct 8, 2020
104c0f0
Rename Pin::new_static to Pin::static_ref.
m-ou-se Oct 12, 2020
2c71f68
Add Pin::static_mut.
m-ou-se Oct 12, 2020
f83446b
Reword safety guarantee of Pin::static_{ref,mut}.
m-ou-se Oct 13, 2020
df95dce
Add missing `mut`.
m-ou-se Oct 15, 2020
7b652d3
Haiku: explicitly set CMAKE_SYSTEM_NAME when cross-compiling
nielx Sep 19, 2020
cc0b718
Mark inout asm! operands as used in liveness pass
oliviacrain Oct 15, 2020
fd193f2
Treat InOut variables like other input variables
oliviacrain Oct 17, 2020
7b33ae6
Improve wording of "cannot multiply" type error
camelid Oct 18, 2020
003516f
BTreeMap: split off most code of remove and split_off
ssomers Oct 17, 2020
48060f1
rustdoc: Show the correct source filename, without `.html`
camelid Oct 19, 2020
17c6c59
Mark InOut operands as used in RWU table with write_place
oliviacrain Oct 19, 2020
8f0bced
Refactor liveness-issue-77915 to liveness-asm and improve tests
oliviacrain Oct 19, 2020
cb33f95
remove what seems to be an outdated comment
RalfJung Oct 19, 2020
c1766c6
fix static_ptr_ty for foreign statics, and more comments in check_uns…
RalfJung Oct 19, 2020
153e843
fix Rvalue::ty for ThreadLocalRef
RalfJung Oct 19, 2020
9dd0bb6
Do not print braces again print_anon_const already does it
spastorino Oct 19, 2020
d641cb8
Allow NtBlock to parse on check inline const next token
spastorino Oct 19, 2020
dcd2d91
Add inline const macro test
spastorino Oct 19, 2020
243c8e9
Apply some review suggestions
camelid Oct 20, 2020
154acf0
Rollup merge of #77726 - fusion-engineering-forks:static-pin, r=dtolnay
JohnTitor Oct 21, 2020
983299f
Rollup merge of #77976 - oliviacrain:issue-77915-fix, r=matthewjasper
JohnTitor Oct 21, 2020
ec025ba
Rollup merge of #78009 - nielx:fix/CMAKE_SYSTEM_NAME, r=Mark-Simulacrum
JohnTitor Oct 21, 2020
1e9e561
Rollup merge of #78056 - ssomers:btree_chop_up_1, r=dtolnay
JohnTitor Oct 21, 2020
76da0e2
Rollup merge of #78063 - camelid:improve-cannot-multiply-error, r=est…
JohnTitor Oct 21, 2020
0d3f068
Rollup merge of #78094 - camelid:rustdoc-fix-source-title, r=jyn514
JohnTitor Oct 21, 2020
dbb36ab
Rollup merge of #78101 - RalfJung:foreign-static, r=oli-obk
JohnTitor Oct 21, 2020
34c2223
Rollup merge of #78118 - spastorino:inline-const-followups, r=petroch…
JohnTitor Oct 21, 2020
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 0 additions & 2 deletions compiler/rustc_hir_pretty/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1138,9 +1138,7 @@ impl<'a> State<'a> {
fn print_expr_anon_const(&mut self, anon_const: &hir::AnonConst) {
self.ibox(INDENT_UNIT);
self.s.word_space("const");
self.s.word("{");
self.print_anon_const(anon_const);
self.s.word("}");
self.end()
}

Expand Down
9 changes: 3 additions & 6 deletions compiler/rustc_middle/src/mir/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -821,9 +821,6 @@ pub struct LocalDecl<'tcx> {
/// flag drop flags to avoid triggering this check as they are introduced
/// after typeck.
///
/// Unsafety checking will also ignore dereferences of these locals,
/// so they can be used for raw pointers only used in a desugaring.
///
/// This should be sound because the drop flags are fully algebraic, and
/// therefore don't affect the OIBIT or outlives properties of the
/// generator.
Expand Down Expand Up @@ -1010,13 +1007,13 @@ impl<'tcx> LocalDecl<'tcx> {
}

/// Returns `Some` if this is a reference to a static item that is used to
/// access that static
/// access that static.
pub fn is_ref_to_static(&self) -> bool {
matches!(self.local_info, Some(box LocalInfo::StaticRef { .. }))
}

/// Returns `Some` if this is a reference to a static item that is used to
/// access that static
/// Returns `Some` if this is a reference to a thread-local static item that is used to
/// access that static.
pub fn is_ref_to_thread_local(&self) -> bool {
match self.local_info {
Some(box LocalInfo::StaticRef { is_thread_local, .. }) => is_thread_local,
Expand Down
8 changes: 6 additions & 2 deletions compiler/rustc_middle/src/mir/tcx.rs
Original file line number Diff line number Diff line change
Expand Up @@ -152,10 +152,14 @@ impl<'tcx> Rvalue<'tcx> {
tcx.mk_ty(ty::Array(operand.ty(local_decls, tcx), count))
}
Rvalue::ThreadLocalRef(did) => {
let static_ty = tcx.type_of(did);
if tcx.is_mutable_static(did) {
tcx.mk_mut_ptr(tcx.type_of(did))
tcx.mk_mut_ptr(static_ty)
} else if tcx.is_foreign_item(did) {
tcx.mk_imm_ptr(static_ty)
} else {
tcx.mk_imm_ref(tcx.lifetimes.re_static, tcx.type_of(did))
// FIXME: These things don't *really* have 'static lifetime.
tcx.mk_imm_ref(tcx.lifetimes.re_static, static_ty)
}
}
Rvalue::Ref(reg, bk, ref place) => {
Expand Down
4 changes: 4 additions & 0 deletions compiler/rustc_middle/src/ty/util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -529,8 +529,12 @@ impl<'tcx> TyCtxt<'tcx> {
// Make sure that any constants in the static's type are evaluated.
let static_ty = self.normalize_erasing_regions(ty::ParamEnv::empty(), self.type_of(def_id));

// Make sure that accesses to unsafe statics end up using raw pointers.
// For thread-locals, this needs to be kept in sync with `Rvalue::ty`.
if self.is_mutable_static(def_id) {
self.mk_mut_ptr(static_ty)
} else if self.is_foreign_item(def_id) {
self.mk_imm_ptr(static_ty)
} else {
self.mk_imm_ref(self.lifetimes.re_erased, static_ty)
}
Expand Down
3 changes: 3 additions & 0 deletions compiler/rustc_mir/src/transform/check_unsafety.rs
Original file line number Diff line number Diff line change
Expand Up @@ -204,6 +204,9 @@ impl<'a, 'tcx> Visitor<'tcx> for UnsafetyChecker<'a, 'tcx> {
if let [] = proj_base {
let decl = &self.body.local_decls[place.local];
if decl.internal {
// If the projection root is an artifical local that we introduced when
// desugaring `static`, give a more specific error message
// (avoid the general "raw pointer" clause below, that would only be confusing).
if let Some(box LocalInfo::StaticRef { def_id, .. }) = decl.local_info {
if self.tcx.is_mutable_static(def_id) {
self.require_unsafe(
Expand Down
6 changes: 5 additions & 1 deletion compiler/rustc_parse/src/parser/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -548,7 +548,11 @@ impl<'a> Parser<'a> {

fn check_inline_const(&mut self) -> bool {
self.check_keyword(kw::Const)
&& self.look_ahead(1, |t| t == &token::OpenDelim(DelimToken::Brace))
&& self.look_ahead(1, |t| match t.kind {
token::Interpolated(ref nt) => matches!(**nt, token::NtBlock(..)),
token::OpenDelim(DelimToken::Brace) => true,
_ => false,
})
}

/// Checks to see if the next token is either `+` or `+=`.
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_passes/src/liveness.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1174,7 +1174,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
}
}
hir::InlineAsmOperand::InOut { expr, .. } => {
succ = self.write_place(expr, succ, ACC_READ | ACC_WRITE);
succ = self.write_place(expr, succ, ACC_READ | ACC_WRITE | ACC_USE);
}
hir::InlineAsmOperand::SplitInOut { out_expr, .. } => {
if let Some(expr) = out_expr {
Expand Down
2 changes: 1 addition & 1 deletion compiler/rustc_typeck/src/check/op.rs
Original file line number Diff line number Diff line change
Expand Up @@ -302,7 +302,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
true,
),
hir::BinOpKind::Mul => (
format!("cannot multiply `{}` to `{}`", rhs_ty, lhs_ty),
format!("cannot multiply `{}` by `{}`", lhs_ty, rhs_ty),
Some("std::ops::Mul"),
true,
),
Expand Down
223 changes: 1 addition & 222 deletions library/alloc/src/collections/btree/map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,6 @@ use super::node::{self, marker, ForceResult::*, Handle, NodeRef};
use super::search::{self, SearchResult::*};
use super::unwrap_unchecked;

use UnderflowResult::*;

mod entry;
pub use entry::{Entry, OccupiedEntry, VacantEntry};
use Entry::*;
Expand Down Expand Up @@ -1154,40 +1152,8 @@ impl<K: Ord, V> BTreeMap<K, V> {

let mut right = Self::new();
let right_root = Self::ensure_is_owned(&mut right.root);
for _ in 0..left_root.height() {
right_root.push_internal_level();
}

{
let mut left_node = left_root.node_as_mut();
let mut right_node = right_root.node_as_mut();

loop {
let mut split_edge = match search::search_node(left_node, key) {
// key is going to the right tree
Found(handle) => handle.left_edge(),
GoDown(handle) => handle,
};

split_edge.move_suffix(&mut right_node);

match (split_edge.force(), right_node.force()) {
(Internal(edge), Internal(node)) => {
left_node = edge.descend();
right_node = node.first_edge().descend();
}
(Leaf(_), Leaf(_)) => {
break;
}
_ => {
unreachable!();
}
}
}
}

left_root.fix_right_border();
right_root.fix_left_border();
left_root.split_off(right_root, key);

if left_root.height() < right_root.height() {
self.length = left_root.node_as_ref().calc_length();
Expand Down Expand Up @@ -2250,193 +2216,6 @@ impl<K, V> BTreeMap<K, V> {
}
}

impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>, marker::KV> {
/// Removes a key/value-pair from the map, and returns that pair, as well as
/// the leaf edge corresponding to that former pair.
fn remove_kv_tracking<F: FnOnce()>(
self,
handle_emptied_internal_root: F,
) -> ((K, V), Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>) {
let (old_kv, mut pos, was_internal) = match self.force() {
Leaf(leaf) => {
let (old_kv, pos) = leaf.remove();
(old_kv, pos, false)
}
Internal(mut internal) => {
// Replace the location freed in the internal node with an
// adjacent KV, and remove that adjacent KV from its leaf.
// Always choose the adjacent KV on the left side because
// it is typically faster to pop an element from the end
// of the KV arrays without needing to shift other elements.

let key_loc = internal.kv_mut().0 as *mut K;
let val_loc = internal.kv_mut().1 as *mut V;

let to_remove = internal.left_edge().descend().last_leaf_edge().left_kv().ok();
let to_remove = unsafe { unwrap_unchecked(to_remove) };

let (kv, pos) = to_remove.remove();

let old_key = unsafe { mem::replace(&mut *key_loc, kv.0) };
let old_val = unsafe { mem::replace(&mut *val_loc, kv.1) };

((old_key, old_val), pos, true)
}
};

// Handle underflow
let mut cur_node = unsafe { ptr::read(&pos).into_node().forget_type() };
let mut at_leaf = true;
while cur_node.len() < node::MIN_LEN {
match handle_underfull_node(cur_node) {
AtRoot => break,
Merged(edge, merged_with_left, offset) => {
// If we merged with our right sibling then our tracked
// position has not changed. However if we merged with our
// left sibling then our tracked position is now dangling.
if at_leaf && merged_with_left {
let idx = pos.idx() + offset;
let node = match unsafe { ptr::read(&edge).descend().force() } {
Leaf(leaf) => leaf,
Internal(_) => unreachable!(),
};
pos = unsafe { Handle::new_edge(node, idx) };
}

let parent = edge.into_node();
if parent.len() == 0 {
// The parent that was just emptied must be the root,
// because nodes on a lower level would not have been
// left with a single child.
handle_emptied_internal_root();
break;
} else {
cur_node = parent.forget_type();
at_leaf = false;
}
}
Stole(stole_from_left) => {
// Adjust the tracked position if we stole from a left sibling
if stole_from_left && at_leaf {
// SAFETY: This is safe since we just added an element to our node.
unsafe {
pos.move_next_unchecked();
}
}
break;
}
}
}

// If we deleted from an internal node then we need to compensate for
// the earlier swap and adjust the tracked position to point to the
// next element.
if was_internal {
pos = unsafe { unwrap_unchecked(pos.next_kv().ok()).next_leaf_edge() };
}

(old_kv, pos)
}
}

impl<K, V> node::Root<K, V> {
/// Removes empty levels on the top, but keep an empty leaf if the entire tree is empty.
fn fix_top(&mut self) {
while self.height() > 0 && self.node_as_ref().len() == 0 {
self.pop_internal_level();
}
}

fn fix_right_border(&mut self) {
self.fix_top();

{
let mut cur_node = self.node_as_mut();

while let Internal(node) = cur_node.force() {
let mut last_kv = node.last_kv();

if last_kv.can_merge() {
cur_node = last_kv.merge().descend();
} else {
let right_len = last_kv.reborrow().right_edge().descend().len();
// `MINLEN + 1` to avoid readjust if merge happens on the next level.
if right_len < node::MIN_LEN + 1 {
last_kv.bulk_steal_left(node::MIN_LEN + 1 - right_len);
}
cur_node = last_kv.right_edge().descend();
}
}
}

self.fix_top();
}

/// The symmetric clone of `fix_right_border`.
fn fix_left_border(&mut self) {
self.fix_top();

{
let mut cur_node = self.node_as_mut();

while let Internal(node) = cur_node.force() {
let mut first_kv = node.first_kv();

if first_kv.can_merge() {
cur_node = first_kv.merge().descend();
} else {
let left_len = first_kv.reborrow().left_edge().descend().len();
if left_len < node::MIN_LEN + 1 {
first_kv.bulk_steal_right(node::MIN_LEN + 1 - left_len);
}
cur_node = first_kv.left_edge().descend();
}
}
}

self.fix_top();
}
}

enum UnderflowResult<'a, K, V> {
AtRoot,
Merged(Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge>, bool, usize),
Stole(bool),
}

fn handle_underfull_node<'a, K: 'a, V: 'a>(
node: NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal>,
) -> UnderflowResult<'_, K, V> {
let parent = match node.ascend() {
Ok(parent) => parent,
Err(_) => return AtRoot,
};

// Prefer the left KV if it exists. Merging with the left side is faster,
// since merging happens towards the left and `node` has fewer elements.
// Stealing from the left side is faster, since we can pop from the end of
// the KV arrays.
let (is_left, mut handle) = match parent.left_kv() {
Ok(left) => (true, left),
Err(parent) => {
let right = unsafe { unwrap_unchecked(parent.right_kv().ok()) };
(false, right)
}
};

if handle.can_merge() {
let offset = if is_left { handle.reborrow().left_edge().descend().len() + 1 } else { 0 };
Merged(handle.merge(), is_left, offset)
} else {
if is_left {
handle.steal_left();
} else {
handle.steal_right();
}
Stole(is_left)
}
}

impl<K: Ord, V, I: Iterator<Item = (K, V)>> Iterator for MergeIter<K, V, I> {
type Item = (K, V);

Expand Down
2 changes: 2 additions & 0 deletions library/alloc/src/collections/btree/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,10 @@ mod borrow;
pub mod map;
mod navigate;
mod node;
mod remove;
mod search;
pub mod set;
mod split;

#[doc(hidden)]
trait Recover<Q: ?Sized> {
Expand Down
Loading