Skip to content

Commit

Permalink
Auto merge of #78594 - m-ou-se:rollup-h5c8frs, r=m-ou-se
Browse files Browse the repository at this point in the history
Rollup of 7 pull requests

Successful merges:

 - #74622 (Add std::panic::panic_any.)
 - #77099 (make exp_m1 and ln_1p examples more representative of use)
 - #78526 (Strip tokens from trait and impl items before printing AST JSON)
 - #78550 (x.py setup: Create config.toml in the current directory, not the top-level directory)
 - #78577 (validator: Extend aliasing check to a call terminator)
 - #78581 (Constantify more BTreeMap and BTreeSet functions)
 - #78587 (parser: Cleanup `LazyTokenStream` and avoid some clones)

Failed merges:

r? `@ghost`
  • Loading branch information
bors committed Oct 31, 2020
2 parents 3478d7c + 1873ca5 commit 4f7612a
Show file tree
Hide file tree
Showing 17 changed files with 219 additions and 113 deletions.
65 changes: 22 additions & 43 deletions compiler/rustc_ast/src/tokenstream.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ use rustc_serialize::{Decodable, Decoder, Encodable, Encoder};
use rustc_span::{Span, DUMMY_SP};
use smallvec::{smallvec, SmallVec};

use std::{iter, mem};
use std::{fmt, iter, mem};

/// When the main rust parser encounters a syntax-extension invocation, it
/// parses the arguments to the invocation as a token-tree. This is a very
Expand Down Expand Up @@ -120,72 +120,51 @@ where
}
}

// A cloneable callback which produces a `TokenStream`. Each clone
// of this should produce the same `TokenStream`
pub trait CreateTokenStream: sync::Send + sync::Sync + FnOnce() -> TokenStream {
// Workaround for the fact that `Clone` is not object-safe
fn clone_it(&self) -> Box<dyn CreateTokenStream>;
pub trait CreateTokenStream: sync::Send + sync::Sync {
fn create_token_stream(&self) -> TokenStream;
}

impl<F: 'static + Clone + sync::Send + sync::Sync + FnOnce() -> TokenStream> CreateTokenStream
for F
{
fn clone_it(&self) -> Box<dyn CreateTokenStream> {
Box::new(self.clone())
}
}

impl Clone for Box<dyn CreateTokenStream> {
fn clone(&self) -> Self {
let val: &(dyn CreateTokenStream) = &**self;
val.clone_it()
impl CreateTokenStream for TokenStream {
fn create_token_stream(&self) -> TokenStream {
self.clone()
}
}

/// A lazy version of `TokenStream`, which may defer creation
/// A lazy version of `TokenStream`, which defers creation
/// of an actual `TokenStream` until it is needed.
pub type LazyTokenStream = Lrc<LazyTokenStreamInner>;

/// `Box` is here only to reduce the structure size.
#[derive(Clone)]
pub enum LazyTokenStreamInner {
Lazy(Box<dyn CreateTokenStream>),
Ready(TokenStream),
}
pub struct LazyTokenStream(Lrc<Box<dyn CreateTokenStream>>);

impl std::fmt::Debug for LazyTokenStreamInner {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
LazyTokenStreamInner::Lazy(..) => f.debug_struct("LazyTokenStream::Lazy").finish(),
LazyTokenStreamInner::Ready(..) => f.debug_struct("LazyTokenStream::Ready").finish(),
}
impl LazyTokenStream {
pub fn new(inner: impl CreateTokenStream + 'static) -> LazyTokenStream {
LazyTokenStream(Lrc::new(Box::new(inner)))
}

pub fn create_token_stream(&self) -> TokenStream {
self.0.create_token_stream()
}
}

impl LazyTokenStreamInner {
pub fn into_token_stream(&self) -> TokenStream {
match self {
// Note that we do not cache this. If this ever becomes a performance
// problem, we should investigate wrapping `LazyTokenStreamInner`
// in a lock
LazyTokenStreamInner::Lazy(cb) => (cb.clone())(),
LazyTokenStreamInner::Ready(stream) => stream.clone(),
}
impl fmt::Debug for LazyTokenStream {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt("LazyTokenStream", f)
}
}

impl<S: Encoder> Encodable<S> for LazyTokenStreamInner {
impl<S: Encoder> Encodable<S> for LazyTokenStream {
fn encode(&self, _s: &mut S) -> Result<(), S::Error> {
panic!("Attempted to encode LazyTokenStream");
}
}

impl<D: Decoder> Decodable<D> for LazyTokenStreamInner {
impl<D: Decoder> Decodable<D> for LazyTokenStream {
fn decode(_d: &mut D) -> Result<Self, D::Error> {
panic!("Attempted to decode LazyTokenStream");
}
}

impl<CTX> HashStable<CTX> for LazyTokenStreamInner {
impl<CTX> HashStable<CTX> for LazyTokenStream {
fn hash_stable(&self, _hcx: &mut CTX, _hasher: &mut StableHasher) {
panic!("Attempted to compute stable hash for LazyTokenStream");
}
Expand Down
13 changes: 6 additions & 7 deletions compiler/rustc_expand/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,11 @@ use rustc_ast::attr::HasAttrs;
use rustc_ast::mut_visit::*;
use rustc_ast::ptr::P;
use rustc_ast::token::{DelimToken, Token, TokenKind};
use rustc_ast::tokenstream::{DelimSpan, LazyTokenStreamInner, Spacing, TokenStream, TokenTree};
use rustc_ast::tokenstream::{DelimSpan, LazyTokenStream, Spacing, TokenStream, TokenTree};
use rustc_ast::{self as ast, AttrItem, Attribute, MetaItem};
use rustc_attr as attr;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::map_in_place::MapInPlace;
use rustc_data_structures::sync::Lrc;
use rustc_errors::{error_code, struct_span_err, Applicability, Handler};
use rustc_feature::{Feature, Features, State as FeatureState};
use rustc_feature::{
Expand Down Expand Up @@ -303,7 +302,7 @@ impl<'a> StripUnconfigured<'a> {

// Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token
// for `attr` when we expand it to `#[attr]`
let pound_token = orig_tokens.into_token_stream().trees().next().unwrap();
let pound_token = orig_tokens.create_token_stream().trees().next().unwrap();
if !matches!(pound_token, TokenTree::Token(Token { kind: TokenKind::Pound, .. })) {
panic!("Bad tokens for attribute {:?}", attr);
}
Expand All @@ -313,16 +312,16 @@ impl<'a> StripUnconfigured<'a> {
DelimSpan::from_single(pound_token.span()),
DelimToken::Bracket,
item.tokens
.clone()
.as_ref()
.unwrap_or_else(|| panic!("Missing tokens for {:?}", item))
.into_token_stream(),
.create_token_stream(),
);

let mut attr = attr::mk_attr_from_item(attr.style, item, span);
attr.tokens = Some(Lrc::new(LazyTokenStreamInner::Ready(TokenStream::new(vec![
attr.tokens = Some(LazyTokenStream::new(TokenStream::new(vec![
(pound_token, Spacing::Alone),
(bracket_group, Spacing::Alone),
]))));
])));
self.process_cfg_attr(attr)
})
.collect()
Expand Down
11 changes: 11 additions & 0 deletions compiler/rustc_interface/src/passes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,17 @@ impl mut_visit::MutVisitor for TokenStripper {
i.tokens = None;
mut_visit::noop_flat_map_foreign_item(i, self)
}
fn flat_map_trait_item(
&mut self,
mut i: P<ast::AssocItem>,
) -> SmallVec<[P<ast::AssocItem>; 1]> {
i.tokens = None;
mut_visit::noop_flat_map_assoc_item(i, self)
}
fn flat_map_impl_item(&mut self, mut i: P<ast::AssocItem>) -> SmallVec<[P<ast::AssocItem>; 1]> {
i.tokens = None;
mut_visit::noop_flat_map_assoc_item(i, self)
}
fn visit_block(&mut self, b: &mut P<ast::Block>) {
b.tokens = None;
mut_visit::noop_visit_block(b, self);
Expand Down
45 changes: 40 additions & 5 deletions compiler/rustc_mir/src/transform/validate.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,8 @@ use rustc_middle::mir::{
visit::{PlaceContext, Visitor},
};
use rustc_middle::mir::{
AggregateKind, BasicBlock, Body, BorrowKind, Local, Location, MirPhase, Operand, Rvalue,
SourceScope, Statement, StatementKind, Terminator, TerminatorKind, VarDebugInfo,
AggregateKind, BasicBlock, Body, BorrowKind, Local, Location, MirPhase, Operand, PlaceRef,
Rvalue, SourceScope, Statement, StatementKind, Terminator, TerminatorKind, VarDebugInfo,
};
use rustc_middle::ty::relate::{Relate, RelateResult, TypeRelation};
use rustc_middle::ty::{self, ParamEnv, Ty, TyCtxt};
Expand Down Expand Up @@ -46,8 +46,16 @@ impl<'tcx> MirPass<'tcx> for Validator {
.iterate_to_fixpoint()
.into_results_cursor(body);

TypeChecker { when: &self.when, body, tcx, param_env, mir_phase, storage_liveness }
.visit_body(body);
TypeChecker {
when: &self.when,
body,
tcx,
param_env,
mir_phase,
storage_liveness,
place_cache: Vec::new(),
}
.visit_body(body);
}
}

Expand Down Expand Up @@ -150,6 +158,7 @@ struct TypeChecker<'a, 'tcx> {
param_env: ParamEnv<'tcx>,
mir_phase: MirPhase,
storage_liveness: ResultsCursor<'a, 'tcx, MaybeStorageLive>,
place_cache: Vec<PlaceRef<'tcx>>,
}

impl<'a, 'tcx> TypeChecker<'a, 'tcx> {
Expand Down Expand Up @@ -391,7 +400,7 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
self.check_edge(location, *unwind, EdgeKind::Unwind);
}
}
TerminatorKind::Call { func, destination, cleanup, .. } => {
TerminatorKind::Call { func, args, destination, cleanup, .. } => {
let func_ty = func.ty(&self.body.local_decls, self.tcx);
match func_ty.kind() {
ty::FnPtr(..) | ty::FnDef(..) => {}
Expand All @@ -406,6 +415,32 @@ impl<'a, 'tcx> Visitor<'tcx> for TypeChecker<'a, 'tcx> {
if let Some(cleanup) = cleanup {
self.check_edge(location, *cleanup, EdgeKind::Unwind);
}

// The call destination place and Operand::Move place used as an argument might be
// passed by a reference to the callee. Consequently they must be non-overlapping.
// Currently this simply checks for duplicate places.
self.place_cache.clear();
if let Some((destination, _)) = destination {
self.place_cache.push(destination.as_ref());
}
for arg in args {
if let Operand::Move(place) = arg {
self.place_cache.push(place.as_ref());
}
}
let all_len = self.place_cache.len();
self.place_cache.sort_unstable();
self.place_cache.dedup();
let has_duplicates = all_len != self.place_cache.len();
if has_duplicates {
self.fail(
location,
format!(
"encountered overlapping memory in `Call` terminator: {:?}",
terminator.kind,
),
);
}
}
TerminatorKind::Assert { cond, target, cleanup, .. } => {
let cond_ty = cond.ty(&self.body.local_decls, self.tcx);
Expand Down
23 changes: 12 additions & 11 deletions compiler/rustc_parse/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -249,29 +249,30 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
// came from. Here we attempt to extract these lossless token streams
// before we fall back to the stringification.

let convert_tokens = |tokens: Option<LazyTokenStream>| tokens.map(|t| t.into_token_stream());
let convert_tokens =
|tokens: &Option<LazyTokenStream>| tokens.as_ref().map(|t| t.create_token_stream());

let tokens = match *nt {
Nonterminal::NtItem(ref item) => prepend_attrs(&item.attrs, item.tokens.as_ref()),
Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.clone()),
Nonterminal::NtBlock(ref block) => convert_tokens(&block.tokens),
Nonterminal::NtStmt(ref stmt) => {
// FIXME: We currently only collect tokens for `:stmt`
// matchers in `macro_rules!` macros. When we start collecting
// tokens for attributes on statements, we will need to prepend
// attributes here
convert_tokens(stmt.tokens.clone())
convert_tokens(&stmt.tokens)
}
Nonterminal::NtPat(ref pat) => convert_tokens(pat.tokens.clone()),
Nonterminal::NtTy(ref ty) => convert_tokens(ty.tokens.clone()),
Nonterminal::NtPat(ref pat) => convert_tokens(&pat.tokens),
Nonterminal::NtTy(ref ty) => convert_tokens(&ty.tokens),
Nonterminal::NtIdent(ident, is_raw) => {
Some(tokenstream::TokenTree::token(token::Ident(ident.name, is_raw), ident.span).into())
}
Nonterminal::NtLifetime(ident) => {
Some(tokenstream::TokenTree::token(token::Lifetime(ident.name), ident.span).into())
}
Nonterminal::NtMeta(ref attr) => convert_tokens(attr.tokens.clone()),
Nonterminal::NtPath(ref path) => convert_tokens(path.tokens.clone()),
Nonterminal::NtVis(ref vis) => convert_tokens(vis.tokens.clone()),
Nonterminal::NtMeta(ref attr) => convert_tokens(&attr.tokens),
Nonterminal::NtPath(ref path) => convert_tokens(&path.tokens),
Nonterminal::NtVis(ref vis) => convert_tokens(&vis.tokens),
Nonterminal::NtTT(ref tt) => Some(tt.clone().into()),
Nonterminal::NtExpr(ref expr) | Nonterminal::NtLiteral(ref expr) => {
if expr.tokens.is_none() {
Expand Down Expand Up @@ -604,7 +605,7 @@ fn prepend_attrs(
attrs: &[ast::Attribute],
tokens: Option<&tokenstream::LazyTokenStream>,
) -> Option<tokenstream::TokenStream> {
let tokens = tokens?.clone().into_token_stream();
let tokens = tokens?.create_token_stream();
if attrs.is_empty() {
return Some(tokens);
}
Expand All @@ -617,9 +618,9 @@ fn prepend_attrs(
);
builder.push(
attr.tokens
.clone()
.as_ref()
.unwrap_or_else(|| panic!("Attribute {:?} is missing tokens!", attr))
.into_token_stream(),
.create_token_stream(),
);
}
builder.push(tokens);
Expand Down
63 changes: 37 additions & 26 deletions compiler/rustc_parse/src/parser/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ pub use path::PathStyle;

use rustc_ast::ptr::P;
use rustc_ast::token::{self, DelimToken, Token, TokenKind};
use rustc_ast::tokenstream::{self, DelimSpan, LazyTokenStream, LazyTokenStreamInner, Spacing};
use rustc_ast::tokenstream::{TokenStream, TokenTree};
use rustc_ast::tokenstream::{self, DelimSpan, LazyTokenStream, Spacing};
use rustc_ast::tokenstream::{CreateTokenStream, TokenStream, TokenTree};
use rustc_ast::DUMMY_NODE_ID;
use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, CrateSugar, Extern, Unsafe};
use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacDelimiter, Mutability, StrLit};
Expand Down Expand Up @@ -1199,15 +1199,12 @@ impl<'a> Parser<'a> {
f: impl FnOnce(&mut Self) -> PResult<'a, R>,
) -> PResult<'a, (R, Option<LazyTokenStream>)> {
let start_token = (self.token.clone(), self.token_spacing);
let mut cursor_snapshot = self.token_cursor.clone();
let cursor_snapshot = self.token_cursor.clone();

let ret = f(self)?;

let new_calls = self.token_cursor.num_next_calls;
let num_calls = new_calls - cursor_snapshot.num_next_calls;
let desugar_doc_comments = self.desugar_doc_comments;

// We didn't capture any tokens
let num_calls = self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls;
if num_calls == 0 {
return Ok((ret, None));
}
Expand All @@ -1220,27 +1217,41 @@ impl<'a> Parser<'a> {
//
// This also makes `Parser` very cheap to clone, since
// there is no intermediate collection buffer to clone.
let lazy_cb = move || {
// The token produced by the final call to `next` or `next_desugared`
// was not actually consumed by the callback. The combination
// of chaining the initial token and using `take` produces the desired
// result - we produce an empty `TokenStream` if no calls were made,
// and omit the final token otherwise.
let tokens = std::iter::once(start_token)
.chain((0..num_calls).map(|_| {
if desugar_doc_comments {
cursor_snapshot.next_desugared()
} else {
cursor_snapshot.next()
}
}))
.take(num_calls);
struct LazyTokenStreamImpl {
start_token: (Token, Spacing),
cursor_snapshot: TokenCursor,
num_calls: usize,
desugar_doc_comments: bool,
}
impl CreateTokenStream for LazyTokenStreamImpl {
fn create_token_stream(&self) -> TokenStream {
// The token produced by the final call to `next` or `next_desugared`
// was not actually consumed by the callback. The combination
// of chaining the initial token and using `take` produces the desired
// result - we produce an empty `TokenStream` if no calls were made,
// and omit the final token otherwise.
let mut cursor_snapshot = self.cursor_snapshot.clone();
let tokens = std::iter::once(self.start_token.clone())
.chain((0..self.num_calls).map(|_| {
if self.desugar_doc_comments {
cursor_snapshot.next_desugared()
} else {
cursor_snapshot.next()
}
}))
.take(self.num_calls);

make_token_stream(tokens)
};
let stream = LazyTokenStream::new(LazyTokenStreamInner::Lazy(Box::new(lazy_cb)));
make_token_stream(tokens)
}
}

Ok((ret, Some(stream)))
let lazy_impl = LazyTokenStreamImpl {
start_token,
cursor_snapshot,
num_calls,
desugar_doc_comments: self.desugar_doc_comments,
};
Ok((ret, Some(LazyTokenStream::new(lazy_impl))))
}

/// `::{` or `::*`
Expand Down
Loading

0 comments on commit 4f7612a

Please sign in to comment.