Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

proc_macro::Group::span_open and span_close #53902

Merged
merged 3 commits into from
Sep 9, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
37 changes: 32 additions & 5 deletions src/libproc_macro/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ use std::str::FromStr;
use syntax::errors::DiagnosticBuilder;
use syntax::parse::{self, token};
use syntax::symbol::Symbol;
use syntax::tokenstream;
use syntax::tokenstream::{self, DelimSpan};
use syntax_pos::{Pos, FileName};

/// The main type provided by this crate, representing an abstract stream of
Expand Down Expand Up @@ -609,7 +609,7 @@ impl fmt::Display for TokenTree {
pub struct Group {
delimiter: Delimiter,
stream: TokenStream,
span: Span,
span: DelimSpan,
}

#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
Expand Down Expand Up @@ -650,7 +650,7 @@ impl Group {
Group {
delimiter: delimiter,
stream: stream,
span: Span::call_site(),
span: DelimSpan::from_single(Span::call_site().0),
}
}

Expand All @@ -671,9 +671,36 @@ impl Group {

/// Returns the span for the delimiters of this token stream, spanning the
/// entire `Group`.
///
/// ```text
/// pub fn span(&self) -> Span {
/// ^^^^^^^
/// ```
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
pub fn span(&self) -> Span {
self.span
Span(self.span.entire())
}

/// Returns the span pointing to the opening delimiter of this group.
///
/// ```text
/// pub fn span_open(&self) -> Span {
/// ^
/// ```
#[unstable(feature = "proc_macro_span", issue = "38356")]
pub fn span_open(&self) -> Span {
Span(self.span.open)
}

/// Returns the span pointing to the closing delimiter of this group.
///
/// ```text
/// pub fn span_close(&self) -> Span {
/// ^
/// ```
#[unstable(feature = "proc_macro_span", issue = "38356")]
pub fn span_close(&self) -> Span {
Span(self.span.close)
}

/// Configures the span for this `Group`'s delimiters, but not its internal
Expand All @@ -684,7 +711,7 @@ impl Group {
/// tokens at the level of the `Group`.
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
pub fn set_span(&mut self, span: Span) {
self.span = span;
self.span = DelimSpan::from_single(span.0);
}
}

Expand Down
4 changes: 2 additions & 2 deletions src/libproc_macro/rustc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,7 @@ impl TokenTree {
tokenstream::TokenTree::Delimited(span, delimed) => {
let delimiter = Delimiter::from_internal(delimed.delim);
let mut g = Group::new(delimiter, ::TokenStream(delimed.tts.into()));
g.set_span(Span(span));
g.span = span;
return g.into();
}
};
Expand Down Expand Up @@ -192,7 +192,7 @@ impl TokenTree {
self::TokenTree::Punct(tt) => (tt.as_char(), tt.spacing(), tt.span()),
self::TokenTree::Group(tt) => {
return TokenTree::Delimited(
tt.span.0,
tt.span,
Delimited {
delim: tt.delimiter.to_internal(),
tts: tt.stream.0.into(),
Expand Down
12 changes: 12 additions & 0 deletions src/librustc/ich/hcx.rs
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ use syntax::ast;
use syntax::source_map::SourceMap;
use syntax::ext::hygiene::SyntaxContext;
use syntax::symbol::Symbol;
use syntax::tokenstream::DelimSpan;
use syntax_pos::{Span, DUMMY_SP};
use syntax_pos::hygiene;

Expand Down Expand Up @@ -396,6 +397,17 @@ impl<'a> HashStable<StableHashingContext<'a>> for Span {
}
}

impl<'a> HashStable<StableHashingContext<'a>> for DelimSpan {
fn hash_stable<W: StableHasherResult>(
&self,
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>,
) {
self.open.hash_stable(hcx, hasher);
self.close.hash_stable(hcx, hasher);
}
}

pub fn hash_stable_trait_impls<'a, 'gcx, W, R>(
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>,
Expand Down
5 changes: 3 additions & 2 deletions src/librustc_resolve/macros.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ use syntax::parse::parser::PathStyle;
use syntax::parse::token::{self, Token};
use syntax::ptr::P;
use syntax::symbol::{Symbol, keywords};
use syntax::tokenstream::{TokenStream, TokenTree, Delimited};
use syntax::tokenstream::{TokenStream, TokenTree, Delimited, DelimSpan};
use syntax::util::lev_distance::find_best_match_for_name;
use syntax_pos::{Span, DUMMY_SP};
use errors::Applicability;
Expand Down Expand Up @@ -279,7 +279,8 @@ impl<'a, 'crateloader: 'a> base::Resolver for Resolver<'a, 'crateloader> {
tokens.push(TokenTree::Token(path.span, tok).into());
}
}
attrs[i].tokens = TokenTree::Delimited(attrs[i].span, Delimited {
let delim_span = DelimSpan::from_single(attrs[i].span);
attrs[i].tokens = TokenTree::Delimited(delim_span, Delimited {
delim: token::Paren,
tts: TokenStream::concat(tokens).into(),
}).into();
Expand Down
4 changes: 2 additions & 2 deletions src/libsyntax/attr/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ use parse::token::{self, Token};
use ptr::P;
use symbol::Symbol;
use ThinVec;
use tokenstream::{TokenStream, TokenTree, Delimited};
use tokenstream::{TokenStream, TokenTree, Delimited, DelimSpan};
use GLOBALS;

use std::iter;
Expand Down Expand Up @@ -535,7 +535,7 @@ impl MetaItemKind {
}
tokens.push(item.node.tokens());
}
TokenTree::Delimited(span, Delimited {
TokenTree::Delimited(DelimSpan::from_single(span), Delimited {
delim: token::Paren,
tts: TokenStream::concat(tokens).into(),
}).into()
Expand Down
22 changes: 13 additions & 9 deletions src/libsyntax/ext/quote.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,14 @@

use ast::{self, Arg, Arm, Block, Expr, Item, Pat, Stmt, Ty};
use source_map::respan;
use syntax_pos::Span;
use syntax_pos::{Span, DUMMY_SP};
use ext::base::ExtCtxt;
use ext::base;
use ext::build::AstBuilder;
use parse::parser::{Parser, PathStyle};
use parse::token;
use ptr::P;
use tokenstream::{TokenStream, TokenTree};
use tokenstream::{DelimSpan, TokenStream, TokenTree};

/// Quasiquoting works via token trees.
///
Expand All @@ -36,7 +36,7 @@ pub mod rt {
use symbol::Symbol;
use ThinVec;

use tokenstream::{self, TokenTree, TokenStream};
use tokenstream::{self, DelimSpan, TokenTree, TokenStream};

pub use parse::new_parser_from_tts;
pub use syntax_pos::{BytePos, Span, DUMMY_SP, FileName};
Expand Down Expand Up @@ -245,7 +245,8 @@ pub mod rt {
}
inner.push(self.tokens.clone());

r.push(TokenTree::Delimited(self.span, tokenstream::Delimited {
let delim_span = DelimSpan::from_single(self.span);
r.push(TokenTree::Delimited(delim_span, tokenstream::Delimited {
delim: token::Bracket, tts: TokenStream::concat(inner).into()
}));
r
Expand All @@ -261,7 +262,7 @@ pub mod rt {

impl ToTokens for () {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
vec![TokenTree::Delimited(DUMMY_SP, tokenstream::Delimited {
vec![TokenTree::Delimited(DelimSpan::dummy(), tokenstream::Delimited {
delim: token::Paren,
tts: TokenStream::empty().into(),
})]
Expand Down Expand Up @@ -385,13 +386,16 @@ pub fn unflatten(tts: Vec<TokenTree>) -> Vec<TokenTree> {

let mut results = Vec::new();
let mut result = Vec::new();
let mut open_span = DUMMY_SP;
for tree in tts {
match tree {
TokenTree::Token(_, token::OpenDelim(..)) => {
TokenTree::Token(span, token::OpenDelim(..)) => {
open_span = span;
results.push(::std::mem::replace(&mut result, Vec::new()));
}
TokenTree::Token(span, token::CloseDelim(delim)) => {
let tree = TokenTree::Delimited(span, Delimited {
let delim_span = DelimSpan::from_pair(open_span, span);
let tree = TokenTree::Delimited(delim_span, Delimited {
delim,
tts: result.into_iter().map(TokenStream::from).collect::<TokenStream>().into(),
});
Expand Down Expand Up @@ -756,9 +760,9 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, quoted: bool) -> Vec<ast::Stmt
vec![cx.stmt_expr(e_push)]
},
TokenTree::Delimited(span, ref delimed) => {
let mut stmts = statements_mk_tt(cx, &delimed.open_tt(span), false);
let mut stmts = statements_mk_tt(cx, &delimed.open_tt(span.open), false);
stmts.extend(statements_mk_tts(cx, delimed.stream()));
stmts.extend(statements_mk_tt(cx, &delimed.close_tt(span), false));
stmts.extend(statements_mk_tt(cx, &delimed.close_tt(span.close), false));
stmts
}
}
Expand Down
28 changes: 14 additions & 14 deletions src/libsyntax/ext/tt/macro_parser.rs
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ pub use self::ParseResult::*;
use self::TokenTreeOrTokenTreeSlice::*;

use ast::Ident;
use syntax_pos::{self, BytePos, Span};
use syntax_pos::{self, Span};
use errors::FatalError;
use ext::tt::quoted::{self, TokenTree};
use parse::{Directory, ParseSess};
Expand All @@ -94,7 +94,7 @@ use parse::token::{self, DocComment, Nonterminal, Token};
use print::pprust;
use OneVector;
use symbol::keywords;
use tokenstream::TokenStream;
use tokenstream::{DelimSpan, TokenStream};

use rustc_data_structures::fx::FxHashMap;
use std::collections::hash_map::Entry::{Occupied, Vacant};
Expand Down Expand Up @@ -151,10 +151,10 @@ struct MatcherPos<'a> {
top_elts: TokenTreeOrTokenTreeSlice<'a>,
/// The position of the "dot" in this matcher
idx: usize,
/// The beginning position in the source that the beginning of this matcher corresponds to. In
/// other words, the token in the source at `sp_lo` is matched against the first token of the
/// matcher.
sp_lo: BytePos,
/// The first span of source source that the beginning of this matcher corresponds to. In other
/// words, the token in the source whose span is `sp_open` is matched against the first token of
/// the matcher.
sp_open: Span,

/// For each named metavar in the matcher, we keep track of token trees matched against the
/// metavar by the black box parser. In particular, there may be more than one match per
Expand Down Expand Up @@ -284,17 +284,17 @@ fn create_matches(len: usize) -> Vec<Rc<Vec<NamedMatch>>> {
}

/// Generate the top-level matcher position in which the "dot" is before the first token of the
/// matcher `ms` and we are going to start matching at position `lo` in the source.
fn initial_matcher_pos(ms: &[TokenTree], lo: BytePos) -> MatcherPos {
/// matcher `ms` and we are going to start matching at the span `open` in the source.
fn initial_matcher_pos(ms: &[TokenTree], open: Span) -> MatcherPos {
let match_idx_hi = count_names(ms);
let matches = create_matches(match_idx_hi);
MatcherPos {
// Start with the top level matcher given to us
top_elts: TtSeq(ms), // "elts" is an abbr. for "elements"
// The "dot" is before the first token of the matcher
idx: 0,
// We start matching with byte `lo` in the source code
sp_lo: lo,
// We start matching at the span `open` in the source code
sp_open: open,

// Initialize `matches` to a bunch of empty `Vec`s -- one for each metavar in `top_elts`.
// `match_lo` for `top_elts` is 0 and `match_hi` is `matches.len()`. `match_cur` is 0 since
Expand Down Expand Up @@ -332,7 +332,7 @@ fn initial_matcher_pos(ms: &[TokenTree], lo: BytePos) -> MatcherPos {
/// token tree it was derived from.
#[derive(Debug, Clone)]
pub enum NamedMatch {
MatchedSeq(Rc<Vec<NamedMatch>>, syntax_pos::Span),
MatchedSeq(Rc<Vec<NamedMatch>>, DelimSpan),
MatchedNonterminal(Rc<Nonterminal>),
}

Expand Down Expand Up @@ -488,7 +488,7 @@ fn inner_parse_loop<'a>(
// Add matches from this repetition to the `matches` of `up`
for idx in item.match_lo..item.match_hi {
let sub = item.matches[idx].clone();
let span = span.with_lo(item.sp_lo);
let span = DelimSpan::from_pair(item.sp_open, span);
new_pos.push_match(idx, MatchedSeq(sub, span));
}

Expand Down Expand Up @@ -556,7 +556,7 @@ fn inner_parse_loop<'a>(
match_cur: item.match_cur,
match_hi: item.match_cur + seq.num_captures,
up: Some(item),
sp_lo: sp.lo(),
sp_open: sp.open,
top_elts: Tt(TokenTree::Sequence(sp, seq)),
})));
}
Expand Down Expand Up @@ -643,7 +643,7 @@ pub fn parse(
//
// This MatcherPos instance is allocated on the stack. All others -- and
// there are frequently *no* others! -- are allocated on the heap.
let mut initial = initial_matcher_pos(ms, parser.span.lo());
let mut initial = initial_matcher_pos(ms, parser.span);
let mut cur_items = smallvec![MatcherPosHandle::Ref(&mut initial)];
let mut next_items = Vec::new();

Expand Down
Loading