From c4befe1710b3c394018ca65a6e99e109d081f16e Mon Sep 17 00:00:00 2001 From: Mark Mansi Date: Fri, 19 Jan 2018 15:46:15 -0600 Subject: [PATCH 1/4] Run rustfmt and add comments --- src/libsyntax/ext/tt/quoted.rs | 141 ++++++++++++++++++++++++--------- 1 file changed, 103 insertions(+), 38 deletions(-) diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index 0e21e3f6b0010..ee87a612345ec 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -10,10 +10,10 @@ use ast; use ext::tt::macro_parser; -use parse::{ParseSess, token}; +use parse::{token, ParseSess}; use print::pprust; use symbol::keywords; -use syntax_pos::{DUMMY_SP, Span, BytePos}; +use syntax_pos::{BytePos, Span, DUMMY_SP}; use tokenstream; use std::rc::Rc; @@ -68,7 +68,9 @@ pub struct SequenceRepetition { /// for token sequences. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] pub enum KleeneOp { + /// Kleene star (`*`) for zero or more repetitions ZeroOrMore, + /// Kleene star (`+`) for one or more repetitions OneOrMore, } @@ -83,7 +85,11 @@ pub enum TokenTree { /// E.g. `$var` MetaVar(Span, ast::Ident), /// E.g. `$var:expr`. This is only used in the left hand side of MBE macros. - MetaVarDecl(Span, ast::Ident /* name to bind */, ast::Ident /* kind of nonterminal */), + MetaVarDecl( + Span, + ast::Ident, /* name to bind */ + ast::Ident, /* kind of nonterminal */ + ), } impl TokenTree { @@ -131,17 +137,20 @@ impl TokenTree { /// Retrieve the `TokenTree`'s span. pub fn span(&self) -> Span { match *self { - TokenTree::Token(sp, _) | - TokenTree::MetaVar(sp, _) | - TokenTree::MetaVarDecl(sp, _, _) | - TokenTree::Delimited(sp, _) | - TokenTree::Sequence(sp, _) => sp, + TokenTree::Token(sp, _) + | TokenTree::MetaVar(sp, _) + | TokenTree::MetaVarDecl(sp, _, _) + | TokenTree::Delimited(sp, _) + | TokenTree::Sequence(sp, _) => sp, } } } -pub fn parse(input: tokenstream::TokenStream, expect_matchers: bool, sess: &ParseSess) - -> Vec { +pub fn parse( + input: tokenstream::TokenStream, + expect_matchers: bool, + sess: &ParseSess, +) -> Vec { let mut result = Vec::new(); let mut trees = input.trees(); while let Some(tree) = trees.next() { @@ -154,16 +163,24 @@ pub fn parse(input: tokenstream::TokenStream, expect_matchers: bool, sess: &Pars Some(kind) => { let span = end_sp.with_lo(start_sp.lo()); result.push(TokenTree::MetaVarDecl(span, ident, kind)); - continue + continue; } _ => end_sp, }, - tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span), + tree => tree.as_ref() + .map(tokenstream::TokenTree::span) + .unwrap_or(span), }, - tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp), + tree => tree.as_ref() + .map(tokenstream::TokenTree::span) + .unwrap_or(start_sp), }; sess.missing_fragment_specifiers.borrow_mut().insert(span); - result.push(TokenTree::MetaVarDecl(span, ident, keywords::Invalid.ident())); + result.push(TokenTree::MetaVarDecl( + span, + ident, + keywords::Invalid.ident(), + )); } _ => result.push(tree), } @@ -171,12 +188,14 @@ pub fn parse(input: tokenstream::TokenStream, expect_matchers: bool, sess: &Pars result } -fn parse_tree(tree: tokenstream::TokenTree, - trees: &mut I, - expect_matchers: bool, - sess: &ParseSess) - -> TokenTree - where I: Iterator, +fn parse_tree( + tree: tokenstream::TokenTree, + trees: &mut I, + expect_matchers: bool, + sess: &ParseSess, +) -> TokenTree +where + I: Iterator, { match tree { tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() { @@ -189,43 +208,69 @@ fn parse_tree(tree: tokenstream::TokenTree, let sequence = parse(delimited.tts.into(), expect_matchers, sess); let (separator, op) = parse_sep_and_kleene_op(trees, span, sess); let name_captures = macro_parser::count_names(&sequence); - TokenTree::Sequence(span, Rc::new(SequenceRepetition { - tts: sequence, - separator, - op, - num_captures: name_captures, - })) + TokenTree::Sequence( + span, + Rc::new(SequenceRepetition { + tts: sequence, + separator, + op, + num_captures: name_captures, + }), + ) } Some(tokenstream::TokenTree::Token(ident_span, ref token)) if token.is_ident() => { let ident = token.ident().unwrap(); let span = ident_span.with_lo(span.lo()); if ident.name == keywords::Crate.name() { - let ident = ast::Ident { name: keywords::DollarCrate.name(), ..ident }; + let ident = ast::Ident { + name: keywords::DollarCrate.name(), + ..ident + }; TokenTree::Token(span, token::Ident(ident)) } else { TokenTree::MetaVar(span, ident) } } Some(tokenstream::TokenTree::Token(span, tok)) => { - let msg = format!("expected identifier, found `{}`", pprust::token_to_string(&tok)); + let msg = format!( + "expected identifier, found `{}`", + pprust::token_to_string(&tok) + ); sess.span_diagnostic.span_err(span, &msg); TokenTree::MetaVar(span, keywords::Invalid.ident()) } None => TokenTree::Token(span, token::Dollar), }, tokenstream::TokenTree::Token(span, tok) => TokenTree::Token(span, tok), - tokenstream::TokenTree::Delimited(span, delimited) => { - TokenTree::Delimited(span, Rc::new(Delimited { + tokenstream::TokenTree::Delimited(span, delimited) => TokenTree::Delimited( + span, + Rc::new(Delimited { delim: delimited.delim, tts: parse(delimited.tts.into(), expect_matchers, sess), - })) - } + }), + ), } } -fn parse_sep_and_kleene_op(input: &mut I, span: Span, sess: &ParseSess) - -> (Option, KleeneOp) - where I: Iterator, +/// Attempt to parse a single Kleene star, possibly with a separator. +/// +/// For example, in a pattern such as `$(a),*`, `a` is the pattern to be repeated, `,` is the +/// separator, and `*` is the Kleene operator. This function is specifically concerned with parsing +/// the last two tokens of such a pattern: namely, the optional separator and the Kleene operator +/// itself. Note that here we are parsing the _pattern_ itself, rather than trying to match some +/// stream of tokens against the pattern. +/// +/// This function will take some input iterator `input` corresponding to `span` and a parsing +/// session `sess`. If the next one (or possibly two) tokens in `input` correspond to a Kleene +/// operator and separator, then a tuple with `(separator, KleeneOp)` is returned. Otherwise, an +/// error with the appropriate span is emitted to `sess` and a dummy value is returned. +fn parse_sep_and_kleene_op( + input: &mut I, + span: Span, + sess: &ParseSess, +) -> (Option, KleeneOp) +where + I: Iterator, { fn kleene_op(token: &token::Token) -> Option { match *token { @@ -235,20 +280,40 @@ fn parse_sep_and_kleene_op(input: &mut I, span: Span, sess: &ParseSess) } } + // We attempt to look at the next two token trees in `input`. I will call the first #1 and the + // second #2. If #1 and #2 don't match a valid KleeneOp with/without separator, that is an + // error, and we should emit an error on the most specific span possible. let span = match input.next() { + // #1 is a token Some(tokenstream::TokenTree::Token(span, tok)) => match kleene_op(&tok) { + // #1 is a KleeneOp with no separator Some(op) => return (None, op), + + // #1 is not a KleeneOp, but may be a separator... need to look at #2 None => match input.next() { + // #2 is a token Some(tokenstream::TokenTree::Token(span, tok2)) => match kleene_op(&tok2) { + // #2 is a KleeneOp, so #1 must be a separator Some(op) => return (Some(tok), op), + + // #2 is not a KleeneOp... error None => span, }, - tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span), - } + + // #2 is not a token at all... error + tree => tree.as_ref() + .map(tokenstream::TokenTree::span) + .unwrap_or(span), + }, }, - tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span), + + // #1 is not a token at all... error + tree => tree.as_ref() + .map(tokenstream::TokenTree::span) + .unwrap_or(span), }; + // Error... sess.span_diagnostic.span_err(span, "expected `*` or `+`"); (None, KleeneOp::ZeroOrMore) } From 49431d49661af7a3e55743a398346903ef58f20f Mon Sep 17 00:00:00 2001 From: Mark Mansi Date: Fri, 19 Jan 2018 16:39:54 -0600 Subject: [PATCH 2/4] Add a bunch of doc comments --- src/libsyntax/ext/tt/quoted.rs | 75 ++++++++++++++++++++++++++++++++-- 1 file changed, 72 insertions(+), 3 deletions(-) diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index ee87a612345ec..606dfcd58a26c 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -18,6 +18,8 @@ use tokenstream; use std::rc::Rc; +/// Contains the sub-token-trees of a "delimited" token tree, such as the contents of `(`. Note +/// thatthat the delimiter itself might be `NoDelim`. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Delimited { pub delim: token::DelimToken, @@ -25,14 +27,17 @@ pub struct Delimited { } impl Delimited { + /// Return the opening delimiter (possibly `NoDelim`). pub fn open_token(&self) -> token::Token { token::OpenDelim(self.delim) } + /// Return the closing delimiter (possibly `NoDelim`). pub fn close_token(&self) -> token::Token { token::CloseDelim(self.delim) } + /// Return a `self::TokenTree` witha a `Span` corresponding to the opening delimiter. pub fn open_tt(&self, span: Span) -> TokenTree { let open_span = if span == DUMMY_SP { DUMMY_SP @@ -42,6 +47,7 @@ impl Delimited { TokenTree::Token(open_span, self.open_token()) } + /// Return a `self::TokenTree` witha a `Span` corresponding to the closing delimiter. pub fn close_tt(&self, span: Span) -> TokenTree { let close_span = if span == DUMMY_SP { DUMMY_SP @@ -75,7 +81,7 @@ pub enum KleeneOp { } /// Similar to `tokenstream::TokenTree`, except that `$i`, `$i:ident`, and `$(...)` -/// are "first-class" token trees. +/// are "first-class" token trees. Useful for parsing macros. #[derive(Debug, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] pub enum TokenTree { Token(Span, token::Token), @@ -93,6 +99,7 @@ pub enum TokenTree { } impl TokenTree { + /// Return the number of tokens in the tree. pub fn len(&self) -> usize { match *self { TokenTree::Delimited(_, ref delimed) => match delimed.delim { @@ -104,6 +111,8 @@ impl TokenTree { } } + /// Returns true if the given token tree contains no other tokens. This is vacuously true for + /// single tokens or metavar/decls, but may be false for delimited trees or sequences. pub fn is_empty(&self) -> bool { match *self { TokenTree::Delimited(_, ref delimed) => match delimed.delim { @@ -115,6 +124,7 @@ impl TokenTree { } } + /// Get the `index`-th sub-token-tree. This only makes sense for delimited trees and sequences. pub fn get_tt(&self, index: usize) -> TokenTree { match (self, index) { (&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => { @@ -146,15 +156,39 @@ impl TokenTree { } } +/// Takes a `tokenstream::TokenStream` and returns a `Vec`. Specifically, this +/// takes a generic `TokenStream`, such as is used in the rest of the compiler, and returns a +/// collection of `TokenTree` for use in parsing a macro. +/// +/// # Parameters +/// +/// - `input`: a token stream to read from, the contents of which we are parsing. +/// - `expect_matchers`: `parse` can be used to parse either the "patterns" or the "body" of a +/// macro. Both take roughly the same form _except_ that in a pattern, metavars are declared with +/// their "matcher" type. For example `$var:expr` or `$id:ident`. In this example, `expr` and +/// `ident` are "matchers". They are not present in the body of a macro rule -- just in the +/// pattern, so we pass a parameter to indicate whether to expect them or not. +/// - `sess`: the parsing session. Any errors will be emitted to this session. +/// +/// # Returns +/// +/// A collection of `self::TokenTree`. There may also be some errors emitted to `sess`. pub fn parse( input: tokenstream::TokenStream, expect_matchers: bool, sess: &ParseSess, ) -> Vec { + // Will contain the final collection of `self::TokenTree` let mut result = Vec::new(); + + // For each token tree in `input`, parse the token into a `self::TokenTree`, consuming + // additional trees if need be. let mut trees = input.trees(); while let Some(tree) = trees.next() { let tree = parse_tree(tree, &mut trees, expect_matchers, sess); + + // Given the parsed tree, if there is a metavar and we are expecting matchers, actually + // parse out the matcher (i.e. in `$id:ident` this would parse the `:` and `ident`). match tree { TokenTree::MetaVar(start_sp, ident) if expect_matchers => { let span = match trees.next() { @@ -182,12 +216,27 @@ pub fn parse( keywords::Invalid.ident(), )); } + + // Not a metavar or no matchers allowed, so just return the tree _ => result.push(tree), } } result } +/// Takes a `tokenstream::TokenTree` and returns a `self::TokenTree`. Specifically, this takes a +/// generic `TokenTree`, such as is used in the rest of the compiler, and returns a `TokenTree` +/// for use in parsing a macro. +/// +/// Converting the given tree may involve reading more tokens. +/// +/// # Parameters +/// +/// - `tree`: the tree wish to convert. +/// - `trees`: an iterator over trees. We may need to read more tokens from it in order to finish +/// converting `tree` +/// - `expect_matchers`: same as for `parse` (see above). +/// - `sess`: the parsing session. Any errors will be emitted to this session. fn parse_tree( tree: tokenstream::TokenTree, trees: &mut I, @@ -197,16 +246,24 @@ fn parse_tree( where I: Iterator, { + // Depending on what `tree` is, we could be parsing different parts of a macro match tree { + // `tree` is a `$` token. Look at the next token in `trees` tokenstream::TokenTree::Token(span, token::Dollar) => match trees.next() { + // `tree` is followed by a delimited set of token trees. This indicates the beginning + // of a repetition sequence in the macro (e.g. `$(pat)*`). Some(tokenstream::TokenTree::Delimited(span, delimited)) => { + // Must have `(` not `{` or `[` if delimited.delim != token::Paren { let tok = pprust::token_to_string(&token::OpenDelim(delimited.delim)); let msg = format!("expected `(`, found `{}`", tok); sess.span_diagnostic.span_err(span, &msg); } + // Parse the contents of the sequence itself let sequence = parse(delimited.tts.into(), expect_matchers, sess); + // Get the Kleen operator and optional separator let (separator, op) = parse_sep_and_kleene_op(trees, span, sess); + // Count the number of captured "names" (i.e. named metavars) let name_captures = macro_parser::count_names(&sequence); TokenTree::Sequence( span, @@ -218,6 +275,9 @@ where }), ) } + + // `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special + // metavariable that names the crate of the invokation. Some(tokenstream::TokenTree::Token(ident_span, ref token)) if token.is_ident() => { let ident = token.ident().unwrap(); let span = ident_span.with_lo(span.lo()); @@ -231,6 +291,8 @@ where TokenTree::MetaVar(span, ident) } } + + // `tree` is followed by a random token. This is an error. Some(tokenstream::TokenTree::Token(span, tok)) => { let msg = format!( "expected identifier, found `{}`", @@ -239,9 +301,16 @@ where sess.span_diagnostic.span_err(span, &msg); TokenTree::MetaVar(span, keywords::Invalid.ident()) } + + // There are no more tokens. Just return the `$` we already have. None => TokenTree::Token(span, token::Dollar), }, + + // `tree` is an arbitrary token. Keep it. tokenstream::TokenTree::Token(span, tok) => TokenTree::Token(span, tok), + + // `tree` is the beginning of a delimited set of tokens (e.g. `(` or `{`). We need to + // descend into the delimited set and further parse it. tokenstream::TokenTree::Delimited(span, delimited) => TokenTree::Delimited( span, Rc::new(Delimited { @@ -257,8 +326,8 @@ where /// For example, in a pattern such as `$(a),*`, `a` is the pattern to be repeated, `,` is the /// separator, and `*` is the Kleene operator. This function is specifically concerned with parsing /// the last two tokens of such a pattern: namely, the optional separator and the Kleene operator -/// itself. Note that here we are parsing the _pattern_ itself, rather than trying to match some -/// stream of tokens against the pattern. +/// itself. Note that here we are parsing the _macro_ itself, rather than trying to match some +/// stream of tokens in an invokation of a macro. /// /// This function will take some input iterator `input` corresponding to `span` and a parsing /// session `sess`. If the next one (or possibly two) tokens in `input` correspond to a Kleene From ca0c0805693b08566cf118b676533be776005494 Mon Sep 17 00:00:00 2001 From: Mark Mansi Date: Sun, 21 Jan 2018 16:03:47 -0600 Subject: [PATCH 3/4] Fix typos --- src/libsyntax/ext/tt/quoted.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index 606dfcd58a26c..61dc3d32f207b 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -19,7 +19,7 @@ use tokenstream; use std::rc::Rc; /// Contains the sub-token-trees of a "delimited" token tree, such as the contents of `(`. Note -/// thatthat the delimiter itself might be `NoDelim`. +/// that the delimiter itself might be `NoDelim`. #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Delimited { pub delim: token::DelimToken, @@ -76,7 +76,7 @@ pub struct SequenceRepetition { pub enum KleeneOp { /// Kleene star (`*`) for zero or more repetitions ZeroOrMore, - /// Kleene star (`+`) for one or more repetitions + /// Kleene plus (`+`) for one or more repetitions OneOrMore, } @@ -261,7 +261,7 @@ where } // Parse the contents of the sequence itself let sequence = parse(delimited.tts.into(), expect_matchers, sess); - // Get the Kleen operator and optional separator + // Get the Kleene operator and optional separator let (separator, op) = parse_sep_and_kleene_op(trees, span, sess); // Count the number of captured "names" (i.e. named metavars) let name_captures = macro_parser::count_names(&sequence); From 576294237b10fff22bc462398ff7d06fffa05bd0 Mon Sep 17 00:00:00 2001 From: Mark Mansi Date: Mon, 29 Jan 2018 17:08:04 -0600 Subject: [PATCH 4/4] fix typos --- src/libsyntax/ext/tt/quoted.rs | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/libsyntax/ext/tt/quoted.rs b/src/libsyntax/ext/tt/quoted.rs index 61dc3d32f207b..c55dfaba8f6b2 100644 --- a/src/libsyntax/ext/tt/quoted.rs +++ b/src/libsyntax/ext/tt/quoted.rs @@ -37,7 +37,7 @@ impl Delimited { token::CloseDelim(self.delim) } - /// Return a `self::TokenTree` witha a `Span` corresponding to the opening delimiter. + /// Return a `self::TokenTree` with a `Span` corresponding to the opening delimiter. pub fn open_tt(&self, span: Span) -> TokenTree { let open_span = if span == DUMMY_SP { DUMMY_SP @@ -47,7 +47,7 @@ impl Delimited { TokenTree::Token(open_span, self.open_token()) } - /// Return a `self::TokenTree` witha a `Span` corresponding to the closing delimiter. + /// Return a `self::TokenTree` with a `Span` corresponding to the closing delimiter. pub fn close_tt(&self, span: Span) -> TokenTree { let close_span = if span == DUMMY_SP { DUMMY_SP @@ -232,7 +232,7 @@ pub fn parse( /// /// # Parameters /// -/// - `tree`: the tree wish to convert. +/// - `tree`: the tree we wish to convert. /// - `trees`: an iterator over trees. We may need to read more tokens from it in order to finish /// converting `tree` /// - `expect_matchers`: same as for `parse` (see above). @@ -327,7 +327,7 @@ where /// separator, and `*` is the Kleene operator. This function is specifically concerned with parsing /// the last two tokens of such a pattern: namely, the optional separator and the Kleene operator /// itself. Note that here we are parsing the _macro_ itself, rather than trying to match some -/// stream of tokens in an invokation of a macro. +/// stream of tokens in an invocation of a macro. /// /// This function will take some input iterator `input` corresponding to `span` and a parsing /// session `sess`. If the next one (or possibly two) tokens in `input` correspond to a Kleene