Skip to content

Commit

Permalink
reduce visibility of a bunch of stuff in ext::tt
Browse files Browse the repository at this point in the history
  • Loading branch information
matklad committed Sep 22, 2019
1 parent 4ff32c0 commit 645cdca
Show file tree
Hide file tree
Showing 7 changed files with 50 additions and 58 deletions.
4 changes: 2 additions & 2 deletions src/librustc_resolve/macros.rs
Expand Up @@ -18,7 +18,7 @@ use syntax::ext::base::{self, InvocationRes, Indeterminate, SpecialDerives};
use syntax::ext::base::{MacroKind, SyntaxExtension};
use syntax::ext::expand::{AstFragment, AstFragmentKind, Invocation, InvocationKind};
use syntax::ext::hygiene::{self, ExpnId, ExpnData, ExpnKind};
use syntax::ext::tt::macro_rules;
use syntax::ext::compile_declarative_macro;
use syntax::feature_gate::{emit_feature_err, is_builtin_attr_name};
use syntax::feature_gate::GateIssue;
use syntax::symbol::{Symbol, kw, sym};
Expand Down Expand Up @@ -843,7 +843,7 @@ impl<'a> Resolver<'a> {
/// Compile the macro into a `SyntaxExtension` and possibly replace it with a pre-defined
/// extension partially or entirely for built-in macros and legacy plugin macros.
crate fn compile_macro(&mut self, item: &ast::Item, edition: Edition) -> SyntaxExtension {
let mut result = macro_rules::compile(
let mut result = compile_declarative_macro(
&self.session.parse_sess, self.session.features_untracked(), item, edition
);

Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/ext/tt/macro_check.rs
Expand Up @@ -196,7 +196,7 @@ struct MacroState<'a> {
/// - `node_id` is used to emit lints
/// - `span` is used when no spans are available
/// - `lhses` and `rhses` should have the same length and represent the macro definition
pub fn check_meta_variables(
crate fn check_meta_variables(
sess: &ParseSess,
node_id: NodeId,
span: Span,
Expand Down
16 changes: 8 additions & 8 deletions src/libsyntax/ext/tt/macro_parser.rs
Expand Up @@ -70,8 +70,8 @@
//! eof: [a $( a )* a b ·]
//! ```

pub use NamedMatch::*;
pub use ParseResult::*;
crate use NamedMatch::*;
crate use ParseResult::*;
use TokenTreeOrTokenTreeSlice::*;

use crate::ast::{Ident, Name};
Expand Down Expand Up @@ -267,7 +267,7 @@ impl<'root, 'tt> DerefMut for MatcherPosHandle<'root, 'tt> {
}

/// Represents the possible results of an attempted parse.
pub enum ParseResult<T> {
crate enum ParseResult<T> {
/// Parsed successfully.
Success(T),
/// Arm failed to match. If the second parameter is `token::Eof`, it indicates an unexpected
Expand All @@ -279,10 +279,10 @@ pub enum ParseResult<T> {

/// A `ParseResult` where the `Success` variant contains a mapping of `Ident`s to `NamedMatch`es.
/// This represents the mapping of metavars to the token trees they bind to.
pub type NamedParseResult = ParseResult<FxHashMap<Ident, NamedMatch>>;
crate type NamedParseResult = ParseResult<FxHashMap<Ident, NamedMatch>>;

/// Count how many metavars are named in the given matcher `ms`.
pub fn count_names(ms: &[TokenTree]) -> usize {
crate fn count_names(ms: &[TokenTree]) -> usize {
ms.iter().fold(0, |count, elt| {
count + match *elt {
TokenTree::Sequence(_, ref seq) => seq.num_captures,
Expand Down Expand Up @@ -352,7 +352,7 @@ fn initial_matcher_pos<'root, 'tt>(ms: &'tt [TokenTree], open: Span) -> MatcherP
/// only on the nesting depth of `ast::TTSeq`s in the originating
/// token tree it was derived from.
#[derive(Debug, Clone)]
pub enum NamedMatch {
crate enum NamedMatch {
MatchedSeq(Lrc<NamedMatchVec>, DelimSpan),
MatchedNonterminal(Lrc<Nonterminal>),
}
Expand Down Expand Up @@ -415,7 +415,7 @@ fn nameize<I: Iterator<Item = NamedMatch>>(

/// Generates an appropriate parsing failure message. For EOF, this is "unexpected end...". For
/// other tokens, this is "unexpected token...".
pub fn parse_failure_msg(tok: &Token) -> String {
crate fn parse_failure_msg(tok: &Token) -> String {
match tok.kind {
token::Eof => "unexpected end of macro invocation".to_string(),
_ => format!(
Expand Down Expand Up @@ -648,7 +648,7 @@ fn inner_parse_loop<'root, 'tt>(
/// - `directory`: Information about the file locations (needed for the black-box parser)
/// - `recurse_into_modules`: Whether or not to recurse into modules (needed for the black-box
/// parser)
pub fn parse(
crate fn parse(
sess: &ParseSess,
tts: TokenStream,
ms: &[TokenTree],
Expand Down
14 changes: 9 additions & 5 deletions src/libsyntax/ext/tt/macro_rules.rs
Expand Up @@ -35,7 +35,7 @@ const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are \
`ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `lifetime`, \
`literal`, `path`, `meta`, `tt`, `item` and `vis`";

pub struct ParserAnyMacro<'a> {
crate struct ParserAnyMacro<'a> {
parser: Parser<'a>,

/// Span of the expansion site of the macro this parser is for
Expand All @@ -45,7 +45,11 @@ pub struct ParserAnyMacro<'a> {
arm_span: Span,
}

pub fn annotate_err_with_kind(err: &mut DiagnosticBuilder<'_>, kind: AstFragmentKind, span: Span) {
crate fn annotate_err_with_kind(
err: &mut DiagnosticBuilder<'_>,
kind: AstFragmentKind,
span: Span,
) {
match kind {
AstFragmentKind::Ty => {
err.span_label(span, "this macro call doesn't expand to a type");
Expand All @@ -58,7 +62,7 @@ pub fn annotate_err_with_kind(err: &mut DiagnosticBuilder<'_>, kind: AstFragment
}

impl<'a> ParserAnyMacro<'a> {
pub fn make(mut self: Box<ParserAnyMacro<'a>>, kind: AstFragmentKind) -> AstFragment {
crate fn make(mut self: Box<ParserAnyMacro<'a>>, kind: AstFragmentKind) -> AstFragment {
let ParserAnyMacro { site_span, macro_ident, ref mut parser, arm_span } = *self;
let fragment = panictry!(parser.parse_ast_fragment(kind, true).map_err(|mut e| {
if parser.token == token::Eof && e.message().ends_with(", found `<eof>`") {
Expand Down Expand Up @@ -284,8 +288,8 @@ fn generic_extension<'cx>(
//
// Holy self-referential!

/// Converts a `macro_rules!` invocation into a syntax extension.
pub fn compile(
/// Converts a macro item into a syntax extension.
pub fn compile_declarative_macro(
sess: &ParseSess,
features: &Features,
def: &ast::Item,
Expand Down
57 changes: 22 additions & 35 deletions src/libsyntax/ext/tt/quoted.rs
Expand Up @@ -16,14 +16,14 @@ use std::iter::Peekable;
/// Contains the sub-token-trees of a "delimited" token tree, such as the contents of `(`. Note
/// that the delimiter itself might be `NoDelim`.
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
pub struct Delimited {
pub delim: token::DelimToken,
pub tts: Vec<TokenTree>,
crate struct Delimited {
crate delim: token::DelimToken,
crate tts: Vec<TokenTree>,
}

impl Delimited {
/// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
pub fn open_tt(&self, span: Span) -> TokenTree {
crate fn open_tt(&self, span: Span) -> TokenTree {
let open_span = if span.is_dummy() {
span
} else {
Expand All @@ -33,7 +33,7 @@ impl Delimited {
}

/// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
pub fn close_tt(&self, span: Span) -> TokenTree {
crate fn close_tt(&self, span: Span) -> TokenTree {
let close_span = if span.is_dummy() {
span
} else {
Expand All @@ -44,33 +44,33 @@ impl Delimited {
}

#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
pub struct SequenceRepetition {
crate struct SequenceRepetition {
/// The sequence of token trees
pub tts: Vec<TokenTree>,
crate tts: Vec<TokenTree>,
/// The optional separator
pub separator: Option<Token>,
crate separator: Option<Token>,
/// Whether the sequence can be repeated zero (*), or one or more times (+)
pub kleene: KleeneToken,
crate kleene: KleeneToken,
/// The number of `Match`s that appear in the sequence (and subsequences)
pub num_captures: usize,
crate num_captures: usize,
}

#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)]
pub struct KleeneToken {
pub span: Span,
pub op: KleeneOp,
crate struct KleeneToken {
crate span: Span,
crate op: KleeneOp,
}

impl KleeneToken {
pub fn new(op: KleeneOp, span: Span) -> KleeneToken {
crate fn new(op: KleeneOp, span: Span) -> KleeneToken {
KleeneToken { span, op }
}
}

/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
/// for token sequences.
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum KleeneOp {
crate enum KleeneOp {
/// Kleene star (`*`) for zero or more repetitions
ZeroOrMore,
/// Kleene plus (`+`) for one or more repetitions
Expand All @@ -82,7 +82,7 @@ pub enum KleeneOp {
/// Similar to `tokenstream::TokenTree`, except that `$i`, `$i:ident`, and `$(...)`
/// are "first-class" token trees. Useful for parsing macros.
#[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
pub enum TokenTree {
crate enum TokenTree {
Token(Token),
Delimited(DelimSpan, Lrc<Delimited>),
/// A kleene-style repetition sequence
Expand All @@ -99,7 +99,7 @@ pub enum TokenTree {

impl TokenTree {
/// Return the number of tokens in the tree.
pub fn len(&self) -> usize {
crate fn len(&self) -> usize {
match *self {
TokenTree::Delimited(_, ref delimed) => match delimed.delim {
token::NoDelim => delimed.tts.len(),
Expand All @@ -110,37 +110,24 @@ impl TokenTree {
}
}

/// Returns `true` if the given token tree contains no other tokens. This is vacuously true for
/// single tokens or metavar/decls, but may be false for delimited trees or sequences.
pub fn is_empty(&self) -> bool {
match *self {
TokenTree::Delimited(_, ref delimed) => match delimed.delim {
token::NoDelim => delimed.tts.is_empty(),
_ => false,
},
TokenTree::Sequence(_, ref seq) => seq.tts.is_empty(),
_ => true,
}
}

/// Returns `true` if the given token tree is delimited.
pub fn is_delimited(&self) -> bool {
crate fn is_delimited(&self) -> bool {
match *self {
TokenTree::Delimited(..) => true,
_ => false,
}
}

/// Returns `true` if the given token tree is a token of the given kind.
pub fn is_token(&self, expected_kind: &TokenKind) -> bool {
crate fn is_token(&self, expected_kind: &TokenKind) -> bool {
match self {
TokenTree::Token(Token { kind: actual_kind, .. }) => actual_kind == expected_kind,
_ => false,
}
}

/// Gets the `index`-th sub-token-tree. This only makes sense for delimited trees and sequences.
pub fn get_tt(&self, index: usize) -> TokenTree {
crate fn get_tt(&self, index: usize) -> TokenTree {
match (self, index) {
(&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
delimed.tts[index].clone()
Expand All @@ -160,7 +147,7 @@ impl TokenTree {
}

/// Retrieves the `TokenTree`'s span.
pub fn span(&self) -> Span {
crate fn span(&self) -> Span {
match *self {
TokenTree::Token(Token { span, .. })
| TokenTree::MetaVar(span, _)
Expand Down Expand Up @@ -195,7 +182,7 @@ impl TokenTree {
/// # Returns
///
/// A collection of `self::TokenTree`. There may also be some errors emitted to `sess`.
pub fn parse(
crate fn parse(
input: tokenstream::TokenStream,
expect_matchers: bool,
sess: &ParseSess,
Expand Down
13 changes: 7 additions & 6 deletions src/libsyntax/lib.rs
Expand Up @@ -162,18 +162,19 @@ pub mod ext {
mod proc_macro_server;

pub use syntax_pos::hygiene;
pub use tt::macro_rules::compile_declarative_macro;
pub mod allocator;
pub mod base;
pub mod build;
pub mod expand;
pub mod proc_macro;

pub mod tt {
pub mod transcribe;
pub mod macro_check;
pub mod macro_parser;
pub mod macro_rules;
pub mod quoted;
crate mod tt {
crate mod transcribe;
crate mod macro_check;
crate mod macro_parser;
crate mod macro_rules;
crate mod quoted;
}
}

Expand Down
2 changes: 1 addition & 1 deletion src/libsyntax/tokenstream.rs
Expand Up @@ -64,7 +64,7 @@ where

impl TokenTree {
/// Use this token tree as a matcher to parse given tts.
pub fn parse(cx: &base::ExtCtxt<'_>, mtch: &[quoted::TokenTree], tts: TokenStream)
crate fn parse(cx: &base::ExtCtxt<'_>, mtch: &[quoted::TokenTree], tts: TokenStream)
-> macro_parser::NamedParseResult {
// `None` is because we're not interpolating
let directory = Directory {
Expand Down

0 comments on commit 645cdca

Please sign in to comment.