Skip to content

Commit

Permalink
Merge #18
Browse files Browse the repository at this point in the history
18: Remove dependency on syn r=taiki-e a=taiki-e

a part of #16

Co-authored-by: Taiki Endo <te316e89@gmail.com>
  • Loading branch information
bors[bot] and taiki-e committed Aug 25, 2020
2 parents 7d29667 + 02a2950 commit aa24a39
Show file tree
Hide file tree
Showing 4 changed files with 237 additions and 68 deletions.
2 changes: 0 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,5 +29,3 @@ version_check = "0.9.2"
[dependencies]
proc-macro2 = "1.0"
quote = "1.0"
syn = "1.0"
syn-mid = "0.5"
132 changes: 132 additions & 0 deletions src/ast.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,132 @@
use proc_macro2::{Delimiter, Literal, Span, TokenStream, TokenTree};
use quote::ToTokens;
use std::iter::Peekable;

use crate::utils::{parse_as_empty, tt_span};

pub(crate) struct Func {
pub(crate) attrs: Vec<Attribute>,
pub(crate) sig: Vec<TokenTree>,
pub(crate) body: TokenTree,
pub(crate) print_const: bool,
}

pub(crate) fn parse_input(input: TokenStream) -> Result<Func, TokenStream> {
let mut input = input.into_iter().peekable();

let attrs = parse_attrs(&mut input)?;
let sig = parse_signature(&mut input);
let body = input.next();
parse_as_empty(input)?;

if body.is_none()
|| !sig.iter().any(|tt| if let TokenTree::Ident(i) = tt { i == "fn" } else { false })
{
return Err(error!(
Span::call_site(),
"#[const_fn] attribute may only be used on functions"
));
}
if !sig.iter().any(|tt| if let TokenTree::Ident(i) = tt { i == "const" } else { false }) {
let span = sig
.iter()
.position(|tt| if let TokenTree::Ident(i) = tt { i == "fn" } else { false })
.map(|i| sig[i].span())
.unwrap();
return Err(error!(span, "#[const_fn] attribute may only be used on const functions"));
}

Ok(Func { attrs, sig, body: body.unwrap(), print_const: true })
}

impl ToTokens for Func {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.attrs.iter().for_each(|attr| attr.to_tokens(tokens));
if self.print_const {
self.sig.iter().for_each(|attr| attr.to_tokens(tokens));
} else {
self.sig
.iter()
.filter(|tt| if let TokenTree::Ident(i) = tt { i != "const" } else { true })
.for_each(|tt| tt.to_tokens(tokens));
}
self.body.to_tokens(tokens);
}
}

fn parse_signature(input: &mut Peekable<impl Iterator<Item = TokenTree>>) -> Vec<TokenTree> {
let mut sig = Vec::new();
loop {
match input.peek() {
Some(TokenTree::Group(group)) if group.delimiter() == Delimiter::Brace => break,
None => break,
_ => sig.push(input.next().unwrap()),
}
}
sig
}

fn parse_attrs(
input: &mut Peekable<impl Iterator<Item = TokenTree>>,
) -> Result<Vec<Attribute>, TokenStream> {
let mut attrs = Vec::new();
loop {
let pound_token = match input.peek() {
Some(TokenTree::Punct(p)) if p.as_char() == '#' => input.next().unwrap(),
_ => break,
};
let group = match input.peek() {
Some(TokenTree::Group(g)) if g.delimiter() == Delimiter::Bracket => {
input.next().unwrap()
}
tt => return Err(error!(tt_span(tt), "expected `[`")),
};
attrs.push(Attribute { pound_token, group });
}
Ok(attrs)
}

pub(crate) struct Attribute {
// `#`
pub(crate) pound_token: TokenTree,
// `[...]`
pub(crate) group: TokenTree,
}

impl ToTokens for Attribute {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.pound_token.to_tokens(tokens);
self.group.to_tokens(tokens);
}
}

pub(crate) struct LitStr {
token: Literal,
value: String,
}

impl LitStr {
pub(crate) fn new(token: &Literal) -> Result<Self, TokenStream> {
let value = token.to_string();
// unlike `syn::LitStr`, only accepts `"..."`
if value.starts_with('"') && value.ends_with('"') {
Ok(Self { token: token.clone(), value })
} else {
Err(error!(token.span(), "expected string literal"))
}
}

pub(crate) fn value(&self) -> &str {
&self.value[1..self.value.len() - 1]
}

pub(crate) fn span(&self) -> Span {
self.token.span()
}
}

impl ToTokens for LitStr {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.token.to_tokens(tokens);
}
}
147 changes: 81 additions & 66 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,109 +47,124 @@
#[allow(unused_extern_crates)]
extern crate proc_macro;

#[macro_use]
mod utils;

mod ast;

use proc_macro::TokenStream;
use proc_macro2::TokenStream as TokenStream2;
use proc_macro2::{Delimiter, TokenStream as TokenStream2, TokenTree};
use quote::{quote, ToTokens};
use std::str::FromStr;
use syn::{
parse::{Parse, ParseStream},
parse_quote, Error, *,
};
use syn_mid::ItemFn;

use crate::utils::{parse_as_empty, tt_span};

/// An attribute for easy generation of a const function with conditional compilations.
/// See crate level documentation for details.
#[proc_macro_attribute]
pub fn const_fn(args: TokenStream, function: TokenStream) -> TokenStream {
let arg: Arg = syn::parse_macro_input!(args);

let mut item: ItemFn = syn::parse_macro_input!(function);

if item.sig.constness.is_none() {
return Error::new_spanned(
item.sig.fn_token,
"#[const_fn] attribute may only be used on const functions",
)
.to_compile_error()
.into();
}
pub fn const_fn(args: TokenStream, input: TokenStream) -> TokenStream {
let arg = match parse_arg(args.into()) {
Ok(a) => a,
Err(e) => return e.into(),
};
let mut func = match ast::parse_input(input.into()) {
Ok(i) => i,
Err(e) => return e.into(),
};

match arg {
Arg::Cfg(c) => {
let mut tokens = quote!(#[cfg(#c)]);
tokens.extend(item.to_token_stream());
item.attrs.push(parse_quote!(#[cfg(not(#c))]));
item.sig.constness = None;
tokens.extend(item.into_token_stream());
tokens.extend(func.to_token_stream());
tokens.extend(quote!(#[cfg(not(#c))]));
func.print_const = false;
tokens.extend(func.into_token_stream());
tokens.into()
}
Arg::Feature(f, e, s) => {
let mut tokens = quote!(#[cfg(#f #e #s)]);
tokens.extend(item.to_token_stream());
item.attrs.push(parse_quote!(#[cfg(not(#f #e #s))]));
item.sig.constness = None;
tokens.extend(item.into_token_stream());
Arg::Feature(f) => {
let mut tokens = quote!(#[cfg(#f)]);
tokens.extend(func.to_token_stream());
tokens.extend(quote!(#[cfg(not(#f))]));
func.print_const = false;
tokens.extend(func.into_token_stream());
tokens.into()
}
Arg::Version(req) => {
if req.major > 1 || req.minor > VERSION.minor {
item.sig.constness = None;
func.print_const = false;
}
item.into_token_stream().into()
func.into_token_stream().into()
}
Arg::Nightly => {
if !VERSION.nightly {
item.sig.constness = None;
func.print_const = false;
}
item.into_token_stream().into()
func.into_token_stream().into()
}
}
}

mod kw {
syn::custom_keyword!(nightly);
syn::custom_keyword!(feature);
syn::custom_keyword!(cfg);
}

enum Arg {
// `const_fn("1.36")`
// `const_fn("...")`
Version(VersionReq),
// `const_fn(nightly)`
Nightly,
// `const_fn(cfg(...))`
Cfg(TokenStream2),
// `const_fn(feature = "...")`
Feature(kw::feature, Token![=], LitStr),
Feature(TokenStream2),
}

impl Parse for Arg {
fn parse(input: ParseStream<'_>) -> Result<Self> {
let lookahead = input.lookahead1();
if lookahead.peek(kw::nightly) {
let _: kw::nightly = input.parse()?;
Ok(Arg::Nightly)
} else if lookahead.peek(kw::cfg) {
let _: kw::cfg = input.parse()?;
let content;
let _: token::Paren = syn::parenthesized!(content in input);
let t: TokenStream2 = content.parse()?;
Ok(Arg::Cfg(t))
} else if lookahead.peek(kw::feature) {
let f: kw::feature = input.parse()?;
let e: Token![=] = input.parse()?;
let s: LitStr = input.parse()?;
Ok(Arg::Feature(f, e, s))
} else if lookahead.peek(LitStr) {
let s: LitStr = input.parse()?;
match s.value().parse::<VersionReq>() {
Ok(req) => Ok(Arg::Version(req)),
Err(e) => Err(Error::new(s.span(), e)),
fn parse_arg(tokens: TokenStream2) -> Result<Arg, TokenStream2> {
let tokens2 = tokens.clone();
let mut iter = tokens.into_iter();

let next = iter.next();
match &next {
Some(TokenTree::Ident(i)) => match i.to_string().as_str() {
"nightly" => {
parse_as_empty(iter)?;
return Ok(Arg::Nightly);
}
"cfg" => {
return match iter.next().as_ref() {
Some(TokenTree::Group(g)) if g.delimiter() == Delimiter::Parenthesis => {
parse_as_empty(iter)?;
Ok(Arg::Cfg(g.stream()))
}
tt => Err(error!(tt_span(tt), "expected `(`")),
};
}
"feature" => {
return match iter.next().as_ref() {
Some(TokenTree::Punct(p)) if p.as_char() == '=' => match iter.next().as_ref() {
Some(TokenTree::Literal(l)) if l.to_string().starts_with('"') => {
parse_as_empty(iter)?;
Ok(Arg::Feature(tokens2))
}
tt => Err(error!(tt_span(tt), "expected `=`")),
},
tt => Err(error!(tt_span(tt), "expected `=`")),
};
}
_ => {}
},
Some(TokenTree::Literal(l)) => {
if let Ok(l) = ast::LitStr::new(l) {
parse_as_empty(iter)?;
return match l.value().parse::<VersionReq>() {
Ok(req) => Ok(Arg::Version(req)),
Err(e) => Err(error!(l.span(), "{}", e)),
};
}
} else {
Err(lookahead.error())
}
_ => {}
}

Err(error!(
tt_span(next.as_ref()),
"expected one of: `nightly`, `cfg`, `feature`, string literal"
))
}

struct VersionReq {
Expand All @@ -160,7 +175,7 @@ struct VersionReq {
impl FromStr for VersionReq {
type Err = String;

fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut pieces = s.split('.');
let major = pieces
.next()
Expand Down
24 changes: 24 additions & 0 deletions src/utils.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
use proc_macro2::{Span, TokenStream, TokenTree};

macro_rules! error {
($span:expr, $msg:expr) => {{
let msg = $msg;
quote::quote_spanned!($span=> compile_error! { #msg })
}};
($span:expr, $($tt:tt)*) => {
error!($span, format!($($tt)*))
};
}

pub(crate) fn tt_span(tt: Option<&TokenTree>) -> Span {
tt.map_or_else(Span::call_site, TokenTree::span)
}

pub(crate) fn parse_as_empty(
mut tokens: impl Iterator<Item = TokenTree>,
) -> Result<(), TokenStream> {
match tokens.next() {
Some(tt) => Err(error!(tt.span(), "unexpected token: {}", tt)),
None => Ok(()),
}
}

0 comments on commit aa24a39

Please sign in to comment.