Skip to content

Commit

Permalink
Update to syn 2
Browse files Browse the repository at this point in the history
  • Loading branch information
dtolnay committed Mar 18, 2023
1 parent a893a73 commit 1259995
Show file tree
Hide file tree
Showing 19 changed files with 299 additions and 342 deletions.
2 changes: 1 addition & 1 deletion gen/build/Cargo.toml
Expand Up @@ -25,7 +25,7 @@ once_cell = "1.9"
proc-macro2 = { version = "1.0.39", default-features = false, features = ["span-locations"] }
quote = { version = "1.0", default-features = false }
scratch = "1.0"
syn = { version = "1.0.95", default-features = false, features = ["parsing", "printing", "clone-impls", "full"] }
syn = { version = "2.0.0", default-features = false, features = ["parsing", "printing", "clone-impls", "full"] }

[dev-dependencies]
cxx = { version = "1.0", path = "../.." }
Expand Down
2 changes: 1 addition & 1 deletion gen/cmd/Cargo.toml
Expand Up @@ -25,7 +25,7 @@ clap = { version = "4", default-features = false, features = ["error-context", "
codespan-reporting = "0.11"
proc-macro2 = { version = "1.0.39", default-features = false, features = ["span-locations"] }
quote = { version = "1.0", default-features = false }
syn = { version = "1.0.95", default-features = false, features = ["parsing", "printing", "clone-impls", "full"] }
syn = { version = "2.0.0", default-features = false, features = ["parsing", "printing", "clone-impls", "full"] }

[package.metadata.docs.rs]
targets = ["x86_64-unknown-linux-gnu"]
2 changes: 1 addition & 1 deletion gen/lib/Cargo.toml
Expand Up @@ -15,7 +15,7 @@ rust-version = "1.60"
codespan-reporting = "0.11"
proc-macro2 = { version = "1.0.39", default-features = false, features = ["span-locations"] }
quote = { version = "1.0", default-features = false }
syn = { version = "1.0.95", default-features = false, features = ["parsing", "printing", "clone-impls", "full"] }
syn = { version = "2.0.0", default-features = false, features = ["parsing", "printing", "clone-impls", "full"] }

[lib]
doc-scrape-examples = false
Expand Down
6 changes: 3 additions & 3 deletions gen/src/file.rs
Expand Up @@ -2,7 +2,7 @@ use crate::syntax::file::Module;
use crate::syntax::namespace::Namespace;
use syn::parse::discouraged::Speculative;
use syn::parse::{Error, Parse, ParseStream, Result};
use syn::{braced, Attribute, Ident, Item, Token, Visibility};
use syn::{braced, Attribute, Ident, Item, Meta, Token, Visibility};

pub struct File {
pub modules: Vec<Module>,
Expand All @@ -23,7 +23,7 @@ fn parse(input: ParseStream, modules: &mut Vec<Module>) -> Result<()> {
let mut namespace = Namespace::ROOT;
let mut attrs = input.call(Attribute::parse_outer)?;
for attr in &attrs {
let path = &attr.path.segments;
let path = &attr.path().segments;
if path.len() == 2 && path[0].ident == "cxx" && path[1].ident == "bridge" {
cxx_bridge = true;
namespace = parse_args(attr)?;
Expand Down Expand Up @@ -64,7 +64,7 @@ fn parse(input: ParseStream, modules: &mut Vec<Module>) -> Result<()> {
}

fn parse_args(attr: &Attribute) -> Result<Namespace> {
if attr.tokens.is_empty() {
if let Meta::Path(_) = attr.meta {
Ok(Namespace::ROOT)
} else {
attr.parse_args_with(Namespace::parse_bridge_attr_namespace)
Expand Down
2 changes: 1 addition & 1 deletion macro/Cargo.toml
Expand Up @@ -23,7 +23,7 @@ experimental-enum-variants-from-header = ["clang-ast", "flate2", "memmap", "serd
[dependencies]
proc-macro2 = "1.0.39"
quote = "1.0.4"
syn = { version = "1.0.95", features = ["full"] }
syn = { version = "2.0.0", features = ["full"] }

# optional dependencies:
clang-ast = { version = "0.1", optional = true }
Expand Down
14 changes: 7 additions & 7 deletions macro/src/expand.rs
Expand Up @@ -1264,7 +1264,7 @@ fn expand_rust_box(key: NamedImplKey, types: &Types, explicit_impl: Option<&Impl
let (impl_generics, ty_generics) = generics::split_for_impl(key, explicit_impl, resolve);

let begin_span = explicit_impl.map_or(key.begin_span, |explicit| explicit.impl_token.span);
let end_span = explicit_impl.map_or(key.end_span, |explicit| explicit.brace_token.span);
let end_span = explicit_impl.map_or(key.end_span, |explicit| explicit.brace_token.span.join());
let unsafe_token = format_ident!("unsafe", span = begin_span);
let prevent_unwind_drop_label = format!("::{} as Drop>::drop", ident);

Expand Down Expand Up @@ -1322,7 +1322,7 @@ fn expand_rust_vec(key: NamedImplKey, types: &Types, explicit_impl: Option<&Impl
let (impl_generics, ty_generics) = generics::split_for_impl(key, explicit_impl, resolve);

let begin_span = explicit_impl.map_or(key.begin_span, |explicit| explicit.impl_token.span);
let end_span = explicit_impl.map_or(key.end_span, |explicit| explicit.brace_token.span);
let end_span = explicit_impl.map_or(key.end_span, |explicit| explicit.brace_token.span.join());
let unsafe_token = format_ident!("unsafe", span = begin_span);
let prevent_unwind_drop_label = format!("::{} as Drop>::drop", elem);

Expand Down Expand Up @@ -1416,7 +1416,7 @@ fn expand_unique_ptr(
};

let begin_span = explicit_impl.map_or(key.begin_span, |explicit| explicit.impl_token.span);
let end_span = explicit_impl.map_or(key.end_span, |explicit| explicit.brace_token.span);
let end_span = explicit_impl.map_or(key.end_span, |explicit| explicit.brace_token.span.join());
let unsafe_token = format_ident!("unsafe", span = begin_span);

quote_spanned! {end_span=>
Expand Down Expand Up @@ -1501,7 +1501,7 @@ fn expand_shared_ptr(
};

let begin_span = explicit_impl.map_or(key.begin_span, |explicit| explicit.impl_token.span);
let end_span = explicit_impl.map_or(key.end_span, |explicit| explicit.brace_token.span);
let end_span = explicit_impl.map_or(key.end_span, |explicit| explicit.brace_token.span.join());
let unsafe_token = format_ident!("unsafe", span = begin_span);

quote_spanned! {end_span=>
Expand Down Expand Up @@ -1556,7 +1556,7 @@ fn expand_weak_ptr(key: NamedImplKey, types: &Types, explicit_impl: Option<&Impl
let (impl_generics, ty_generics) = generics::split_for_impl(key, explicit_impl, resolve);

let begin_span = explicit_impl.map_or(key.begin_span, |explicit| explicit.impl_token.span);
let end_span = explicit_impl.map_or(key.end_span, |explicit| explicit.brace_token.span);
let end_span = explicit_impl.map_or(key.end_span, |explicit| explicit.brace_token.span.join());
let unsafe_token = format_ident!("unsafe", span = begin_span);

quote_spanned! {end_span=>
Expand Down Expand Up @@ -1629,7 +1629,7 @@ fn expand_cxx_vector(
let (impl_generics, ty_generics) = generics::split_for_impl(key, explicit_impl, resolve);

let begin_span = explicit_impl.map_or(key.begin_span, |explicit| explicit.impl_token.span);
let end_span = explicit_impl.map_or(key.end_span, |explicit| explicit.brace_token.span);
let end_span = explicit_impl.map_or(key.end_span, |explicit| explicit.brace_token.span.join());
let unsafe_token = format_ident!("unsafe", span = begin_span);

let can_pass_element_by_value = types.is_maybe_trivial(elem);
Expand Down Expand Up @@ -1810,7 +1810,7 @@ fn expand_extern_type(ty: &Type, types: &Types, proper: bool) -> TokenStream {
}
Type::SliceRef(ty) => {
let span = ty.ampersand.span;
let rust_slice = Ident::new("RustSlice", ty.bracket.span);
let rust_slice = Ident::new("RustSlice", ty.bracket.span.join());
quote_spanned!(span=> ::cxx::private::#rust_slice)
}
_ => quote!(#ty),
Expand Down
151 changes: 85 additions & 66 deletions syntax/attrs.rs
Expand Up @@ -5,8 +5,8 @@ use crate::syntax::Atom::{self, *};
use crate::syntax::{cfg, Derive, Doc, ForeignName};
use proc_macro2::{Ident, TokenStream};
use quote::ToTokens;
use syn::parse::{Nothing, Parse, ParseStream, Parser as _};
use syn::{parenthesized, token, Attribute, Error, LitStr, Path, Result, Token};
use syn::parse::ParseStream;
use syn::{Attribute, Error, Expr, Lit, LitStr, Meta, Path, Result, Token};

// Intended usage:
//
Expand Down Expand Up @@ -47,8 +47,9 @@ pub struct Parser<'a> {
pub fn parse(cx: &mut Errors, attrs: Vec<Attribute>, mut parser: Parser) -> OtherAttrs {
let mut passthrough_attrs = Vec::new();
for attr in attrs {
if attr.path.is_ident("doc") {
match parse_doc_attribute.parse2(attr.tokens.clone()) {
let attr_path = attr.path();
if attr_path.is_ident("doc") {
match parse_doc_attribute(&attr.meta) {
Ok(attr) => {
if let Some(doc) = &mut parser.doc {
match attr {
Expand All @@ -63,7 +64,7 @@ pub fn parse(cx: &mut Errors, attrs: Vec<Attribute>, mut parser: Parser) -> Othe
break;
}
}
} else if attr.path.is_ident("derive") {
} else if attr_path.is_ident("derive") {
match attr.parse_args_with(|attr: ParseStream| parse_derive_attribute(cx, attr)) {
Ok(attr) => {
if let Some(derives) = &mut parser.derives {
Expand All @@ -76,7 +77,7 @@ pub fn parse(cx: &mut Errors, attrs: Vec<Attribute>, mut parser: Parser) -> Othe
break;
}
}
} else if attr.path.is_ident("repr") {
} else if attr_path.is_ident("repr") {
match attr.parse_args_with(parse_repr_attribute) {
Ok(attr) => {
if let Some(repr) = &mut parser.repr {
Expand All @@ -89,8 +90,8 @@ pub fn parse(cx: &mut Errors, attrs: Vec<Attribute>, mut parser: Parser) -> Othe
break;
}
}
} else if attr.path.is_ident("namespace") {
match parse_namespace_attribute.parse2(attr.tokens.clone()) {
} else if attr_path.is_ident("namespace") {
match Namespace::parse_meta(&attr.meta) {
Ok(attr) => {
if let Some(namespace) = &mut parser.namespace {
**namespace = attr;
Expand All @@ -102,8 +103,8 @@ pub fn parse(cx: &mut Errors, attrs: Vec<Attribute>, mut parser: Parser) -> Othe
break;
}
}
} else if attr.path.is_ident("cxx_name") {
match parse_cxx_name_attribute.parse2(attr.tokens.clone()) {
} else if attr_path.is_ident("cxx_name") {
match parse_cxx_name_attribute(&attr.meta) {
Ok(attr) => {
if let Some(cxx_name) = &mut parser.cxx_name {
**cxx_name = Some(attr);
Expand All @@ -115,8 +116,8 @@ pub fn parse(cx: &mut Errors, attrs: Vec<Attribute>, mut parser: Parser) -> Othe
break;
}
}
} else if attr.path.is_ident("rust_name") {
match parse_rust_name_attribute.parse2(attr.tokens.clone()) {
} else if attr_path.is_ident("rust_name") {
match parse_rust_name_attribute(&attr.meta) {
Ok(attr) => {
if let Some(rust_name) = &mut parser.rust_name {
**rust_name = Some(attr);
Expand All @@ -128,8 +129,8 @@ pub fn parse(cx: &mut Errors, attrs: Vec<Attribute>, mut parser: Parser) -> Othe
break;
}
}
} else if attr.path.is_ident("cfg") {
match cfg::parse_attribute.parse2(attr.tokens.clone()) {
} else if attr_path.is_ident("cfg") {
match cfg::parse_attribute(&attr) {
Ok(cfg_expr) => {
if let Some(cfg) = &mut parser.cfg {
cfg.merge(cfg_expr);
Expand All @@ -142,31 +143,31 @@ pub fn parse(cx: &mut Errors, attrs: Vec<Attribute>, mut parser: Parser) -> Othe
break;
}
}
} else if attr.path.is_ident("variants_from_header")
} else if attr_path.is_ident("variants_from_header")
&& cfg!(feature = "experimental-enum-variants-from-header")
{
if let Err(err) = Nothing::parse.parse2(attr.tokens.clone()) {
if let Err(err) = require_empty_attribute(&attr.meta) {
cx.push(err);
}
if let Some(variants_from_header) = &mut parser.variants_from_header {
**variants_from_header = Some(attr);
continue;
}
} else if attr.path.is_ident("allow")
|| attr.path.is_ident("warn")
|| attr.path.is_ident("deny")
|| attr.path.is_ident("forbid")
|| attr.path.is_ident("deprecated")
|| attr.path.is_ident("must_use")
} else if attr_path.is_ident("allow")
|| attr_path.is_ident("warn")
|| attr_path.is_ident("deny")
|| attr_path.is_ident("forbid")
|| attr_path.is_ident("deprecated")
|| attr_path.is_ident("must_use")
{
// https://doc.rust-lang.org/reference/attributes/diagnostics.html
passthrough_attrs.push(attr);
continue;
} else if attr.path.is_ident("serde") {
} else if attr_path.is_ident("serde") {
passthrough_attrs.push(attr);
continue;
} else if attr.path.segments.len() > 1 {
let tool = &attr.path.segments.first().unwrap().ident;
} else if attr_path.segments.len() > 1 {
let tool = &attr_path.segments.first().unwrap().ident;
if tool == "rustfmt" {
// Skip, rustfmt only needs to find it in the pre-expansion source file.
continue;
Expand All @@ -192,24 +193,26 @@ mod kw {
syn::custom_keyword!(hidden);
}

fn parse_doc_attribute(input: ParseStream) -> Result<DocAttribute> {
let lookahead = input.lookahead1();
if lookahead.peek(Token![=]) {
input.parse::<Token![=]>()?;
let lit: LitStr = input.parse()?;
Ok(DocAttribute::Doc(lit))
} else if lookahead.peek(token::Paren) {
let content;
parenthesized!(content in input);
content.parse::<kw::hidden>()?;
Ok(DocAttribute::Hidden)
} else {
Err(lookahead.error())
fn parse_doc_attribute(meta: &Meta) -> Result<DocAttribute> {
match meta {
Meta::NameValue(meta) => {
if let Expr::Lit(expr) = &meta.value {
if let Lit::Str(lit) = &expr.lit {
return Ok(DocAttribute::Doc(lit.clone()));
}
}
}
Meta::List(meta) => {
meta.parse_args::<kw::hidden>()?;
return Ok(DocAttribute::Hidden);
}
Meta::Path(_) => {}
}
Err(Error::new_spanned(meta, "unsupported doc attribute"))
}

fn parse_derive_attribute(cx: &mut Errors, input: ParseStream) -> Result<Vec<Derive>> {
let paths = input.parse_terminated::<Path, Token![,]>(Path::parse_mod_style)?;
let paths = input.parse_terminated(Path::parse_mod_style, Token![,])?;

let mut derives = Vec::new();
for path in paths {
Expand Down Expand Up @@ -241,31 +244,42 @@ fn parse_repr_attribute(input: ParseStream) -> Result<Atom> {
))
}

fn parse_namespace_attribute(input: ParseStream) -> Result<Namespace> {
input.parse::<Token![=]>()?;
let namespace = input.parse::<Namespace>()?;
Ok(namespace)
}

fn parse_cxx_name_attribute(input: ParseStream) -> Result<ForeignName> {
input.parse::<Token![=]>()?;
if input.peek(LitStr) {
let lit: LitStr = input.parse()?;
ForeignName::parse(&lit.value(), lit.span())
} else {
let ident: Ident = input.parse()?;
ForeignName::parse(&ident.to_string(), ident.span())
fn parse_cxx_name_attribute(meta: &Meta) -> Result<ForeignName> {
if let Meta::NameValue(meta) = meta {
match &meta.value {
Expr::Lit(expr) => {
if let Lit::Str(lit) = &expr.lit {
return ForeignName::parse(&lit.value(), lit.span());
}
}
Expr::Path(expr) => {
if let Some(ident) = expr.path.get_ident() {
return ForeignName::parse(&ident.to_string(), ident.span());
}
}
_ => {}
}
}
Err(Error::new_spanned(meta, "unsupported cxx_name attribute"))
}

fn parse_rust_name_attribute(input: ParseStream) -> Result<Ident> {
input.parse::<Token![=]>()?;
if input.peek(LitStr) {
let lit: LitStr = input.parse()?;
lit.parse()
} else {
input.parse()
fn parse_rust_name_attribute(meta: &Meta) -> Result<Ident> {
if let Meta::NameValue(meta) = meta {
match &meta.value {
Expr::Lit(expr) => {
if let Lit::Str(lit) = &expr.lit {
return lit.parse();
}
}
Expr::Path(expr) => {
if let Some(ident) = expr.path.get_ident() {
return Ok(ident.clone());
}
}
_ => {}
}
}
Err(Error::new_spanned(meta, "unsupported rust_name attribute"))
}

#[derive(Clone)]
Expand All @@ -288,15 +302,20 @@ impl ToTokens for OtherAttrs {
pound_token,
style,
bracket_token,
path,
tokens: attr_tokens,
meta,
} = attr;
pound_token.to_tokens(tokens);
let _ = style; // ignore; render outer and inner attrs both as outer
bracket_token.surround(tokens, |tokens| {
path.to_tokens(tokens);
attr_tokens.to_tokens(tokens);
});
bracket_token.surround(tokens, |tokens| meta.to_tokens(tokens));
}
}
}

fn require_empty_attribute(meta: &Meta) -> Result<()> {
let error_span = match meta {
Meta::Path(_) => return Ok(()),
Meta::List(meta) => meta.delimiter.span().open(),
Meta::NameValue(meta) => meta.eq_token.span,
};
Err(Error::new(error_span, "unexpected token in cxx attribute"))
}

0 comments on commit 1259995

Please sign in to comment.