Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix multiple grammars not always recompile issue #789 #790

Merged
merged 1 commit into from
Feb 9, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
47 changes: 31 additions & 16 deletions generator/src/generator.rs
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ use crate::docs::DocComment;
pub(crate) fn generate(
name: Ident,
generics: &Generics,
path: Option<PathBuf>,
paths: Vec<PathBuf>,
rules: Vec<OptimizedRule>,
defaults: Vec<&str>,
doc_comment: &DocComment,
Expand All @@ -32,10 +32,7 @@ pub(crate) fn generate(

let builtins = generate_builtin_rules();
let include_fix = if include_grammar {
match path {
Some(ref path) => generate_include(&name, path.to_str().expect("non-Unicode path")),
None => quote!(),
}
generate_include(&name, paths)
} else {
quote!()
};
Expand Down Expand Up @@ -170,17 +167,33 @@ fn generate_builtin_rules() -> Vec<(&'static str, TokenStream)> {
builtins
}

// Needed because Cargo doesn't watch for changes in grammars.
fn generate_include(name: &Ident, path: &str) -> TokenStream {
/// Generate Rust `include_str!` for grammar files, then Cargo will watch changes in grammars.
fn generate_include(name: &Ident, paths: Vec<PathBuf>) -> TokenStream {
let const_name = format_ident!("_PEST_GRAMMAR_{}", name);
// Need to make this relative to the current directory since the path to the file
// is derived from the CARGO_MANIFEST_DIR environment variable
let mut current_dir = std::env::current_dir().expect("Unable to get current directory");
current_dir.push(path);
let relative_path = current_dir.to_str().expect("path contains invalid unicode");
let current_dir = std::env::current_dir().expect("Unable to get current directory");

let include_tokens = paths.iter().map(|path| {
let path = path.to_str().expect("non-Unicode path");

let relative_path = current_dir
.join(path)
.to_str()
.expect("path contains invalid unicode")
.to_string();

quote! {
include_str!(#relative_path)
}
});

let len = include_tokens.len();
quote! {
#[allow(non_upper_case_globals)]
const #const_name: &'static str = include_str!(#relative_path);
const #const_name: [&'static str; #len] = [
#(#include_tokens),*
];
}
}

Expand Down Expand Up @@ -1016,14 +1029,16 @@ mod tests {
let defaults = vec!["ANY"];
let result = result_type();
let box_ty = box_type();
let mut current_dir = std::env::current_dir().expect("Unable to get current directory");
current_dir.push("test.pest");
let test_path = current_dir.to_str().expect("path contains invalid unicode");
let current_dir = std::env::current_dir().expect("Unable to get current directory");

let base_path = current_dir.join("base.pest").to_str().unwrap().to_string();
let test_path = current_dir.join("test.pest").to_str().unwrap().to_string();

assert_eq!(
generate(name, &generics, Some(PathBuf::from("test.pest")), rules, defaults, doc_comment, true).to_string(),
generate(name, &generics, vec![PathBuf::from("base.pest"), PathBuf::from("test.pest")], rules, defaults, doc_comment, true).to_string(),
quote! {
#[allow(non_upper_case_globals)]
const _PEST_GRAMMAR_MyParser: &'static str = include_str!(#test_path);
const _PEST_GRAMMAR_MyParser: [&'static str; 2usize] = [include_str!(#base_path), include_str!(#test_path)];

#[doc = "This is Rule doc\nThis is second line"]
#[allow(dead_code, non_camel_case_types, clippy::upper_case_acronyms)]
Expand Down
9 changes: 5 additions & 4 deletions generator/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
let (name, generics, contents) = parse_derive(ast);

let mut data = String::new();
let mut path = None;
let mut paths = vec![];

for content in contents {
let (_data, _path) = match content {
Expand Down Expand Up @@ -81,8 +81,9 @@ pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
};

data.push_str(&_data);
if _path.is_some() {
path = _path;
match _path {
Some(path) => paths.push(path),
None => (),
}
}

Expand All @@ -99,7 +100,7 @@ pub fn derive_parser(input: TokenStream, include_grammar: bool) -> TokenStream {
generator::generate(
name,
&generics,
path,
paths,
optimized,
defaults,
&doc_comment,
Expand Down
1 change: 1 addition & 0 deletions generator/tests/base.pest
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
base = { "base" }