Skip to content

Commit

Permalink
Merge pull request #654 from tomtau/chore/criterion
Browse files Browse the repository at this point in the history
chore: switch benches to criterion plus MSRV bump to 1.56
  • Loading branch information
CAD97 committed Jul 15, 2022
2 parents f9bdec5 + 9153493 commit eb8a042
Show file tree
Hide file tree
Showing 15 changed files with 52 additions and 43 deletions.
10 changes: 5 additions & 5 deletions .github/workflows/ci.yml
Expand Up @@ -22,7 +22,7 @@ jobs:
- name: Install Rust Stable
uses: actions-rs/toolchain@v1
with:
toolchain: 1.51.0 # Pinned warnings
toolchain: 1.56.1 # Pinned warnings
components: rustfmt, clippy
default: true
- name: Install gcc
Expand Down Expand Up @@ -74,7 +74,7 @@ jobs:
- name: Install Rust Nightly
uses: actions-rs/toolchain@v1
with:
toolchain: nightly-2021-01-01
toolchain: nightly-2022-01-01
default: true
profile: minimal
- name: Bootstraping Grammars - Building
Expand All @@ -101,10 +101,10 @@ jobs:
steps:
- name: Checkout source code
uses: actions/checkout@v3
- name: Install Rust Nightly
- name: Install Rust Stable
uses: actions-rs/toolchain@v1
with:
toolchain: nightly-2021-08-01
toolchain: stable
profile: minimal
components: llvm-tools-preview
default: true
Expand All @@ -119,7 +119,7 @@ jobs:
command: run
args: --package pest_bootstrap
- name: Install cargo-llvm-cov
run: curl -LsSf https://github.com/taiki-e/cargo-llvm-cov/releases/download/v0.3.3/cargo-llvm-cov-x86_64-unknown-linux-gnu.tar.gz | tar xzf - -C ~/.cargo/bin
uses: taiki-e/install-action@cargo-llvm-cov
- name: Generate code coverage
run: cargo llvm-cov --all-features --workspace --lcov --output-path lcov.info
- name: Upload Results to Codecov
Expand Down
1 change: 1 addition & 0 deletions bootstrap/Cargo.toml
Expand Up @@ -9,6 +9,7 @@ repository = "https://github.com/pest-parser/pest"
documentation = "https://docs.rs/pest"
publish = false
license = "MIT/Apache-2.0"
rust-version = "1.56"

[dependencies]
pest_generator = "2.1.1" # Use the crates-io version, which (should be) known-good
Expand Down
1 change: 1 addition & 0 deletions derive/Cargo.toml
Expand Up @@ -11,6 +11,7 @@ keywords = ["pest", "parser", "peg", "grammar"]
categories = ["parsing"]
license = "MIT/Apache-2.0"
readme = "_README.md"
rust-version = "1.56"

[lib]
name = "pest_derive"
Expand Down
1 change: 1 addition & 0 deletions generator/Cargo.toml
Expand Up @@ -11,6 +11,7 @@ keywords = ["pest", "generator"]
categories = ["parsing"]
license = "MIT/Apache-2.0"
readme = "_README.md"
rust-version = "1.56"

[features]
default = ["std"]
Expand Down
3 changes: 2 additions & 1 deletion grammars/Cargo.toml
Expand Up @@ -11,13 +11,14 @@ keywords = ["pest", "parser", "peg", "grammar"]
categories = ["parsing"]
license = "MIT/Apache-2.0"
readme = "_README.md"
rust-version = "1.56"

[dependencies]
pest = { path = "../pest", version = "2.1.0" }
pest_derive = { path = "../derive", version = "2.1.0" }

[dev-dependencies]
bencher = "0.1"
criterion = "0.3"

[[bench]]
name = "json"
Expand Down
14 changes: 8 additions & 6 deletions grammars/benches/json.rs
Expand Up @@ -7,10 +7,10 @@
// option. All files in the project carrying such notice may not be copied,
// modified, or distributed except according to those terms.

extern crate bencher;
extern crate criterion;
extern crate pest;
extern crate pest_grammars;
use bencher::{benchmark_group, benchmark_main, Bencher};
use criterion::{criterion_group, criterion_main, Criterion};

use std::fs::File;
use std::io::Read;
Expand All @@ -19,14 +19,16 @@ use pest::Parser;

use pest_grammars::json::*;

fn data(b: &mut Bencher) {
fn criterion_benchmark(c: &mut Criterion) {
let mut file = File::open("benches/data.json").unwrap();
let mut data = String::new();

file.read_to_string(&mut data).unwrap();

b.iter(|| JsonParser::parse(Rule::json, &data).unwrap());
c.bench_function("json parser", |b| {
b.iter(|| JsonParser::parse(Rule::json, &data).unwrap())
});
}

benchmark_group!(benches, data);
benchmark_main!(benches);
criterion_group!(benches, criterion_benchmark);
criterion_main!(benches);
1 change: 1 addition & 0 deletions grammars/fuzz/Cargo.toml
Expand Up @@ -3,6 +3,7 @@ name = "pest_grammars-fuzz"
version = "0.0.0"
authors = ["Automatically generated"]
publish = false
rust-version = "1.56"

[package.metadata]
cargo-fuzz = true
Expand Down
1 change: 1 addition & 0 deletions meta/Cargo.toml
Expand Up @@ -13,6 +13,7 @@ license = "MIT/Apache-2.0"
readme = "_README.md"
exclude = ["src/grammar.pest"]
include = ["Cargo.toml", "src/**/*", "src/grammar.rs", "_README.md", "LICENSE-*"]
rust-version = "1.56"

[dependencies]
pest = { path = "../pest", version = "2.1.0" }
Expand Down
1 change: 1 addition & 0 deletions meta/fuzz/Cargo.toml
Expand Up @@ -3,6 +3,7 @@ name = "pest_meta-fuzz"
version = "0.0.0"
authors = ["Automatically generated"]
publish = false
rust-version = "1.56"

[package.metadata]
cargo-fuzz = true
Expand Down
36 changes: 18 additions & 18 deletions meta/src/validator.rs
Expand Up @@ -157,7 +157,7 @@ pub fn validate_rust_keywords<'i>(
ErrorVariant::CustomError {
message: format!("{} is a rust keyword", name),
},
definition.clone(),
*definition,
))
}
}
Expand All @@ -180,7 +180,7 @@ pub fn validate_pest_keywords<'i>(
ErrorVariant::CustomError {
message: format!("{} is a pest keyword", name),
},
definition.clone(),
*definition,
))
}
}
Expand All @@ -201,7 +201,7 @@ pub fn validate_already_defined(definitions: &Vec<Span>) -> Vec<Error<Rule>> {
ErrorVariant::CustomError {
message: format!("rule {} already defined", name),
},
definition.clone(),
*definition,
))
} else {
defined.insert(name);
Expand All @@ -228,7 +228,7 @@ pub fn validate_undefined<'i>(
ErrorVariant::CustomError {
message: format!("rule {} is undefined", name),
},
rule.clone(),
*rule,
))
}
}
Expand Down Expand Up @@ -342,7 +342,7 @@ fn validate_repetition<'a, 'i: 'a>(rules: &'a [ParserRule<'i>]) -> Vec<Error<Rul
infinitely"
.to_owned()
},
node.span.clone()
node.span
))
} else if is_non_progressing(&other.expr, &map, &mut vec![]) {
Some(Error::new_from_span(
Expand All @@ -352,7 +352,7 @@ fn validate_repetition<'a, 'i: 'a>(rules: &'a [ParserRule<'i>]) -> Vec<Error<Rul
infinitely"
.to_owned(),
},
node.span.clone()
node.span
))
} else {
None
Expand Down Expand Up @@ -389,7 +389,7 @@ fn validate_choices<'a, 'i: 'a>(rules: &'a [ParserRule<'i>]) -> Vec<Error<Rule>>
"expression cannot fail; following choices cannot be reached"
.to_owned(),
},
node.span.clone(),
node.span,
))
} else {
None
Expand Down Expand Up @@ -419,7 +419,7 @@ fn validate_whitespace_comment<'a, 'i: 'a>(rules: &'a [ParserRule<'i>]) -> Vec<E
&rule.name
),
},
rule.node.span.clone(),
rule.node.span,
))
} else if is_non_progressing(&rule.node.expr, &map, &mut vec![]) {
Some(Error::new_from_span(
Expand All @@ -429,7 +429,7 @@ fn validate_whitespace_comment<'a, 'i: 'a>(rules: &'a [ParserRule<'i>]) -> Vec<E
&rule.name
),
},
rule.node.span.clone(),
rule.node.span,
))
} else {
None
Expand Down Expand Up @@ -475,7 +475,7 @@ fn left_recursion<'a, 'i: 'a>(rules: HashMap<String, &'a ParserNode<'i>>) -> Vec
chain
)
},
node.span.clone()
node.span
));
}

Expand All @@ -499,21 +499,21 @@ fn left_recursion<'a, 'i: 'a>(rules: HashMap<String, &'a ParserNode<'i>>) -> Vec
}
}
ParserExpr::Choice(ref lhs, ref rhs) => {
check_expr(&lhs, rules, trace).or_else(|| check_expr(&rhs, rules, trace))
check_expr(lhs, rules, trace).or_else(|| check_expr(rhs, rules, trace))
}
ParserExpr::Rep(ref node) => check_expr(&node, rules, trace),
ParserExpr::RepOnce(ref node) => check_expr(&node, rules, trace),
ParserExpr::Opt(ref node) => check_expr(&node, rules, trace),
ParserExpr::PosPred(ref node) => check_expr(&node, rules, trace),
ParserExpr::NegPred(ref node) => check_expr(&node, rules, trace),
ParserExpr::Push(ref node) => check_expr(&node, rules, trace),
ParserExpr::Rep(ref node) => check_expr(node, rules, trace),
ParserExpr::RepOnce(ref node) => check_expr(node, rules, trace),
ParserExpr::Opt(ref node) => check_expr(node, rules, trace),
ParserExpr::PosPred(ref node) => check_expr(node, rules, trace),
ParserExpr::NegPred(ref node) => check_expr(node, rules, trace),
ParserExpr::Push(ref node) => check_expr(node, rules, trace),
_ => None,
}
}

let mut errors = vec![];

for (ref name, ref node) in &rules {
for (name, node) in &rules {
let name = (*name).clone();

if let Some(error) = check_expr(node, &rules, &mut vec![name]) {
Expand Down
3 changes: 2 additions & 1 deletion pest/Cargo.toml
Expand Up @@ -11,14 +11,15 @@ keywords = ["pest", "parser", "peg", "grammar"]
categories = ["parsing"]
license = "MIT/Apache-2.0"
readme = "_README.md"
rust-version = "1.56"

[features]
default = ["std"]
# Implements `std::error::Error` for the `Error` type
std = ["ucd-trie/std", "thiserror"]
# Enables the `to_json` function for `Pair` and `Pairs`
pretty-print = ["serde", "serde_json"]
# Enable const fn constructor for `PrecClimber` (requires nightly)
# Enable const fn constructor for `PrecClimber`
const_prec_climber = []

[dependencies]
Expand Down
9 changes: 4 additions & 5 deletions pest/src/error.rs
Expand Up @@ -354,14 +354,12 @@ impl<R: RuleType> Error<R> {
}

if let Some(end) = end {
underline.push('^');
if end - start > 1 {
underline.push('^');
for _ in 2..(end - start) {
underline.push('-');
}
underline.push('^');
} else {
underline.push('^');
}
} else {
underline.push_str("^---")
Expand Down Expand Up @@ -398,13 +396,14 @@ impl<R: RuleType> Error<R> {
1 => f(&rules[0]),
2 => format!("{} or {}", f(&rules[0]), f(&rules[1])),
l => {
let non_separated = f(&rules[l - 1]);
let separated = rules
.iter()
.take(l - 1)
.map(|r| f(r))
.map(f)
.collect::<Vec<_>>()
.join(", ");
format!("{}, or {}", separated, f(&rules[l - 1]))
format!("{}, or {}", separated, non_separated)
}
}
}
Expand Down
1 change: 0 additions & 1 deletion pest/src/lib.rs
Expand Up @@ -7,7 +7,6 @@
// option. All files in the project carrying such notice may not be copied,
// modified, or distributed except according to those terms.
#![no_std]
#![cfg_attr(feature = "const_prec_climber", feature(const_fn_trait_bound))]

//! # pest. The Elegant Parser
//!
Expand Down
12 changes: 6 additions & 6 deletions pest/src/parser_state.rs
Expand Up @@ -365,7 +365,7 @@ impl<'i, R: RuleType> ParserState<'i, R> {
F: FnOnce(Box<Self>) -> ParseResult<Box<Self>>,
{
let token_index = self.queue.len();
let initial_pos = self.position.clone();
let initial_pos = self.position;

let result = f(self);

Expand Down Expand Up @@ -745,7 +745,7 @@ impl<'i, R: RuleType> ParserState<'i, R> {
}
};

let initial_pos = self.position.clone();
let initial_pos = self.position;

let result = f(self.checkpoint());

Expand Down Expand Up @@ -846,13 +846,13 @@ impl<'i, R: RuleType> ParserState<'i, R> {
where
F: FnOnce(Box<Self>) -> ParseResult<Box<Self>>,
{
let start = self.position.clone();
let start = self.position;

let result = f(self);

match result {
Ok(mut state) => {
let end = state.position.clone();
let end = state.position;
state.stack.push(start.span(&end));
Ok(state)
}
Expand Down Expand Up @@ -958,7 +958,7 @@ impl<'i, R: RuleType> ParserState<'i, R> {
return Ok(self);
}

let mut position = self.position.clone();
let mut position = self.position;
let result = {
let mut iter_b2t = self.stack[range].iter();
let matcher = |span: &Span| position.match_string(span.as_str());
Expand Down Expand Up @@ -1019,7 +1019,7 @@ impl<'i, R: RuleType> ParserState<'i, R> {
/// ```
#[inline]
pub fn stack_match_pop(mut self: Box<Self>) -> ParseResult<Box<Self>> {
let mut position = self.position.clone();
let mut position = self.position;
let mut result = true;
while let Some(span) = self.stack.pop() {
result = position.match_string(span.as_str());
Expand Down
1 change: 1 addition & 0 deletions vm/Cargo.toml
Expand Up @@ -11,6 +11,7 @@ keywords = ["pest", "vm"]
categories = ["parsing"]
license = "MIT/Apache-2.0"
readme = "_README.md"
rust-version = "1.56"

[dependencies]
pest = { path = "../pest", version = "2.1.0" }
Expand Down

0 comments on commit eb8a042

Please sign in to comment.