Skip to content

Commit

Permalink
Reimplement variance in terms of new traits.
Browse files Browse the repository at this point in the history
  • Loading branch information
olson-sean-k committed Jan 13, 2024
1 parent ba7d6a6 commit d54bd06
Show file tree
Hide file tree
Showing 7 changed files with 670 additions and 503 deletions.
10 changes: 5 additions & 5 deletions src/encode.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use std::borrow::{Borrow, Cow};
use std::fmt::Display;
use thiserror::Error;

use crate::token::Token;
use crate::token::{Bound, Token};

/// A regular expression that never matches.
///
Expand Down Expand Up @@ -215,7 +215,7 @@ fn encode<'t, A, T>(
},
(position, Repetition(repetition)) => {
let encoding = {
let (lower, upper) = repetition.bounds();
let cardinality = repetition.cardinality();
let mut pattern = String::new();
pattern.push_str("(?:");
encode(
Expand All @@ -224,11 +224,11 @@ fn encode<'t, A, T>(
&mut pattern,
repetition.tokens().iter(),
);
pattern.push_str(&if let Some(upper) = upper {
format!("){{{},{}}}", lower, upper)
pattern.push_str(&if let Bound::Bounded(upper) = cardinality().upper() {
format!("){{{},{}}}", cardinality.lower(), upper)
}
else {
format!("){{{},}}", lower)
format!("){{{},}}", cardinality.lower())
});
pattern
};
Expand Down
8 changes: 4 additions & 4 deletions src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ use thiserror::Error;

use crate::encode::CompileError;
use crate::rule::{Checked, RuleError};
use crate::token::{InvariantText, ParseError, Token, TokenTree, Tokenized};
use crate::token::{ParseError, Text, Token, TokenTree, Tokenized};
#[cfg(feature = "walk")]
use crate::walk::WalkError;

Expand Down Expand Up @@ -217,8 +217,8 @@ impl Variance {
}
}

impl From<token::Variance<InvariantText<'_>>> for Variance {
fn from(variance: token::Variance<InvariantText<'_>>) -> Self {
impl From<token::Variance<Text<'_>>> for Variance {
fn from(variance: token::Variance<Text<'_>>) -> Self {
match variance {
token::Variance::Invariant(text) => {
Variance::Invariant(PathBuf::from(text.to_string().into_owned()))
Expand Down Expand Up @@ -817,7 +817,7 @@ impl<'t> Program<'t> for Any<'t> {
}

fn variance(&self) -> Variance {
self.tree.as_ref().variance::<InvariantText>().into()
self.tree.as_ref().variance::<Text>().into()
}

fn is_exhaustive(&self) -> bool {
Expand Down
54 changes: 29 additions & 25 deletions src/rule.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ use std::slice;
use thiserror::Error;

use crate::diagnostics::{CompositeSpan, CorrelatedSpan, SpanExt as _};
use crate::token::{self, InvariantSize, Token, TokenKind, TokenTree, Tokenized};
use crate::token::{self, Cardinality, Size, Token, TokenKind, TokenTree, Tokenized};

Check failure on line 27 in src/rule.rs

View workflow job for this annotation

GitHub Actions / Lint

unresolved import `crate::token::TokenKind`
use crate::{Any, BuildError, Glob, Pattern};

/// Maximum invariant size.
Expand All @@ -35,7 +35,7 @@ use crate::{Any, BuildError, Glob, Pattern};
///
/// This limit is independent of the back end encoding. This code does not rely on errors in the
/// encoder by design, such as size limitations.
const MAX_INVARIANT_SIZE: InvariantSize = InvariantSize::new(0x10000);
const MAX_INVARIANT_SIZE: Size = Size::new(0x10000);

trait IteratorExt: Iterator + Sized {
fn adjacent(self) -> Adjacent<Self>
Expand Down Expand Up @@ -363,7 +363,7 @@ impl<'t> TryFrom<&'t str> for Checked<Tokenized<'t>> {

pub fn check(tokenized: Tokenized) -> Result<Checked<Tokenized>, RuleError> {
boundary(&tokenized)?;
bounds(&tokenized)?;
//cardinality(&tokenized)?; // TODO: See `cardinality` below.
group(&tokenized)?;
size(&tokenized)?;
Ok(Checked { inner: tokenized })
Expand Down Expand Up @@ -518,7 +518,7 @@ fn group<'t>(tokenized: &Tokenized<'t>) -> Result<(), RuleError<'t>> {
let tokens = repetition.tokens();
if let Some(terminals) = tokens.terminals() {
check_group(terminals, outer).map_err(diagnose)?;
check_group_repetition(terminals, outer, repetition.bounds())
check_group_repetition(terminals, outer, repetition.cardinality())
.map_err(diagnose)?;
}
recurse(expression, tokens.iter(), outer)?;
Expand Down Expand Up @@ -656,10 +656,10 @@ fn group<'t>(tokenized: &Tokenized<'t>) -> Result<(), RuleError<'t>> {
fn check_group_repetition<'t>(
terminals: Terminals<&Token>,
outer: Outer<'t, 't>,
bounds: (usize, Option<usize>),
cardinality: Cardinality<usize>,
) -> Result<(), CorrelatedError> {
let Outer { left, .. } = outer;
let (lower, _) = bounds;
let lower = cardinality.lower();
match terminals.map(|token| (token, token.kind())) {
// The repetition is preceded by a termination; disallow rooted sub-globs with a zero
// lower bound.
Expand Down Expand Up @@ -723,24 +723,28 @@ fn group<'t>(tokenized: &Tokenized<'t>) -> Result<(), RuleError<'t>> {
recurse(tokenized.expression(), tokenized.tokens(), Outer::default())
}

fn bounds<'t>(tokenized: &Tokenized<'t>) -> Result<(), RuleError<'t>> {
if let Some((_, token)) = tokenized.walk().find(|(_, token)| match token.kind() {
TokenKind::Repetition(ref repetition) => {
let (lower, upper) = repetition.bounds();
upper.map_or(false, |upper| upper < lower || upper == 0)
},
_ => false,
}) {
Err(RuleError::new(
tokenized.expression().clone(),
RuleErrorKind::IncompatibleBounds,
CompositeSpan::spanned("here", *token.annotation()),
))
}
else {
Ok(())
}
}
// TODO: `Cardinality` masks this error by ordering its inputs. Either glob expressions allow
// closed repetition bounds to be in any order, or this rule must more directly check the
// relevant fields. This is a great illustration of why differentiating trees (syntactic vs.
// semantic) can be useful!
//fn cardinality<'t>(tokenized: &Tokenized<'t>) -> Result<(), RuleError<'t>> {
// if let Some((_, token)) = tokenized.walk().find(|(_, token)| match token.kind() {
// TokenKind::Repetition(ref repetition) => {
// let (lower, upper) = repetition.bounds();
// upper.map_or(false, |upper| upper < lower || upper == 0)
// },
// _ => false,
// }) {
// Err(RuleError::new(
// tokenized.expression().clone(),
// RuleErrorKind::IncompatibleBounds,
// CompositeSpan::spanned("here", *token.annotation()),
// ))
// }
// else {
// Ok(())
// }
//}

fn size<'t>(tokenized: &Tokenized<'t>) -> Result<(), RuleError<'t>> {
if let Some((_, token)) = tokenized
Expand All @@ -750,7 +754,7 @@ fn size<'t>(tokenized: &Tokenized<'t>) -> Result<(), RuleError<'t>> {
// revisiting the same tokens to recompute their local variance.
.find(|(_, token)| {
token
.variance::<InvariantSize>()
.variance::<Size>()
.as_invariance()
.map_or(false, |size| *size >= MAX_INVARIANT_SIZE)
})
Expand Down
Loading

0 comments on commit d54bd06

Please sign in to comment.