diff --git a/Cargo.toml b/Cargo.toml index 0d88b863..5459a734 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "cssparser" -version = "0.13.7" +version = "0.14.0" authors = [ "Simon Sapin " ] description = "Rust implementation of CSS Syntax Level 3" diff --git a/src/color.rs b/src/color.rs index e716f5e3..d0da405c 100644 --- a/src/color.rs +++ b/src/color.rs @@ -5,7 +5,7 @@ use std::fmt; use std::f32::consts::PI; -use super::{Token, Parser, ToCss}; +use super::{Token, Parser, ToCss, ParseError, BasicParseError}; use tokenizer::NumericValue; #[cfg(feature = "serde")] @@ -141,46 +141,48 @@ impl Color { /// Parse a value, per CSS Color Module Level 3. /// /// FIXME(#2) Deprecated CSS2 System Colors are not supported yet. - pub fn parse(input: &mut Parser) -> Result { - match try!(input.next()) { - Token::Hash(value) | Token::IDHash(value) => { + pub fn parse<'i, 't>(input: &mut Parser<'i, 't>) -> Result> { + let token = try!(input.next()); + match token { + Token::Hash(ref value) | Token::IDHash(ref value) => { Color::parse_hash(value.as_bytes()) }, - Token::Ident(value) => parse_color_keyword(&*value), - Token::Function(name) => { - input.parse_nested_block(|arguments| { + Token::Ident(ref value) => parse_color_keyword(&*value), + Token::Function(ref name) => { + return input.parse_nested_block(|arguments| { parse_color_function(&*name, arguments) - }) + .map_err(|e| ParseError::Basic(e)) + }).map_err(ParseError::<()>::basic); } _ => Err(()) - } + }.map_err(|()| BasicParseError::UnexpectedToken(token)) } /// Parse a color hash, without the leading '#' character. #[inline] pub fn parse_hash(value: &[u8]) -> Result { match value.len() { - 8 => rgba( + 8 => Ok(rgba( try!(from_hex(value[0])) * 16 + try!(from_hex(value[1])), try!(from_hex(value[2])) * 16 + try!(from_hex(value[3])), try!(from_hex(value[4])) * 16 + try!(from_hex(value[5])), - try!(from_hex(value[6])) * 16 + try!(from_hex(value[7])), + try!(from_hex(value[6])) * 16 + try!(from_hex(value[7]))), ), - 6 => rgb( + 6 => Ok(rgb( try!(from_hex(value[0])) * 16 + try!(from_hex(value[1])), try!(from_hex(value[2])) * 16 + try!(from_hex(value[3])), - try!(from_hex(value[4])) * 16 + try!(from_hex(value[5])), + try!(from_hex(value[4])) * 16 + try!(from_hex(value[5]))), ), - 4 => rgba( + 4 => Ok(rgba( try!(from_hex(value[0])) * 17, try!(from_hex(value[1])) * 17, try!(from_hex(value[2])) * 17, - try!(from_hex(value[3])) * 17, + try!(from_hex(value[3])) * 17), ), - 3 => rgb( + 3 => Ok(rgb( try!(from_hex(value[0])) * 17, try!(from_hex(value[1])) * 17, - try!(from_hex(value[2])) * 17, + try!(from_hex(value[2])) * 17), ), _ => Err(()) } @@ -190,13 +192,13 @@ impl Color { #[inline] -fn rgb(red: u8, green: u8, blue: u8) -> Result { +fn rgb(red: u8, green: u8, blue: u8) -> Color { rgba(red, green, blue, 255) } #[inline] -fn rgba(red: u8, green: u8, blue: u8, alpha: u8) -> Result { - Ok(Color::RGBA(RGBA::new(red, green, blue, alpha))) +fn rgba(red: u8, green: u8, blue: u8, alpha: u8) -> Color { + Color::RGBA(RGBA::new(red, green, blue, alpha)) } @@ -410,11 +412,11 @@ fn clamp_floor_256_f32(val: f32) -> u8 { } #[inline] -fn parse_color_function(name: &str, arguments: &mut Parser) -> Result { +fn parse_color_function<'i, 't>(name: &str, arguments: &mut Parser<'i, 't>) -> Result> { let (red, green, blue, uses_commas) = match_ignore_ascii_case! { name, "rgb" | "rgba" => parse_rgb_components_rgb(arguments)?, "hsl" | "hsla" => parse_rgb_components_hsl(arguments)?, - _ => return Err(()) + _ => return Err(BasicParseError::UnexpectedToken(Token::Ident(name.to_owned().into()))), }; let alpha = if !arguments.is_exhausted() { @@ -423,7 +425,7 @@ fn parse_color_function(name: &str, arguments: &mut Parser) -> Result } else { match try!(arguments.next()) { Token::Delim('/') => {}, - _ => return Err(()) + t => return Err(BasicParseError::UnexpectedToken(t)), }; }; let token = try!(arguments.next()); @@ -434,8 +436,8 @@ fn parse_color_function(name: &str, arguments: &mut Parser) -> Result Token::Percentage(ref v) => { clamp_unit_f32(v.unit_value) } - _ => { - return Err(()) + t => { + return Err(BasicParseError::UnexpectedToken(t)) } } } else { @@ -443,12 +445,12 @@ fn parse_color_function(name: &str, arguments: &mut Parser) -> Result }; try!(arguments.expect_exhausted()); - rgba(red, green, blue, alpha) + Ok(rgba(red, green, blue, alpha)) } #[inline] -fn parse_rgb_components_rgb(arguments: &mut Parser) -> Result<(u8, u8, u8, bool), ()> { +fn parse_rgb_components_rgb<'i, 't>(arguments: &mut Parser<'i, 't>) -> Result<(u8, u8, u8, bool), BasicParseError<'i>> { let red: u8; let green: u8; let blue: u8; @@ -465,7 +467,7 @@ fn parse_rgb_components_rgb(arguments: &mut Parser) -> Result<(u8, u8, u8, bool) uses_commas = true; try!(arguments.expect_number()) } - _ => return Err(()) + t => return Err(BasicParseError::UnexpectedToken(t)) }); if uses_commas { try!(arguments.expect_comma()); @@ -480,36 +482,38 @@ fn parse_rgb_components_rgb(arguments: &mut Parser) -> Result<(u8, u8, u8, bool) uses_commas = true; try!(arguments.expect_percentage()) } - _ => return Err(()) + t => return Err(BasicParseError::UnexpectedToken(t)) }); if uses_commas { try!(arguments.expect_comma()); } blue = clamp_unit_f32(try!(arguments.expect_percentage())); } - _ => return Err(()) + t => return Err(BasicParseError::UnexpectedToken(t)) }; return Ok((red, green, blue, uses_commas)); } #[inline] -fn parse_rgb_components_hsl(arguments: &mut Parser) -> Result<(u8, u8, u8, bool), ()> { +fn parse_rgb_components_hsl<'i, 't>(arguments: &mut Parser<'i, 't>) -> Result<(u8, u8, u8, bool), BasicParseError<'i>> { let mut uses_commas = false; // Hue given as an angle // https://drafts.csswg.org/css-values/#angles - let hue_degrees = match try!(arguments.next()) { - Token::Number(NumericValue { value: v, .. }) => v, - Token::Dimension(NumericValue { value: v, .. }, unit) => { + let token = try!(arguments.next()); + let hue_degrees = match token { + Token::Number(NumericValue { value: v, .. }) => Ok(v), + Token::Dimension(NumericValue { value: v, .. }, ref unit) => { match_ignore_ascii_case! { &*unit, - "deg" => v, - "grad" => v * 360. / 400., - "rad" => v * 360. / (2. * PI), - "turn" => v * 360., - _ => return Err(()) + "deg" => Ok(v), + "grad" => Ok(v * 360. / 400.), + "rad" => Ok(v * 360. / (2. * PI)), + "turn" => Ok(v * 360.), + _ => Err(()), } } - _ => return Err(()) + t => return Err(BasicParseError::UnexpectedToken(t)) }; + let hue_degrees = try!(hue_degrees.map_err(|()| BasicParseError::UnexpectedToken(token))); // Subtract an integer before rounding, to avoid some rounding errors: let hue_normalized_degrees = hue_degrees - 360. * (hue_degrees / 360.).floor(); let hue = hue_normalized_degrees / 360.; @@ -522,7 +526,7 @@ fn parse_rgb_components_hsl(arguments: &mut Parser) -> Result<(u8, u8, u8, bool) uses_commas = true; try!(arguments.expect_percentage()) } - _ => return Err(()) + t => return Err(BasicParseError::UnexpectedToken(t)) }; let saturation = saturation.max(0.).min(1.); diff --git a/src/lib.rs b/src/lib.rs index 4df99317..e7ab4b7d 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -89,7 +89,7 @@ pub use from_bytes::{stylesheet_encoding, EncodingSupport}; pub use color::{RGBA, Color, parse_color_keyword}; pub use nth::parse_nth; pub use serializer::{ToCss, CssStringWriter, serialize_identifier, serialize_string, TokenSerializationType}; -pub use parser::{Parser, Delimiter, Delimiters, SourcePosition}; +pub use parser::{Parser, Delimiter, Delimiters, SourcePosition, ParseError, BasicParseError, ParserInput}; pub use unicode_range::UnicodeRange; // For macros diff --git a/src/nth.rs b/src/nth.rs index ec735bc2..680ab5aa 100644 --- a/src/nth.rs +++ b/src/nth.rs @@ -4,76 +4,98 @@ use std::ascii::AsciiExt; -use super::{Token, Parser}; +use super::{Token, Parser, BasicParseError}; /// Parse the *An+B* notation, as found in the `:nth-child()` selector. /// The input is typically the arguments of a function, /// in which case the caller needs to check if the arguments’ parser is exhausted. /// Return `Ok((A, B))`, or `Err(())` for a syntax error. -pub fn parse_nth(input: &mut Parser) -> Result<(i32, i32), ()> { - match try!(input.next()) { - Token::Number(value) => Ok((0, try!(value.int_value.ok_or(())) as i32)), - Token::Dimension(value, unit) => { - let a = try!(value.int_value.ok_or(())) as i32; - match_ignore_ascii_case! { &unit, - "n" => parse_b(input, a), - "n-" => parse_signless_b(input, a, -1), - _ => Ok((a, try!(parse_n_dash_digits(&*unit)))) +pub fn parse_nth<'i, 't>(input: &mut Parser<'i, 't>) -> Result<(i32, i32), BasicParseError<'i>> { + let token = try!(input.next()); + match token { + Token::Number(ref value) => { + match value.int_value { + Some(v) => Ok((0, v as i32)), + None => Err(()), } } - Token::Ident(value) => { + Token::Dimension(value, ref unit) => { + match value.int_value { + Some(v) => { + let a = v as i32; + match_ignore_ascii_case! { + &unit, + "n" => Ok(try!(parse_b(input, a))), + "n-" => Ok(try!(parse_signless_b(input, a, -1))), + _ => { + parse_n_dash_digits(&*unit).map(|val| (a, val)) + } + } + } + None => Err(()), + } + } + Token::Ident(ref value) => { match_ignore_ascii_case! { &value, "even" => Ok((2, 0)), "odd" => Ok((2, 1)), - "n" => parse_b(input, 1), - "-n" => parse_b(input, -1), - "n-" => parse_signless_b(input, 1, -1), - "-n-" => parse_signless_b(input, -1, -1), + "n" => Ok(try!(parse_b(input, 1))), + "-n" => Ok(try!(parse_b(input, -1))), + "n-" => Ok(try!(parse_signless_b(input, 1, -1))), + "-n-" => Ok(try!(parse_signless_b(input, -1, -1))), _ => if value.starts_with("-") { - Ok((-1, try!(parse_n_dash_digits(&value[1..])))) + parse_n_dash_digits(&value[1..]).map(|v| (-1, v)) } else { - Ok((1, try!(parse_n_dash_digits(&*value)))) + parse_n_dash_digits(&*value).map(|v| (1, v)) } } } Token::Delim('+') => match try!(input.next_including_whitespace()) { Token::Ident(value) => { match_ignore_ascii_case! { &value, - "n" => parse_b(input, 1), - "n-" => parse_signless_b(input, 1, -1), - _ => Ok((1, try!(parse_n_dash_digits(&*value)))) + "n" => Ok(try!(parse_b(input, 1))), + "n-" => Ok(try!(parse_signless_b(input, 1, -1))), + _ => parse_n_dash_digits(&*value).map(|v| (1, v)) } } - _ => Err(()) + t => return Err(BasicParseError::UnexpectedToken(t)), }, - _ => Err(()) - } + _ => Err(()), + }.map_err(|()| BasicParseError::UnexpectedToken(token)) } -fn parse_b(input: &mut Parser, a: i32) -> Result<(i32, i32), ()> { +fn parse_b<'i, 't>(input: &mut Parser<'i, 't>, a: i32) -> Result<(i32, i32), BasicParseError<'i>> { let start_position = input.position(); - match input.next() { - Ok(Token::Delim('+')) => parse_signless_b(input, a, 1), - Ok(Token::Delim('-')) => parse_signless_b(input, a, -1), + let token = input.next(); + match token { + Ok(Token::Delim('+')) => Ok(try!(parse_signless_b(input, a, 1))), + Ok(Token::Delim('-')) => Ok(try!(parse_signless_b(input, a, -1))), Ok(Token::Number(ref value)) if value.has_sign => { - Ok((a, try!(value.int_value.ok_or(())) as i32)) + match value.int_value { + Some(v) => Ok((a, v as i32)), + None => Err(()), + } } _ => { input.reset(start_position); Ok((a, 0)) } - } + }.map_err(|()| BasicParseError::UnexpectedToken(token.unwrap())) } -fn parse_signless_b(input: &mut Parser, a: i32, b_sign: i32) -> Result<(i32, i32), ()> { - match try!(input.next()) { +fn parse_signless_b<'i, 't>(input: &mut Parser<'i, 't>, a: i32, b_sign: i32) -> Result<(i32, i32), BasicParseError<'i>> { + let token = try!(input.next()); + match token { Token::Number(ref value) if !value.has_sign => { - Ok((a, b_sign * (try!(value.int_value.ok_or(())) as i32))) + match value.int_value { + Some(v) => Ok((a, b_sign * v as i32)), + None => Err(()), + } } _ => Err(()) - } + }.map_err(|()| BasicParseError::UnexpectedToken(token)) } fn parse_n_dash_digits(string: &str) -> Result { diff --git a/src/parser.rs b/src/parser.rs index 9c10ea15..a92a3d5b 100644 --- a/src/parser.rs +++ b/src/parser.rs @@ -6,7 +6,6 @@ use std::ops::Range; use std::ascii::AsciiExt; use std::ops::BitOr; use std::borrow::Cow; -use std::ops; use tokenizer::{self, Token, NumericValue, PercentageValue, Tokenizer, SourceLocation}; @@ -21,47 +20,61 @@ pub struct SourcePosition { at_start_of: Option, } - -/// Like std::borrow::Cow, except the borrowed variant contains a mutable -/// reference. -enum MaybeOwned<'a, T: 'a> { - Owned(T), - Borrowed(&'a mut T), +/// The funamental parsing errors that can be triggered by built-in parsing routines. +#[derive(Clone, Debug, PartialEq)] +pub enum BasicParseError<'a> { + /// An unexpected token was encountered. + UnexpectedToken(Token<'a>), + /// A particular token was expected but not found. + ExpectedToken(Token<'a>), + /// The end of the input was encountered unexpectedly. + EndOfInput, + /// An `@` rule was encountered that was invalid. + AtRuleInvalid, + /// A qualified rule was encountered that was invalid. + QualifiedRuleInvalid, } -impl<'a, T> ops::Deref for MaybeOwned<'a, T> { - type Target = T; - - fn deref<'b>(&'b self) -> &'b T { - match *self { - MaybeOwned::Owned(ref t) => t, - MaybeOwned::Borrowed(ref pointer) => &**pointer, - } +impl<'a, T> From> for ParseError<'a, T> { + fn from(this: BasicParseError<'a>) -> ParseError<'a, T> { + ParseError::Basic(this) } } -impl<'a, T> ops::DerefMut for MaybeOwned<'a, T> { - fn deref_mut<'b>(&'b mut self) -> &'b mut T { - match *self { - MaybeOwned::Owned(ref mut t) => t, - MaybeOwned::Borrowed(ref mut pointer) => &mut **pointer, +/// Extensible parse errors that can be encountered by client parsing implementations. +#[derive(Clone, Debug, PartialEq)] +pub enum ParseError<'a, T: 'a> { + /// A fundamental parse error from a built-in parsing routine. + Basic(BasicParseError<'a>), + /// A parse error reported by downstream consumer code. + Custom(T), +} + +impl<'a, T> ParseError<'a, T> { + /// Extract the fundamental parse error from an extensible error. + pub fn basic(self) -> BasicParseError<'a> { + match self { + ParseError::Basic(e) => e, + ParseError::Custom(_) => panic!("Not a basic parse error"), } } } -impl<'a, T> Clone for MaybeOwned<'a, T> where T: Clone { - fn clone(&self) -> MaybeOwned<'a, T> { - MaybeOwned::Owned((**self).clone()) +/// The owned input for a parser. +pub struct ParserInput<'t>(Tokenizer<'t>); + +impl<'t> ParserInput<'t> { + /// Create a new input for a parser. + pub fn new(input: &'t str) -> ParserInput<'t> { + ParserInput(Tokenizer::new(input)) } } - /// A CSS parser that borrows its `&str` input, /// yields `Token`s, /// and keeps track of nested blocks and functions. -#[derive(Clone)] pub struct Parser<'i: 't, 't> { - tokenizer: MaybeOwned<'t, Tokenizer<'i>>, + tokenizer: &'t mut ParserInput<'i>, /// If `Some(_)`, .parse_nested_block() can be called. at_start_of: Option, /// For parsers from `parse_until` or `parse_nested_block` @@ -164,12 +177,12 @@ impl Delimiters { } } -impl<'i, 't> Parser<'i, 't> { +impl<'i: 't, 't> Parser<'i, 't> { /// Create a new parser #[inline] - pub fn new(input: &'i str) -> Parser<'i, 'i> { + pub fn new(input: &'t mut ParserInput<'i>) -> Parser<'i, 't> { Parser { - tokenizer: MaybeOwned::Owned(Tokenizer::new(input)), + tokenizer: input, at_start_of: None, stop_before: Delimiter::None, } @@ -188,13 +201,12 @@ impl<'i, 't> Parser<'i, 't> { /// /// This ignores whitespace and comments. #[inline] - pub fn expect_exhausted(&mut self) -> Result<(), ()> { + pub fn expect_exhausted(&mut self) -> Result<(), BasicParseError<'i>> { let start_position = self.position(); let result = match self.next() { - Err(()) => Ok(()), - Ok(_) => { - Err(()) - } + Err(BasicParseError::EndOfInput) => Ok(()), + Err(e) => unreachable!("Unexpected error encountered: {:?}", e), + Ok(t) => Err(BasicParseError::UnexpectedToken(t)), }; self.reset(start_position); result @@ -206,7 +218,7 @@ impl<'i, 't> Parser<'i, 't> { #[inline] pub fn position(&self) -> SourcePosition { SourcePosition { - position: self.tokenizer.position(), + position: (self.tokenizer.0).position(), at_start_of: self.at_start_of, } } @@ -217,35 +229,35 @@ impl<'i, 't> Parser<'i, 't> { /// Should only be used with `SourcePosition` values from the same `Parser` instance. #[inline] pub fn reset(&mut self, new_position: SourcePosition) { - self.tokenizer.reset(new_position.position); + (self.tokenizer.0).reset(new_position.position); self.at_start_of = new_position.at_start_of; } /// Start looking for `var()` functions. (See the `.seen_var_functions()` method.) #[inline] pub fn look_for_var_functions(&mut self) { - self.tokenizer.look_for_var_functions() + (self.tokenizer.0).look_for_var_functions() } /// Return whether a `var()` function has been seen by the tokenizer since /// either `look_for_var_functions` was called, and stop looking. #[inline] pub fn seen_var_functions(&mut self) -> bool { - self.tokenizer.seen_var_functions() + (self.tokenizer.0).seen_var_functions() } /// Start looking for viewport percentage lengths. (See the `seen_viewport_percentages` /// method.) #[inline] pub fn look_for_viewport_percentages(&mut self) { - self.tokenizer.look_for_viewport_percentages() + (self.tokenizer.0).look_for_viewport_percentages() } /// Return whether a `vh`, `vw`, `vmin`, or `vmax` dimension has been seen by the tokenizer /// since `look_for_viewport_percentages` was called, and stop looking. #[inline] pub fn seen_viewport_percentages(&mut self) -> bool { - self.tokenizer.seen_viewport_percentages() + (self.tokenizer.0).seen_viewport_percentages() } /// Execute the given closure, passing it the parser. @@ -266,25 +278,25 @@ impl<'i, 't> Parser<'i, 't> { /// Return a slice of the CSS input #[inline] pub fn slice(&self, range: Range) -> &'i str { - self.tokenizer.slice(range.start.position..range.end.position) + (self.tokenizer.0).slice(range.start.position..range.end.position) } /// Return a slice of the CSS input, from the given position to the current one. #[inline] pub fn slice_from(&self, start_position: SourcePosition) -> &'i str { - self.tokenizer.slice_from(start_position.position) + (self.tokenizer.0).slice_from(start_position.position) } /// Return the line and column number within the input for the current position. #[inline] pub fn current_source_location(&self) -> SourceLocation { - self.tokenizer.current_source_location() + (self.tokenizer.0).current_source_location() } /// Return the line and column number within the input for the given position. #[inline] pub fn source_location(&self, target: SourcePosition) -> SourceLocation { - self.tokenizer.source_location(target.position) + (self.tokenizer.0).source_location(target.position) } /// Return the next token in the input that is neither whitespace or a comment, @@ -298,7 +310,7 @@ impl<'i, 't> Parser<'i, 't> { /// See the `Parser::parse_nested_block` method to parse the content of functions or blocks. /// /// This only returns a closing token when it is unmatched (and therefore an error). - pub fn next(&mut self) -> Result, ()> { + pub fn next(&mut self) -> Result, BasicParseError<'i>> { loop { match self.next_including_whitespace_and_comments() { Ok(Token::WhiteSpace(_)) | Ok(Token::Comment(_)) => {}, @@ -308,7 +320,7 @@ impl<'i, 't> Parser<'i, 't> { } /// Same as `Parser::next`, but does not skip whitespace tokens. - pub fn next_including_whitespace(&mut self) -> Result, ()> { + pub fn next_including_whitespace(&mut self) -> Result, BasicParseError<'i>> { loop { match self.next_including_whitespace_and_comments() { Ok(Token::Comment(_)) => {}, @@ -323,14 +335,15 @@ impl<'i, 't> Parser<'i, 't> { /// where comments are preserved. /// When parsing higher-level values, per the CSS Syntax specification, /// comments should always be ignored between tokens. - pub fn next_including_whitespace_and_comments(&mut self) -> Result, ()> { + pub fn next_including_whitespace_and_comments(&mut self) -> Result, BasicParseError<'i>> { if let Some(block_type) = self.at_start_of.take() { - consume_until_end_of_block(block_type, &mut *self.tokenizer); + consume_until_end_of_block(block_type, &mut self.tokenizer.0); } - if self.stop_before.contains(Delimiters::from_byte(self.tokenizer.next_byte())) { - return Err(()) + let byte = (self.tokenizer.0).next_byte(); + if self.stop_before.contains(Delimiters::from_byte(byte)) { + return Err(BasicParseError::EndOfInput) } - let token = try!(self.tokenizer.next()); + let token = try!((self.tokenizer.0).next().map_err(|()| BasicParseError::EndOfInput)); if let Some(block_type) = BlockType::opening(&token) { self.at_start_of = Some(block_type); } @@ -342,8 +355,8 @@ impl<'i, 't> Parser<'i, 't> { /// /// This can help tell e.g. `color: green;` from `color: green 4px;` #[inline] - pub fn parse_entirely(&mut self, parse: F) -> Result - where F: FnOnce(&mut Parser<'i, 't>) -> Result { + pub fn parse_entirely(&mut self, parse: F) -> Result> + where F: FnOnce(&mut Parser<'i, 't>) -> Result> { let result = parse(self); try!(self.expect_exhausted()); result @@ -360,13 +373,13 @@ impl<'i, 't> Parser<'i, 't> { /// This method retuns `Err(())` the first time that a closure call does, /// or if a closure call leaves some input before the next comma or the end of the input. #[inline] - pub fn parse_comma_separated(&mut self, mut parse_one: F) -> Result, ()> - where F: FnMut(&mut Parser) -> Result { + pub fn parse_comma_separated(&mut self, mut parse_one: F) -> Result, ParseError<'i, E>> + where F: for<'tt> FnMut(&mut Parser<'i, 'tt>) -> Result> { let mut values = vec![]; loop { - values.push(try!(self.parse_until_before(Delimiter::Comma, |parser| parse_one(parser)))); + values.push(try!(self.parse_until_before(Delimiter::Comma, &mut parse_one))); match self.next() { - Err(()) => return Ok(values), + Err(_) => return Ok(values), Ok(Token::Comma) => continue, Ok(_) => unreachable!(), } @@ -385,33 +398,9 @@ impl<'i, 't> Parser<'i, 't> { /// /// The result is overridden to `Err(())` if the closure leaves some input before that point. #[inline] - pub fn parse_nested_block(&mut self, parse: F) -> Result - where F: for<'tt> FnOnce(&mut Parser<'i, 'tt>) -> Result { - let block_type = self.at_start_of.take().expect("\ - A nested parser can only be created when a Function, \ - ParenthesisBlock, SquareBracketBlock, or CurlyBracketBlock \ - token was just consumed.\ - "); - let closing_delimiter = match block_type { - BlockType::CurlyBracket => ClosingDelimiter::CloseCurlyBracket, - BlockType::SquareBracket => ClosingDelimiter::CloseSquareBracket, - BlockType::Parenthesis => ClosingDelimiter::CloseParenthesis, - }; - let result; - // Introduce a new scope to limit duration of nested_parser’s borrow - { - let mut nested_parser = Parser { - tokenizer: MaybeOwned::Borrowed(&mut *self.tokenizer), - at_start_of: None, - stop_before: closing_delimiter, - }; - result = nested_parser.parse_entirely(parse); - if let Some(block_type) = nested_parser.at_start_of { - consume_until_end_of_block(block_type, &mut *nested_parser.tokenizer); - } - } - consume_until_end_of_block(block_type, &mut *self.tokenizer); - result + pub fn parse_nested_block(&mut self, parse: F) -> Result > + where F: for<'tt> FnOnce(&mut Parser<'i, 'tt>) -> Result> { + parse_nested_block(self, parse) } /// Limit parsing to until a given delimiter. (E.g. a semicolon for a property value.) @@ -422,37 +411,10 @@ impl<'i, 't> Parser<'i, 't> { /// /// The result is overridden to `Err(())` if the closure leaves some input before that point. #[inline] - pub fn parse_until_before(&mut self, delimiters: Delimiters, parse: F) - -> Result - where F: for<'tt> FnOnce(&mut Parser<'i, 'tt>) -> Result { - let delimiters = self.stop_before | delimiters; - let result; - // Introduce a new scope to limit duration of nested_parser’s borrow - { - let mut delimited_parser = Parser { - tokenizer: MaybeOwned::Borrowed(&mut *self.tokenizer), - at_start_of: self.at_start_of.take(), - stop_before: delimiters, - }; - result = delimited_parser.parse_entirely(parse); - if let Some(block_type) = delimited_parser.at_start_of { - consume_until_end_of_block(block_type, &mut *delimited_parser.tokenizer); - } - } - // FIXME: have a special-purpose tokenizer method for this that does less work. - loop { - if delimiters.contains(Delimiters::from_byte(self.tokenizer.next_byte())) { - break - } - if let Ok(token) = self.tokenizer.next() { - if let Some(block_type) = BlockType::opening(&token) { - consume_until_end_of_block(block_type, &mut *self.tokenizer); - } - } else { - break - } - } - result + pub fn parse_until_before(&mut self, delimiters: Delimiters, parse: F) + -> Result > + where F: for<'tt> FnOnce(&mut Parser<'i, 'tt>) -> Result> { + parse_until_before(self, delimiters, parse) } /// Like `parse_until_before`, but also consume the delimiter token. @@ -461,153 +423,150 @@ impl<'i, 't> Parser<'i, 't> { /// (e.g. if these is only one in the given set) /// or if it was there at all (as opposed to reaching the end of the input). #[inline] - pub fn parse_until_after(&mut self, delimiters: Delimiters, parse: F) - -> Result - where F: for<'tt> FnOnce(&mut Parser<'i, 'tt>) -> Result { - let result = self.parse_until_before(delimiters, parse); - let next_byte = self.tokenizer.next_byte(); - if next_byte.is_some() && !self.stop_before.contains(Delimiters::from_byte(next_byte)) { - debug_assert!(delimiters.contains(Delimiters::from_byte(next_byte))); - self.tokenizer.advance(1); - if next_byte == Some(b'{') { - consume_until_end_of_block(BlockType::CurlyBracket, &mut *self.tokenizer); - } - } - result + pub fn parse_until_after(&mut self, delimiters: Delimiters, parse: F) + -> Result > + where F: for<'tt> FnOnce(&mut Parser<'i, 'tt>) -> Result> { + parse_until_after(self, delimiters, parse) } /// Parse a and return its value. #[inline] - pub fn expect_whitespace(&mut self) -> Result<&'i str, ()> { + pub fn expect_whitespace(&mut self) -> Result<&'i str, BasicParseError<'i>> { match try!(self.next_including_whitespace()) { Token::WhiteSpace(value) => Ok(value), - _ => Err(()) + t => Err(BasicParseError::UnexpectedToken(t)) } } /// Parse a and return the unescaped value. #[inline] - pub fn expect_ident(&mut self) -> Result, ()> { + pub fn expect_ident(&mut self) -> Result, BasicParseError<'i>> { match try!(self.next()) { Token::Ident(value) => Ok(value), - _ => Err(()) + t => Err(BasicParseError::UnexpectedToken(t)) } } /// Parse a whose unescaped value is an ASCII-insensitive match for the given value. #[inline] - pub fn expect_ident_matching(&mut self, expected_value: &str) -> Result<(), ()> { + pub fn expect_ident_matching(&mut self, expected_value: &str) -> Result<(), BasicParseError<'i>> { match try!(self.next()) { Token::Ident(ref value) if value.eq_ignore_ascii_case(expected_value) => Ok(()), - _ => Err(()) + t => Err(BasicParseError::UnexpectedToken(t)) } } /// Parse a and return the unescaped value. #[inline] - pub fn expect_string(&mut self) -> Result, ()> { + pub fn expect_string(&mut self) -> Result, BasicParseError<'i>> { match try!(self.next()) { Token::QuotedString(value) => Ok(value), - _ => Err(()) + t => Err(BasicParseError::UnexpectedToken(t)) } } /// Parse either a or a , and return the unescaped value. #[inline] - pub fn expect_ident_or_string(&mut self) -> Result, ()> { + pub fn expect_ident_or_string(&mut self) -> Result, BasicParseError<'i>> { match try!(self.next()) { Token::Ident(value) => Ok(value), Token::QuotedString(value) => Ok(value), - _ => Err(()) + t => Err(BasicParseError::UnexpectedToken(t)) } } /// Parse a and return the unescaped value. #[inline] - pub fn expect_url(&mut self) -> Result, ()> { + pub fn expect_url(&mut self) -> Result, BasicParseError<'i>> { match try!(self.next()) { Token::UnquotedUrl(value) => Ok(value), Token::Function(ref name) if name.eq_ignore_ascii_case("url") => { - self.parse_nested_block(|input| input.expect_string()) + self.parse_nested_block(|input| input.expect_string() + .map_err(|e| ParseError::Basic(e))) + .map_err(ParseError::<()>::basic) }, - _ => Err(()) + t => Err(BasicParseError::UnexpectedToken(t)) } } /// Parse either a or a , and return the unescaped value. #[inline] - pub fn expect_url_or_string(&mut self) -> Result, ()> { + pub fn expect_url_or_string(&mut self) -> Result, BasicParseError<'i>> { match try!(self.next()) { Token::UnquotedUrl(value) => Ok(value), Token::QuotedString(value) => Ok(value), Token::Function(ref name) if name.eq_ignore_ascii_case("url") => { - self.parse_nested_block(|input| input.expect_string()) + self.parse_nested_block(|input| input.expect_string().map_err(|e| ParseError::Basic(e))) + .map_err(ParseError::<()>::basic) }, - _ => Err(()) + t => Err(BasicParseError::UnexpectedToken(t)) } } /// Parse a and return the integer value. #[inline] - pub fn expect_number(&mut self) -> Result { + pub fn expect_number(&mut self) -> Result> { match try!(self.next()) { Token::Number(NumericValue { value, .. }) => Ok(value), - _ => Err(()) + t => Err(BasicParseError::UnexpectedToken(t)) } } /// Parse a that does not have a fractional part, and return the integer value. #[inline] - pub fn expect_integer(&mut self) -> Result { - match try!(self.next()) { - Token::Number(NumericValue { int_value, .. }) => int_value.ok_or(()), - _ => Err(()) + pub fn expect_integer(&mut self) -> Result> { + let token = try!(self.next()); + match token { + Token::Number(NumericValue { int_value: Some(int_value), .. }) => { + Ok(int_value) + } + t => Err(BasicParseError::UnexpectedToken(t)) } } /// Parse a and return the value. /// `0%` and `100%` map to `0.0` and `1.0` (not `100.0`), respectively. #[inline] - pub fn expect_percentage(&mut self) -> Result { + pub fn expect_percentage(&mut self) -> Result> { match try!(self.next()) { Token::Percentage(PercentageValue { unit_value, .. }) => Ok(unit_value), - _ => Err(()) + t => Err(BasicParseError::UnexpectedToken(t)) } } /// Parse a `:` . #[inline] - pub fn expect_colon(&mut self) -> Result<(), ()> { + pub fn expect_colon(&mut self) -> Result<(), BasicParseError<'i>> { match try!(self.next()) { Token::Colon => Ok(()), - _ => Err(()) + t => Err(BasicParseError::UnexpectedToken(t)) } } /// Parse a `;` . #[inline] - pub fn expect_semicolon(&mut self) -> Result<(), ()> { + pub fn expect_semicolon(&mut self) -> Result<(), BasicParseError<'i>> { match try!(self.next()) { Token::Semicolon => Ok(()), - _ => Err(()) + t => Err(BasicParseError::UnexpectedToken(t)) } } /// Parse a `,` . #[inline] - pub fn expect_comma(&mut self) -> Result<(), ()> { + pub fn expect_comma(&mut self) -> Result<(), BasicParseError<'i>> { match try!(self.next()) { Token::Comma => Ok(()), - _ => Err(()) + t => Err(BasicParseError::UnexpectedToken(t)) } } /// Parse a with the given value. #[inline] - pub fn expect_delim(&mut self, expected_value: char) -> Result<(), ()> { + pub fn expect_delim(&mut self, expected_value: char) -> Result<(), BasicParseError<'i>> { match try!(self.next()) { Token::Delim(value) if value == expected_value => Ok(()), - _ => Err(()) + t => Err(BasicParseError::UnexpectedToken(t)) } } @@ -615,10 +574,10 @@ impl<'i, 't> Parser<'i, 't> { /// /// If the result is `Ok`, you can then call the `Parser::parse_nested_block` method. #[inline] - pub fn expect_curly_bracket_block(&mut self) -> Result<(), ()> { + pub fn expect_curly_bracket_block(&mut self) -> Result<(), BasicParseError<'i>> { match try!(self.next()) { Token::CurlyBracketBlock => Ok(()), - _ => Err(()) + t => Err(BasicParseError::UnexpectedToken(t)) } } @@ -626,10 +585,10 @@ impl<'i, 't> Parser<'i, 't> { /// /// If the result is `Ok`, you can then call the `Parser::parse_nested_block` method. #[inline] - pub fn expect_square_bracket_block(&mut self) -> Result<(), ()> { + pub fn expect_square_bracket_block(&mut self) -> Result<(), BasicParseError<'i>> { match try!(self.next()) { Token::SquareBracketBlock => Ok(()), - _ => Err(()) + t => Err(BasicParseError::UnexpectedToken(t)) } } @@ -637,10 +596,10 @@ impl<'i, 't> Parser<'i, 't> { /// /// If the result is `Ok`, you can then call the `Parser::parse_nested_block` method. #[inline] - pub fn expect_parenthesis_block(&mut self) -> Result<(), ()> { + pub fn expect_parenthesis_block(&mut self) -> Result<(), BasicParseError<'i>> { match try!(self.next()) { Token::ParenthesisBlock => Ok(()), - _ => Err(()) + t => Err(BasicParseError::UnexpectedToken(t)) } } @@ -648,10 +607,10 @@ impl<'i, 't> Parser<'i, 't> { /// /// If the result is `Ok`, you can then call the `Parser::parse_nested_block` method. #[inline] - pub fn expect_function(&mut self) -> Result, ()> { + pub fn expect_function(&mut self) -> Result, BasicParseError<'i>> { match try!(self.next()) { Token::Function(name) => Ok(name), - _ => Err(()) + t => Err(BasicParseError::UnexpectedToken(t)) } } @@ -659,10 +618,10 @@ impl<'i, 't> Parser<'i, 't> { /// /// If the result is `Ok`, you can then call the `Parser::parse_nested_block` method. #[inline] - pub fn expect_function_matching(&mut self, expected_name: &str) -> Result<(), ()> { + pub fn expect_function_matching(&mut self, expected_name: &str) -> Result<(), BasicParseError<'i>> { match try!(self.next()) { Token::Function(ref name) if name.eq_ignore_ascii_case(expected_name) => Ok(()), - _ => Err(()) + t => Err(BasicParseError::UnexpectedToken(t)) } } @@ -670,24 +629,108 @@ impl<'i, 't> Parser<'i, 't> { /// /// See `Token::is_parse_error`. This also checks nested blocks and functions recursively. #[inline] - pub fn expect_no_error_token(&mut self) -> Result<(), ()> { + pub fn expect_no_error_token(&mut self) -> Result<(), BasicParseError<'i>> { loop { match self.next_including_whitespace_and_comments() { Ok(Token::Function(_)) | Ok(Token::ParenthesisBlock) | Ok(Token::SquareBracketBlock) | Ok(Token::CurlyBracketBlock) => { - try!(self.parse_nested_block(|input| input.expect_no_error_token())) + let result = self.parse_nested_block(|input| input.expect_no_error_token() + .map_err(|e| ParseError::Basic(e))); + try!(result.map_err(ParseError::<()>::basic)) } Ok(token) => { if token.is_parse_error() { - return Err(()) + //FIXME: maybe these should be separate variants of BasicParseError instead? + return Err(BasicParseError::UnexpectedToken(token)) } } - Err(()) => return Ok(()) + Err(_) => return Ok(()) + } + } + } +} + +pub fn parse_until_before<'i: 't, 't, F, T, E>(parser: &mut Parser<'i, 't>, + delimiters: Delimiters, + parse: F) + -> Result > + where F: for<'tt> FnOnce(&mut Parser<'i, 'tt>) -> Result> { + let delimiters = parser.stop_before | delimiters; + let result; + // Introduce a new scope to limit duration of nested_parser’s borrow + { + let mut delimited_parser = Parser { + tokenizer: parser.tokenizer, + at_start_of: parser.at_start_of.take(), + stop_before: delimiters, + }; + result = delimited_parser.parse_entirely(parse); + if let Some(block_type) = delimited_parser.at_start_of { + consume_until_end_of_block(block_type, &mut delimited_parser.tokenizer.0); + } + } + // FIXME: have a special-purpose tokenizer method for this that does less work. + loop { + if delimiters.contains(Delimiters::from_byte((parser.tokenizer.0).next_byte())) { + break + } + if let Ok(token) = (parser.tokenizer.0).next() { + if let Some(block_type) = BlockType::opening(&token) { + consume_until_end_of_block(block_type, &mut parser.tokenizer.0); } + } else { + break } } + result } +pub fn parse_until_after<'i: 't, 't, F, T, E>(parser: &mut Parser<'i, 't>, + delimiters: Delimiters, + parse: F) + -> Result > + where F: for<'tt> FnOnce(&mut Parser<'i, 'tt>) -> Result> { + let result = parser.parse_until_before(delimiters, parse); + let next_byte = (parser.tokenizer.0).next_byte(); + if next_byte.is_some() && !parser.stop_before.contains(Delimiters::from_byte(next_byte)) { + debug_assert!(delimiters.contains(Delimiters::from_byte(next_byte))); + (parser.tokenizer.0).advance(1); + if next_byte == Some(b'{') { + consume_until_end_of_block(BlockType::CurlyBracket, &mut parser.tokenizer.0); + } + } + result +} + +pub fn parse_nested_block<'i: 't, 't, F, T, E>(parser: &mut Parser<'i, 't>, parse: F) + -> Result > + where F: for<'tt> FnOnce(&mut Parser<'i, 'tt>) -> Result> { + let block_type = parser.at_start_of.take().expect("\ + A nested parser can only be created when a Function, \ + ParenthesisBlock, SquareBracketBlock, or CurlyBracketBlock \ + token was just consumed.\ + "); + let closing_delimiter = match block_type { + BlockType::CurlyBracket => ClosingDelimiter::CloseCurlyBracket, + BlockType::SquareBracket => ClosingDelimiter::CloseSquareBracket, + BlockType::Parenthesis => ClosingDelimiter::CloseParenthesis, + }; + let result; + // Introduce a new scope to limit duration of nested_parser’s borrow + { + let mut nested_parser = Parser { + tokenizer: parser.tokenizer, + at_start_of: None, + stop_before: closing_delimiter, + }; + result = nested_parser.parse_entirely(parse); + if let Some(block_type) = nested_parser.at_start_of { + consume_until_end_of_block(block_type, &mut nested_parser.tokenizer.0); + } + } + consume_until_end_of_block(block_type, &mut parser.tokenizer.0); + result +} fn consume_until_end_of_block(block_type: BlockType, tokenizer: &mut Tokenizer) { let mut stack = vec![block_type]; diff --git a/src/rules_and_declarations.rs b/src/rules_and_declarations.rs index 9ccc140e..6d8470de 100644 --- a/src/rules_and_declarations.rs +++ b/src/rules_and_declarations.rs @@ -4,17 +4,18 @@ // https://drafts.csswg.org/css-syntax/#parsing +use parser::{parse_until_before, parse_until_after, parse_nested_block}; use std::ascii::AsciiExt; use std::ops::Range; use std::borrow::Cow; -use super::{Token, Parser, Delimiter, SourcePosition}; +use super::{Token, Parser, Delimiter, SourcePosition, ParseError, BasicParseError}; /// Parse `!important`. /// /// Typical usage is `input.try(parse_important).is_ok()` /// at the end of a `DeclarationParser::parse_value` implementation. -pub fn parse_important(input: &mut Parser) -> Result<(), ()> { +pub fn parse_important<'i, 't>(input: &mut Parser<'i, 't>) -> Result<(), BasicParseError<'i>> { try!(input.expect_delim('!')); input.expect_ident_matching("important") } @@ -43,15 +44,17 @@ pub enum AtRuleType { OptionalBlock(P), } - /// A trait to provide various parsing of declaration values. /// /// For example, there could be different implementations for property declarations in style rules /// and for descriptors in `@font-face` rules. -pub trait DeclarationParser { +pub trait DeclarationParser<'i> { /// The finished representation of a declaration. type Declaration; + /// The error type that is included in the ParseError value that can be returned. + type Error: 'i; + /// Parse the value of a declaration with the given `name`. /// /// Return the finished representation for the declaration @@ -69,10 +72,10 @@ pub trait DeclarationParser { /// If `!important` can be used in a given context, /// `input.try(parse_important).is_ok()` should be used at the end /// of the implementation of this method and the result should be part of the return value. - fn parse_value(&mut self, name: &str, input: &mut Parser) -> Result; + fn parse_value<'t>(&mut self, name: Cow<'i, str>, input: &mut Parser<'i, 't>) + -> Result>; } - /// A trait to provide various parsing of at-rules. /// /// For example, there could be different implementations for top-level at-rules @@ -82,13 +85,16 @@ pub trait DeclarationParser { /// Default implementations that reject all at-rules are provided, /// so that `impl AtRuleParser<(), ()> for ... {}` can be used /// for using `DeclarationListParser` to parse a declartions list with only qualified rules. -pub trait AtRuleParser { +pub trait AtRuleParser<'i> { /// The intermediate representation of an at-rule prelude. type Prelude; /// The finished representation of an at-rule. type AtRule; + /// The error type that is included in the ParseError value that can be returned. + type Error: 'i; + /// Parse the prelude of an at-rule with the given `name`. /// /// Return the representation of the prelude and the type of at-rule, @@ -106,11 +112,11 @@ pub trait AtRuleParser { /// The given `input` is a "delimited" parser /// that ends wherever the prelude should end. /// (Before the next semicolon, the next `{`, or the end of the current block.) - fn parse_prelude(&mut self, name: &str, input: &mut Parser) - -> Result, ()> { + fn parse_prelude<'t>(&mut self, name: Cow<'i, str>, input: &mut Parser<'i, 't>) + -> Result, ParseError<'i, Self::Error>> { let _ = name; let _ = input; - Err(()) + Err(ParseError::Basic(BasicParseError::AtRuleInvalid)) } /// Parse the content of a `{ /* ... */ }` block for the body of the at-rule. @@ -121,11 +127,11 @@ pub trait AtRuleParser { /// /// This is only called when `parse_prelude` returned `WithBlock` or `OptionalBlock`, /// and a block was indeed found following the prelude. - fn parse_block(&mut self, prelude: Self::Prelude, input: &mut Parser) - -> Result { + fn parse_block<'t>(&mut self, prelude: Self::Prelude, input: &mut Parser<'i, 't>) + -> Result> { let _ = prelude; let _ = input; - Err(()) + Err(ParseError::Basic(BasicParseError::AtRuleInvalid)) } /// An `OptionalBlock` prelude was followed by `;`. @@ -139,7 +145,6 @@ pub trait AtRuleParser { } } - /// A trait to provide various parsing of qualified rules. /// /// For example, there could be different implementations @@ -150,13 +155,16 @@ pub trait AtRuleParser { /// so that `impl QualifiedRuleParser<(), ()> for ... {}` can be used /// for example for using `RuleListParser` to parse a rule list with only at-rules /// (such as inside `@font-feature-values`). -pub trait QualifiedRuleParser { +pub trait QualifiedRuleParser<'i> { /// The intermediate representation of a qualified rule prelude. type Prelude; /// The finished representation of a qualified rule. type QualifiedRule; + /// The error type that is included in the ParseError value that can be returned. + type Error: 'i; + /// Parse the prelude of a qualified rule. For style rules, this is as Selector list. /// /// Return the representation of the prelude, @@ -166,9 +174,10 @@ pub trait QualifiedRuleParser { /// /// The given `input` is a "delimited" parser /// that ends where the prelude should end (before the next `{`). - fn parse_prelude(&mut self, input: &mut Parser) -> Result { + fn parse_prelude<'t>(&mut self, input: &mut Parser<'i, 't>) + -> Result> { let _ = input; - Err(()) + Err(ParseError::Basic(BasicParseError::QualifiedRuleInvalid)) } /// Parse the content of a `{ /* ... */ }` block for the body of the qualified rule. @@ -176,11 +185,11 @@ pub trait QualifiedRuleParser { /// Return the finished representation of the qualified rule /// as returned by `RuleListParser::next`, /// or `Err(())` to ignore the entire at-rule as invalid. - fn parse_block(&mut self, prelude: Self::Prelude, input: &mut Parser) - -> Result { + fn parse_block<'t>(&mut self, prelude: Self::Prelude, input: &mut Parser<'i, 't>) + -> Result> { let _ = prelude; let _ = input; - Err(()) + Err(ParseError::Basic(BasicParseError::QualifiedRuleInvalid)) } } @@ -195,8 +204,9 @@ pub struct DeclarationListParser<'i: 't, 't: 'a, 'a, P> { } -impl<'i, 't, 'a, I, P> DeclarationListParser<'i, 't, 'a, P> -where P: DeclarationParser + AtRuleParser { +impl<'i: 't, 't: 'a, 'a, I, P, E: 'i> DeclarationListParser<'i, 't, 'a, P> +where P: DeclarationParser<'i, Declaration = I, Error = E> + + AtRuleParser<'i, AtRule = I, Error = E> { /// Create a new `DeclarationListParser` for the given `input` and `parser`. /// /// Note that all CSS declaration lists can on principle contain at-rules. @@ -221,11 +231,12 @@ where P: DeclarationParser + AtRuleParser { /// `DeclarationListParser` is an iterator that yields `Ok(_)` for a valid declaration or at-rule /// or `Err(())` for an invalid one. -impl<'i, 't, 'a, I, P> Iterator for DeclarationListParser<'i, 't, 'a, P> -where P: DeclarationParser + AtRuleParser { - type Item = Result>; +impl<'i: 't, 't: 'a, 'a, I, P, E: 'i> Iterator for DeclarationListParser<'i, 't, 'a, P> +where P: DeclarationParser<'i, Declaration = I, Error = E> + + AtRuleParser<'i, AtRule = I, Error = E> { + type Item = Result>; - fn next(&mut self) -> Option>> { + fn next(&mut self) -> Option>> { loop { let start_position = self.input.position(); match self.input.next_including_whitespace_and_comments() { @@ -233,20 +244,28 @@ where P: DeclarationParser + AtRuleParser { Ok(Token::Ident(name)) => { return Some({ let parser = &mut self.parser; - self.input.parse_until_after(Delimiter::Semicolon, |input| { + // FIXME: https://github.com/rust-lang/rust/issues/42508 + parse_until_after::<'i, 't, _, _, _>(self.input, Delimiter::Semicolon, |input| { try!(input.expect_colon()); - parser.parse_value(&*name, input) + parser.parse_value(name, input) }) - }.map_err(|()| start_position..self.input.position())) + }.map_err(|e| PreciseParseError { + error: e, + span: start_position..self.input.position() + })) } Ok(Token::AtKeyword(name)) => { return Some(parse_at_rule(start_position, name, self.input, &mut self.parser)) } Ok(_) => { - return Some(self.input.parse_until_after(Delimiter::Semicolon, |_| Err(())) - .map_err(|()| start_position..self.input.position())) + return Some(self.input.parse_until_after(Delimiter::Semicolon, + |_| Err(ParseError::Basic(BasicParseError::ExpectedToken(Token::Semicolon)))) + .map_err(|e| PreciseParseError { + error: e, + span: start_position..self.input.position() + })) } - Err(()) => return None, + Err(_) => return None, } } } @@ -266,8 +285,9 @@ pub struct RuleListParser<'i: 't, 't: 'a, 'a, P> { } -impl<'i: 't, 't: 'a, 'a, R, P> RuleListParser<'i, 't, 'a, P> -where P: QualifiedRuleParser + AtRuleParser { +impl<'i: 't, 't: 'a, 'a, R, P, E: 'i> RuleListParser<'i, 't, 'a, P> +where P: QualifiedRuleParser<'i, QualifiedRule = R, Error = E> + + AtRuleParser<'i, AtRule = R, Error = E> { /// Create a new `RuleListParser` for the given `input` at the top-level of a stylesheet /// and the given `parser`. /// @@ -306,11 +326,12 @@ where P: QualifiedRuleParser + AtRuleParser { /// `RuleListParser` is an iterator that yields `Ok(_)` for a rule or `Err(())` for an invalid one. -impl<'i, 't, 'a, R, P> Iterator for RuleListParser<'i, 't, 'a, P> -where P: QualifiedRuleParser + AtRuleParser { - type Item = Result>; +impl<'i: 't, 't: 'a, 'a, R, P, E: 'i> Iterator for RuleListParser<'i, 't, 'a, P> +where P: QualifiedRuleParser<'i, QualifiedRule = R, Error = E> + + AtRuleParser<'i, AtRule = R, Error = E> { + type Item = Result>; - fn next(&mut self) -> Option>> { + fn next(&mut self) -> Option>> { loop { let start_position = self.input.position(); match self.input.next_including_whitespace_and_comments() { @@ -321,7 +342,7 @@ where P: QualifiedRuleParser + AtRuleParser { self.any_rule_so_far = true; if first_stylesheet_rule && name.eq_ignore_ascii_case("charset") { let delimiters = Delimiter::Semicolon | Delimiter::CurlyBracketBlock; - let _ = self.input.parse_until_after(delimiters, |_input| Ok(())); + let _: Result<(), ParseError<()>> = self.input.parse_until_after(delimiters, |_| Ok(())); } else { return Some(parse_at_rule(start_position, name, self.input, &mut self.parser)) } @@ -330,9 +351,12 @@ where P: QualifiedRuleParser + AtRuleParser { self.any_rule_so_far = true; self.input.reset(start_position); return Some(parse_qualified_rule(self.input, &mut self.parser) - .map_err(|()| start_position..self.input.position())) + .map_err(|e| PreciseParseError { + error: e, + span: start_position..self.input.position() + })) } - Err(()) => return None, + Err(_) => return None, } } } @@ -340,99 +364,134 @@ where P: QualifiedRuleParser + AtRuleParser { /// Parse a single declaration, such as an `( /* ... */ )` parenthesis in an `@supports` prelude. -pub fn parse_one_declaration

(input: &mut Parser, parser: &mut P) - -> Result<

::Declaration, - Range> - where P: DeclarationParser { +pub fn parse_one_declaration<'i, 't, P, E>(input: &mut Parser<'i, 't>, parser: &mut P) + -> Result<

>::Declaration, + PreciseParseError<'i, E>> + where P: DeclarationParser<'i, Error = E> { let start_position = input.position(); input.parse_entirely(|input| { let name = try!(input.expect_ident()); try!(input.expect_colon()); - parser.parse_value(&*name, input) - }).map_err(|()| start_position..input.position()) + parser.parse_value(name, input) + }).map_err(|e| PreciseParseError { + error: e, + span: start_position..input.position() + }) } /// Parse a single rule, such as for CSSOM’s `CSSStyleSheet.insertRule`. -pub fn parse_one_rule(input: &mut Parser, parser: &mut P) -> Result -where P: QualifiedRuleParser + AtRuleParser { +pub fn parse_one_rule<'i, 't, R, P, E>(input: &mut Parser<'i, 't>, parser: &mut P) + -> Result> +where P: QualifiedRuleParser<'i, QualifiedRule = R, Error = E> + + AtRuleParser<'i, AtRule = R, Error = E> { input.parse_entirely(|input| { loop { let start_position = input.position(); match try!(input.next_including_whitespace_and_comments()) { Token::WhiteSpace(_) | Token::Comment(_) => {} Token::AtKeyword(name) => { - return parse_at_rule(start_position, name, input, parser).map_err(|_| ()) + return parse_at_rule(start_position, name, input, parser).map_err(|e| e.error) } _ => { input.reset(start_position); - return parse_qualified_rule(input, parser).map_err(|_| ()) + return parse_qualified_rule(input, parser) } } } }) } +pub struct PreciseParseError<'i, E: 'i> { + pub error: ParseError<'i, E>, + pub span: Range, +} -fn parse_at_rule

(start_position: SourcePosition, name: Cow, - input: &mut Parser, parser: &mut P) - -> Result<

::AtRule, Range> - where P: AtRuleParser { +fn parse_at_rule<'i: 't, 't, P, E>(start_position: SourcePosition, name: Cow<'i, str>, + input: &mut Parser<'i, 't>, parser: &mut P) + -> Result<

>::AtRule, PreciseParseError<'i, E>> + where P: AtRuleParser<'i, Error = E> { let delimiters = Delimiter::Semicolon | Delimiter::CurlyBracketBlock; - let result = input.parse_until_before(delimiters, |input| { - parser.parse_prelude(&*name, input) + // FIXME: https://github.com/rust-lang/rust/issues/42508 + let result = parse_until_before::<'i, 't, _, _, _>(input, delimiters, |input| { + parser.parse_prelude(name, input) }); match result { Ok(AtRuleType::WithoutBlock(rule)) => { match input.next() { - Ok(Token::Semicolon) | Err(()) => Ok(rule), - Ok(Token::CurlyBracketBlock) => Err(start_position..input.position()), + Ok(Token::Semicolon) | Err(_) => Ok(rule), + Ok(Token::CurlyBracketBlock) => Err(PreciseParseError { + error: ParseError::Basic(BasicParseError::UnexpectedToken(Token::CurlyBracketBlock)), + span: start_position..input.position(), + }), Ok(_) => unreachable!() } } Ok(AtRuleType::WithBlock(prelude)) => { match input.next() { Ok(Token::CurlyBracketBlock) => { - input.parse_nested_block(move |input| parser.parse_block(prelude, input)) - .map_err(|()| start_position..input.position()) + // FIXME: https://github.com/rust-lang/rust/issues/42508 + parse_nested_block::<'i, 't, _, _, _>(input, move |input| parser.parse_block(prelude, input)) + .map_err(|e| PreciseParseError { + error: e, + span: start_position..input.position(), + }) } - Ok(Token::Semicolon) | Err(()) => Err(start_position..input.position()), + Ok(Token::Semicolon) => Err(PreciseParseError { + error: ParseError::Basic(BasicParseError::UnexpectedToken(Token::Semicolon)), + span: start_position..input.position() + }), + Err(e) => Err(PreciseParseError { + error: ParseError::Basic(e), + span: start_position..input.position(), + }), Ok(_) => unreachable!() } } Ok(AtRuleType::OptionalBlock(prelude)) => { match input.next() { - Ok(Token::Semicolon) | Err(()) => Ok(parser.rule_without_block(prelude)), + Ok(Token::Semicolon) | Err(_) => Ok(parser.rule_without_block(prelude)), Ok(Token::CurlyBracketBlock) => { - input.parse_nested_block(move |input| parser.parse_block(prelude, input)) - .map_err(|()| start_position..input.position()) + // FIXME: https://github.com/rust-lang/rust/issues/42508 + parse_nested_block::<'i, 't, _, _, _>(input, move |input| parser.parse_block(prelude, input)) + .map_err(|e| PreciseParseError { + error: e, + span: start_position..input.position(), + }) } _ => unreachable!() } } - Err(()) => { + Err(_) => { let end_position = input.position(); - match input.next() { - Ok(Token::CurlyBracketBlock) | Ok(Token::Semicolon) | Err(()) => {} + let error = match input.next() { + Ok(Token::CurlyBracketBlock) => BasicParseError::UnexpectedToken(Token::CurlyBracketBlock), + Ok(Token::Semicolon) => BasicParseError::UnexpectedToken(Token::Semicolon), + Err(e) => e, _ => unreachable!() - } - Err(start_position..end_position) + }; + Err(PreciseParseError { + error: ParseError::Basic(error), + span: start_position..end_position, + }) } } } -fn parse_qualified_rule

(input: &mut Parser, parser: &mut P) - -> Result<

::QualifiedRule, ()> - where P: QualifiedRuleParser { - let prelude = input.parse_until_before(Delimiter::CurlyBracketBlock, |input| { +fn parse_qualified_rule<'i, 't, P, E>(input: &mut Parser<'i, 't>, parser: &mut P) + -> Result<

>::QualifiedRule, ParseError<'i, E>> + where P: QualifiedRuleParser<'i, Error = E> { + // FIXME: https://github.com/rust-lang/rust/issues/42508 + let prelude = parse_until_before::<'i, 't, _, _, _>(input, Delimiter::CurlyBracketBlock, |input| { parser.parse_prelude(input) }); match try!(input.next()) { Token::CurlyBracketBlock => { // Do this here so that we consume the `{` even if the prelude is `Err`. let prelude = try!(prelude); - input.parse_nested_block(move |input| parser.parse_block(prelude, input)) + // FIXME: https://github.com/rust-lang/rust/issues/42508 + parse_nested_block::<'i, 't, _, _, _>(input, move |input| parser.parse_block(prelude, input)) } _ => unreachable!() } diff --git a/src/tests.rs b/src/tests.rs index e10b1df1..90d321b8 100644 --- a/src/tests.rs +++ b/src/tests.rs @@ -12,9 +12,9 @@ use rustc_serialize::json::{self, Json, ToJson}; #[cfg(feature = "bench")] use self::test::Bencher; -use super::{Parser, Delimiter, Token, NumericValue, PercentageValue, SourceLocation, - DeclarationListParser, DeclarationParser, RuleListParser, - AtRuleType, AtRuleParser, QualifiedRuleParser, +use super::{Parser, Delimiter, Token, NumericValue, PercentageValue, SourceLocation, ParseError, + DeclarationListParser, DeclarationParser, RuleListParser, BasicParseError, + AtRuleType, AtRuleParser, QualifiedRuleParser, ParserInput, parse_one_declaration, parse_one_rule, parse_important, stylesheet_encoding, EncodingSupport, TokenSerializationType, @@ -62,7 +62,7 @@ fn normalize(json: &mut Json) { } } -fn assert_json_eq(results: json::Json, mut expected: json::Json, message: String) { +fn assert_json_eq(results: json::Json, mut expected: json::Json, message: &str) { normalize(&mut expected); if !almost_equals(&results, &expected) { println!("{}", ::difference::Changeset::new( @@ -70,7 +70,7 @@ fn assert_json_eq(results: json::Json, mut expected: json::Json, message: String &expected.pretty().to_string(), "\n", )); - panic!(message) + panic!("{}", message) } } @@ -97,8 +97,9 @@ fn run_json_tests Json>(json_data: &str, parse: F) { run_raw_json_tests(json_data, |input, expected| { match input { Json::String(input) => { - let result = parse(&mut Parser::new(&input)); - assert_json_eq(result, expected, input); + let mut parse_input = ParserInput::new(&input); + let result = parse(&mut Parser::new(&mut parse_input)); + assert_json_eq(result, expected, &input); }, _ => panic!("Unexpected JSON") } @@ -117,9 +118,10 @@ fn component_value_list() { #[test] fn one_component_value() { run_json_tests(include_str!("css-parsing-tests/one_component_value.json"), |input| { - input.parse_entirely(|input| { + let result: Result> = input.parse_entirely(|input| { Ok(one_component_value_to_json(try!(input.next()), input)) - }).unwrap_or(JArray!["error", "invalid"]) + }); + result.unwrap_or(JArray!["error", "invalid"]) }); } @@ -213,13 +215,14 @@ fn stylesheet_from_bytes() { let encoding = stylesheet_encoding::( &css, protocol_encoding_label, environment_encoding); let (css_unicode, used_encoding, _) = encoding.decode(&css); - let input = &mut Parser::new(&css_unicode); + let mut input = ParserInput::new(&css_unicode); + let input = &mut Parser::new(&mut input); let rules = RuleListParser::new_for_stylesheet(input, JsonParser) .map(|result| result.unwrap_or(JArray!["error", "invalid"])) .collect::>(); JArray![rules, used_encoding.name().to_lowercase()] }; - assert_json_eq(result, expected, Json::Object(map).to_string()); + assert_json_eq(result, expected, &Json::Object(map).to_string()); }); fn get_string<'a>(map: &'a json::Object, key: &str) -> Option<&'a str> { @@ -235,25 +238,38 @@ fn stylesheet_from_bytes() { #[test] fn expect_no_error_token() { - assert!(Parser::new("foo 4px ( / { !bar }").expect_no_error_token().is_ok()); - assert!(Parser::new(")").expect_no_error_token().is_err()); - assert!(Parser::new("}").expect_no_error_token().is_err()); - assert!(Parser::new("(a){]").expect_no_error_token().is_err()); - assert!(Parser::new("'\n'").expect_no_error_token().is_err()); - assert!(Parser::new("url('\n'").expect_no_error_token().is_err()); - assert!(Parser::new("url(a b)").expect_no_error_token().is_err()); - assert!(Parser::new("url(\u{7F})").expect_no_error_token().is_err()); + let mut input = ParserInput::new("foo 4px ( / { !bar }"); + assert!(Parser::new(&mut input).expect_no_error_token().is_ok()); + let mut input = ParserInput::new(")"); + assert!(Parser::new(&mut input).expect_no_error_token().is_err()); + let mut input = ParserInput::new("}"); + assert!(Parser::new(&mut input).expect_no_error_token().is_err()); + let mut input = ParserInput::new("(a){]"); + assert!(Parser::new(&mut input).expect_no_error_token().is_err()); + let mut input = ParserInput::new("'\n'"); + assert!(Parser::new(&mut input).expect_no_error_token().is_err()); + let mut input = ParserInput::new("url('\n'"); + assert!(Parser::new(&mut input).expect_no_error_token().is_err()); + let mut input = ParserInput::new("url(a b)"); + assert!(Parser::new(&mut input).expect_no_error_token().is_err()); + let mut input = ParserInput::new("url(\u{7F}))"); + assert!(Parser::new(&mut input).expect_no_error_token().is_err()); } /// https://github.com/servo/rust-cssparser/issues/71 #[test] fn outer_block_end_consumed() { - let mut input = Parser::new("(calc(true))"); + let mut input = ParserInput::new("(calc(true))"); + let mut input = Parser::new(&mut input); assert!(input.expect_parenthesis_block().is_ok()); - assert!(input.parse_nested_block(|input| input.expect_function_matching("calc")).is_ok()); + assert!(input.parse_nested_block(|input| { + let result: Result<_, ParseError<()>> = input.expect_function_matching("calc") + .map_err(|e| ParseError::Basic(e)); + result + }).is_ok()); println!("{:?}", input.position()); - assert_eq!(input.next(), Err(())); + assert!(input.next().is_err()); } #[test] @@ -272,30 +288,41 @@ fn unquoted_url_escaping() { !\\\"#$%&\\'\\(\\)*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\\\]\ ^_`abcdefghijklmnopqrstuvwxyz{|}~\\7F é\ )\ - "); - assert_eq!(Parser::new(&serialized).next(), Ok(token)) + "); + let mut input = ParserInput::new(&serialized); + assert_eq!(Parser::new(&mut input).next(), Ok(token)) } #[test] fn test_expect_url() { - fn parse(s: &str) -> Result, ()> { + fn parse<'a>(s: &mut ParserInput<'a>) -> Result, BasicParseError<'a>> { Parser::new(s).expect_url() } - assert_eq!(parse("url()").unwrap(), ""); - assert_eq!(parse("url( ").unwrap(), ""); - assert_eq!(parse("url( abc").unwrap(), "abc"); - assert_eq!(parse("url( abc \t)").unwrap(), "abc"); - assert_eq!(parse("url( 'abc' \t)").unwrap(), "abc"); - assert_eq!(parse("url(abc more stuff)"), Err(())); + let mut input = ParserInput::new("url()"); + assert_eq!(parse(&mut input).unwrap(), ""); + let mut input = ParserInput::new("url( "); + assert_eq!(parse(&mut input).unwrap(), ""); + let mut input = ParserInput::new("url( abc"); + assert_eq!(parse(&mut input).unwrap(), "abc"); + let mut input = ParserInput::new("url( abc \t)"); + assert_eq!(parse(&mut input).unwrap(), "abc"); + let mut input = ParserInput::new("url( 'abc' \t)"); + assert_eq!(parse(&mut input).unwrap(), "abc"); + let mut input = ParserInput::new("url(abc more stuff)"); + assert!(parse(&mut input).is_err()); // The grammar at https://drafts.csswg.org/css-values/#urls plans for `*` // at the position of "more stuff", but no such modifier is defined yet. - assert_eq!(parse("url('abc' more stuff)"), Err(())); + let mut input = ParserInput::new("url('abc' more stuff)"); + assert!(parse(&mut input).is_err()); } fn run_color_tests) -> Json>(json_data: &str, to_json: F) { run_json_tests(json_data, |input| { - to_json(input.parse_entirely(Color::parse)) + let result: Result<_, ParseError<()>> = input.parse_entirely(|i| { + Color::parse(i).map_err(|e| ParseError::Basic(e)) + }); + to_json(result.map_err(|_| ())) }); } @@ -322,14 +349,17 @@ fn color3_keywords() { #[test] fn nth() { run_json_tests(include_str!("css-parsing-tests/An+B.json"), |input| { - input.parse_entirely(parse_nth).ok().to_json() + input.parse_entirely(|i| { + let result: Result<_, ParseError<()>> = parse_nth(i).map_err(|e| ParseError::Basic(e)); + result + }).ok().to_json() }); } #[test] fn unicode_range() { run_json_tests(include_str!("css-parsing-tests/urange.json"), |input| { - input.parse_comma_separated(|input| { + let result: Result<_, ParseError<()>> = input.parse_comma_separated(|input| { let result = UnicodeRange::parse(input).ok().map(|r| (r.start, r.end)); if input.is_exhausted() { Ok(result) @@ -337,7 +367,8 @@ fn unicode_range() { while let Ok(_) = input.next() {} Ok(None) } - }).unwrap().to_json() + }); + result.unwrap().to_json() }); } @@ -376,17 +407,19 @@ fn serializer(preserve_comments: bool) { _ => None }; if let Some(closing_token) = closing_token { - input.parse_nested_block(|input| { + let result: Result<_, ParseError<()>> = input.parse_nested_block(|input| { write_to(previous_token, input, string, preserve_comments); Ok(()) - }).unwrap(); + }); + result.unwrap(); closing_token.to_css(string).unwrap(); } } } let mut serialized = String::new(); write_to(TokenSerializationType::nothing(), input, &mut serialized, preserve_comments); - let parser = &mut Parser::new(&serialized); + let mut input = ParserInput::new(&serialized); + let parser = &mut Parser::new(&mut input); Json::Array(component_values_to_json(parser)) }); } @@ -417,7 +450,8 @@ fn serialize_rgba_two_digit_float_if_roundtrips() { #[test] fn line_numbers() { - let mut input = Parser::new("foo bar\nbaz\r\n\n\"a\\\r\nb\""); + let mut input = ParserInput::new("foo bar\nbaz\r\n\n\"a\\\r\nb\""); + let mut input = Parser::new(&mut input); assert_eq!(input.current_source_location(), SourceLocation { line: 1, column: 1 }); assert_eq!(input.next_including_whitespace(), Ok(Token::Ident(Borrowed("foo")))); assert_eq!(input.current_source_location(), SourceLocation { line: 1, column: 4 }); @@ -438,7 +472,7 @@ fn line_numbers() { assert_eq!(input.next_including_whitespace(), Ok(Token::QuotedString(Borrowed("ab")))); assert_eq!(input.current_source_location(), SourceLocation { line: 5, column: 3 }); - assert_eq!(input.next_including_whitespace(), Err(())); + assert!(input.next_including_whitespace().is_err()); } #[test] @@ -470,7 +504,8 @@ fn overflow() { -3.402824e+38 ".replace("{309 zeros}", &repeat('0').take(309).collect::()); - let mut input = Parser::new(&css); + let mut input = ParserInput::new(&css); + let mut input = Parser::new(&mut input); assert_eq!(input.expect_integer(), Ok(2147483646)); assert_eq!(input.expect_integer(), Ok(2147483647)); @@ -499,11 +534,15 @@ fn overflow() { #[test] fn line_delimited() { - let mut input = Parser::new(" { foo ; bar } baz;,"); + let mut input = ParserInput::new(" { foo ; bar } baz;,"); + let mut input = Parser::new(&mut input); assert_eq!(input.next(), Ok(Token::CurlyBracketBlock)); - assert_eq!(input.parse_until_after(Delimiter::Semicolon, |_| Ok(42)), Err(())); + assert!({ + let result: Result<_, ParseError<()>> = input.parse_until_after(Delimiter::Semicolon, |_| Ok(42)); + result + }.is_err()); assert_eq!(input.next(), Ok(Token::Comma)); - assert_eq!(input.next(), Err(())); + assert!(input.next().is_err()); } #[test] @@ -596,7 +635,8 @@ const BACKGROUND_IMAGE: &'static str = include_str!("big-data-url.css"); #[bench] fn unquoted_url(b: &mut Bencher) { b.iter(|| { - let mut input = Parser::new(BACKGROUND_IMAGE); + let mut input = ParserInput::new(BACKGROUND_IMAGE); + let mut input = Parser::new(&mut input); input.look_for_var_functions(); let result = input.try(|input| input.expect_url()); @@ -614,7 +654,8 @@ fn unquoted_url(b: &mut Bencher) { fn numeric(b: &mut Bencher) { b.iter(|| { for _ in 0..1000000 { - let mut input = Parser::new("10px"); + let mut input = ParserInput::new("10px"); + let mut input = Parser::new(&mut input); let _ = test::black_box(input.next()); } }) @@ -629,14 +670,17 @@ fn no_stack_overflow_multiple_nested_blocks() { let dup = input.clone(); input.push_str(&dup); } - let mut input = Parser::new(&input); + let mut input = ParserInput::new(&input); + let mut input = Parser::new(&mut input); while let Ok(..) = input.next() { } } -impl DeclarationParser for JsonParser { +impl<'i> DeclarationParser<'i> for JsonParser { type Declaration = Json; + type Error = (); - fn parse_value(&mut self, name: &str, input: &mut Parser) -> Result { + fn parse_value<'t>(&mut self, name: Cow<'i, str>, input: &mut Parser<'i, 't>) + -> Result> { let mut value = vec![]; let mut important = false; loop { @@ -671,12 +715,13 @@ impl DeclarationParser for JsonParser { } } -impl AtRuleParser for JsonParser { +impl<'i> AtRuleParser<'i> for JsonParser { type Prelude = Vec; type AtRule = Json; + type Error = (); - fn parse_prelude(&mut self, name: &str, input: &mut Parser) - -> Result, Json>, ()> { + fn parse_prelude<'t>(&mut self, name: Cow<'i, str>, input: &mut Parser<'i, 't>) + -> Result, Json>, ParseError<'i, ()>> { Ok(AtRuleType::OptionalBlock(vec![ "at-rule".to_json(), name.to_json(), @@ -684,7 +729,8 @@ impl AtRuleParser for JsonParser { ])) } - fn parse_block(&mut self, mut prelude: Vec, input: &mut Parser) -> Result { + fn parse_block<'t>(&mut self, mut prelude: Vec, input: &mut Parser<'i, 't>) + -> Result> { prelude.push(Json::Array(component_values_to_json(input))); Ok(Json::Array(prelude)) } @@ -695,15 +741,17 @@ impl AtRuleParser for JsonParser { } } -impl QualifiedRuleParser for JsonParser { +impl<'i> QualifiedRuleParser<'i> for JsonParser { type Prelude = Vec; type QualifiedRule = Json; + type Error = (); - fn parse_prelude(&mut self, input: &mut Parser) -> Result, ()> { + fn parse_prelude<'t>(&mut self, input: &mut Parser<'i, 't>) -> Result, ParseError<'i, ()>> { Ok(component_values_to_json(input)) } - fn parse_block(&mut self, prelude: Vec, input: &mut Parser) -> Result { + fn parse_block<'t>(&mut self, prelude: Vec, input: &mut Parser<'i, 't>) + -> Result> { Ok(JArray![ "qualified rule", prelude, @@ -730,7 +778,10 @@ fn one_component_value_to_json(token: Token, input: &mut Parser) -> Json { } fn nested(input: &mut Parser) -> Vec { - input.parse_nested_block(|input| Ok(component_values_to_json(input))).unwrap() + let result: Result<_, ParseError<()>> = input.parse_nested_block(|input| { + Ok(component_values_to_json(input)) + }); + result.unwrap() } match token { diff --git a/src/unicode_range.rs b/src/unicode_range.rs index 6a6dc022..64030358 100644 --- a/src/unicode_range.rs +++ b/src/unicode_range.rs @@ -4,7 +4,7 @@ //! https://drafts.csswg.org/css-syntax/#urange -use {Parser, ToCss}; +use {Parser, ToCss, BasicParseError}; use std::char; use std::cmp; use std::fmt; @@ -24,7 +24,7 @@ pub struct UnicodeRange { impl UnicodeRange { /// https://drafts.csswg.org/css-syntax/#urange-syntax - pub fn parse(input: &mut Parser) -> Result { + pub fn parse<'i, 't>(input: &mut Parser<'i, 't>) -> Result> { // = // u '+' '?'* | // u '?'* | @@ -42,22 +42,25 @@ impl UnicodeRange { // but oh well… let concatenated_tokens = input.slice_from(after_u); - let range = parse_concatenated(concatenated_tokens.as_bytes())?; + let range = match parse_concatenated(concatenated_tokens.as_bytes()) { + Ok(range) => range, + Err(()) => return Err(BasicParseError::UnexpectedToken(Token::Ident(concatenated_tokens.into()))), + }; if range.end > char::MAX as u32 || range.start > range.end { - Err(()) + Err(BasicParseError::UnexpectedToken(Token::Ident(concatenated_tokens.into()))) } else { Ok(range) } } } -fn parse_tokens(input: &mut Parser) -> Result<(), ()> { +fn parse_tokens<'i, 't>(input: &mut Parser<'i, 't>) -> Result<(), BasicParseError<'i>> { match input.next_including_whitespace()? { Token::Delim('+') => { match input.next_including_whitespace()? { Token::Ident(_) => {} Token::Delim('?') => {} - _ => return Err(()) + t => return Err(BasicParseError::UnexpectedToken(t)) } parse_question_marks(input) } @@ -73,7 +76,7 @@ fn parse_tokens(input: &mut Parser) -> Result<(), ()> { _ => input.reset(after_number) } } - _ => return Err(()) + t => return Err(BasicParseError::UnexpectedToken(t)) } Ok(()) }