From 9327ebde8949b6cff459cd13b30fd6088d838135 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 11 Apr 2023 09:57:44 +0000 Subject: [PATCH 1/2] Update logos requirement from 0.12.0 to 0.13.0 Updates the requirements on [logos](https://github.com/maciejhirsz/logos) to permit the latest version. - [Release notes](https://github.com/maciejhirsz/logos/releases) - [Commits](https://github.com/maciejhirsz/logos/compare/v0.12.0...v0.13) --- updated-dependencies: - dependency-name: logos dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index ae70bbc..65d7fbb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,7 +14,7 @@ path = "tests/lib.rs" [dependencies] enumn = "0.1.3" lazy_static = "1.4.0" -logos = "0.12.0" +logos = "0.13.0" regex = "1.3" thiserror = "1.0" serde = { version = "1.0", features = ["derive"] } From 2a08534ac72fe05db6dff26f9473a31444a38519 Mon Sep 17 00:00:00 2001 From: Luis Moreno Date: Tue, 18 Apr 2023 21:06:57 -0400 Subject: [PATCH 2/2] Fix compilation --- src/expression_parser.rs | 106 +++++++++++++++++++-------------------- src/lexer/mod.rs | 92 +++++++++++++++++++-------------- src/statement/mod.rs | 4 +- src/statement/parser.rs | 63 +++++++++++++---------- src/template_parser.rs | 3 +- 5 files changed, 147 insertions(+), 121 deletions(-) diff --git a/src/expression_parser.rs b/src/expression_parser.rs index dd2649a..b60af93 100644 --- a/src/expression_parser.rs +++ b/src/expression_parser.rs @@ -43,7 +43,7 @@ impl ExpressionParser { fn parse_logical_or<'a>(lexer: &mut PeekableLexer<'a, Token<'a>>) -> Result> { let left = ExpressionParser::parse_logical_and(lexer)?; - if let Some(Token::LogicalOr) = lexer.peek() { + if let Some(Ok(Token::LogicalOr)) = lexer.peek() { lexer.next(); let right = ExpressionParser::parse_logical_or(lexer)?; return Ok(Expression::BinaryExpression( @@ -57,7 +57,7 @@ impl ExpressionParser { fn parse_logical_and<'a>(lexer: &mut PeekableLexer<'a, Token<'a>>) -> Result> { let left = ExpressionParser::parse_logical_compare(lexer)?; - if let Some(Token::LogicalAnd) = lexer.peek() { + if let Some(Ok(Token::LogicalAnd)) = lexer.peek() { lexer.next(); let right = ExpressionParser::parse_logical_and(lexer)?; return Ok(Expression::BinaryExpression( @@ -75,12 +75,12 @@ impl ExpressionParser { let left = ExpressionParser::parse_string_concat(lexer)?; let binary_op = match lexer.peek() { - Some(Token::Equal) => BinaryOperation::LogicalEq, - Some(Token::NotEqual) => BinaryOperation::LogicalNe, - Some(Token::Lt) => BinaryOperation::LogicalLt, - Some(Token::Gt) => BinaryOperation::LogicalGt, - Some(Token::LessEqual) => BinaryOperation::LogicalLe, - Some(Token::GreaterEqual) => BinaryOperation::LogicalGe, + Some(Ok(Token::Equal)) => BinaryOperation::LogicalEq, + Some(Ok(Token::NotEqual)) => BinaryOperation::LogicalNe, + Some(Ok(Token::Lt)) => BinaryOperation::LogicalLt, + Some(Ok(Token::Gt)) => BinaryOperation::LogicalGt, + Some(Ok(Token::LessEqual)) => BinaryOperation::LogicalLe, + Some(Ok(Token::GreaterEqual)) => BinaryOperation::LogicalGe, _ => return Ok(left), }; @@ -96,7 +96,7 @@ impl ExpressionParser { fn parse_string_concat<'a>(lexer: &mut PeekableLexer<'a, Token<'a>>) -> Result> { let left = ExpressionParser::parse_math_pow(lexer)?; - if let Some(Token::Tilde) = lexer.peek() { + if let Some(Ok(Token::Tilde)) = lexer.peek() { lexer.next(); let right = ExpressionParser::parse_logical_and(lexer)?; return Ok(Expression::BinaryExpression( @@ -110,7 +110,7 @@ impl ExpressionParser { fn parse_math_pow<'a>(lexer: &mut PeekableLexer<'a, Token<'a>>) -> Result> { let left = ExpressionParser::parse_math_plus_minus(lexer)?; - if let Some(Token::MulMul) = lexer.peek() { + if let Some(Ok(Token::MulMul)) = lexer.peek() { lexer.next(); let right = ExpressionParser::parse_math_pow(lexer)?; return Ok(Expression::BinaryExpression( @@ -127,8 +127,8 @@ impl ExpressionParser { ) -> Result> { let left = ExpressionParser::parse_math_mul_div(lexer)?; let binary_op = match lexer.peek() { - Some(Token::Plus) => BinaryOperation::Plus, - Some(Token::Minus) => BinaryOperation::Minus, + Some(Ok(Token::Plus)) => BinaryOperation::Plus, + Some(Ok(Token::Minus)) => BinaryOperation::Minus, _ => return Ok(left), }; lexer.next(); @@ -143,10 +143,10 @@ impl ExpressionParser { fn parse_math_mul_div<'a>(lexer: &mut PeekableLexer<'a, Token<'a>>) -> Result> { let left = ExpressionParser::parse_unary_plus_min(lexer)?; let binary_op = match lexer.peek() { - Some(Token::Mul) => BinaryOperation::Mul, - Some(Token::Div) => BinaryOperation::Div, - Some(Token::DivDiv) => BinaryOperation::DivInteger, - Some(Token::Percent) => BinaryOperation::Modulo, + Some(Ok(Token::Mul)) => BinaryOperation::Mul, + Some(Ok(Token::Div)) => BinaryOperation::Div, + Some(Ok(Token::DivDiv)) => BinaryOperation::DivInteger, + Some(Ok(Token::Percent)) => BinaryOperation::Modulo, _ => return Ok(left), }; lexer.next(); @@ -163,9 +163,9 @@ impl ExpressionParser { lexer: &mut PeekableLexer<'a, Token<'a>>, ) -> Result> { let unary_op = match lexer.peek() { - Some(Token::Plus) => Some(UnaryOperation::Plus), - Some(Token::Minus) => Some(UnaryOperation::Minus), - Some(Token::LogicalNot) => Some(UnaryOperation::LogicalNot), + Some(Ok(Token::Plus)) => Some(UnaryOperation::Plus), + Some(Ok(Token::Minus)) => Some(UnaryOperation::Minus), + Some(Ok(Token::LogicalNot)) => Some(UnaryOperation::LogicalNot), _ => None, }; if unary_op.is_some() { @@ -179,7 +179,7 @@ impl ExpressionParser { None => sub_expr, }; - if let Some(Token::Pipe) = lexer.peek() { + if let Some(Ok(Token::Pipe)) = lexer.peek() { lexer.next(); let filter_expression = ExpressionParser::parse_filter_expression(lexer)?; Ok(Expression::FilteredExpression(FilteredExpression::new( @@ -197,8 +197,8 @@ impl ExpressionParser { loop { match lexer.next() { Some(token) => { - if let Token::Identifier(identifier) = token { - let params = if let Some(Token::LBracket) = lexer.peek() { + if let Ok(Token::Identifier(identifier)) = token { + let params = if let Some(Ok(Token::LBracket)) = lexer.peek() { lexer.next(); ExpressionParser::parse_call_params(lexer)? } else { @@ -216,7 +216,7 @@ impl ExpressionParser { SourceLocationInfo::new(range.start, range.end), ))); } - if let Some(Token::Pipe) = lexer.peek() { + if let Some(Ok(Token::Pipe)) = lexer.peek() { lexer.next(); } else { break; @@ -236,17 +236,17 @@ impl ExpressionParser { lexer: &mut PeekableLexer<'a, Token<'a>>, ) -> Result>> { let mut params = CallParams::default(); - if let Some(Token::RBracket) = lexer.peek() { + if let Some(Ok(Token::RBracket)) = lexer.peek() { lexer.next(); return Ok(None); } loop { let mut params_name: Option = None; - if let Some(Token::Identifier(keyword)) = lexer.peek() { + if let Some(Ok(Token::Identifier(keyword))) = lexer.peek() { params_name = Some(keyword.to_string()); lexer.next(); - if let Some(Token::Assign) = lexer.peek() { + if let Some(Ok(Token::Assign)) = lexer.peek() { lexer.next(); } } @@ -256,13 +256,13 @@ impl ExpressionParser { } else { params.pos_params.push(value); } - if let Some(Token::Comma) = lexer.peek() { + if let Some(Ok(Token::Comma)) = lexer.peek() { lexer.next(); } else { break; } } - if let Some(Token::RBracket) = lexer.next() { + if let Some(Ok(Token::RBracket)) = lexer.next() { Ok(Some(params)) } else { let range = lexer.span(); @@ -279,17 +279,17 @@ impl ExpressionParser { let value = if let Some(tok) = token { match tok { - Token::IntegerNum(num) => Expression::Constant(Value::from(num)), - Token::True => Expression::Constant(Value::from(true)), - Token::False => Expression::Constant(Value::from(false)), - Token::FloatNum(num) => Expression::Constant(Value::from(num)), - Token::String(string) => Expression::Constant(Value::from(string.to_string())), - Token::LBracket => ExpressionParser::parse_braced_expression_or_tuple(lexer)?, - Token::Identifier(identifier) => { + Ok(Token::IntegerNum(num)) => Expression::Constant(Value::from(num)), + Ok(Token::True) => Expression::Constant(Value::from(true)), + Ok(Token::False) => Expression::Constant(Value::from(false)), + Ok(Token::FloatNum(num)) => Expression::Constant(Value::from(num)), + Ok(Token::String(string)) => Expression::Constant(Value::from(string.to_string())), + Ok(Token::LBracket) => ExpressionParser::parse_braced_expression_or_tuple(lexer)?, + Ok(Token::Identifier(identifier)) => { Expression::ValueRef(ValueRefExpression::new(identifier.to_string())) } - Token::LSqBracket => ExpressionParser::parse_tuple(lexer)?, - Token::LCrlBracket => ExpressionParser::parse_dict(lexer)?, + Ok(Token::LSqBracket) => ExpressionParser::parse_tuple(lexer)?, + Ok(Token::LCrlBracket) => ExpressionParser::parse_dict(lexer)?, _ => { let range = lexer.span(); @@ -310,10 +310,10 @@ impl ExpressionParser { let token = lexer.peek(); let value = match token { - Some(Token::LSqBracket) | Some(Token::Point) => { + Some(Ok(Token::LSqBracket)) | Some(Ok(Token::Point)) => { ExpressionParser::parse_subscript(lexer, value)? } - Some(Token::LBracket) => todo!(), + Some(Ok(Token::LBracket)) => todo!(), _ => value, }; @@ -326,7 +326,7 @@ impl ExpressionParser { let mut is_tuple: bool = false; let mut exprs = vec![]; loop { - if let Some(Token::RBracket) = lexer.peek() { + if let Some(Ok(Token::RBracket)) = lexer.peek() { lexer.next(); break; } @@ -344,7 +344,7 @@ impl ExpressionParser { } } } - if let Some(Token::Comma) = lexer.peek() { + if let Some(Ok(Token::Comma)) = lexer.peek() { lexer.next(); is_tuple = true; } @@ -366,10 +366,10 @@ impl ExpressionParser { let mut subscript = SubscriptExpression::new(Box::new(expression)); while let Some(token) = lexer.peek() { match token { - Token::LSqBracket => { + Ok(Token::LSqBracket) => { lexer.next(); let expr = ExpressionParser::full_expresion_parser(lexer)?; - if let Some(Token::RSqBracket) = lexer.next() { + if let Some(Ok(Token::RSqBracket)) = lexer.next() { subscript.add_index(Box::new(expr)); } else { let range = lexer.span(); @@ -380,10 +380,10 @@ impl ExpressionParser { ))); } } - Token::Point => { + Ok(Token::Point) => { lexer.next(); let token = lexer.next(); - if let Some(Token::Identifier(identifier)) = token { + if let Some(Ok(Token::Identifier(identifier))) = token { subscript.add_index(Box::new(Expression::Constant(Value::String( identifier.to_string(), )))); @@ -402,7 +402,7 @@ impl ExpressionParser { } fn parse_tuple<'a>(lexer: &mut PeekableLexer<'a, Token<'a>>) -> Result> { let mut tuple = TupleExpression::default(); - if let Some(Token::RSqBracket) = lexer.peek() { + if let Some(Ok(Token::RSqBracket)) = lexer.peek() { lexer.next(); return Ok(Expression::Tuple(tuple)); } @@ -410,13 +410,13 @@ impl ExpressionParser { loop { let expr = ExpressionParser::full_expresion_parser(lexer)?; tuple.push(Box::new(expr)); - if let Some(Token::Comma) = lexer.peek() { + if let Some(Ok(Token::Comma)) = lexer.peek() { lexer.next(); } else { break; } } - if let Some(Token::RSqBracket) = lexer.peek() { + if let Some(Ok(Token::RSqBracket)) = lexer.peek() { lexer.next(); Ok(Expression::Tuple(tuple)) } else { @@ -430,17 +430,17 @@ impl ExpressionParser { } fn parse_dict<'a>(lexer: &mut PeekableLexer<'a, Token<'a>>) -> Result> { let mut dict = DictionaryExpression::default(); - if let Some(Token::RCrlBracket) = lexer.peek() { + if let Some(Ok(Token::RCrlBracket)) = lexer.peek() { lexer.next(); return Ok(Expression::Dict(dict)); } loop { let key = lexer.next(); - if let Some(Token::String(key_string)) = key { - if let Some(Token::Colon) = lexer.next() { + if let Some(Ok(Token::String(key_string))) = key { + if let Some(Ok(Token::Colon)) = lexer.next() { let expr = ExpressionParser::full_expresion_parser(lexer)?; dict.push(key_string.to_string(), Box::new(expr)); - if let Some(Token::Comma) = lexer.peek() { + if let Some(Ok(Token::Comma)) = lexer.peek() { lexer.next(); continue; } else { @@ -461,7 +461,7 @@ impl ExpressionParser { ))); } } - if let Some(Token::RCrlBracket) = lexer.next() { + if let Some(Ok(Token::RCrlBracket)) = lexer.next() { Ok(Expression::Dict(dict)) } else { let range = lexer.span(); diff --git a/src/lexer/mod.rs b/src/lexer/mod.rs index 6b9c3fc..8c45f12 100644 --- a/src/lexer/mod.rs +++ b/src/lexer/mod.rs @@ -1,12 +1,30 @@ use logos::{Lexer, Logos}; -use std::borrow::Cow; +use std::{ + borrow::Cow, + num::{ParseFloatError, ParseIntError}, +}; -#[derive(Logos, Debug, PartialEq)] -pub enum Token<'a> { - #[error] - #[regex(r"[ \t\n\f]+", logos::skip)] - Unknown, +#[derive(Debug, PartialEq, Clone, Copy, Default)] +pub struct ParseError; + +impl From for ParseError { + fn from(_: ParseIntError) -> Self { + ParseError + } +} +impl From for ParseError { + fn from(_: ParseFloatError) -> Self { + ParseError + } +} + +#[derive(Debug, Clone, PartialEq, Logos)] +#[logos( + error = ParseError, +)] +#[logos(skip r"[ \t\n\f]+")] +pub enum Token<'a> { // One-symbol operators #[token("<")] Lt, @@ -175,7 +193,7 @@ pub enum Token<'a> { pub struct PeekableLexer<'source, T: Logos<'source>> { lexer: Lexer<'source, T>, - peeked: Option>, + peeked: Option>>, } impl<'source, T> PeekableLexer<'source, T> @@ -189,7 +207,7 @@ where } } #[inline] - pub fn peek(&mut self) -> Option<&T> { + pub fn peek(&mut self) -> Option<&Result> { let lexer = &mut self.lexer; self.peeked.get_or_insert_with(|| lexer.next()).as_ref() } @@ -204,7 +222,7 @@ impl<'source, T> Iterator for PeekableLexer<'source, T> where T: Logos<'source>, { - type Item = T; + type Item = Result; #[inline] fn next(&mut self) -> Option { @@ -221,13 +239,13 @@ fn lex_numbers() { assert_eq!( tokens, &[ - Token::IntegerNum(1), - Token::IntegerNum(42), - Token::Minus, - Token::IntegerNum(100), - Token::FloatNum(3.18), - Token::Minus, - Token::FloatNum(77.77), + Ok(Token::IntegerNum(1)), + Ok(Token::IntegerNum(42)), + Ok(Token::Minus), + Ok(Token::IntegerNum(100)), + Ok(Token::FloatNum(3.18)), + Ok(Token::Minus), + Ok(Token::FloatNum(77.77)), ] ); } @@ -238,8 +256,8 @@ fn lex_strings() { assert_eq!( tokens, &[ - Token::String(std::borrow::Cow::Borrowed("some string")), - Token::String(std::borrow::Cow::Borrowed("")), + Ok(Token::String(std::borrow::Cow::Borrowed("some string"))), + Ok(Token::String(std::borrow::Cow::Borrowed(""))), ] ); } @@ -250,25 +268,25 @@ fn lex_math() { assert_eq!( tokens, &[ - Token::LBracket, - Token::IntegerNum(2), - Token::Plus, - Token::IntegerNum(3), - Token::Mul, - Token::LBracket, - Token::IntegerNum(5), - Token::Minus, - Token::IntegerNum(1), - Token::RBracket, - Token::Plus, - Token::IntegerNum(2), - Token::MulMul, - Token::IntegerNum(3), - Token::Div, - Token::IntegerNum(16), - Token::RBracket, - Token::Percent, - Token::IntegerNum(5), + Ok(Token::LBracket), + Ok(Token::IntegerNum(2)), + Ok(Token::Plus), + Ok(Token::IntegerNum(3)), + Ok(Token::Mul), + Ok(Token::LBracket), + Ok(Token::IntegerNum(5)), + Ok(Token::Minus), + Ok(Token::IntegerNum(1)), + Ok(Token::RBracket), + Ok(Token::Plus), + Ok(Token::IntegerNum(2)), + Ok(Token::MulMul), + Ok(Token::IntegerNum(3)), + Ok(Token::Div), + Ok(Token::IntegerNum(16)), + Ok(Token::RBracket), + Ok(Token::Percent), + Ok(Token::IntegerNum(5)), ] ); } diff --git a/src/statement/mod.rs b/src/statement/mod.rs index b4995b1..de95255 100644 --- a/src/statement/mod.rs +++ b/src/statement/mod.rs @@ -257,7 +257,7 @@ pub struct StatementInfo<'a> { mode: StatementInfoType, pub current_composition: Arc>, compositions: Vec>>, - _token: Token<'a>, + _token: Option>, renderer: Option>, } @@ -272,7 +272,7 @@ pub enum StatementInfoType { impl<'a> StatementInfo<'a> { pub fn new( mode: StatementInfoType, - _token: Token<'a>, + _token: Option>, renderers: Arc>, ) -> Self { let current_composition = renderers.clone(); diff --git a/src/statement/parser.rs b/src/statement/parser.rs index 9d3530d..d7dee73 100644 --- a/src/statement/parser.rs +++ b/src/statement/parser.rs @@ -19,18 +19,24 @@ impl StatementParser { let tok = lexer.next(); match tok { - Some(Token::If) => StatementParser::parse_if(&mut lexer, statementinfo_list), - Some(Token::Else) => { + Some(Ok(Token::If)) => StatementParser::parse_if(&mut lexer, statementinfo_list), + Some(Ok(Token::Else)) => { StatementParser::parse_else(statementinfo_list); Ok(()) } - Some(Token::EndIf) => StatementParser::parse_endif(&mut lexer, statementinfo_list), - Some(Token::ElIf) => StatementParser::parse_elif(&mut lexer, statementinfo_list), - Some(Token::For) => StatementParser::parse_for(&mut lexer, statementinfo_list), - Some(Token::EndFor) => StatementParser::parse_endfor(&mut lexer, statementinfo_list), - Some(Token::With) => StatementParser::parse_with(&mut lexer, statementinfo_list), - Some(Token::EndWith) => StatementParser::parse_endwith(&mut lexer, statementinfo_list), - Some(Token::Include) => StatementParser::parse_include(&mut lexer, statementinfo_list), + Some(Ok(Token::EndIf)) => StatementParser::parse_endif(&mut lexer, statementinfo_list), + Some(Ok(Token::ElIf)) => StatementParser::parse_elif(&mut lexer, statementinfo_list), + Some(Ok(Token::For)) => StatementParser::parse_for(&mut lexer, statementinfo_list), + Some(Ok(Token::EndFor)) => { + StatementParser::parse_endfor(&mut lexer, statementinfo_list) + } + Some(Ok(Token::With)) => StatementParser::parse_with(&mut lexer, statementinfo_list), + Some(Ok(Token::EndWith)) => { + StatementParser::parse_endwith(&mut lexer, statementinfo_list) + } + Some(Ok(Token::Include)) => { + StatementParser::parse_include(&mut lexer, statementinfo_list) + } Some(_) => { let range = lexer.span(); Err(Error::from(ParseError::new( @@ -48,8 +54,11 @@ impl StatementParser { let value = ExpressionParser::full_expresion_parser(lexer)?; let composed_renderer = Arc::new(ComposedRenderer::new()); let renderer = Statement::If(IfStatement::new(Box::new(value))); - let mut statement_info = - StatementInfo::new(StatementInfoType::IfStatement, Token::If, composed_renderer); + let mut statement_info = StatementInfo::new( + StatementInfoType::IfStatement, + Some(Token::If), + composed_renderer, + ); statement_info.renderer = Some(renderer); statementinfo_list.push(statement_info); @@ -64,7 +73,7 @@ impl StatementParser { let renderer = Statement::Else(ElseStatement::new(Some(Box::new(value)))); let mut statement_info = StatementInfo::new( StatementInfoType::ElseIfStatement, - Token::Else, + Some(Token::Else), composed_renderer, ); statement_info.renderer = Some(renderer); @@ -77,7 +86,7 @@ impl StatementParser { let renderer = Statement::Else(ElseStatement::new(None)); let mut statement_info = StatementInfo::new( StatementInfoType::ElseIfStatement, - Token::Else, + Some(Token::Else), composed_renderer, ); statement_info.renderer = Some(renderer); @@ -129,7 +138,7 @@ impl StatementParser { ) -> Result<()> { let mut vars = vec![]; loop { - if let Some(Token::Identifier(identifier)) = lexer.next() { + if let Some(Ok(Token::Identifier(identifier))) = lexer.next() { vars.push(identifier.to_string()); } else { let range = lexer.span(); @@ -137,13 +146,13 @@ impl StatementParser { SourceLocationInfo::new_with_range(range.start, range.end), ))); } - if let Some(Token::Comma) = lexer.peek() { + if let Some(Ok(Token::Comma)) = lexer.peek() { lexer.next(); } else { break; } } - if let Some(Token::In) = lexer.next() { + if let Some(Ok(Token::In)) = lexer.next() { let expression = ExpressionParser::full_expresion_parser(lexer)?; if lexer.next().is_some() { let range = lexer.span(); @@ -156,7 +165,7 @@ impl StatementParser { let renderer = Statement::For(ForStatement::new(vars, Box::new(expression))); let mut statement_info = StatementInfo::new( StatementInfoType::ForStatement, - Token::For, + Some(Token::For), composed_renderer, ); statement_info.renderer = Some(renderer); @@ -204,8 +213,8 @@ impl StatementParser { statementinfo_list: &mut StatementInfoList<'a>, ) -> Result<()> { let mut vars: Vec<(String, Box)> = vec![]; - while let Some(Token::Identifier(identifier)) = lexer.next() { - let value = if let Some(Token::Assign) = lexer.peek() { + while let Some(Ok(Token::Identifier(identifier))) = lexer.next() { + let value = if let Some(Ok(Token::Assign)) = lexer.peek() { lexer.next(); ExpressionParser::full_expresion_parser(lexer)? } else { @@ -216,7 +225,7 @@ impl StatementParser { ))); }; vars.push((identifier.to_string(), Box::new(value))); - if let Some(Token::Comma) = lexer.peek() { + if let Some(Ok(Token::Comma)) = lexer.peek() { lexer.next(); } else { break; @@ -239,7 +248,7 @@ impl StatementParser { let renderer = Statement::With(WithStatement::new(vars)); let mut statement_info = StatementInfo::new( StatementInfoType::WithStatement, - Token::With, + Some(Token::With), composed_renderer, ); statement_info.renderer = Some(renderer); @@ -288,9 +297,9 @@ impl StatementParser { let mut is_ignore_missing = false; let mut is_with_context = true; - if let Some(Token::Ignore) = lexer.peek() { + if let Some(Ok(Token::Ignore)) = lexer.peek() { lexer.next(); - if let Some(Token::Missing) = lexer.peek() { + if let Some(Ok(Token::Missing)) = lexer.peek() { is_ignore_missing = true; } else { let range = lexer.span(); @@ -303,8 +312,8 @@ impl StatementParser { } match lexer.next() { - Some(Token::With) => { - if let Some(Token::Context) = lexer.peek() { + Some(Ok(Token::With)) => { + if let Some(Ok(Token::Context)) = lexer.peek() { lexer.next(); } else { let range = lexer.span(); @@ -314,9 +323,9 @@ impl StatementParser { ))); } } - Some(Token::Without) => { + Some(Ok(Token::Without)) => { is_with_context = false; - if let Some(Token::Context) = lexer.peek() { + if let Some(Ok(Token::Context)) = lexer.peek() { lexer.next(); } else { let range = lexer.span(); diff --git a/src/template_parser.rs b/src/template_parser.rs index 86ea2f0..7d5f415 100644 --- a/src/template_parser.rs +++ b/src/template_parser.rs @@ -1,7 +1,6 @@ use crate::error::{Error, ParseError, ParseErrorKind, Result}; use crate::expression_parser::ExpressionParser; use crate::keyword::{RegexEnum, ROUGH_TOKENIZER}; -use crate::lexer::Token; use crate::renderer::ExpressionRenderer; use crate::renderer::{ComposedRenderer, RawTextRenderer}; use crate::source::{Range, SourceLocation, SourceLocationInfo}; @@ -73,7 +72,7 @@ impl<'a> TemplateParser<'a> { } fn fine_parsing(&self, renderer: Arc>) -> Result<()> { let mut statements_stack: StatementInfoList<'_> = vec![]; - let root = StatementInfo::new(StatementInfoType::TemplateRoot, Token::Unknown, renderer); + let root = StatementInfo::new(StatementInfoType::TemplateRoot, None, renderer); statements_stack.push(root); for orig_block in self.text_blocks.read().unwrap().iter() { match orig_block.mode {