From 8618bb2e93da37f62355bfbc2a47f4fdaa12674f Mon Sep 17 00:00:00 2001 From: LeanSerra <46695152+LeanSerra@users.noreply.github.com> Date: Thu, 23 Oct 2025 21:58:12 -0300 Subject: [PATCH 01/25] feat: AST generation for arithmetic expressions --- rust-toolchain.toml | 2 + src/compiler/ast.rs | 164 ++++ src/compiler/context.rs | 4 +- src/compiler/error.rs | 2 + src/compiler/mod.rs | 1 + src/grammar/rules.rs | 1596 ++++++++++++++++++---------------- src/grammar/rules.rustemo | 12 +- src/grammar/rules_actions.rs | 152 +++- src/grammar/rules_builder.rs | 49 +- src/grammar/types.rs | 10 + src/lib.rs | 1 + src/main.rs | 8 +- 12 files changed, 1246 insertions(+), 755 deletions(-) create mode 100644 rust-toolchain.toml create mode 100644 src/compiler/ast.rs diff --git a/rust-toolchain.toml b/rust-toolchain.toml new file mode 100644 index 0000000..5d56faf --- /dev/null +++ b/rust-toolchain.toml @@ -0,0 +1,2 @@ +[toolchain] +channel = "nightly" diff --git a/src/compiler/ast.rs b/src/compiler/ast.rs new file mode 100644 index 0000000..4082485 --- /dev/null +++ b/src/compiler/ast.rs @@ -0,0 +1,164 @@ +use std::{array, cell::Cell, mem, rc::Rc}; + +#[derive(Clone)] +pub struct Ast { + tree: [Rc; mem::variant_count::()], + stack_t: Vec>, + stack_e: Vec>, +} + +#[derive(Clone, Copy)] +pub enum AstPtr { + Program = 0, + Assignment, + Number, + Factor, + Term, + ArithmeticExpression, + SimpleExpression, +} + +pub enum AstNodeRef { + Ptr(AstPtr), + Node(Rc), +} + +pub struct Node { + pub value: NodeValue, + parent: Cell>>, + left_child: Option>, + right_child: Option>, +} + +impl Node { + pub fn new_leaf(value: NodeValue) -> Self { + Self { + value, + parent: Cell::new(None), + left_child: None, + right_child: None, + } + } +} + +#[derive(Clone, Debug)] +pub enum NodeValue { + Action(AstAction), + Value(String), +} + +#[derive(Clone, Debug)] +pub enum AstAction { + Plus, + Sub, + Mult, + Div, + Assign, +} + +impl Default for Ast { + fn default() -> Self { + Self { + tree: array::from_fn(|_| Rc::new(Node::new_leaf(NodeValue::Value("".to_string())))), + stack_e: Vec::new(), + stack_t: Vec::new(), + } + } +} + +impl Ast { + pub fn new() -> Self { + Self::default() + } + + pub fn assign_node_to_ptr(&mut self, source: AstNodeRef, dest_ptr: AstPtr) { + let node = match source { + AstNodeRef::Node(node) => node, + AstNodeRef::Ptr(ptr) => self.tree[ptr as usize].clone(), + }; + + self.tree[dest_ptr as usize] = node; + } + + pub fn create_node( + &mut self, + action: AstAction, + left_child_ptr: AstNodeRef, + right_child_ptr: AstNodeRef, + dest_ptr: AstPtr, + ) -> Rc { + let left_child = match left_child_ptr { + AstNodeRef::Ptr(ptr) => self.tree.get(ptr as usize).cloned(), + AstNodeRef::Node(node) => Some(node), + }; + let right_child = match right_child_ptr { + AstNodeRef::Ptr(ptr) => self.tree.get(ptr as usize).cloned(), + AstNodeRef::Node(node) => Some(node), + }; + + let node = Rc::new(Node { + value: NodeValue::Action(action), + parent: Cell::new(None), + left_child: left_child.clone(), + right_child: right_child.clone(), + }); + + if let Some(left) = left_child { + left.parent.replace(Some(node.clone())); + } + if let Some(right) = right_child { + right.parent.replace(Some(node.clone())); + } + + self.tree[dest_ptr as usize] = node.clone(); + node + } + + pub fn create_leaf(&mut self, value: String, dest_ptr: AstPtr) -> Rc { + let leaf = Rc::new(Node::new_leaf(NodeValue::Value(value))); + self.tree[dest_ptr as usize] = leaf.clone(); + leaf + } + + pub fn traverse_from(&self, from: AstPtr) { + Ast::recursive_traverse(&self.tree[from as usize], 0); + } + + fn recursive_traverse(node: &Rc, depth: usize) { + if let Some(left_child) = &node.left_child { + Ast::recursive_traverse(left_child, depth + 1); + } + + println!("DEPTH: {depth}|{:?}", node.value); + + if let Some(right_child) = &node.right_child { + Ast::recursive_traverse(right_child, depth + 1); + } + } + + pub fn push_t_stack(&mut self, node: AstNodeRef) { + let node = match node { + AstNodeRef::Node(node) => node, + AstNodeRef::Ptr(ptr) => self.tree[ptr as usize].clone(), + }; + + self.stack_t.push(node); + } + + pub fn pop_t_stack(&mut self) -> Option> { + self.stack_t.pop() + } + + pub fn push_e_stack(&mut self, node: AstNodeRef) { + let node = match node { + AstNodeRef::Node(node) => node, + AstNodeRef::Ptr(ptr) => self.tree[ptr as usize].clone(), + }; + + self.stack_e.push(node); + } + + pub fn pop_e_stack(&mut self) -> Option> { + self.stack_e.pop() + } +} diff --git a/src/compiler/context.rs b/src/compiler/context.rs index 1fec4a1..940dc48 100644 --- a/src/compiler/context.rs +++ b/src/compiler/context.rs @@ -1,5 +1,5 @@ use crate::{ - compiler::error::CompilerError, + compiler::{ast::Ast, error::CompilerError}, grammar::{ rules_builder::Symbol, types::{DataType, TokenFloatLiteral, TokenIntLiteral, TokenStringLiteral}, @@ -35,6 +35,7 @@ pub struct CompilerContext { parser_file: File, lexer_file: File, symbol_table_file: File, + pub ast: Ast, } impl CompilerContext { @@ -52,6 +53,7 @@ impl CompilerContext { parser_file, lexer_file, symbol_table_file, + ast: Ast::new(), }) } diff --git a/src/compiler/error.rs b/src/compiler/error.rs index 5e9e217..025723f 100644 --- a/src/compiler/error.rs +++ b/src/compiler/error.rs @@ -15,6 +15,8 @@ pub enum CompilerError { Context(String), #[error("IO error: {0}")] IO(String), + #[error("Compiler internal error: {0}. This is a bug.")] + Internal(String), } pub fn log_error_and_exit( diff --git a/src/compiler/mod.rs b/src/compiler/mod.rs index c2c6418..36ac1c6 100644 --- a/src/compiler/mod.rs +++ b/src/compiler/mod.rs @@ -1,2 +1,3 @@ +pub mod ast; pub mod context; pub mod error; diff --git a/src/grammar/rules.rs b/src/grammar/rules.rs index f165d14..fafc183 100644 --- a/src/grammar/rules.rs +++ b/src/grammar/rules.rs @@ -15,7 +15,7 @@ use rustemo::debug::{log, logn}; #[cfg(debug_assertions)] use rustemo::colored::*; pub type Input = str; -const STATE_COUNT: usize = 115usize; +const STATE_COUNT: usize = 117usize; const MAX_RECOGNIZERS: usize = 21usize; #[allow(dead_code)] const TERMINAL_COUNT: usize = 39usize; @@ -129,9 +129,11 @@ pub enum ProdKind { ArithmeticExpressionArithmeticExpressionSumTerm, ArithmeticExpressionArithmeticExpressionSubTerm, ArithmeticExpressionArithmeticExpressionTerm, + DummyAEP1, TermTermMulFactor, TermTermDivFactor, TermTermFactor, + DummyTP1, FactorFactorId, FactorFactorNumber, FactorFactorParen, @@ -249,17 +251,19 @@ impl std::fmt::Debug for ProdKind { ProdKind::NumberNumberNegativeFloat => "Number: TokenSub TokenFloatLiteral", ProdKind::NotStatementNot => "NotStatement: TokenNot BooleanExpression", ProdKind::ArithmeticExpressionArithmeticExpressionSumTerm => { - "ArithmeticExpression: ArithmeticExpression TokenSum Term" + "ArithmeticExpression: ArithmeticExpression DummyAE TokenSum Term" } ProdKind::ArithmeticExpressionArithmeticExpressionSubTerm => { - "ArithmeticExpression: ArithmeticExpression TokenSub Term" + "ArithmeticExpression: ArithmeticExpression DummyAE TokenSub Term" } ProdKind::ArithmeticExpressionArithmeticExpressionTerm => { "ArithmeticExpression: Term" } - ProdKind::TermTermMulFactor => "Term: Term TokenMul Factor", - ProdKind::TermTermDivFactor => "Term: Term TokenDiv Factor", + ProdKind::DummyAEP1 => "DummyAE: ", + ProdKind::TermTermMulFactor => "Term: Term DummyT TokenMul Factor", + ProdKind::TermTermDivFactor => "Term: Term DummyT TokenDiv Factor", ProdKind::TermTermFactor => "Term: Factor", + ProdKind::DummyTP1 => "DummyT: ", ProdKind::FactorFactorId => "Factor: TokenId", ProdKind::FactorFactorNumber => "Factor: Number", ProdKind::FactorFactorParen => { @@ -299,7 +303,9 @@ pub enum NonTermKind { Number, NotStatement, ArithmeticExpression, + DummyAE, Term, + DummyT, Factor, } impl From for NonTermKind { @@ -396,9 +402,11 @@ impl From for NonTermKind { ProdKind::ArithmeticExpressionArithmeticExpressionTerm => { NonTermKind::ArithmeticExpression } + ProdKind::DummyAEP1 => NonTermKind::DummyAE, ProdKind::TermTermMulFactor => NonTermKind::Term, ProdKind::TermTermDivFactor => NonTermKind::Term, ProdKind::TermTermFactor => NonTermKind::Term, + ProdKind::DummyTP1 => NonTermKind::DummyT, ProdKind::FactorFactorId => NonTermKind::Factor, ProdKind::FactorFactorNumber => NonTermKind::Factor, ProdKind::FactorFactorParen => NonTermKind::Factor, @@ -472,58 +480,60 @@ pub enum State { TokenFloatLiteralS60, ArithmeticExpressionS61, TokenParOpenS62, - TokenSumS63, - TokenSubS64, - TokenMulS65, - TokenDivS66, - TokenCBOpenS67, - TokenColonS68, - TokenCommaS69, - TokenCBCloseS70, - VarDeclarationsS71, - BooleanExpressionS72, - TokenParOpenS73, - TokenParCloseS74, - TokenEqualS75, - TokenNotEqualS76, - TokenLessS77, - TokenLessEqualS78, - TokenGreaterS79, - TokenGreaterEqualS80, - BooleanExpressionChainS81, - ComparisonOpS82, + DummyAES63, + DummyTS64, + TokenCBOpenS65, + TokenColonS66, + TokenCommaS67, + TokenCBCloseS68, + VarDeclarationsS69, + BooleanExpressionS70, + TokenParOpenS71, + TokenParCloseS72, + TokenEqualS73, + TokenNotEqualS74, + TokenLessS75, + TokenLessEqualS76, + TokenGreaterS77, + TokenGreaterEqualS78, + BooleanExpressionChainS79, + ComparisonOpS80, + TokenParCloseS81, + TokenCBCloseS82, TokenParCloseS83, - TokenCBCloseS84, + TokenParCloseS84, TokenParCloseS85, - TokenParCloseS86, - TokenParCloseS87, - TokenDateS88, - TermS89, - TermS90, - FactorS91, - FactorS92, - BodyS93, - TokenIntS94, - TokenFloatS95, - TokenStringS96, - DataTypeS97, - VarDeclarationS98, - ArithmeticExpressionS99, - TokenCBOpenS100, - TokenAndS101, - TokenOrS102, - ConjunctionS103, - SimpleExpressionS104, - TokenCBOpenS105, - TokenParCloseS106, - TokenCBCloseS107, - TokenParCloseS108, - BodyS109, - BooleanExpressionS110, - BooleanExpressionChainS111, - BodyS112, - TokenCBCloseS113, - TokenCBCloseS114, + TokenDateS86, + TokenSumS87, + TokenSubS88, + TokenMulS89, + TokenDivS90, + BodyS91, + TokenIntS92, + TokenFloatS93, + TokenStringS94, + DataTypeS95, + VarDeclarationS96, + ArithmeticExpressionS97, + TokenCBOpenS98, + TokenAndS99, + TokenOrS100, + ConjunctionS101, + SimpleExpressionS102, + TokenCBOpenS103, + TokenParCloseS104, + TermS105, + TermS106, + FactorS107, + FactorS108, + TokenCBCloseS109, + TokenParCloseS110, + BodyS111, + BooleanExpressionS112, + BooleanExpressionChainS113, + BodyS114, + TokenCBCloseS115, + TokenCBCloseS116, } impl StateT for State { fn default_layout() -> Option { @@ -601,58 +611,60 @@ impl std::fmt::Debug for State { State::TokenFloatLiteralS60 => "60:TokenFloatLiteral", State::ArithmeticExpressionS61 => "61:ArithmeticExpression", State::TokenParOpenS62 => "62:TokenParOpen", - State::TokenSumS63 => "63:TokenSum", - State::TokenSubS64 => "64:TokenSub", - State::TokenMulS65 => "65:TokenMul", - State::TokenDivS66 => "66:TokenDiv", - State::TokenCBOpenS67 => "67:TokenCBOpen", - State::TokenColonS68 => "68:TokenColon", - State::TokenCommaS69 => "69:TokenComma", - State::TokenCBCloseS70 => "70:TokenCBClose", - State::VarDeclarationsS71 => "71:VarDeclarations", - State::BooleanExpressionS72 => "72:BooleanExpression", - State::TokenParOpenS73 => "73:TokenParOpen", - State::TokenParCloseS74 => "74:TokenParClose", - State::TokenEqualS75 => "75:TokenEqual", - State::TokenNotEqualS76 => "76:TokenNotEqual", - State::TokenLessS77 => "77:TokenLess", - State::TokenLessEqualS78 => "78:TokenLessEqual", - State::TokenGreaterS79 => "79:TokenGreater", - State::TokenGreaterEqualS80 => "80:TokenGreaterEqual", - State::BooleanExpressionChainS81 => "81:BooleanExpressionChain", - State::ComparisonOpS82 => "82:ComparisonOp", + State::DummyAES63 => "63:DummyAE", + State::DummyTS64 => "64:DummyT", + State::TokenCBOpenS65 => "65:TokenCBOpen", + State::TokenColonS66 => "66:TokenColon", + State::TokenCommaS67 => "67:TokenComma", + State::TokenCBCloseS68 => "68:TokenCBClose", + State::VarDeclarationsS69 => "69:VarDeclarations", + State::BooleanExpressionS70 => "70:BooleanExpression", + State::TokenParOpenS71 => "71:TokenParOpen", + State::TokenParCloseS72 => "72:TokenParClose", + State::TokenEqualS73 => "73:TokenEqual", + State::TokenNotEqualS74 => "74:TokenNotEqual", + State::TokenLessS75 => "75:TokenLess", + State::TokenLessEqualS76 => "76:TokenLessEqual", + State::TokenGreaterS77 => "77:TokenGreater", + State::TokenGreaterEqualS78 => "78:TokenGreaterEqual", + State::BooleanExpressionChainS79 => "79:BooleanExpressionChain", + State::ComparisonOpS80 => "80:ComparisonOp", + State::TokenParCloseS81 => "81:TokenParClose", + State::TokenCBCloseS82 => "82:TokenCBClose", State::TokenParCloseS83 => "83:TokenParClose", - State::TokenCBCloseS84 => "84:TokenCBClose", + State::TokenParCloseS84 => "84:TokenParClose", State::TokenParCloseS85 => "85:TokenParClose", - State::TokenParCloseS86 => "86:TokenParClose", - State::TokenParCloseS87 => "87:TokenParClose", - State::TokenDateS88 => "88:TokenDate", - State::TermS89 => "89:Term", - State::TermS90 => "90:Term", - State::FactorS91 => "91:Factor", - State::FactorS92 => "92:Factor", - State::BodyS93 => "93:Body", - State::TokenIntS94 => "94:TokenInt", - State::TokenFloatS95 => "95:TokenFloat", - State::TokenStringS96 => "96:TokenString", - State::DataTypeS97 => "97:DataType", - State::VarDeclarationS98 => "98:VarDeclaration", - State::ArithmeticExpressionS99 => "99:ArithmeticExpression", - State::TokenCBOpenS100 => "100:TokenCBOpen", - State::TokenAndS101 => "101:TokenAnd", - State::TokenOrS102 => "102:TokenOr", - State::ConjunctionS103 => "103:Conjunction", - State::SimpleExpressionS104 => "104:SimpleExpression", - State::TokenCBOpenS105 => "105:TokenCBOpen", - State::TokenParCloseS106 => "106:TokenParClose", - State::TokenCBCloseS107 => "107:TokenCBClose", - State::TokenParCloseS108 => "108:TokenParClose", - State::BodyS109 => "109:Body", - State::BooleanExpressionS110 => "110:BooleanExpression", - State::BooleanExpressionChainS111 => "111:BooleanExpressionChain", - State::BodyS112 => "112:Body", - State::TokenCBCloseS113 => "113:TokenCBClose", - State::TokenCBCloseS114 => "114:TokenCBClose", + State::TokenDateS86 => "86:TokenDate", + State::TokenSumS87 => "87:TokenSum", + State::TokenSubS88 => "88:TokenSub", + State::TokenMulS89 => "89:TokenMul", + State::TokenDivS90 => "90:TokenDiv", + State::BodyS91 => "91:Body", + State::TokenIntS92 => "92:TokenInt", + State::TokenFloatS93 => "93:TokenFloat", + State::TokenStringS94 => "94:TokenString", + State::DataTypeS95 => "95:DataType", + State::VarDeclarationS96 => "96:VarDeclaration", + State::ArithmeticExpressionS97 => "97:ArithmeticExpression", + State::TokenCBOpenS98 => "98:TokenCBOpen", + State::TokenAndS99 => "99:TokenAnd", + State::TokenOrS100 => "100:TokenOr", + State::ConjunctionS101 => "101:Conjunction", + State::SimpleExpressionS102 => "102:SimpleExpression", + State::TokenCBOpenS103 => "103:TokenCBOpen", + State::TokenParCloseS104 => "104:TokenParClose", + State::TermS105 => "105:Term", + State::TermS106 => "106:Term", + State::FactorS107 => "107:Factor", + State::FactorS108 => "108:Factor", + State::TokenCBCloseS109 => "109:TokenCBClose", + State::TokenParCloseS110 => "110:TokenParClose", + State::BodyS111 => "111:Body", + State::BooleanExpressionS112 => "112:BooleanExpression", + State::BooleanExpressionChainS113 => "113:BooleanExpressionChain", + State::BodyS114 => "114:Body", + State::TokenCBCloseS115 => "115:TokenCBClose", + State::TokenCBCloseS116 => "116:TokenCBClose", }; write!(f, "{name}") } @@ -1182,8 +1194,8 @@ fn action_arithmeticexpression_s39( TK::TokenId => { Vec::from(&[Reduce(PK::SimpleExpressionSimpleExpressionArithmetic, 1usize)]) } - TK::TokenSum => Vec::from(&[Shift(State::TokenSumS63)]), - TK::TokenSub => Vec::from(&[Shift(State::TokenSubS64)]), + TK::TokenSum => Vec::from(&[Reduce(PK::DummyAEP1, 0usize)]), + TK::TokenSub => Vec::from(&[Reduce(PK::DummyAEP1, 0usize)]), TK::TokenParClose => { Vec::from(&[Reduce(PK::SimpleExpressionSimpleExpressionArithmetic, 1usize)]) } @@ -1249,13 +1261,13 @@ fn action_term_s40(token_kind: TokenKind) -> Vec> { &[Reduce(PK::ArithmeticExpressionArithmeticExpressionTerm, 1usize)], ) } - TK::TokenMul => Vec::from(&[Shift(State::TokenMulS65)]), + TK::TokenMul => Vec::from(&[Reduce(PK::DummyTP1, 0usize)]), TK::TokenSub => { Vec::from( &[Reduce(PK::ArithmeticExpressionArithmeticExpressionTerm, 1usize)], ) } - TK::TokenDiv => Vec::from(&[Shift(State::TokenDivS66)]), + TK::TokenDiv => Vec::from(&[Reduce(PK::DummyTP1, 0usize)]), TK::TokenParClose => { Vec::from( &[Reduce(PK::ArithmeticExpressionArithmeticExpressionTerm, 1usize)], @@ -1362,20 +1374,20 @@ fn action_factor_s41(token_kind: TokenKind) -> Vec> { } fn action_tokenparclose_s42(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenCBOpen => Vec::from(&[Shift(State::TokenCBOpenS67)]), + TK::TokenCBOpen => Vec::from(&[Shift(State::TokenCBOpenS65)]), _ => vec![], } } fn action_tokenid_s43(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenColon => Vec::from(&[Shift(State::TokenColonS68)]), - TK::TokenComma => Vec::from(&[Shift(State::TokenCommaS69)]), + TK::TokenColon => Vec::from(&[Shift(State::TokenColonS66)]), + TK::TokenComma => Vec::from(&[Shift(State::TokenCommaS67)]), _ => vec![], } } fn action_vardeclarations_s44(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS70)]), + TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS68)]), _ => vec![], } } @@ -1428,7 +1440,7 @@ fn action_tokennot_s49(token_kind: TokenKind) -> Vec> { } fn action_tokeniszero_s50(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS73)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS71)]), _ => vec![], } } @@ -1442,7 +1454,7 @@ fn action_functioniszero_s51(token_kind: TokenKind) -> Vec Vec> { match token_kind { - TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS74)]), + TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS72)]), _ => vec![], } } @@ -1453,12 +1465,12 @@ fn action_simpleexpression_s53(token_kind: TokenKind) -> Vec Vec::from(&[Shift(State::TokenEqualS75)]), - TK::TokenNotEqual => Vec::from(&[Shift(State::TokenNotEqualS76)]), - TK::TokenLess => Vec::from(&[Shift(State::TokenLessS77)]), - TK::TokenLessEqual => Vec::from(&[Shift(State::TokenLessEqualS78)]), - TK::TokenGreater => Vec::from(&[Shift(State::TokenGreaterS79)]), - TK::TokenGreaterEqual => Vec::from(&[Shift(State::TokenGreaterEqualS80)]), + TK::TokenEqual => Vec::from(&[Shift(State::TokenEqualS73)]), + TK::TokenNotEqual => Vec::from(&[Shift(State::TokenNotEqualS74)]), + TK::TokenLess => Vec::from(&[Shift(State::TokenLessS75)]), + TK::TokenLessEqual => Vec::from(&[Shift(State::TokenLessEqualS76)]), + TK::TokenGreater => Vec::from(&[Shift(State::TokenGreaterS77)]), + TK::TokenGreaterEqual => Vec::from(&[Shift(State::TokenGreaterEqualS78)]), TK::TokenAnd => { Vec::from( &[Reduce(PK::BooleanExpressionChainBooleanExpressionChainEmpty, 0usize)], @@ -1484,25 +1496,25 @@ fn action_notstatement_s54(token_kind: TokenKind) -> Vec } fn action_booleanexpression_s55(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS83)]), + TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS81)]), _ => vec![], } } fn action_body_s56(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS84)]), + TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS82)]), _ => vec![], } } fn action_tokenid_s57(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS85)]), + TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS83)]), _ => vec![], } } fn action_simpleexpression_s58(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS86)]), + TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS84)]), _ => vec![], } } @@ -1566,59 +1578,33 @@ fn action_arithmeticexpression_s61( token_kind: TokenKind, ) -> Vec> { match token_kind { - TK::TokenSum => Vec::from(&[Shift(State::TokenSumS63)]), - TK::TokenSub => Vec::from(&[Shift(State::TokenSubS64)]), - TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS87)]), + TK::TokenSum => Vec::from(&[Reduce(PK::DummyAEP1, 0usize)]), + TK::TokenSub => Vec::from(&[Reduce(PK::DummyAEP1, 0usize)]), + TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS85)]), _ => vec![], } } fn action_tokenparopen_s62(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenDate => Vec::from(&[Shift(State::TokenDateS88)]), - _ => vec![], - } -} -fn action_tokensum_s63(token_kind: TokenKind) -> Vec> { - match token_kind { - TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), - TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), - TK::TokenId => Vec::from(&[Shift(State::TokenIdS32)]), - TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), - _ => vec![], - } -} -fn action_tokensub_s64(token_kind: TokenKind) -> Vec> { - match token_kind { - TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), - TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), - TK::TokenId => Vec::from(&[Shift(State::TokenIdS32)]), - TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), + TK::TokenDate => Vec::from(&[Shift(State::TokenDateS86)]), _ => vec![], } } -fn action_tokenmul_s65(token_kind: TokenKind) -> Vec> { +fn action_dummyae_s63(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), - TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), - TK::TokenId => Vec::from(&[Shift(State::TokenIdS32)]), - TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), + TK::TokenSum => Vec::from(&[Shift(State::TokenSumS87)]), + TK::TokenSub => Vec::from(&[Shift(State::TokenSubS88)]), _ => vec![], } } -fn action_tokendiv_s66(token_kind: TokenKind) -> Vec> { +fn action_dummyt_s64(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), - TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), - TK::TokenId => Vec::from(&[Shift(State::TokenIdS32)]), - TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), + TK::TokenMul => Vec::from(&[Shift(State::TokenMulS89)]), + TK::TokenDiv => Vec::from(&[Shift(State::TokenDivS90)]), _ => vec![], } } -fn action_tokencbopen_s67(token_kind: TokenKind) -> Vec> { +fn action_tokencbopen_s65(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenId => Vec::from(&[Shift(State::TokenIdS27)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::BodyBodyEmpty, 0usize)]), @@ -1631,21 +1617,21 @@ fn action_tokencbopen_s67(token_kind: TokenKind) -> Vec> _ => vec![], } } -fn action_tokencolon_s68(token_kind: TokenKind) -> Vec> { +fn action_tokencolon_s66(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenInt => Vec::from(&[Shift(State::TokenIntS94)]), - TK::TokenFloat => Vec::from(&[Shift(State::TokenFloatS95)]), - TK::TokenString => Vec::from(&[Shift(State::TokenStringS96)]), + TK::TokenInt => Vec::from(&[Shift(State::TokenIntS92)]), + TK::TokenFloat => Vec::from(&[Shift(State::TokenFloatS93)]), + TK::TokenString => Vec::from(&[Shift(State::TokenStringS94)]), _ => vec![], } } -fn action_tokencomma_s69(token_kind: TokenKind) -> Vec> { +fn action_tokencomma_s67(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenId => Vec::from(&[Shift(State::TokenIdS43)]), _ => vec![], } } -fn action_tokencbclose_s70(token_kind: TokenKind) -> Vec> { +fn action_tokencbclose_s68(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::InitBodyInitBody, 3usize)]), TK::TokenId => Vec::from(&[Reduce(PK::InitBodyInitBody, 3usize)]), @@ -1658,7 +1644,7 @@ fn action_tokencbclose_s70(token_kind: TokenKind) -> Vec _ => vec![], } } -fn action_vardeclarations_s71(token_kind: TokenKind) -> Vec> { +fn action_vardeclarations_s69(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenCBClose => { Vec::from(&[Reduce(PK::VarDeclarationsVarDeclarationsRecursive, 2usize)]) @@ -1666,13 +1652,13 @@ fn action_vardeclarations_s71(token_kind: TokenKind) -> Vec vec![], } } -fn action_booleanexpression_s72(token_kind: TokenKind) -> Vec> { +fn action_booleanexpression_s70(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenParClose => Vec::from(&[Reduce(PK::NotStatementNot, 2usize)]), _ => vec![], } } -fn action_tokenparopen_s73(token_kind: TokenKind) -> Vec> { +fn action_tokenparopen_s71(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), @@ -1682,13 +1668,13 @@ fn action_tokenparopen_s73(token_kind: TokenKind) -> Vec _ => vec![], } } -fn action_tokenparclose_s74(token_kind: TokenKind) -> Vec> { +fn action_tokenparclose_s72(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenCBOpen => Vec::from(&[Shift(State::TokenCBOpenS100)]), + TK::TokenCBOpen => Vec::from(&[Shift(State::TokenCBOpenS98)]), _ => vec![], } } -fn action_tokenequal_s75(token_kind: TokenKind) -> Vec> { +fn action_tokenequal_s73(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => { Vec::from(&[Reduce(PK::ComparisonOpComparisonOpEqual, 1usize)]) @@ -1707,7 +1693,7 @@ fn action_tokenequal_s75(token_kind: TokenKind) -> Vec> _ => vec![], } } -fn action_tokennotequal_s76(token_kind: TokenKind) -> Vec> { +fn action_tokennotequal_s74(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => { Vec::from(&[Reduce(PK::ComparisonOpComparisonOpNotEqual, 1usize)]) @@ -1728,7 +1714,7 @@ fn action_tokennotequal_s76(token_kind: TokenKind) -> Vec vec![], } } -fn action_tokenless_s77(token_kind: TokenKind) -> Vec> { +fn action_tokenless_s75(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => { Vec::from(&[Reduce(PK::ComparisonOpComparisonOpLess, 1usize)]) @@ -1747,7 +1733,7 @@ fn action_tokenless_s77(token_kind: TokenKind) -> Vec> { _ => vec![], } } -fn action_tokenlessequal_s78(token_kind: TokenKind) -> Vec> { +fn action_tokenlessequal_s76(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => { Vec::from(&[Reduce(PK::ComparisonOpComparisonOpLessEqual, 1usize)]) @@ -1770,7 +1756,7 @@ fn action_tokenlessequal_s78(token_kind: TokenKind) -> Vec vec![], } } -fn action_tokengreater_s79(token_kind: TokenKind) -> Vec> { +fn action_tokengreater_s77(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => { Vec::from(&[Reduce(PK::ComparisonOpComparisonOpGreater, 1usize)]) @@ -1789,7 +1775,7 @@ fn action_tokengreater_s79(token_kind: TokenKind) -> Vec _ => vec![], } } -fn action_tokengreaterequal_s80(token_kind: TokenKind) -> Vec> { +fn action_tokengreaterequal_s78(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => { Vec::from(&[Reduce(PK::ComparisonOpComparisonOpGreaterEqual, 1usize)]) @@ -1812,7 +1798,7 @@ fn action_tokengreaterequal_s80(token_kind: TokenKind) -> Vec vec![], } } -fn action_booleanexpressionchain_s81( +fn action_booleanexpressionchain_s79( token_kind: TokenKind, ) -> Vec> { match token_kind { @@ -1821,12 +1807,12 @@ fn action_booleanexpressionchain_s81( &[Reduce(PK::BooleanExpressionBooleanExpressionSimpleExpression, 2usize)], ) } - TK::TokenAnd => Vec::from(&[Shift(State::TokenAndS101)]), - TK::TokenOr => Vec::from(&[Shift(State::TokenOrS102)]), + TK::TokenAnd => Vec::from(&[Shift(State::TokenAndS99)]), + TK::TokenOr => Vec::from(&[Shift(State::TokenOrS100)]), _ => vec![], } } -fn action_comparisonop_s82(token_kind: TokenKind) -> Vec> { +fn action_comparisonop_s80(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), @@ -1837,13 +1823,13 @@ fn action_comparisonop_s82(token_kind: TokenKind) -> Vec _ => vec![], } } -fn action_tokenparclose_s83(token_kind: TokenKind) -> Vec> { +fn action_tokenparclose_s81(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenCBOpen => Vec::from(&[Shift(State::TokenCBOpenS105)]), + TK::TokenCBOpen => Vec::from(&[Shift(State::TokenCBOpenS103)]), _ => vec![], } } -fn action_tokencbclose_s84(token_kind: TokenKind) -> Vec> { +fn action_tokencbclose_s82(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::ElseStatementElseStatement, 4usize)]), TK::TokenId => Vec::from(&[Reduce(PK::ElseStatementElseStatement, 4usize)]), @@ -1856,7 +1842,7 @@ fn action_tokencbclose_s84(token_kind: TokenKind) -> Vec _ => vec![], } } -fn action_tokenparclose_s85(token_kind: TokenKind) -> Vec> { +fn action_tokenparclose_s83(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::FunctionReadFunctionReadCall, 4usize)]), TK::TokenId => Vec::from(&[Reduce(PK::FunctionReadFunctionReadCall, 4usize)]), @@ -1871,7 +1857,7 @@ fn action_tokenparclose_s85(token_kind: TokenKind) -> Vec vec![], } } -fn action_tokenparclose_s86(token_kind: TokenKind) -> Vec> { +fn action_tokenparclose_s84(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::FunctionWriteFunctionWriteCall, 4usize)]), TK::TokenId => Vec::from(&[Reduce(PK::FunctionWriteFunctionWriteCall, 4usize)]), @@ -1890,7 +1876,7 @@ fn action_tokenparclose_s86(token_kind: TokenKind) -> Vec vec![], } } -fn action_tokenparclose_s87(token_kind: TokenKind) -> Vec> { +fn action_tokenparclose_s85(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::FactorFactorParen, 3usize)]), TK::TokenId => Vec::from(&[Reduce(PK::FactorFactorParen, 3usize)]), @@ -1916,296 +1902,80 @@ fn action_tokenparclose_s87(token_kind: TokenKind) -> Vec vec![], } } -fn action_tokendate_s88(token_kind: TokenKind) -> Vec> { +fn action_tokendate_s86(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS106)]), + TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS104)]), _ => vec![], } } -fn action_term_s89(token_kind: TokenKind) -> Vec> { +fn action_tokensum_s87(token_kind: TokenKind) -> Vec> { match token_kind { - TK::STOP => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 3usize)], - ) - } - TK::TokenId => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 3usize)], - ) - } - TK::TokenSum => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 3usize)], - ) - } - TK::TokenMul => Vec::from(&[Shift(State::TokenMulS65)]), - TK::TokenSub => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 3usize)], - ) - } - TK::TokenDiv => Vec::from(&[Shift(State::TokenDivS66)]), - TK::TokenParClose => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 3usize)], - ) - } - TK::TokenCBClose => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 3usize)], - ) - } - TK::TokenWhile => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 3usize)], - ) - } - TK::TokenEqual => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 3usize)], - ) - } - TK::TokenNotEqual => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 3usize)], - ) - } - TK::TokenLess => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 3usize)], - ) - } - TK::TokenLessEqual => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 3usize)], - ) - } - TK::TokenGreater => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 3usize)], - ) - } - TK::TokenGreaterEqual => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 3usize)], - ) - } - TK::TokenIf => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 3usize)], - ) - } - TK::TokenElse => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 3usize)], - ) - } - TK::TokenAnd => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 3usize)], - ) - } - TK::TokenOr => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 3usize)], - ) - } - TK::TokenRead => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 3usize)], - ) - } - TK::TokenWrite => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 3usize)], - ) - } + TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), + TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS32)]), + TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), _ => vec![], } } -fn action_term_s90(token_kind: TokenKind) -> Vec> { +fn action_tokensub_s88(token_kind: TokenKind) -> Vec> { match token_kind { - TK::STOP => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 3usize)], - ) - } - TK::TokenId => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 3usize)], - ) - } - TK::TokenSum => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 3usize)], - ) - } - TK::TokenMul => Vec::from(&[Shift(State::TokenMulS65)]), - TK::TokenSub => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 3usize)], - ) - } - TK::TokenDiv => Vec::from(&[Shift(State::TokenDivS66)]), - TK::TokenParClose => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 3usize)], - ) - } - TK::TokenCBClose => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 3usize)], - ) - } - TK::TokenWhile => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 3usize)], - ) - } - TK::TokenEqual => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 3usize)], - ) - } - TK::TokenNotEqual => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 3usize)], - ) - } - TK::TokenLess => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 3usize)], - ) - } - TK::TokenLessEqual => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 3usize)], - ) - } - TK::TokenGreater => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 3usize)], - ) - } - TK::TokenGreaterEqual => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 3usize)], - ) - } - TK::TokenIf => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 3usize)], - ) - } - TK::TokenElse => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 3usize)], - ) - } - TK::TokenAnd => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 3usize)], - ) - } - TK::TokenOr => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 3usize)], - ) - } - TK::TokenRead => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 3usize)], - ) - } - TK::TokenWrite => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 3usize)], - ) - } + TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), + TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS32)]), + TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), _ => vec![], } } -fn action_factor_s91(token_kind: TokenKind) -> Vec> { +fn action_tokenmul_s89(token_kind: TokenKind) -> Vec> { match token_kind { - TK::STOP => Vec::from(&[Reduce(PK::TermTermMulFactor, 3usize)]), - TK::TokenId => Vec::from(&[Reduce(PK::TermTermMulFactor, 3usize)]), - TK::TokenSum => Vec::from(&[Reduce(PK::TermTermMulFactor, 3usize)]), - TK::TokenMul => Vec::from(&[Reduce(PK::TermTermMulFactor, 3usize)]), - TK::TokenSub => Vec::from(&[Reduce(PK::TermTermMulFactor, 3usize)]), - TK::TokenDiv => Vec::from(&[Reduce(PK::TermTermMulFactor, 3usize)]), - TK::TokenParClose => Vec::from(&[Reduce(PK::TermTermMulFactor, 3usize)]), - TK::TokenCBClose => Vec::from(&[Reduce(PK::TermTermMulFactor, 3usize)]), - TK::TokenWhile => Vec::from(&[Reduce(PK::TermTermMulFactor, 3usize)]), - TK::TokenEqual => Vec::from(&[Reduce(PK::TermTermMulFactor, 3usize)]), - TK::TokenNotEqual => Vec::from(&[Reduce(PK::TermTermMulFactor, 3usize)]), - TK::TokenLess => Vec::from(&[Reduce(PK::TermTermMulFactor, 3usize)]), - TK::TokenLessEqual => Vec::from(&[Reduce(PK::TermTermMulFactor, 3usize)]), - TK::TokenGreater => Vec::from(&[Reduce(PK::TermTermMulFactor, 3usize)]), - TK::TokenGreaterEqual => Vec::from(&[Reduce(PK::TermTermMulFactor, 3usize)]), - TK::TokenIf => Vec::from(&[Reduce(PK::TermTermMulFactor, 3usize)]), - TK::TokenElse => Vec::from(&[Reduce(PK::TermTermMulFactor, 3usize)]), - TK::TokenAnd => Vec::from(&[Reduce(PK::TermTermMulFactor, 3usize)]), - TK::TokenOr => Vec::from(&[Reduce(PK::TermTermMulFactor, 3usize)]), - TK::TokenRead => Vec::from(&[Reduce(PK::TermTermMulFactor, 3usize)]), - TK::TokenWrite => Vec::from(&[Reduce(PK::TermTermMulFactor, 3usize)]), + TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), + TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS32)]), + TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), _ => vec![], } } -fn action_factor_s92(token_kind: TokenKind) -> Vec> { +fn action_tokendiv_s90(token_kind: TokenKind) -> Vec> { match token_kind { - TK::STOP => Vec::from(&[Reduce(PK::TermTermDivFactor, 3usize)]), - TK::TokenId => Vec::from(&[Reduce(PK::TermTermDivFactor, 3usize)]), - TK::TokenSum => Vec::from(&[Reduce(PK::TermTermDivFactor, 3usize)]), - TK::TokenMul => Vec::from(&[Reduce(PK::TermTermDivFactor, 3usize)]), - TK::TokenSub => Vec::from(&[Reduce(PK::TermTermDivFactor, 3usize)]), - TK::TokenDiv => Vec::from(&[Reduce(PK::TermTermDivFactor, 3usize)]), - TK::TokenParClose => Vec::from(&[Reduce(PK::TermTermDivFactor, 3usize)]), - TK::TokenCBClose => Vec::from(&[Reduce(PK::TermTermDivFactor, 3usize)]), - TK::TokenWhile => Vec::from(&[Reduce(PK::TermTermDivFactor, 3usize)]), - TK::TokenEqual => Vec::from(&[Reduce(PK::TermTermDivFactor, 3usize)]), - TK::TokenNotEqual => Vec::from(&[Reduce(PK::TermTermDivFactor, 3usize)]), - TK::TokenLess => Vec::from(&[Reduce(PK::TermTermDivFactor, 3usize)]), - TK::TokenLessEqual => Vec::from(&[Reduce(PK::TermTermDivFactor, 3usize)]), - TK::TokenGreater => Vec::from(&[Reduce(PK::TermTermDivFactor, 3usize)]), - TK::TokenGreaterEqual => Vec::from(&[Reduce(PK::TermTermDivFactor, 3usize)]), - TK::TokenIf => Vec::from(&[Reduce(PK::TermTermDivFactor, 3usize)]), - TK::TokenElse => Vec::from(&[Reduce(PK::TermTermDivFactor, 3usize)]), - TK::TokenAnd => Vec::from(&[Reduce(PK::TermTermDivFactor, 3usize)]), - TK::TokenOr => Vec::from(&[Reduce(PK::TermTermDivFactor, 3usize)]), - TK::TokenRead => Vec::from(&[Reduce(PK::TermTermDivFactor, 3usize)]), - TK::TokenWrite => Vec::from(&[Reduce(PK::TermTermDivFactor, 3usize)]), + TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), + TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS32)]), + TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), _ => vec![], } } -fn action_body_s93(token_kind: TokenKind) -> Vec> { +fn action_body_s91(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS107)]), + TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS109)]), _ => vec![], } } -fn action_tokenint_s94(token_kind: TokenKind) -> Vec> { +fn action_tokenint_s92(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenId => Vec::from(&[Reduce(PK::DataTypeIntType, 1usize)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::DataTypeIntType, 1usize)]), _ => vec![], } } -fn action_tokenfloat_s95(token_kind: TokenKind) -> Vec> { +fn action_tokenfloat_s93(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenId => Vec::from(&[Reduce(PK::DataTypeFloatType, 1usize)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::DataTypeFloatType, 1usize)]), _ => vec![], } } -fn action_tokenstring_s96(token_kind: TokenKind) -> Vec> { +fn action_tokenstring_s94(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenId => Vec::from(&[Reduce(PK::DataTypeStringType, 1usize)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::DataTypeStringType, 1usize)]), _ => vec![], } } -fn action_datatype_s97(token_kind: TokenKind) -> Vec> { +fn action_datatype_s95(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenId => { Vec::from(&[Reduce(PK::VarDeclarationVarDeclarationSingle, 3usize)]) @@ -2216,7 +1986,7 @@ fn action_datatype_s97(token_kind: TokenKind) -> Vec> { _ => vec![], } } -fn action_vardeclaration_s98(token_kind: TokenKind) -> Vec> { +fn action_vardeclaration_s96(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenId => { Vec::from(&[Reduce(PK::VarDeclarationVarDeclarationRecursive, 3usize)]) @@ -2227,17 +1997,17 @@ fn action_vardeclaration_s98(token_kind: TokenKind) -> Vec vec![], } } -fn action_arithmeticexpression_s99( +fn action_arithmeticexpression_s97( token_kind: TokenKind, ) -> Vec> { match token_kind { - TK::TokenSum => Vec::from(&[Shift(State::TokenSumS63)]), - TK::TokenSub => Vec::from(&[Shift(State::TokenSubS64)]), - TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS108)]), + TK::TokenSum => Vec::from(&[Reduce(PK::DummyAEP1, 0usize)]), + TK::TokenSub => Vec::from(&[Reduce(PK::DummyAEP1, 0usize)]), + TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS110)]), _ => vec![], } } -fn action_tokencbopen_s100(token_kind: TokenKind) -> Vec> { +fn action_tokencbopen_s98(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenId => Vec::from(&[Shift(State::TokenIdS27)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::BodyBodyEmpty, 0usize)]), @@ -2250,7 +2020,7 @@ fn action_tokencbopen_s100(token_kind: TokenKind) -> Vec _ => vec![], } } -fn action_tokenand_s101(token_kind: TokenKind) -> Vec> { +fn action_tokenand_s99(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => { Vec::from(&[Reduce(PK::ConjunctionConjunctionAnd, 1usize)]) @@ -2271,7 +2041,7 @@ fn action_tokenand_s101(token_kind: TokenKind) -> Vec> { _ => vec![], } } -fn action_tokenor_s102(token_kind: TokenKind) -> Vec> { +fn action_tokenor_s100(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => Vec::from(&[Reduce(PK::ConjunctionConjunctionOr, 1usize)]), TK::TokenFloatLiteral => { @@ -2290,7 +2060,7 @@ fn action_tokenor_s102(token_kind: TokenKind) -> Vec> { _ => vec![], } } -fn action_conjunction_s103(token_kind: TokenKind) -> Vec> { +fn action_conjunction_s101(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), @@ -2305,19 +2075,19 @@ fn action_conjunction_s103(token_kind: TokenKind) -> Vec _ => vec![], } } -fn action_simpleexpression_s104(token_kind: TokenKind) -> Vec> { +fn action_simpleexpression_s102(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenParClose => { Vec::from( &[Reduce(PK::BooleanExpressionChainBooleanExpressionChainEmpty, 0usize)], ) } - TK::TokenEqual => Vec::from(&[Shift(State::TokenEqualS75)]), - TK::TokenNotEqual => Vec::from(&[Shift(State::TokenNotEqualS76)]), - TK::TokenLess => Vec::from(&[Shift(State::TokenLessS77)]), - TK::TokenLessEqual => Vec::from(&[Shift(State::TokenLessEqualS78)]), - TK::TokenGreater => Vec::from(&[Shift(State::TokenGreaterS79)]), - TK::TokenGreaterEqual => Vec::from(&[Shift(State::TokenGreaterEqualS80)]), + TK::TokenEqual => Vec::from(&[Shift(State::TokenEqualS73)]), + TK::TokenNotEqual => Vec::from(&[Shift(State::TokenNotEqualS74)]), + TK::TokenLess => Vec::from(&[Shift(State::TokenLessS75)]), + TK::TokenLessEqual => Vec::from(&[Shift(State::TokenLessEqualS76)]), + TK::TokenGreater => Vec::from(&[Shift(State::TokenGreaterS77)]), + TK::TokenGreaterEqual => Vec::from(&[Shift(State::TokenGreaterEqualS78)]), TK::TokenAnd => { Vec::from( &[Reduce(PK::BooleanExpressionChainBooleanExpressionChainEmpty, 0usize)], @@ -2331,7 +2101,7 @@ fn action_simpleexpression_s104(token_kind: TokenKind) -> Vec vec![], } } -fn action_tokencbopen_s105(token_kind: TokenKind) -> Vec> { +fn action_tokencbopen_s103(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenId => Vec::from(&[Shift(State::TokenIdS27)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::BodyBodyEmpty, 0usize)]), @@ -2344,7 +2114,7 @@ fn action_tokencbopen_s105(token_kind: TokenKind) -> Vec _ => vec![], } } -fn action_tokenparclose_s106(token_kind: TokenKind) -> Vec> { +fn action_tokenparclose_s104(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => { Vec::from( @@ -2389,13 +2159,269 @@ fn action_tokenparclose_s106(token_kind: TokenKind) -> Vec vec![], } } -fn action_tokencbclose_s107(token_kind: TokenKind) -> Vec> { +fn action_term_s105(token_kind: TokenKind) -> Vec> { + match token_kind { + TK::STOP => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 4usize)], + ) + } + TK::TokenId => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 4usize)], + ) + } + TK::TokenSum => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 4usize)], + ) + } + TK::TokenMul => Vec::from(&[Reduce(PK::DummyTP1, 0usize)]), + TK::TokenSub => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 4usize)], + ) + } + TK::TokenDiv => Vec::from(&[Reduce(PK::DummyTP1, 0usize)]), + TK::TokenParClose => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 4usize)], + ) + } + TK::TokenCBClose => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 4usize)], + ) + } + TK::TokenWhile => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 4usize)], + ) + } + TK::TokenEqual => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 4usize)], + ) + } + TK::TokenNotEqual => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 4usize)], + ) + } + TK::TokenLess => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 4usize)], + ) + } + TK::TokenLessEqual => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 4usize)], + ) + } + TK::TokenGreater => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 4usize)], + ) + } + TK::TokenGreaterEqual => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 4usize)], + ) + } + TK::TokenIf => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 4usize)], + ) + } + TK::TokenElse => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 4usize)], + ) + } + TK::TokenAnd => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 4usize)], + ) + } + TK::TokenOr => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 4usize)], + ) + } + TK::TokenRead => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 4usize)], + ) + } + TK::TokenWrite => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 4usize)], + ) + } + _ => vec![], + } +} +fn action_term_s106(token_kind: TokenKind) -> Vec> { + match token_kind { + TK::STOP => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 4usize)], + ) + } + TK::TokenId => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 4usize)], + ) + } + TK::TokenSum => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 4usize)], + ) + } + TK::TokenMul => Vec::from(&[Reduce(PK::DummyTP1, 0usize)]), + TK::TokenSub => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 4usize)], + ) + } + TK::TokenDiv => Vec::from(&[Reduce(PK::DummyTP1, 0usize)]), + TK::TokenParClose => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 4usize)], + ) + } + TK::TokenCBClose => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 4usize)], + ) + } + TK::TokenWhile => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 4usize)], + ) + } + TK::TokenEqual => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 4usize)], + ) + } + TK::TokenNotEqual => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 4usize)], + ) + } + TK::TokenLess => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 4usize)], + ) + } + TK::TokenLessEqual => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 4usize)], + ) + } + TK::TokenGreater => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 4usize)], + ) + } + TK::TokenGreaterEqual => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 4usize)], + ) + } + TK::TokenIf => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 4usize)], + ) + } + TK::TokenElse => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 4usize)], + ) + } + TK::TokenAnd => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 4usize)], + ) + } + TK::TokenOr => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 4usize)], + ) + } + TK::TokenRead => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 4usize)], + ) + } + TK::TokenWrite => { + Vec::from( + &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 4usize)], + ) + } + _ => vec![], + } +} +fn action_factor_s107(token_kind: TokenKind) -> Vec> { + match token_kind { + TK::STOP => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), + TK::TokenId => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), + TK::TokenSum => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), + TK::TokenMul => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), + TK::TokenSub => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), + TK::TokenDiv => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), + TK::TokenParClose => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), + TK::TokenCBClose => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), + TK::TokenWhile => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), + TK::TokenEqual => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), + TK::TokenNotEqual => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), + TK::TokenLess => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), + TK::TokenLessEqual => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), + TK::TokenGreater => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), + TK::TokenGreaterEqual => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), + TK::TokenIf => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), + TK::TokenElse => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), + TK::TokenAnd => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), + TK::TokenOr => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), + TK::TokenRead => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), + TK::TokenWrite => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), + _ => vec![], + } +} +fn action_factor_s108(token_kind: TokenKind) -> Vec> { + match token_kind { + TK::STOP => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), + TK::TokenId => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), + TK::TokenSum => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), + TK::TokenMul => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), + TK::TokenSub => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), + TK::TokenDiv => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), + TK::TokenParClose => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), + TK::TokenCBClose => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), + TK::TokenWhile => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), + TK::TokenEqual => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), + TK::TokenNotEqual => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), + TK::TokenLess => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), + TK::TokenLessEqual => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), + TK::TokenGreater => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), + TK::TokenGreaterEqual => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), + TK::TokenIf => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), + TK::TokenElse => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), + TK::TokenAnd => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), + TK::TokenOr => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), + TK::TokenRead => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), + TK::TokenWrite => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), + _ => vec![], + } +} +fn action_tokencbclose_s109(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::ProgramProgramWithMain, 6usize)]), _ => vec![], } } -fn action_tokenparclose_s108(token_kind: TokenKind) -> Vec> { +fn action_tokenparclose_s110(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenParClose => { Vec::from(&[Reduce(PK::FunctionIsZeroFunctionIsZeroCall, 4usize)]) @@ -2403,13 +2429,13 @@ fn action_tokenparclose_s108(token_kind: TokenKind) -> Vec vec![], } } -fn action_body_s109(token_kind: TokenKind) -> Vec> { +fn action_body_s111(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS113)]), + TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS115)]), _ => vec![], } } -fn action_booleanexpression_s110(token_kind: TokenKind) -> Vec> { +fn action_booleanexpression_s112(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenParClose => { Vec::from( @@ -2424,7 +2450,7 @@ fn action_booleanexpression_s110(token_kind: TokenKind) -> Vec vec![], } } -fn action_booleanexpressionchain_s111( +fn action_booleanexpressionchain_s113( token_kind: TokenKind, ) -> Vec> { match token_kind { @@ -2446,13 +2472,13 @@ fn action_booleanexpressionchain_s111( _ => vec![], } } -fn action_body_s112(token_kind: TokenKind) -> Vec> { +fn action_body_s114(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS114)]), + TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS116)]), _ => vec![], } } -fn action_tokencbclose_s113(token_kind: TokenKind) -> Vec> { +fn action_tokencbclose_s115(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::WhileLoopWhile, 7usize)]), TK::TokenId => Vec::from(&[Reduce(PK::WhileLoopWhile, 7usize)]), @@ -2465,7 +2491,7 @@ fn action_tokencbclose_s113(token_kind: TokenKind) -> Vec vec![], } } -fn action_tokencbclose_s114(token_kind: TokenKind) -> Vec> { +fn action_tokencbclose_s116(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::IfStatementIfStatement, 7usize)]), TK::TokenId => Vec::from(&[Reduce(PK::IfStatementIfStatement, 7usize)]), @@ -2657,9 +2683,31 @@ fn goto_tokenparopen_s34(nonterm_kind: NonTermKind) -> State { } } } +fn goto_arithmeticexpression_s39(nonterm_kind: NonTermKind) -> State { + match nonterm_kind { + NonTermKind::DummyAE => State::DummyAES63, + _ => { + panic!( + "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", + State::ArithmeticExpressionS39 + ) + } + } +} +fn goto_term_s40(nonterm_kind: NonTermKind) -> State { + match nonterm_kind { + NonTermKind::DummyT => State::DummyTS64, + _ => { + panic!( + "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", + State::TermS40 + ) + } + } +} fn goto_vardeclaration_s45(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::VarDeclarations => State::VarDeclarationsS71, + NonTermKind::VarDeclarations => State::VarDeclarationsS69, NonTermKind::VarDeclaration => State::VarDeclarationS45, _ => { panic!( @@ -2672,7 +2720,7 @@ fn goto_vardeclaration_s45(nonterm_kind: NonTermKind) -> State { fn goto_tokennot_s49(nonterm_kind: NonTermKind) -> State { match nonterm_kind { NonTermKind::FunctionIsZero => State::FunctionIsZeroS51, - NonTermKind::BooleanExpression => State::BooleanExpressionS72, + NonTermKind::BooleanExpression => State::BooleanExpressionS70, NonTermKind::SimpleExpression => State::SimpleExpressionS53, NonTermKind::Number => State::NumberS38, NonTermKind::NotStatement => State::NotStatementS54, @@ -2689,8 +2737,8 @@ fn goto_tokennot_s49(nonterm_kind: NonTermKind) -> State { } fn goto_simpleexpression_s53(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::BooleanExpressionChain => State::BooleanExpressionChainS81, - NonTermKind::ComparisonOp => State::ComparisonOpS82, + NonTermKind::BooleanExpressionChain => State::BooleanExpressionChainS79, + NonTermKind::ComparisonOp => State::ComparisonOpS80, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", @@ -2699,140 +2747,162 @@ fn goto_simpleexpression_s53(nonterm_kind: NonTermKind) -> State { } } } -fn goto_tokensum_s63(nonterm_kind: NonTermKind) -> State { +fn goto_arithmeticexpression_s61(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::Number => State::NumberS38, - NonTermKind::Term => State::TermS89, - NonTermKind::Factor => State::FactorS41, + NonTermKind::DummyAE => State::DummyAES63, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenSumS63 + State::ArithmeticExpressionS61 ) } } } -fn goto_tokensub_s64(nonterm_kind: NonTermKind) -> State { +fn goto_tokencbopen_s65(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::Number => State::NumberS38, - NonTermKind::Term => State::TermS90, - NonTermKind::Factor => State::FactorS41, + NonTermKind::Body => State::BodyS91, + NonTermKind::FunctionRead => State::FunctionReadS10, + NonTermKind::FunctionWrite => State::FunctionWriteS11, + NonTermKind::Expressions => State::ExpressionsS12, + NonTermKind::Statement => State::StatementS13, + NonTermKind::Assignment => State::AssignmentS14, + NonTermKind::WhileLoop => State::WhileLoopS15, + NonTermKind::IfStatement => State::IfStatementS16, + NonTermKind::ElseStatement => State::ElseStatementS17, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenSubS64 + State::TokenCBOpenS65 ) } } } -fn goto_tokenmul_s65(nonterm_kind: NonTermKind) -> State { +fn goto_tokencolon_s66(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::Number => State::NumberS38, - NonTermKind::Factor => State::FactorS91, + NonTermKind::DataType => State::DataTypeS95, + _ => { + panic!( + "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", + State::TokenColonS66 + ) + } + } +} +fn goto_tokencomma_s67(nonterm_kind: NonTermKind) -> State { + match nonterm_kind { + NonTermKind::VarDeclaration => State::VarDeclarationS96, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenMulS65 + State::TokenCommaS67 ) } } } -fn goto_tokendiv_s66(nonterm_kind: NonTermKind) -> State { +fn goto_tokenparopen_s71(nonterm_kind: NonTermKind) -> State { match nonterm_kind { NonTermKind::Number => State::NumberS38, - NonTermKind::Factor => State::FactorS92, + NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS97, + NonTermKind::Term => State::TermS40, + NonTermKind::Factor => State::FactorS41, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenDivS66 + State::TokenParOpenS71 ) } } } -fn goto_tokencbopen_s67(nonterm_kind: NonTermKind) -> State { +fn goto_booleanexpressionchain_s79(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::Body => State::BodyS93, - NonTermKind::FunctionRead => State::FunctionReadS10, - NonTermKind::FunctionWrite => State::FunctionWriteS11, - NonTermKind::Expressions => State::ExpressionsS12, - NonTermKind::Statement => State::StatementS13, - NonTermKind::Assignment => State::AssignmentS14, - NonTermKind::WhileLoop => State::WhileLoopS15, - NonTermKind::IfStatement => State::IfStatementS16, - NonTermKind::ElseStatement => State::ElseStatementS17, + NonTermKind::Conjunction => State::ConjunctionS101, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenCBOpenS67 + State::BooleanExpressionChainS79 ) } } } -fn goto_tokencolon_s68(nonterm_kind: NonTermKind) -> State { +fn goto_comparisonop_s80(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::DataType => State::DataTypeS97, + NonTermKind::SimpleExpression => State::SimpleExpressionS102, + NonTermKind::Number => State::NumberS38, + NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS39, + NonTermKind::Term => State::TermS40, + NonTermKind::Factor => State::FactorS41, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenColonS68 + State::ComparisonOpS80 ) } } } -fn goto_tokencomma_s69(nonterm_kind: NonTermKind) -> State { +fn goto_tokensum_s87(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::VarDeclaration => State::VarDeclarationS98, + NonTermKind::Number => State::NumberS38, + NonTermKind::Term => State::TermS105, + NonTermKind::Factor => State::FactorS41, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenCommaS69 + State::TokenSumS87 ) } } } -fn goto_tokenparopen_s73(nonterm_kind: NonTermKind) -> State { +fn goto_tokensub_s88(nonterm_kind: NonTermKind) -> State { match nonterm_kind { NonTermKind::Number => State::NumberS38, - NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS99, - NonTermKind::Term => State::TermS40, + NonTermKind::Term => State::TermS106, NonTermKind::Factor => State::FactorS41, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenParOpenS73 + State::TokenSubS88 ) } } } -fn goto_booleanexpressionchain_s81(nonterm_kind: NonTermKind) -> State { +fn goto_tokenmul_s89(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::Conjunction => State::ConjunctionS103, + NonTermKind::Number => State::NumberS38, + NonTermKind::Factor => State::FactorS107, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::BooleanExpressionChainS81 + State::TokenMulS89 ) } } } -fn goto_comparisonop_s82(nonterm_kind: NonTermKind) -> State { +fn goto_tokendiv_s90(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::SimpleExpression => State::SimpleExpressionS104, NonTermKind::Number => State::NumberS38, - NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS39, - NonTermKind::Term => State::TermS40, - NonTermKind::Factor => State::FactorS41, + NonTermKind::Factor => State::FactorS108, + _ => { + panic!( + "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", + State::TokenDivS90 + ) + } + } +} +fn goto_arithmeticexpression_s97(nonterm_kind: NonTermKind) -> State { + match nonterm_kind { + NonTermKind::DummyAE => State::DummyAES63, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::ComparisonOpS82 + State::ArithmeticExpressionS97 ) } } } -fn goto_tokencbopen_s100(nonterm_kind: NonTermKind) -> State { +fn goto_tokencbopen_s98(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::Body => State::BodyS109, + NonTermKind::Body => State::BodyS111, NonTermKind::FunctionRead => State::FunctionReadS10, NonTermKind::FunctionWrite => State::FunctionWriteS11, NonTermKind::Expressions => State::ExpressionsS12, @@ -2844,15 +2914,15 @@ fn goto_tokencbopen_s100(nonterm_kind: NonTermKind) -> State { _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenCBOpenS100 + State::TokenCBOpenS98 ) } } } -fn goto_conjunction_s103(nonterm_kind: NonTermKind) -> State { +fn goto_conjunction_s101(nonterm_kind: NonTermKind) -> State { match nonterm_kind { NonTermKind::FunctionIsZero => State::FunctionIsZeroS51, - NonTermKind::BooleanExpression => State::BooleanExpressionS110, + NonTermKind::BooleanExpression => State::BooleanExpressionS112, NonTermKind::SimpleExpression => State::SimpleExpressionS53, NonTermKind::Number => State::NumberS38, NonTermKind::NotStatement => State::NotStatementS54, @@ -2862,26 +2932,26 @@ fn goto_conjunction_s103(nonterm_kind: NonTermKind) -> State { _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::ConjunctionS103 + State::ConjunctionS101 ) } } } -fn goto_simpleexpression_s104(nonterm_kind: NonTermKind) -> State { +fn goto_simpleexpression_s102(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::BooleanExpressionChain => State::BooleanExpressionChainS111, - NonTermKind::ComparisonOp => State::ComparisonOpS82, + NonTermKind::BooleanExpressionChain => State::BooleanExpressionChainS113, + NonTermKind::ComparisonOp => State::ComparisonOpS80, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::SimpleExpressionS104 + State::SimpleExpressionS102 ) } } } -fn goto_tokencbopen_s105(nonterm_kind: NonTermKind) -> State { +fn goto_tokencbopen_s103(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::Body => State::BodyS112, + NonTermKind::Body => State::BodyS114, NonTermKind::FunctionRead => State::FunctionReadS10, NonTermKind::FunctionWrite => State::FunctionWriteS11, NonTermKind::Expressions => State::ExpressionsS12, @@ -2893,7 +2963,29 @@ fn goto_tokencbopen_s105(nonterm_kind: NonTermKind) -> State { _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenCBOpenS105 + State::TokenCBOpenS103 + ) + } + } +} +fn goto_term_s105(nonterm_kind: NonTermKind) -> State { + match nonterm_kind { + NonTermKind::DummyT => State::DummyTS64, + _ => { + panic!( + "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", + State::TermS105 + ) + } + } +} +fn goto_term_s106(nonterm_kind: NonTermKind) -> State { + match nonterm_kind { + NonTermKind::DummyT => State::DummyTS64, + _ => { + panic!( + "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", + State::TermS106 ) } } @@ -2966,58 +3058,60 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti action_tokenfloatliteral_s60, action_arithmeticexpression_s61, action_tokenparopen_s62, - action_tokensum_s63, - action_tokensub_s64, - action_tokenmul_s65, - action_tokendiv_s66, - action_tokencbopen_s67, - action_tokencolon_s68, - action_tokencomma_s69, - action_tokencbclose_s70, - action_vardeclarations_s71, - action_booleanexpression_s72, - action_tokenparopen_s73, - action_tokenparclose_s74, - action_tokenequal_s75, - action_tokennotequal_s76, - action_tokenless_s77, - action_tokenlessequal_s78, - action_tokengreater_s79, - action_tokengreaterequal_s80, - action_booleanexpressionchain_s81, - action_comparisonop_s82, + action_dummyae_s63, + action_dummyt_s64, + action_tokencbopen_s65, + action_tokencolon_s66, + action_tokencomma_s67, + action_tokencbclose_s68, + action_vardeclarations_s69, + action_booleanexpression_s70, + action_tokenparopen_s71, + action_tokenparclose_s72, + action_tokenequal_s73, + action_tokennotequal_s74, + action_tokenless_s75, + action_tokenlessequal_s76, + action_tokengreater_s77, + action_tokengreaterequal_s78, + action_booleanexpressionchain_s79, + action_comparisonop_s80, + action_tokenparclose_s81, + action_tokencbclose_s82, action_tokenparclose_s83, - action_tokencbclose_s84, + action_tokenparclose_s84, action_tokenparclose_s85, - action_tokenparclose_s86, - action_tokenparclose_s87, - action_tokendate_s88, - action_term_s89, - action_term_s90, - action_factor_s91, - action_factor_s92, - action_body_s93, - action_tokenint_s94, - action_tokenfloat_s95, - action_tokenstring_s96, - action_datatype_s97, - action_vardeclaration_s98, - action_arithmeticexpression_s99, - action_tokencbopen_s100, - action_tokenand_s101, - action_tokenor_s102, - action_conjunction_s103, - action_simpleexpression_s104, - action_tokencbopen_s105, - action_tokenparclose_s106, - action_tokencbclose_s107, - action_tokenparclose_s108, - action_body_s109, - action_booleanexpression_s110, - action_booleanexpressionchain_s111, - action_body_s112, - action_tokencbclose_s113, - action_tokencbclose_s114, + action_tokendate_s86, + action_tokensum_s87, + action_tokensub_s88, + action_tokenmul_s89, + action_tokendiv_s90, + action_body_s91, + action_tokenint_s92, + action_tokenfloat_s93, + action_tokenstring_s94, + action_datatype_s95, + action_vardeclaration_s96, + action_arithmeticexpression_s97, + action_tokencbopen_s98, + action_tokenand_s99, + action_tokenor_s100, + action_conjunction_s101, + action_simpleexpression_s102, + action_tokencbopen_s103, + action_tokenparclose_s104, + action_term_s105, + action_term_s106, + action_factor_s107, + action_factor_s108, + action_tokencbclose_s109, + action_tokenparclose_s110, + action_body_s111, + action_booleanexpression_s112, + action_booleanexpressionchain_s113, + action_body_s114, + action_tokencbclose_s115, + action_tokencbclose_s116, ], gotos: [ goto_aug_s0, @@ -3059,8 +3153,8 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti goto_invalid, goto_invalid, goto_invalid, - goto_invalid, - goto_invalid, + goto_arithmeticexpression_s39, + goto_term_s40, goto_invalid, goto_invalid, goto_invalid, @@ -3081,51 +3175,53 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti goto_invalid, goto_invalid, goto_invalid, + goto_arithmeticexpression_s61, goto_invalid, goto_invalid, - goto_tokensum_s63, - goto_tokensub_s64, - goto_tokenmul_s65, - goto_tokendiv_s66, - goto_tokencbopen_s67, - goto_tokencolon_s68, - goto_tokencomma_s69, goto_invalid, + goto_tokencbopen_s65, + goto_tokencolon_s66, + goto_tokencomma_s67, goto_invalid, goto_invalid, - goto_tokenparopen_s73, goto_invalid, + goto_tokenparopen_s71, goto_invalid, goto_invalid, goto_invalid, goto_invalid, goto_invalid, goto_invalid, - goto_booleanexpressionchain_s81, - goto_comparisonop_s82, goto_invalid, + goto_booleanexpressionchain_s79, + goto_comparisonop_s80, goto_invalid, goto_invalid, goto_invalid, goto_invalid, goto_invalid, goto_invalid, + goto_tokensum_s87, + goto_tokensub_s88, + goto_tokenmul_s89, + goto_tokendiv_s90, goto_invalid, goto_invalid, goto_invalid, goto_invalid, goto_invalid, goto_invalid, + goto_arithmeticexpression_s97, + goto_tokencbopen_s98, goto_invalid, goto_invalid, + goto_conjunction_s101, + goto_simpleexpression_s102, + goto_tokencbopen_s103, goto_invalid, + goto_term_s105, + goto_term_s106, goto_invalid, - goto_tokencbopen_s100, - goto_invalid, - goto_invalid, - goto_conjunction_s103, - goto_simpleexpression_s104, - goto_tokencbopen_s105, goto_invalid, goto_invalid, goto_invalid, @@ -4587,40 +4683,8 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, ], [ - Some((TK::TokenIntLiteral, false)), - Some((TK::TokenFloatLiteral, false)), - Some((TK::TokenId, false)), - Some((TK::TokenSub, false)), - Some((TK::TokenParOpen, false)), - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - ], - [ - Some((TK::TokenIntLiteral, false)), - Some((TK::TokenFloatLiteral, false)), - Some((TK::TokenId, false)), + Some((TK::TokenSum, false)), Some((TK::TokenSub, false)), - Some((TK::TokenParOpen, false)), - None, - None, - None, - None, - None, - None, None, None, None, @@ -4631,22 +4695,8 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - ], - [ - Some((TK::TokenIntLiteral, false)), - Some((TK::TokenFloatLiteral, false)), - Some((TK::TokenId, false)), - Some((TK::TokenSub, false)), - Some((TK::TokenParOpen, false)), - None, - None, - None, - None, - None, - None, - None, - None, - None, + None, + None, None, None, None, @@ -4656,11 +4706,11 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, ], [ - Some((TK::TokenIntLiteral, false)), - Some((TK::TokenFloatLiteral, false)), - Some((TK::TokenId, false)), - Some((TK::TokenSub, false)), - Some((TK::TokenParOpen, false)), + Some((TK::TokenMul, false)), + Some((TK::TokenDiv, false)), + None, + None, + None, None, None, None, @@ -5185,96 +5235,96 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, ], [ - Some((TK::STOP, true)), + Some((TK::TokenIntLiteral, false)), + Some((TK::TokenFloatLiteral, false)), Some((TK::TokenId, false)), - Some((TK::TokenSum, false)), - Some((TK::TokenMul, false)), Some((TK::TokenSub, false)), - Some((TK::TokenDiv, false)), - Some((TK::TokenParClose, false)), - Some((TK::TokenCBClose, false)), - Some((TK::TokenWhile, false)), - Some((TK::TokenEqual, false)), - Some((TK::TokenNotEqual, false)), - Some((TK::TokenLess, false)), - Some((TK::TokenLessEqual, false)), - Some((TK::TokenGreater, false)), - Some((TK::TokenGreaterEqual, false)), - Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), - Some((TK::TokenAnd, false)), - Some((TK::TokenOr, false)), - Some((TK::TokenRead, false)), - Some((TK::TokenWrite, false)), + Some((TK::TokenParOpen, false)), + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, ], [ - Some((TK::STOP, true)), + Some((TK::TokenIntLiteral, false)), + Some((TK::TokenFloatLiteral, false)), Some((TK::TokenId, false)), - Some((TK::TokenSum, false)), - Some((TK::TokenMul, false)), Some((TK::TokenSub, false)), - Some((TK::TokenDiv, false)), - Some((TK::TokenParClose, false)), - Some((TK::TokenCBClose, false)), - Some((TK::TokenWhile, false)), - Some((TK::TokenEqual, false)), - Some((TK::TokenNotEqual, false)), - Some((TK::TokenLess, false)), - Some((TK::TokenLessEqual, false)), - Some((TK::TokenGreater, false)), - Some((TK::TokenGreaterEqual, false)), - Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), - Some((TK::TokenAnd, false)), - Some((TK::TokenOr, false)), - Some((TK::TokenRead, false)), - Some((TK::TokenWrite, false)), + Some((TK::TokenParOpen, false)), + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, ], [ - Some((TK::STOP, true)), + Some((TK::TokenIntLiteral, false)), + Some((TK::TokenFloatLiteral, false)), Some((TK::TokenId, false)), - Some((TK::TokenSum, false)), - Some((TK::TokenMul, false)), Some((TK::TokenSub, false)), - Some((TK::TokenDiv, false)), - Some((TK::TokenParClose, false)), - Some((TK::TokenCBClose, false)), - Some((TK::TokenWhile, false)), - Some((TK::TokenEqual, false)), - Some((TK::TokenNotEqual, false)), - Some((TK::TokenLess, false)), - Some((TK::TokenLessEqual, false)), - Some((TK::TokenGreater, false)), - Some((TK::TokenGreaterEqual, false)), - Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), - Some((TK::TokenAnd, false)), - Some((TK::TokenOr, false)), - Some((TK::TokenRead, false)), - Some((TK::TokenWrite, false)), + Some((TK::TokenParOpen, false)), + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, ], [ - Some((TK::STOP, true)), + Some((TK::TokenIntLiteral, false)), + Some((TK::TokenFloatLiteral, false)), Some((TK::TokenId, false)), - Some((TK::TokenSum, false)), - Some((TK::TokenMul, false)), Some((TK::TokenSub, false)), - Some((TK::TokenDiv, false)), - Some((TK::TokenParClose, false)), - Some((TK::TokenCBClose, false)), - Some((TK::TokenWhile, false)), - Some((TK::TokenEqual, false)), - Some((TK::TokenNotEqual, false)), - Some((TK::TokenLess, false)), - Some((TK::TokenLessEqual, false)), - Some((TK::TokenGreater, false)), - Some((TK::TokenGreaterEqual, false)), - Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), - Some((TK::TokenAnd, false)), - Some((TK::TokenOr, false)), - Some((TK::TokenRead, false)), - Some((TK::TokenWrite, false)), + Some((TK::TokenParOpen, false)), + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, ], [ Some((TK::TokenCBClose, false)), @@ -5598,6 +5648,98 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, ], + [ + Some((TK::STOP, true)), + Some((TK::TokenId, false)), + Some((TK::TokenSum, false)), + Some((TK::TokenMul, false)), + Some((TK::TokenSub, false)), + Some((TK::TokenDiv, false)), + Some((TK::TokenParClose, false)), + Some((TK::TokenCBClose, false)), + Some((TK::TokenWhile, false)), + Some((TK::TokenEqual, false)), + Some((TK::TokenNotEqual, false)), + Some((TK::TokenLess, false)), + Some((TK::TokenLessEqual, false)), + Some((TK::TokenGreater, false)), + Some((TK::TokenGreaterEqual, false)), + Some((TK::TokenIf, false)), + Some((TK::TokenElse, false)), + Some((TK::TokenAnd, false)), + Some((TK::TokenOr, false)), + Some((TK::TokenRead, false)), + Some((TK::TokenWrite, false)), + ], + [ + Some((TK::STOP, true)), + Some((TK::TokenId, false)), + Some((TK::TokenSum, false)), + Some((TK::TokenMul, false)), + Some((TK::TokenSub, false)), + Some((TK::TokenDiv, false)), + Some((TK::TokenParClose, false)), + Some((TK::TokenCBClose, false)), + Some((TK::TokenWhile, false)), + Some((TK::TokenEqual, false)), + Some((TK::TokenNotEqual, false)), + Some((TK::TokenLess, false)), + Some((TK::TokenLessEqual, false)), + Some((TK::TokenGreater, false)), + Some((TK::TokenGreaterEqual, false)), + Some((TK::TokenIf, false)), + Some((TK::TokenElse, false)), + Some((TK::TokenAnd, false)), + Some((TK::TokenOr, false)), + Some((TK::TokenRead, false)), + Some((TK::TokenWrite, false)), + ], + [ + Some((TK::STOP, true)), + Some((TK::TokenId, false)), + Some((TK::TokenSum, false)), + Some((TK::TokenMul, false)), + Some((TK::TokenSub, false)), + Some((TK::TokenDiv, false)), + Some((TK::TokenParClose, false)), + Some((TK::TokenCBClose, false)), + Some((TK::TokenWhile, false)), + Some((TK::TokenEqual, false)), + Some((TK::TokenNotEqual, false)), + Some((TK::TokenLess, false)), + Some((TK::TokenLessEqual, false)), + Some((TK::TokenGreater, false)), + Some((TK::TokenGreaterEqual, false)), + Some((TK::TokenIf, false)), + Some((TK::TokenElse, false)), + Some((TK::TokenAnd, false)), + Some((TK::TokenOr, false)), + Some((TK::TokenRead, false)), + Some((TK::TokenWrite, false)), + ], + [ + Some((TK::STOP, true)), + Some((TK::TokenId, false)), + Some((TK::TokenSum, false)), + Some((TK::TokenMul, false)), + Some((TK::TokenSub, false)), + Some((TK::TokenDiv, false)), + Some((TK::TokenParClose, false)), + Some((TK::TokenCBClose, false)), + Some((TK::TokenWhile, false)), + Some((TK::TokenEqual, false)), + Some((TK::TokenNotEqual, false)), + Some((TK::TokenLess, false)), + Some((TK::TokenLessEqual, false)), + Some((TK::TokenGreater, false)), + Some((TK::TokenGreaterEqual, false)), + Some((TK::TokenIf, false)), + Some((TK::TokenElse, false)), + Some((TK::TokenAnd, false)), + Some((TK::TokenOr, false)), + Some((TK::TokenRead, false)), + Some((TK::TokenWrite, false)), + ], [ Some((TK::STOP, false)), None, diff --git a/src/grammar/rules.rustemo b/src/grammar/rules.rustemo index f23b6b4..cbe34e4 100644 --- a/src/grammar/rules.rustemo +++ b/src/grammar/rules.rustemo @@ -75,14 +75,18 @@ Number: TokenIntLiteral {NumberInt} NotStatement: TokenNot BooleanExpression {Not}; -ArithmeticExpression: ArithmeticExpression TokenSum Term {ArithmeticExpressionSumTerm} -| ArithmeticExpression TokenSub Term {ArithmeticExpressionSubTerm} +ArithmeticExpression: ArithmeticExpression DummyAE TokenSum Term {ArithmeticExpressionSumTerm} +| ArithmeticExpression DummyAE TokenSub Term {ArithmeticExpressionSubTerm} | Term {ArithmeticExpressionTerm}; -Term: Term TokenMul Factor {TermMulFactor} -| Term TokenDiv Factor {TermDivFactor} +DummyAE: EMPTY; + +Term: Term DummyT TokenMul Factor {TermMulFactor} +| Term DummyT TokenDiv Factor {TermDivFactor} | Factor {TermFactor}; +DummyT: EMPTY; + Factor: TokenId {FactorId} | Number {FactorNumber} | TokenParOpen ArithmeticExpression TokenParClose {FactorParen}; diff --git a/src/grammar/rules_actions.rs b/src/grammar/rules_actions.rs index 44372fb..e2403b8 100644 --- a/src/grammar/rules_actions.rs +++ b/src/grammar/rules_actions.rs @@ -1,6 +1,10 @@ -use crate::compiler::context::CompilerContext; +use crate::compiler::{ + ast::{AstAction, AstNodeRef, AstPtr, Node, NodeValue}, + context::CompilerContext, + error::{CompilerError, log_error_and_exit}, +}; pub use crate::grammar::types::*; -use rustemo::Input; +use rustemo::{Context, Input}; /// Parses the keyword "int" pub fn token_int(_ctx: &Ctx, token: Token, compiler_context: &mut CompilerContext) -> TokenInt { @@ -644,6 +648,13 @@ pub fn assignment_assignment_expression( compiler_context.write_to_parser_file(&format!( " -> {token_id} {token_assign} " )); + let leaf = Node::new_leaf(NodeValue::Value(token_id.clone())); + compiler_context.ast.create_node( + AstAction::Assign, + AstNodeRef::Node(leaf.into()), + AstNodeRef::Ptr(AstPtr::SimpleExpression), + AstPtr::Assignment, + ); Assignment::AssignmentExpression(AssignmentExpression { token_id, token_assign, @@ -884,6 +895,10 @@ pub fn simple_expression_simple_expression_arithmetic( compiler_context: &mut CompilerContext, ) -> SimpleExpression { compiler_context.write_to_parser_file(" -> "); + compiler_context.ast.assign_node_to_ptr( + AstNodeRef::Ptr(AstPtr::ArithmeticExpression), + AstPtr::SimpleExpression, + ); SimpleExpression::SimpleExpressionArithmeticExpression(arithmetic_expression) } @@ -986,6 +1001,9 @@ pub fn number_number_int( ) -> Number { compiler_context.push_to_symbol_table(token_int_literal.into()); compiler_context.write_to_parser_file(&format!(" -> {token_int_literal}")); + compiler_context + .ast + .create_leaf(token_int_literal.to_string(), AstPtr::Number); Number::NumberInt(token_int_literal) } @@ -1046,17 +1064,35 @@ pub fn not_statement_not( } } -/// Parses the rule ` -> TokenSum ` +/// Parses the rule ` -> TokenSum ` pub fn arithmetic_expression_arithmetic_expression_sum_term( - _ctx: &Ctx, + ctx: &Ctx, arithmetic_expression: ArithmeticExpression, token_sum: TokenSum, term: Term, compiler_context: &mut CompilerContext, ) -> ArithmeticExpression { compiler_context.write_to_parser_file(&format!( - " -> {token_sum} " + " -> {token_sum} " )); + let Some(node) = compiler_context.ast.pop_e_stack() else { + log_error_and_exit( + ctx.range(), + CompilerError::Internal( + "ArithmeticExpression stack was empty when parsing ` -> TokenSum `" + .into(), + ), + 0, + true, + compiler_context, + ) + }; + compiler_context.ast.create_node( + AstAction::Plus, + AstNodeRef::Node(node), + AstNodeRef::Ptr(AstPtr::Term), + AstPtr::ArithmeticExpression, + ); ArithmeticExpression::ArithmeticExpressionSumTerm(ArithmeticExpressionSumTerm { arithmetic_expression: Box::new(arithmetic_expression), token_sum, @@ -1064,17 +1100,35 @@ pub fn arithmetic_expression_arithmetic_expression_sum_term( }) } -/// Parses the rule ` -> TokenSub ` +/// Parses the rule ` -> TokenSub ` pub fn arithmetic_expression_arithmetic_expression_sub_term( - _ctx: &Ctx, + ctx: &Ctx, arithmetic_expression: ArithmeticExpression, token_sub: TokenSub, term: Term, compiler_context: &mut CompilerContext, ) -> ArithmeticExpression { compiler_context.write_to_parser_file(&format!( - " -> {token_sub} " + " -> {token_sub} " )); + let Some(node) = compiler_context.ast.pop_e_stack() else { + log_error_and_exit( + ctx.range(), + CompilerError::Internal( + "ArithmeticExpression stack was empty when parsing ` -> TokenSub `" + .into(), + ), + 0, + true, + compiler_context, + ) + }; + compiler_context.ast.create_node( + AstAction::Sub, + AstNodeRef::Node(node), + AstNodeRef::Ptr(AstPtr::Term), + AstPtr::ArithmeticExpression, + ); ArithmeticExpression::ArithmeticExpressionSubTerm(ArithmeticExpressionSubTerm { arithmetic_expression: Box::new(arithmetic_expression), token_sub, @@ -1082,6 +1136,14 @@ pub fn arithmetic_expression_arithmetic_expression_sub_term( }) } +// Parses the rule ` -> EMPTY` +pub fn dummy_ae_empty(_ctx: &Ctx, compiler_context: &mut CompilerContext) -> DummyAE { + compiler_context + .ast + .push_e_stack(AstNodeRef::Ptr(AstPtr::ArithmeticExpression)); + None +} + /// Parses the rule ` -> ` pub fn arithmetic_expression_arithmetic_expression_term( _ctx: &Ctx, @@ -1089,18 +1151,40 @@ pub fn arithmetic_expression_arithmetic_expression_term( compiler_context: &mut CompilerContext, ) -> ArithmeticExpression { compiler_context.write_to_parser_file(" -> "); + compiler_context + .ast + .assign_node_to_ptr(AstNodeRef::Ptr(AstPtr::Term), AstPtr::ArithmeticExpression); ArithmeticExpression::ArithmeticExpressionTerm(term) } -/// Parses the rule ` -> TokenMul ` +/// Parses the rule ` -> TokenMul ` pub fn term_term_mul_factor( - _ctx: &Ctx, + ctx: &Ctx, term: Term, token_mul: TokenMul, factor: Factor, compiler_context: &mut CompilerContext, ) -> Term { - compiler_context.write_to_parser_file(&format!(" -> {token_mul} ")); + compiler_context + .write_to_parser_file(&format!(" -> {token_mul} ")); + let Some(node) = compiler_context.ast.pop_t_stack() else { + log_error_and_exit( + ctx.range(), + CompilerError::Internal( + "Term stack was empty when parsing ` -> TokenMul `" + .into(), + ), + 0, + true, + compiler_context, + ) + }; + compiler_context.ast.create_node( + AstAction::Mult, + AstNodeRef::Node(node), + AstNodeRef::Ptr(AstPtr::Factor), + AstPtr::Term, + ); Term::TermMulFactor(TermMulFactor { term: Box::new(term), token_mul, @@ -1108,15 +1192,34 @@ pub fn term_term_mul_factor( }) } -/// Parses the rule ` -> TokenDiv ` +/// Parses the rule ` -> TokenDiv ` pub fn term_term_div_factor( - _ctx: &Ctx, + ctx: &Ctx, term: Term, token_div: TokenDiv, factor: Factor, compiler_context: &mut CompilerContext, ) -> Term { - compiler_context.write_to_parser_file(&format!(" -> {token_div} ")); + compiler_context + .write_to_parser_file(&format!(" -> {token_div} ")); + let Some(node) = compiler_context.ast.pop_t_stack() else { + log_error_and_exit( + ctx.range(), + CompilerError::Internal( + "Term stack was empty when parsing ` -> TokenDiv `" + .into(), + ), + 0, + true, + compiler_context, + ) + }; + compiler_context.ast.create_node( + AstAction::Div, + AstNodeRef::Node(node), + AstNodeRef::Ptr(AstPtr::Factor), + AstPtr::Term, + ); Term::TermDivFactor(TermDivFactor { term: Box::new(term), token_div, @@ -1124,6 +1227,14 @@ pub fn term_term_div_factor( }) } +// Parses the rule ` -> EMPTY` +pub fn dummy_t_empty(_ctx: &Ctx, compiler_context: &mut CompilerContext) -> DummyT { + compiler_context + .ast + .push_t_stack(AstNodeRef::Ptr(AstPtr::Term)); + None +} + /// Parses the rule ` -> ` pub fn term_term_factor( _ctx: &Ctx, @@ -1131,6 +1242,9 @@ pub fn term_term_factor( compiler_context: &mut CompilerContext, ) -> Term { compiler_context.write_to_parser_file(" -> "); + compiler_context + .ast + .assign_node_to_ptr(AstNodeRef::Ptr(AstPtr::Factor), AstPtr::Term); Term::TermFactor(factor) } @@ -1141,6 +1255,9 @@ pub fn factor_factor_id( compiler_context: &mut CompilerContext, ) -> Factor { compiler_context.write_to_parser_file(&format!(" -> {token_id}")); + compiler_context + .ast + .create_leaf(token_id.clone(), AstPtr::Factor); Factor::FactorId(token_id) } @@ -1151,6 +1268,9 @@ pub fn factor_factor_number( compiler_context: &mut CompilerContext, ) -> Factor { compiler_context.write_to_parser_file(" -> "); + compiler_context + .ast + .assign_node_to_ptr(AstNodeRef::Ptr(AstPtr::Number), AstPtr::Factor); Factor::FactorNumber(number) } @@ -1165,6 +1285,10 @@ pub fn factor_factor_paren( compiler_context.write_to_parser_file(&format!( " -> {token_par_open} {token_par_close}" )); + compiler_context.ast.assign_node_to_ptr( + AstNodeRef::Ptr(AstPtr::ArithmeticExpression), + AstPtr::Factor, + ); Factor::FactorParen(FactorParen { token_par_open, arithmetic_expression: Box::new(arithmetic_expression), diff --git a/src/grammar/rules_builder.rs b/src/grammar/rules_builder.rs index 2d47eb8..918e633 100644 --- a/src/grammar/rules_builder.rs +++ b/src/grammar/rules_builder.rs @@ -80,6 +80,8 @@ pub enum NonTerminal { ArithmeticExpression(rules_actions::ArithmeticExpression), Term(rules_actions::Term), Factor(rules_actions::Factor), + DummyAE(rules_actions::DummyAE), + DummyT(rules_actions::DummyT), } impl Builder for Compiler { @@ -1311,11 +1313,17 @@ impl<'i> LRBuilder<'i, Input, Context<'i, Input>, State, ProdKind, TokenKind> fo ProdKind::ArithmeticExpressionArithmeticExpressionSumTerm => { let mut i = compiler_context .res_stack - .split_off(stack_len - 3usize) + .split_off(stack_len - 4usize) .into_iter(); - match (i.next().unwrap(), i.next().unwrap(), i.next().unwrap()) { + match ( + i.next().unwrap(), + i.next().unwrap(), + i.next().unwrap(), + i.next().unwrap(), + ) { ( Symbol::NonTerminal(NonTerminal::ArithmeticExpression(p0)), + Symbol::NonTerminal(NonTerminal::DummyAE(_)), Symbol::Terminal(Terminal::TokenSum(p1)), Symbol::NonTerminal(NonTerminal::Term(p2)), ) => NonTerminal::ArithmeticExpression( @@ -1333,11 +1341,17 @@ impl<'i> LRBuilder<'i, Input, Context<'i, Input>, State, ProdKind, TokenKind> fo ProdKind::ArithmeticExpressionArithmeticExpressionSubTerm => { let mut i = compiler_context .res_stack - .split_off(stack_len - 3usize) + .split_off(stack_len - 4usize) .into_iter(); - match (i.next().unwrap(), i.next().unwrap(), i.next().unwrap()) { + match ( + i.next().unwrap(), + i.next().unwrap(), + i.next().unwrap(), + i.next().unwrap(), + ) { ( Symbol::NonTerminal(NonTerminal::ArithmeticExpression(p0)), + Symbol::NonTerminal(NonTerminal::DummyAE(_)), Symbol::Terminal(Terminal::TokenSub(p1)), Symbol::NonTerminal(NonTerminal::Term(p2)), ) => NonTerminal::ArithmeticExpression( @@ -1373,11 +1387,17 @@ impl<'i> LRBuilder<'i, Input, Context<'i, Input>, State, ProdKind, TokenKind> fo ProdKind::TermTermMulFactor => { let mut i = compiler_context .res_stack - .split_off(stack_len - 3usize) + .split_off(stack_len - 4usize) .into_iter(); - match (i.next().unwrap(), i.next().unwrap(), i.next().unwrap()) { + match ( + i.next().unwrap(), + i.next().unwrap(), + i.next().unwrap(), + i.next().unwrap(), + ) { ( Symbol::NonTerminal(NonTerminal::Term(p0)), + Symbol::NonTerminal(NonTerminal::DummyT(_)), Symbol::Terminal(Terminal::TokenMul(p1)), Symbol::NonTerminal(NonTerminal::Factor(p2)), ) => NonTerminal::Term(rules_actions::term_term_mul_factor( @@ -1393,11 +1413,17 @@ impl<'i> LRBuilder<'i, Input, Context<'i, Input>, State, ProdKind, TokenKind> fo ProdKind::TermTermDivFactor => { let mut i = compiler_context .res_stack - .split_off(stack_len - 3usize) + .split_off(stack_len - 4usize) .into_iter(); - match (i.next().unwrap(), i.next().unwrap(), i.next().unwrap()) { + match ( + i.next().unwrap(), + i.next().unwrap(), + i.next().unwrap(), + i.next().unwrap(), + ) { ( Symbol::NonTerminal(NonTerminal::Term(p0)), + Symbol::NonTerminal(NonTerminal::DummyT(_)), Symbol::Terminal(Terminal::TokenDiv(p1)), Symbol::NonTerminal(NonTerminal::Factor(p2)), ) => NonTerminal::Term(rules_actions::term_term_div_factor( @@ -1466,6 +1492,13 @@ impl<'i> LRBuilder<'i, Input, Context<'i, Input>, State, ProdKind, TokenKind> fo _ => panic!("Invalid symbol parse stack data."), } } + ProdKind::DummyAEP1 => NonTerminal::DummyAE(rules_actions::dummy_ae_empty( + context, + &mut compiler_context, + )), + ProdKind::DummyTP1 => { + NonTerminal::DummyT(rules_actions::dummy_t_empty(context, &mut compiler_context)) + } }; compiler_context.res_stack.push(Symbol::NonTerminal(prod)); } diff --git a/src/grammar/types.rs b/src/grammar/types.rs index 3655f45..6d19df9 100644 --- a/src/grammar/types.rs +++ b/src/grammar/types.rs @@ -671,6 +671,16 @@ pub struct FactorParen { pub token_par_close: TokenParClose, } +/// Enum used for all the non terminals used as markers for generating the intermediate representation +#[derive(Debug)] +pub enum Dummy {} + +/// Type declaration for the `` non terminal represented by an `Option` because it should alway be EMPTY +pub type DummyAE = Option; + +/// Type declaration for the `` non terminal represented by an `Option` because it should alway be EMPTY +pub type DummyT = Option; + impl Display for TokenKind { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let text = match self { diff --git a/src/lib.rs b/src/lib.rs index 7ac7eec..6fc2ffc 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,3 +1,4 @@ +#![feature(variant_count)] pub mod compiler; pub mod grammar; pub mod lexer; diff --git a/src/main.rs b/src/main.rs index 41d529e..1a86752 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,6 +1,6 @@ use clap::Parser as ClapParser; use lm_compiler::{ - compiler::{context::Compiler, error::CompilerError}, + compiler::{ast::AstPtr, context::Compiler, error::CompilerError}, grammar::RulesParser, }; use rustemo::Parser; @@ -26,5 +26,11 @@ fn main() -> Result<(), CompilerError> { println!("{rules}"); + compiler + .inner + .borrow() + .ast + .traverse_from(AstPtr::Assignment); + Ok(()) } From 10b9f990fac2312588f6efae5a8542efa9526037 Mon Sep 17 00:00:00 2001 From: LeanSerra <46695152+LeanSerra@users.noreply.github.com> Date: Thu, 23 Oct 2025 22:02:27 -0300 Subject: [PATCH 02/25] fix: remove clone from Ast --- src/compiler/ast.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/src/compiler/ast.rs b/src/compiler/ast.rs index 4082485..bc2b7cd 100644 --- a/src/compiler/ast.rs +++ b/src/compiler/ast.rs @@ -1,6 +1,5 @@ use std::{array, cell::Cell, mem, rc::Rc}; -#[derive(Clone)] pub struct Ast { tree: [Rc; mem::variant_count::()], stack_t: Vec>, From dd518f7ad1923914dd6b8291c4050ba8e6f5a9de Mon Sep 17 00:00:00 2001 From: LeanSerra <46695152+LeanSerra@users.noreply.github.com> Date: Thu, 23 Oct 2025 22:29:59 -0300 Subject: [PATCH 03/25] ci: set rust toolchain to nightly --- .github/workflows/ci.yaml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 79a529e..1ac820b 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -19,9 +19,9 @@ jobs: uses: actions/checkout@v5 - name: Install Rust - uses: dtolnay/rust-toolchain@stable + uses: dtolnay/rust-toolchain@nightly with: - toolchain: 1.89.0 + toolchain: 1.92.0 components: clippy, rustfmt - name: Run cargo check @@ -40,9 +40,9 @@ jobs: uses: actions/checkout@v5 - name: Install Rust - uses: dtolnay/rust-toolchain@stable + uses: dtolnay/rust-toolchain@nightly with: - toolchain: 1.89.0 + toolchain: 1.92.0 - name: Run cargo test run: cargo test From ef7e251c913b28b9dbc88cc1ee211ccbd43a076e Mon Sep 17 00:00:00 2001 From: LeanSerra <46695152+LeanSerra@users.noreply.github.com> Date: Thu, 23 Oct 2025 22:30:46 -0300 Subject: [PATCH 04/25] ci: set rust toolchain to nightly 1.91.0 --- .github/workflows/ci.yaml | 4 ++-- .github/workflows/releases.yaml | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 1ac820b..06c6ead 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -21,7 +21,7 @@ jobs: - name: Install Rust uses: dtolnay/rust-toolchain@nightly with: - toolchain: 1.92.0 + toolchain: 1.91.0 components: clippy, rustfmt - name: Run cargo check @@ -42,7 +42,7 @@ jobs: - name: Install Rust uses: dtolnay/rust-toolchain@nightly with: - toolchain: 1.92.0 + toolchain: 1.91.0 - name: Run cargo test run: cargo test diff --git a/.github/workflows/releases.yaml b/.github/workflows/releases.yaml index c0cc9bc..32eb0c6 100644 --- a/.github/workflows/releases.yaml +++ b/.github/workflows/releases.yaml @@ -40,9 +40,9 @@ jobs: uses: actions/checkout@v5 - name: Install Rust - uses: dtolnay/rust-toolchain@stable + uses: dtolnay/rust-toolchain@nightly with: - toolchain: 1.89.0 + toolchain: 1.92.0 - name: Build compiler shell: bash From 0992a35aabfd9d2b8b6f85950394072ae3e0672b Mon Sep 17 00:00:00 2001 From: LeanSerra <46695152+LeanSerra@users.noreply.github.com> Date: Thu, 23 Oct 2025 22:38:18 -0300 Subject: [PATCH 05/25] ci: remove toolchain version from install rust step --- .github/workflows/ci.yaml | 3 --- .github/workflows/releases.yaml | 2 -- 2 files changed, 5 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 06c6ead..3005c5b 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -21,7 +21,6 @@ jobs: - name: Install Rust uses: dtolnay/rust-toolchain@nightly with: - toolchain: 1.91.0 components: clippy, rustfmt - name: Run cargo check @@ -41,8 +40,6 @@ jobs: - name: Install Rust uses: dtolnay/rust-toolchain@nightly - with: - toolchain: 1.91.0 - name: Run cargo test run: cargo test diff --git a/.github/workflows/releases.yaml b/.github/workflows/releases.yaml index 32eb0c6..522b5af 100644 --- a/.github/workflows/releases.yaml +++ b/.github/workflows/releases.yaml @@ -41,8 +41,6 @@ jobs: - name: Install Rust uses: dtolnay/rust-toolchain@nightly - with: - toolchain: 1.92.0 - name: Build compiler shell: bash From f1f46db699ca02d1cd52435510082dd99e95eec4 Mon Sep 17 00:00:00 2001 From: LeanSerra <46695152+LeanSerra@users.noreply.github.com> Date: Fri, 24 Oct 2025 22:25:57 -0300 Subject: [PATCH 06/25] feat: graph AST with Graphviz --- .gitignore | 1 + run.sh | 2 ++ src/compiler/ast.rs | 68 ++++++++++++++++++++++++++++++++++++----- src/compiler/context.rs | 27 +++++++++++++++- src/main.rs | 5 ++- 5 files changed, 91 insertions(+), 12 deletions(-) diff --git a/.gitignore b/.gitignore index 525e848..1a994ca 100644 --- a/.gitignore +++ b/.gitignore @@ -2,3 +2,4 @@ *.lexer *.parser *.symbol_table +*.dot diff --git a/run.sh b/run.sh index 106c1af..5c42d21 100755 --- a/run.sh +++ b/run.sh @@ -2,5 +2,7 @@ cargo build --release if [[ $? -eq 0 ]] then mv ./target/release/lm-compiler . ./lm-compiler inputs/test.txt + dot -Tpng inputs/test.dot -o out.png + rm lm-compiler fi diff --git a/src/compiler/ast.rs b/src/compiler/ast.rs index bc2b7cd..1d9ed3f 100644 --- a/src/compiler/ast.rs +++ b/src/compiler/ast.rs @@ -1,4 +1,12 @@ -use std::{array, cell::Cell, mem, rc::Rc}; +use std::{ + array, + cell::Cell, + fmt::Display, + fs::File, + io::{self, Write}, + mem, + rc::Rc, +}; pub struct Ast { tree: [Rc; mem::variant_count::()], @@ -46,6 +54,15 @@ pub enum NodeValue { Value(String), } +impl Display for NodeValue { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Value(value) => write!(f, "{value}"), + Self::Action(action) => write!(f, "{action}"), + } + } +} + #[derive(Clone, Debug)] pub enum AstAction { Plus, @@ -55,6 +72,18 @@ pub enum AstAction { Assign, } +impl Display for AstAction { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Plus => write!(f, "PLUS"), + Self::Sub => write!(f, "SUB"), + Self::Mult => write!(f, "MUL"), + Self::Div => write!(f, "DIV"), + Self::Assign => write!(f, "ASSIGN"), + } + } +} + impl Default for Ast { fn default() -> Self { Self { @@ -119,20 +148,43 @@ impl Ast { leaf } - pub fn traverse_from(&self, from: AstPtr) { - Ast::recursive_traverse(&self.tree[from as usize], 0); + pub fn graph_ast( + &self, + from: AstPtr, + graph_label: &str, + file: &mut File, + ) -> Result<(), io::Error> { + writeln!(file, "graph \"\"")?; + writeln!(file, "{{")?; + writeln!(file, " fontname=\"Arial\"")?; + writeln!(file, " node [fontname=\"Arial\"]")?; + writeln!(file, " edge [fontname=\"Arial\"]")?; + writeln!(file, " label=\"{}\"", graph_label.trim())?; + writeln!(file)?; + Ast::graph_recursive_traverse(&self.tree[from as usize], 0, file)?; + writeln!(file, "}}") } - fn recursive_traverse(node: &Rc, depth: usize) { + fn graph_recursive_traverse( + node: &Rc, + mut node_count: usize, + file: &mut File, + ) -> Result { + let node_name = format!("n{node_count:0>3}"); + writeln!(file, " {node_name:0>3} ;")?; + writeln!(file, " {node_name:0>3} [label=\"{:}\"] ;", node.value)?; if let Some(left_child) = &node.left_child { - Ast::recursive_traverse(left_child, depth + 1); + node_count += 1; + writeln!(file, " {node_name} -- n{node_count:0>3} ;")?; + node_count = Ast::graph_recursive_traverse(left_child, node_count, file)?; } - println!("DEPTH: {depth}|{:?}", node.value); - if let Some(right_child) = &node.right_child { - Ast::recursive_traverse(right_child, depth + 1); + node_count += 1; + writeln!(file, " {node_name} -- n{node_count:0>3} ;")?; + node_count = Ast::graph_recursive_traverse(right_child, node_count, file)?; } + Ok(node_count) } pub fn push_t_stack(&mut self, node: AstNodeRef) { diff --git a/src/compiler/context.rs b/src/compiler/context.rs index 940dc48..dae2860 100644 --- a/src/compiler/context.rs +++ b/src/compiler/context.rs @@ -1,5 +1,8 @@ use crate::{ - compiler::{ast::Ast, error::CompilerError}, + compiler::{ + ast::{Ast, AstPtr}, + error::CompilerError, + }, grammar::{ rules_builder::Symbol, types::{DataType, TokenFloatLiteral, TokenIntLiteral, TokenStringLiteral}, @@ -35,6 +38,7 @@ pub struct CompilerContext { parser_file: File, lexer_file: File, symbol_table_file: File, + graph_file: File, pub ast: Ast, } @@ -44,6 +48,7 @@ impl CompilerContext { let parser_file = CompilerContext::open_parser_file(&path)?; let lexer_file = CompilerContext::open_lexer_file(&path)?; let symbol_table_file = CompilerContext::open_symbol_table_file(&path)?; + let graph_file = CompilerContext::open_graph_file(&path)?; Ok(Self { res_stack: Vec::new(), @@ -53,6 +58,7 @@ impl CompilerContext { parser_file, lexer_file, symbol_table_file, + graph_file, ast: Ast::new(), }) } @@ -90,6 +96,15 @@ impl CompilerContext { .map_err(|e| CompilerError::IO(e.to_string())) } + fn open_graph_file(path: &Path) -> Result { + OpenOptions::new() + .create(true) + .truncate(true) + .write(true) + .open(path.with_extension("dot")) + .map_err(|e| CompilerError::IO(e.to_string())) + } + pub fn path(&self) -> String { self.source_code_path.to_string_lossy().into() } @@ -140,6 +155,16 @@ impl CompilerContext { pub fn symbol_exists(&self, symbol: &SymbolTableElement) -> bool { self.symbol_table.contains(symbol) } + + pub fn create_ast_graph(&mut self, from: AstPtr) -> Result<(), CompilerError> { + self.ast + .graph_ast( + from, + &self.source_code_path.to_string_lossy(), + &mut self.graph_file, + ) + .map_err(|e| CompilerError::IO(e.to_string())) + } } #[derive(Clone, Debug)] diff --git a/src/main.rs b/src/main.rs index 1a86752..708d31a 100644 --- a/src/main.rs +++ b/src/main.rs @@ -28,9 +28,8 @@ fn main() -> Result<(), CompilerError> { compiler .inner - .borrow() - .ast - .traverse_from(AstPtr::Assignment); + .borrow_mut() + .create_ast_graph(AstPtr::Assignment)?; Ok(()) } From 002840ba7e508739935c82951d318f6f9b12c1e5 Mon Sep 17 00:00:00 2001 From: LeanSerra <46695152+LeanSerra@users.noreply.github.com> Date: Fri, 24 Oct 2025 22:26:42 -0300 Subject: [PATCH 07/25] docs: update README.md with graphviz reference --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 8546203..6545a3f 100644 --- a/README.md +++ b/README.md @@ -7,7 +7,8 @@ Simple compiler written in Rust using: ## Prerequisites -Install [Rust](https://rustup.rs/) +-[Rust](https://rustup.rs/) +-[Graphviz](https://graphviz.org/) (optional for AST visualization) ## Features From 040e25e5938158482405967bc7037e903699d2d5 Mon Sep 17 00:00:00 2001 From: LeanSerra <46695152+LeanSerra@users.noreply.github.com> Date: Fri, 24 Oct 2025 23:30:30 -0300 Subject: [PATCH 08/25] docs: fix prerequisites list formatting --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 6545a3f..1df06e7 100644 --- a/README.md +++ b/README.md @@ -7,8 +7,8 @@ Simple compiler written in Rust using: ## Prerequisites --[Rust](https://rustup.rs/) --[Graphviz](https://graphviz.org/) (optional for AST visualization) +- [Rust](https://rustup.rs/) +- [Graphviz](https://graphviz.org/) (optional for AST visualization) ## Features From e3769e7cee25bdcf3636e46c027ae075d20dd633 Mon Sep 17 00:00:00 2001 From: LeanSerra <46695152+LeanSerra@users.noreply.github.com> Date: Sat, 25 Oct 2025 23:21:44 -0300 Subject: [PATCH 09/25] fix: remove boolean chains (a b > c > d) { + if(a > b and c > d) { a := 1 } else { diff --git a/inputs/test.txt b/inputs/test.txt index d415a40..9d7569b 100644 --- a/inputs/test.txt +++ b/inputs/test.txt @@ -31,7 +31,7 @@ else { b2 := 20 } -while(c33 == 5.34 and d4 <= 2 < c33 or e5 >= 1) { +while(c33 == 5.34 and d4 <= 2 and not c33 or e5 >= 1) { a1 := a1+1 if(not isZero(a1)) { a1 := -125 diff --git a/src/grammar/rules.rs b/src/grammar/rules.rs index fafc183..3d5511d 100644 --- a/src/grammar/rules.rs +++ b/src/grammar/rules.rs @@ -106,15 +106,14 @@ pub enum ProdKind { BooleanExpressionBooleanExpressionSimpleExpression, BooleanExpressionBooleanExpressionTrue, BooleanExpressionBooleanExpressionFalse, - BooleanExpressionBooleanExpressionSimpleExpressionRecursive, BooleanExpressionBooleanExpressionNotStatement, BooleanExpressionBooleanExpressionIsZero, - BooleanExpressionChainBooleanExpressionChainAux, - BooleanExpressionChainBooleanExpressionChainEmpty, + BooleanExpressionBooleanExpressionVar, SimpleExpressionSimpleExpressionArithmetic, SimpleExpressionSimpleExpressionString, ConjunctionConjunctionAnd, ConjunctionConjunctionOr, + ConjunctionConjunctionBoolean, ComparisonOpComparisonOpEqual, ComparisonOpComparisonOpNotEqual, ComparisonOpComparisonOpLess, @@ -197,16 +196,16 @@ impl std::fmt::Debug for ProdKind { ProdKind::DataTypeFloatType => "DataType: TokenFloat", ProdKind::DataTypeStringType => "DataType: TokenString", ProdKind::WhileLoopWhile => { - "WhileLoop: TokenWhile TokenParOpen BooleanExpression TokenParClose TokenCBOpen Body TokenCBClose" + "WhileLoop: TokenWhile TokenParOpen Conjunction TokenParClose TokenCBOpen Body TokenCBClose" } ProdKind::IfStatementIfStatement => { - "IfStatement: TokenIf TokenParOpen BooleanExpression TokenParClose TokenCBOpen Body TokenCBClose" + "IfStatement: TokenIf TokenParOpen Conjunction TokenParClose TokenCBOpen Body TokenCBClose" } ProdKind::ElseStatementElseStatement => { "ElseStatement: TokenElse TokenCBOpen Body TokenCBClose" } ProdKind::BooleanExpressionBooleanExpressionSimpleExpression => { - "BooleanExpression: SimpleExpression BooleanExpressionChain" + "BooleanExpression: SimpleExpression ComparisonOp SimpleExpression" } ProdKind::BooleanExpressionBooleanExpressionTrue => { "BooleanExpression: TokenTrue" @@ -214,20 +213,14 @@ impl std::fmt::Debug for ProdKind { ProdKind::BooleanExpressionBooleanExpressionFalse => { "BooleanExpression: TokenFalse" } - ProdKind::BooleanExpressionBooleanExpressionSimpleExpressionRecursive => { - "BooleanExpression: SimpleExpression BooleanExpressionChain Conjunction BooleanExpression" - } ProdKind::BooleanExpressionBooleanExpressionNotStatement => { "BooleanExpression: NotStatement" } ProdKind::BooleanExpressionBooleanExpressionIsZero => { "BooleanExpression: FunctionIsZero" } - ProdKind::BooleanExpressionChainBooleanExpressionChainAux => { - "BooleanExpressionChain: ComparisonOp SimpleExpression BooleanExpressionChain" - } - ProdKind::BooleanExpressionChainBooleanExpressionChainEmpty => { - "BooleanExpressionChain: " + ProdKind::BooleanExpressionBooleanExpressionVar => { + "BooleanExpression: TokenId" } ProdKind::SimpleExpressionSimpleExpressionArithmetic => { "SimpleExpression: ArithmeticExpression" @@ -235,8 +228,13 @@ impl std::fmt::Debug for ProdKind { ProdKind::SimpleExpressionSimpleExpressionString => { "SimpleExpression: TokenStringLiteral" } - ProdKind::ConjunctionConjunctionAnd => "Conjunction: TokenAnd", - ProdKind::ConjunctionConjunctionOr => "Conjunction: TokenOr", + ProdKind::ConjunctionConjunctionAnd => { + "Conjunction: BooleanExpression TokenAnd Conjunction" + } + ProdKind::ConjunctionConjunctionOr => { + "Conjunction: BooleanExpression TokenOr Conjunction" + } + ProdKind::ConjunctionConjunctionBoolean => "Conjunction: BooleanExpression", ProdKind::ComparisonOpComparisonOpEqual => "ComparisonOp: TokenEqual", ProdKind::ComparisonOpComparisonOpNotEqual => "ComparisonOp: TokenNotEqual", ProdKind::ComparisonOpComparisonOpLess => "ComparisonOp: TokenLess", @@ -296,7 +294,6 @@ pub enum NonTermKind { IfStatement, ElseStatement, BooleanExpression, - BooleanExpressionChain, SimpleExpression, Conjunction, ComparisonOp, @@ -359,20 +356,14 @@ impl From for NonTermKind { ProdKind::BooleanExpressionBooleanExpressionFalse => { NonTermKind::BooleanExpression } - ProdKind::BooleanExpressionBooleanExpressionSimpleExpressionRecursive => { - NonTermKind::BooleanExpression - } ProdKind::BooleanExpressionBooleanExpressionNotStatement => { NonTermKind::BooleanExpression } ProdKind::BooleanExpressionBooleanExpressionIsZero => { NonTermKind::BooleanExpression } - ProdKind::BooleanExpressionChainBooleanExpressionChainAux => { - NonTermKind::BooleanExpressionChain - } - ProdKind::BooleanExpressionChainBooleanExpressionChainEmpty => { - NonTermKind::BooleanExpressionChain + ProdKind::BooleanExpressionBooleanExpressionVar => { + NonTermKind::BooleanExpression } ProdKind::SimpleExpressionSimpleExpressionArithmetic => { NonTermKind::SimpleExpression @@ -382,6 +373,7 @@ impl From for NonTermKind { } ProdKind::ConjunctionConjunctionAnd => NonTermKind::Conjunction, ProdKind::ConjunctionConjunctionOr => NonTermKind::Conjunction, + ProdKind::ConjunctionConjunctionBoolean => NonTermKind::Conjunction, ProdKind::ComparisonOpComparisonOpEqual => NonTermKind::ComparisonOp, ProdKind::ComparisonOpComparisonOpNotEqual => NonTermKind::ComparisonOp, ProdKind::ComparisonOpComparisonOpLess => NonTermKind::ComparisonOp, @@ -464,73 +456,73 @@ pub enum State { VarDeclarationsS44, VarDeclarationS45, ExpressionsS46, - TokenTrueS47, - TokenFalseS48, - TokenNotS49, - TokenIsZeroS50, - FunctionIsZeroS51, - BooleanExpressionS52, - SimpleExpressionS53, - NotStatementS54, - BooleanExpressionS55, - BodyS56, - TokenIdS57, - SimpleExpressionS58, - TokenIntLiteralS59, - TokenFloatLiteralS60, - ArithmeticExpressionS61, - TokenParOpenS62, - DummyAES63, - DummyTS64, - TokenCBOpenS65, - TokenColonS66, - TokenCommaS67, - TokenCBCloseS68, - VarDeclarationsS69, - BooleanExpressionS70, - TokenParOpenS71, - TokenParCloseS72, - TokenEqualS73, - TokenNotEqualS74, - TokenLessS75, - TokenLessEqualS76, - TokenGreaterS77, - TokenGreaterEqualS78, - BooleanExpressionChainS79, - ComparisonOpS80, - TokenParCloseS81, - TokenCBCloseS82, + TokenIdS47, + TokenTrueS48, + TokenFalseS49, + TokenNotS50, + TokenIsZeroS51, + FunctionIsZeroS52, + BooleanExpressionS53, + SimpleExpressionS54, + ConjunctionS55, + NotStatementS56, + ConjunctionS57, + BodyS58, + TokenIdS59, + SimpleExpressionS60, + TokenIntLiteralS61, + TokenFloatLiteralS62, + ArithmeticExpressionS63, + TokenParOpenS64, + DummyAES65, + DummyTS66, + TokenCBOpenS67, + TokenColonS68, + TokenCommaS69, + TokenCBCloseS70, + VarDeclarationsS71, + BooleanExpressionS72, + TokenParOpenS73, + TokenAndS74, + TokenOrS75, + TokenEqualS76, + TokenNotEqualS77, + TokenLessS78, + TokenLessEqualS79, + TokenGreaterS80, + TokenGreaterEqualS81, + ComparisonOpS82, TokenParCloseS83, TokenParCloseS84, - TokenParCloseS85, - TokenDateS86, - TokenSumS87, - TokenSubS88, - TokenMulS89, - TokenDivS90, - BodyS91, - TokenIntS92, - TokenFloatS93, - TokenStringS94, - DataTypeS95, - VarDeclarationS96, - ArithmeticExpressionS97, - TokenCBOpenS98, - TokenAndS99, - TokenOrS100, + TokenCBCloseS85, + TokenParCloseS86, + TokenParCloseS87, + TokenParCloseS88, + TokenDateS89, + TokenSumS90, + TokenSubS91, + TokenMulS92, + TokenDivS93, + BodyS94, + TokenIntS95, + TokenFloatS96, + TokenStringS97, + DataTypeS98, + VarDeclarationS99, + ArithmeticExpressionS100, ConjunctionS101, - SimpleExpressionS102, - TokenCBOpenS103, - TokenParCloseS104, - TermS105, - TermS106, - FactorS107, - FactorS108, - TokenCBCloseS109, - TokenParCloseS110, - BodyS111, - BooleanExpressionS112, - BooleanExpressionChainS113, + ConjunctionS102, + SimpleExpressionS103, + TokenCBOpenS104, + TokenCBOpenS105, + TokenParCloseS106, + TermS107, + TermS108, + FactorS109, + FactorS110, + TokenCBCloseS111, + TokenParCloseS112, + BodyS113, BodyS114, TokenCBCloseS115, TokenCBCloseS116, @@ -595,73 +587,73 @@ impl std::fmt::Debug for State { State::VarDeclarationsS44 => "44:VarDeclarations", State::VarDeclarationS45 => "45:VarDeclaration", State::ExpressionsS46 => "46:Expressions", - State::TokenTrueS47 => "47:TokenTrue", - State::TokenFalseS48 => "48:TokenFalse", - State::TokenNotS49 => "49:TokenNot", - State::TokenIsZeroS50 => "50:TokenIsZero", - State::FunctionIsZeroS51 => "51:FunctionIsZero", - State::BooleanExpressionS52 => "52:BooleanExpression", - State::SimpleExpressionS53 => "53:SimpleExpression", - State::NotStatementS54 => "54:NotStatement", - State::BooleanExpressionS55 => "55:BooleanExpression", - State::BodyS56 => "56:Body", - State::TokenIdS57 => "57:TokenId", - State::SimpleExpressionS58 => "58:SimpleExpression", - State::TokenIntLiteralS59 => "59:TokenIntLiteral", - State::TokenFloatLiteralS60 => "60:TokenFloatLiteral", - State::ArithmeticExpressionS61 => "61:ArithmeticExpression", - State::TokenParOpenS62 => "62:TokenParOpen", - State::DummyAES63 => "63:DummyAE", - State::DummyTS64 => "64:DummyT", - State::TokenCBOpenS65 => "65:TokenCBOpen", - State::TokenColonS66 => "66:TokenColon", - State::TokenCommaS67 => "67:TokenComma", - State::TokenCBCloseS68 => "68:TokenCBClose", - State::VarDeclarationsS69 => "69:VarDeclarations", - State::BooleanExpressionS70 => "70:BooleanExpression", - State::TokenParOpenS71 => "71:TokenParOpen", - State::TokenParCloseS72 => "72:TokenParClose", - State::TokenEqualS73 => "73:TokenEqual", - State::TokenNotEqualS74 => "74:TokenNotEqual", - State::TokenLessS75 => "75:TokenLess", - State::TokenLessEqualS76 => "76:TokenLessEqual", - State::TokenGreaterS77 => "77:TokenGreater", - State::TokenGreaterEqualS78 => "78:TokenGreaterEqual", - State::BooleanExpressionChainS79 => "79:BooleanExpressionChain", - State::ComparisonOpS80 => "80:ComparisonOp", - State::TokenParCloseS81 => "81:TokenParClose", - State::TokenCBCloseS82 => "82:TokenCBClose", + State::TokenIdS47 => "47:TokenId", + State::TokenTrueS48 => "48:TokenTrue", + State::TokenFalseS49 => "49:TokenFalse", + State::TokenNotS50 => "50:TokenNot", + State::TokenIsZeroS51 => "51:TokenIsZero", + State::FunctionIsZeroS52 => "52:FunctionIsZero", + State::BooleanExpressionS53 => "53:BooleanExpression", + State::SimpleExpressionS54 => "54:SimpleExpression", + State::ConjunctionS55 => "55:Conjunction", + State::NotStatementS56 => "56:NotStatement", + State::ConjunctionS57 => "57:Conjunction", + State::BodyS58 => "58:Body", + State::TokenIdS59 => "59:TokenId", + State::SimpleExpressionS60 => "60:SimpleExpression", + State::TokenIntLiteralS61 => "61:TokenIntLiteral", + State::TokenFloatLiteralS62 => "62:TokenFloatLiteral", + State::ArithmeticExpressionS63 => "63:ArithmeticExpression", + State::TokenParOpenS64 => "64:TokenParOpen", + State::DummyAES65 => "65:DummyAE", + State::DummyTS66 => "66:DummyT", + State::TokenCBOpenS67 => "67:TokenCBOpen", + State::TokenColonS68 => "68:TokenColon", + State::TokenCommaS69 => "69:TokenComma", + State::TokenCBCloseS70 => "70:TokenCBClose", + State::VarDeclarationsS71 => "71:VarDeclarations", + State::BooleanExpressionS72 => "72:BooleanExpression", + State::TokenParOpenS73 => "73:TokenParOpen", + State::TokenAndS74 => "74:TokenAnd", + State::TokenOrS75 => "75:TokenOr", + State::TokenEqualS76 => "76:TokenEqual", + State::TokenNotEqualS77 => "77:TokenNotEqual", + State::TokenLessS78 => "78:TokenLess", + State::TokenLessEqualS79 => "79:TokenLessEqual", + State::TokenGreaterS80 => "80:TokenGreater", + State::TokenGreaterEqualS81 => "81:TokenGreaterEqual", + State::ComparisonOpS82 => "82:ComparisonOp", State::TokenParCloseS83 => "83:TokenParClose", State::TokenParCloseS84 => "84:TokenParClose", - State::TokenParCloseS85 => "85:TokenParClose", - State::TokenDateS86 => "86:TokenDate", - State::TokenSumS87 => "87:TokenSum", - State::TokenSubS88 => "88:TokenSub", - State::TokenMulS89 => "89:TokenMul", - State::TokenDivS90 => "90:TokenDiv", - State::BodyS91 => "91:Body", - State::TokenIntS92 => "92:TokenInt", - State::TokenFloatS93 => "93:TokenFloat", - State::TokenStringS94 => "94:TokenString", - State::DataTypeS95 => "95:DataType", - State::VarDeclarationS96 => "96:VarDeclaration", - State::ArithmeticExpressionS97 => "97:ArithmeticExpression", - State::TokenCBOpenS98 => "98:TokenCBOpen", - State::TokenAndS99 => "99:TokenAnd", - State::TokenOrS100 => "100:TokenOr", + State::TokenCBCloseS85 => "85:TokenCBClose", + State::TokenParCloseS86 => "86:TokenParClose", + State::TokenParCloseS87 => "87:TokenParClose", + State::TokenParCloseS88 => "88:TokenParClose", + State::TokenDateS89 => "89:TokenDate", + State::TokenSumS90 => "90:TokenSum", + State::TokenSubS91 => "91:TokenSub", + State::TokenMulS92 => "92:TokenMul", + State::TokenDivS93 => "93:TokenDiv", + State::BodyS94 => "94:Body", + State::TokenIntS95 => "95:TokenInt", + State::TokenFloatS96 => "96:TokenFloat", + State::TokenStringS97 => "97:TokenString", + State::DataTypeS98 => "98:DataType", + State::VarDeclarationS99 => "99:VarDeclaration", + State::ArithmeticExpressionS100 => "100:ArithmeticExpression", State::ConjunctionS101 => "101:Conjunction", - State::SimpleExpressionS102 => "102:SimpleExpression", - State::TokenCBOpenS103 => "103:TokenCBOpen", - State::TokenParCloseS104 => "104:TokenParClose", - State::TermS105 => "105:Term", - State::TermS106 => "106:Term", - State::FactorS107 => "107:Factor", - State::FactorS108 => "108:Factor", - State::TokenCBCloseS109 => "109:TokenCBClose", - State::TokenParCloseS110 => "110:TokenParClose", - State::BodyS111 => "111:Body", - State::BooleanExpressionS112 => "112:BooleanExpression", - State::BooleanExpressionChainS113 => "113:BooleanExpressionChain", + State::ConjunctionS102 => "102:Conjunction", + State::SimpleExpressionS103 => "103:SimpleExpression", + State::TokenCBOpenS104 => "104:TokenCBOpen", + State::TokenCBOpenS105 => "105:TokenCBOpen", + State::TokenParCloseS106 => "106:TokenParClose", + State::TermS107 => "107:Term", + State::TermS108 => "108:Term", + State::FactorS109 => "109:Factor", + State::FactorS110 => "110:Factor", + State::TokenCBCloseS111 => "111:TokenCBClose", + State::TokenParCloseS112 => "112:TokenParClose", + State::BodyS113 => "113:Body", State::BodyS114 => "114:Body", State::TokenCBCloseS115 => "115:TokenCBClose", State::TokenCBCloseS116 => "116:TokenCBClose", @@ -897,13 +889,13 @@ fn action_tokenparopen_s22(token_kind: TokenKind) -> Vec TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), TK::TokenStringLiteral => Vec::from(&[Shift(State::TokenStringLiteralS31)]), - TK::TokenId => Vec::from(&[Shift(State::TokenIdS32)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS47)]), TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), - TK::TokenTrue => Vec::from(&[Shift(State::TokenTrueS47)]), - TK::TokenFalse => Vec::from(&[Shift(State::TokenFalseS48)]), - TK::TokenNot => Vec::from(&[Shift(State::TokenNotS49)]), - TK::TokenIsZero => Vec::from(&[Shift(State::TokenIsZeroS50)]), + TK::TokenTrue => Vec::from(&[Shift(State::TokenTrueS48)]), + TK::TokenFalse => Vec::from(&[Shift(State::TokenFalseS49)]), + TK::TokenNot => Vec::from(&[Shift(State::TokenNotS50)]), + TK::TokenIsZero => Vec::from(&[Shift(State::TokenIsZeroS51)]), _ => vec![], } } @@ -912,13 +904,13 @@ fn action_tokenparopen_s23(token_kind: TokenKind) -> Vec TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), TK::TokenStringLiteral => Vec::from(&[Shift(State::TokenStringLiteralS31)]), - TK::TokenId => Vec::from(&[Shift(State::TokenIdS32)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS47)]), TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), - TK::TokenTrue => Vec::from(&[Shift(State::TokenTrueS47)]), - TK::TokenFalse => Vec::from(&[Shift(State::TokenFalseS48)]), - TK::TokenNot => Vec::from(&[Shift(State::TokenNotS49)]), - TK::TokenIsZero => Vec::from(&[Shift(State::TokenIsZeroS50)]), + TK::TokenTrue => Vec::from(&[Shift(State::TokenTrueS48)]), + TK::TokenFalse => Vec::from(&[Shift(State::TokenFalseS49)]), + TK::TokenNot => Vec::from(&[Shift(State::TokenNotS50)]), + TK::TokenIsZero => Vec::from(&[Shift(State::TokenIsZeroS51)]), _ => vec![], } } @@ -937,7 +929,7 @@ fn action_tokencbopen_s24(token_kind: TokenKind) -> Vec> } fn action_tokenparopen_s25(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenId => Vec::from(&[Shift(State::TokenIdS57)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS59)]), _ => vec![], } } @@ -1103,8 +1095,8 @@ fn action_tokenid_s32(token_kind: TokenKind) -> Vec> { } fn action_tokensub_s33(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS59)]), - TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS60)]), + TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS61)]), + TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS62)]), _ => vec![], } } @@ -1120,7 +1112,7 @@ fn action_tokenparopen_s34(token_kind: TokenKind) -> Vec } fn action_tokenconvdate_s35(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS62)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS64)]), _ => vec![], } } @@ -1374,20 +1366,20 @@ fn action_factor_s41(token_kind: TokenKind) -> Vec> { } fn action_tokenparclose_s42(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenCBOpen => Vec::from(&[Shift(State::TokenCBOpenS65)]), + TK::TokenCBOpen => Vec::from(&[Shift(State::TokenCBOpenS67)]), _ => vec![], } } fn action_tokenid_s43(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenColon => Vec::from(&[Shift(State::TokenColonS66)]), - TK::TokenComma => Vec::from(&[Shift(State::TokenCommaS67)]), + TK::TokenColon => Vec::from(&[Shift(State::TokenColonS68)]), + TK::TokenComma => Vec::from(&[Shift(State::TokenCommaS69)]), _ => vec![], } } fn action_vardeclarations_s44(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS68)]), + TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS70)]), _ => vec![], } } @@ -1407,86 +1399,133 @@ fn action_expressions_s46(token_kind: TokenKind) -> Vec> _ => vec![], } } -fn action_tokentrue_s47(token_kind: TokenKind) -> Vec> { +fn action_tokenid_s47(token_kind: TokenKind) -> Vec> { + match token_kind { + TK::TokenSum => Vec::from(&[Reduce(PK::FactorFactorId, 1usize)]), + TK::TokenMul => Vec::from(&[Reduce(PK::FactorFactorId, 1usize)]), + TK::TokenSub => Vec::from(&[Reduce(PK::FactorFactorId, 1usize)]), + TK::TokenDiv => Vec::from(&[Reduce(PK::FactorFactorId, 1usize)]), + TK::TokenParClose => { + Vec::from(&[Reduce(PK::BooleanExpressionBooleanExpressionVar, 1usize)]) + } + TK::TokenEqual => Vec::from(&[Reduce(PK::FactorFactorId, 1usize)]), + TK::TokenNotEqual => Vec::from(&[Reduce(PK::FactorFactorId, 1usize)]), + TK::TokenLess => Vec::from(&[Reduce(PK::FactorFactorId, 1usize)]), + TK::TokenLessEqual => Vec::from(&[Reduce(PK::FactorFactorId, 1usize)]), + TK::TokenGreater => Vec::from(&[Reduce(PK::FactorFactorId, 1usize)]), + TK::TokenGreaterEqual => Vec::from(&[Reduce(PK::FactorFactorId, 1usize)]), + TK::TokenAnd => { + Vec::from(&[Reduce(PK::BooleanExpressionBooleanExpressionVar, 1usize)]) + } + TK::TokenOr => { + Vec::from(&[Reduce(PK::BooleanExpressionBooleanExpressionVar, 1usize)]) + } + _ => vec![], + } +} +fn action_tokentrue_s48(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenParClose => { Vec::from(&[Reduce(PK::BooleanExpressionBooleanExpressionTrue, 1usize)]) } + TK::TokenAnd => { + Vec::from(&[Reduce(PK::BooleanExpressionBooleanExpressionTrue, 1usize)]) + } + TK::TokenOr => { + Vec::from(&[Reduce(PK::BooleanExpressionBooleanExpressionTrue, 1usize)]) + } _ => vec![], } } -fn action_tokenfalse_s48(token_kind: TokenKind) -> Vec> { +fn action_tokenfalse_s49(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenParClose => { Vec::from(&[Reduce(PK::BooleanExpressionBooleanExpressionFalse, 1usize)]) } + TK::TokenAnd => { + Vec::from(&[Reduce(PK::BooleanExpressionBooleanExpressionFalse, 1usize)]) + } + TK::TokenOr => { + Vec::from(&[Reduce(PK::BooleanExpressionBooleanExpressionFalse, 1usize)]) + } _ => vec![], } } -fn action_tokennot_s49(token_kind: TokenKind) -> Vec> { +fn action_tokennot_s50(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), TK::TokenStringLiteral => Vec::from(&[Shift(State::TokenStringLiteralS31)]), - TK::TokenId => Vec::from(&[Shift(State::TokenIdS32)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS47)]), TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), - TK::TokenTrue => Vec::from(&[Shift(State::TokenTrueS47)]), - TK::TokenFalse => Vec::from(&[Shift(State::TokenFalseS48)]), - TK::TokenNot => Vec::from(&[Shift(State::TokenNotS49)]), - TK::TokenIsZero => Vec::from(&[Shift(State::TokenIsZeroS50)]), + TK::TokenTrue => Vec::from(&[Shift(State::TokenTrueS48)]), + TK::TokenFalse => Vec::from(&[Shift(State::TokenFalseS49)]), + TK::TokenNot => Vec::from(&[Shift(State::TokenNotS50)]), + TK::TokenIsZero => Vec::from(&[Shift(State::TokenIsZeroS51)]), _ => vec![], } } -fn action_tokeniszero_s50(token_kind: TokenKind) -> Vec> { +fn action_tokeniszero_s51(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS71)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS73)]), _ => vec![], } } -fn action_functioniszero_s51(token_kind: TokenKind) -> Vec> { +fn action_functioniszero_s52(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenParClose => { Vec::from(&[Reduce(PK::BooleanExpressionBooleanExpressionIsZero, 1usize)]) } + TK::TokenAnd => { + Vec::from(&[Reduce(PK::BooleanExpressionBooleanExpressionIsZero, 1usize)]) + } + TK::TokenOr => { + Vec::from(&[Reduce(PK::BooleanExpressionBooleanExpressionIsZero, 1usize)]) + } + _ => vec![], + } +} +fn action_booleanexpression_s53(token_kind: TokenKind) -> Vec> { + match token_kind { + TK::TokenParClose => { + Vec::from(&[Reduce(PK::ConjunctionConjunctionBoolean, 1usize)]) + } + TK::TokenAnd => Vec::from(&[Shift(State::TokenAndS74)]), + TK::TokenOr => Vec::from(&[Shift(State::TokenOrS75)]), _ => vec![], } } -fn action_booleanexpression_s52(token_kind: TokenKind) -> Vec> { +fn action_simpleexpression_s54(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS72)]), + TK::TokenEqual => Vec::from(&[Shift(State::TokenEqualS76)]), + TK::TokenNotEqual => Vec::from(&[Shift(State::TokenNotEqualS77)]), + TK::TokenLess => Vec::from(&[Shift(State::TokenLessS78)]), + TK::TokenLessEqual => Vec::from(&[Shift(State::TokenLessEqualS79)]), + TK::TokenGreater => Vec::from(&[Shift(State::TokenGreaterS80)]), + TK::TokenGreaterEqual => Vec::from(&[Shift(State::TokenGreaterEqualS81)]), _ => vec![], } } -fn action_simpleexpression_s53(token_kind: TokenKind) -> Vec> { +fn action_conjunction_s55(token_kind: TokenKind) -> Vec> { + match token_kind { + TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS83)]), + _ => vec![], + } +} +fn action_notstatement_s56(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenParClose => { Vec::from( - &[Reduce(PK::BooleanExpressionChainBooleanExpressionChainEmpty, 0usize)], + &[Reduce(PK::BooleanExpressionBooleanExpressionNotStatement, 1usize)], ) } - TK::TokenEqual => Vec::from(&[Shift(State::TokenEqualS73)]), - TK::TokenNotEqual => Vec::from(&[Shift(State::TokenNotEqualS74)]), - TK::TokenLess => Vec::from(&[Shift(State::TokenLessS75)]), - TK::TokenLessEqual => Vec::from(&[Shift(State::TokenLessEqualS76)]), - TK::TokenGreater => Vec::from(&[Shift(State::TokenGreaterS77)]), - TK::TokenGreaterEqual => Vec::from(&[Shift(State::TokenGreaterEqualS78)]), TK::TokenAnd => { Vec::from( - &[Reduce(PK::BooleanExpressionChainBooleanExpressionChainEmpty, 0usize)], + &[Reduce(PK::BooleanExpressionBooleanExpressionNotStatement, 1usize)], ) } TK::TokenOr => { - Vec::from( - &[Reduce(PK::BooleanExpressionChainBooleanExpressionChainEmpty, 0usize)], - ) - } - _ => vec![], - } -} -fn action_notstatement_s54(token_kind: TokenKind) -> Vec> { - match token_kind { - TK::TokenParClose => { Vec::from( &[Reduce(PK::BooleanExpressionBooleanExpressionNotStatement, 1usize)], ) @@ -1494,31 +1533,31 @@ fn action_notstatement_s54(token_kind: TokenKind) -> Vec _ => vec![], } } -fn action_booleanexpression_s55(token_kind: TokenKind) -> Vec> { +fn action_conjunction_s57(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS81)]), + TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS84)]), _ => vec![], } } -fn action_body_s56(token_kind: TokenKind) -> Vec> { +fn action_body_s58(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS82)]), + TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS85)]), _ => vec![], } } -fn action_tokenid_s57(token_kind: TokenKind) -> Vec> { +fn action_tokenid_s59(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS83)]), + TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS86)]), _ => vec![], } } -fn action_simpleexpression_s58(token_kind: TokenKind) -> Vec> { +fn action_simpleexpression_s60(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS84)]), + TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS87)]), _ => vec![], } } -fn action_tokenintliteral_s59(token_kind: TokenKind) -> Vec> { +fn action_tokenintliteral_s61(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::NumberNumberNegativeInt, 2usize)]), TK::TokenId => Vec::from(&[Reduce(PK::NumberNumberNegativeInt, 2usize)]), @@ -1546,7 +1585,7 @@ fn action_tokenintliteral_s59(token_kind: TokenKind) -> Vec vec![], } } -fn action_tokenfloatliteral_s60(token_kind: TokenKind) -> Vec> { +fn action_tokenfloatliteral_s62(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::NumberNumberNegativeFloat, 2usize)]), TK::TokenId => Vec::from(&[Reduce(PK::NumberNumberNegativeFloat, 2usize)]), @@ -1574,37 +1613,37 @@ fn action_tokenfloatliteral_s60(token_kind: TokenKind) -> Vec vec![], } } -fn action_arithmeticexpression_s61( +fn action_arithmeticexpression_s63( token_kind: TokenKind, ) -> Vec> { match token_kind { TK::TokenSum => Vec::from(&[Reduce(PK::DummyAEP1, 0usize)]), TK::TokenSub => Vec::from(&[Reduce(PK::DummyAEP1, 0usize)]), - TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS85)]), + TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS88)]), _ => vec![], } } -fn action_tokenparopen_s62(token_kind: TokenKind) -> Vec> { +fn action_tokenparopen_s64(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenDate => Vec::from(&[Shift(State::TokenDateS86)]), + TK::TokenDate => Vec::from(&[Shift(State::TokenDateS89)]), _ => vec![], } } -fn action_dummyae_s63(token_kind: TokenKind) -> Vec> { +fn action_dummyae_s65(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenSum => Vec::from(&[Shift(State::TokenSumS87)]), - TK::TokenSub => Vec::from(&[Shift(State::TokenSubS88)]), + TK::TokenSum => Vec::from(&[Shift(State::TokenSumS90)]), + TK::TokenSub => Vec::from(&[Shift(State::TokenSubS91)]), _ => vec![], } } -fn action_dummyt_s64(token_kind: TokenKind) -> Vec> { +fn action_dummyt_s66(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenMul => Vec::from(&[Shift(State::TokenMulS89)]), - TK::TokenDiv => Vec::from(&[Shift(State::TokenDivS90)]), + TK::TokenMul => Vec::from(&[Shift(State::TokenMulS92)]), + TK::TokenDiv => Vec::from(&[Shift(State::TokenDivS93)]), _ => vec![], } } -fn action_tokencbopen_s65(token_kind: TokenKind) -> Vec> { +fn action_tokencbopen_s67(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenId => Vec::from(&[Shift(State::TokenIdS27)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::BodyBodyEmpty, 0usize)]), @@ -1617,21 +1656,21 @@ fn action_tokencbopen_s65(token_kind: TokenKind) -> Vec> _ => vec![], } } -fn action_tokencolon_s66(token_kind: TokenKind) -> Vec> { +fn action_tokencolon_s68(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenInt => Vec::from(&[Shift(State::TokenIntS92)]), - TK::TokenFloat => Vec::from(&[Shift(State::TokenFloatS93)]), - TK::TokenString => Vec::from(&[Shift(State::TokenStringS94)]), + TK::TokenInt => Vec::from(&[Shift(State::TokenIntS95)]), + TK::TokenFloat => Vec::from(&[Shift(State::TokenFloatS96)]), + TK::TokenString => Vec::from(&[Shift(State::TokenStringS97)]), _ => vec![], } } -fn action_tokencomma_s67(token_kind: TokenKind) -> Vec> { +fn action_tokencomma_s69(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenId => Vec::from(&[Shift(State::TokenIdS43)]), _ => vec![], } } -fn action_tokencbclose_s68(token_kind: TokenKind) -> Vec> { +fn action_tokencbclose_s70(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::InitBodyInitBody, 3usize)]), TK::TokenId => Vec::from(&[Reduce(PK::InitBodyInitBody, 3usize)]), @@ -1644,7 +1683,7 @@ fn action_tokencbclose_s68(token_kind: TokenKind) -> Vec _ => vec![], } } -fn action_vardeclarations_s69(token_kind: TokenKind) -> Vec> { +fn action_vardeclarations_s71(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenCBClose => { Vec::from(&[Reduce(PK::VarDeclarationsVarDeclarationsRecursive, 2usize)]) @@ -1652,13 +1691,15 @@ fn action_vardeclarations_s69(token_kind: TokenKind) -> Vec vec![], } } -fn action_booleanexpression_s70(token_kind: TokenKind) -> Vec> { +fn action_booleanexpression_s72(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenParClose => Vec::from(&[Reduce(PK::NotStatementNot, 2usize)]), + TK::TokenAnd => Vec::from(&[Reduce(PK::NotStatementNot, 2usize)]), + TK::TokenOr => Vec::from(&[Reduce(PK::NotStatementNot, 2usize)]), _ => vec![], } } -fn action_tokenparopen_s71(token_kind: TokenKind) -> Vec> { +fn action_tokenparopen_s73(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), @@ -1668,13 +1709,37 @@ fn action_tokenparopen_s71(token_kind: TokenKind) -> Vec _ => vec![], } } -fn action_tokenparclose_s72(token_kind: TokenKind) -> Vec> { +fn action_tokenand_s74(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenCBOpen => Vec::from(&[Shift(State::TokenCBOpenS98)]), + TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), + TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), + TK::TokenStringLiteral => Vec::from(&[Shift(State::TokenStringLiteralS31)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS47)]), + TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), + TK::TokenTrue => Vec::from(&[Shift(State::TokenTrueS48)]), + TK::TokenFalse => Vec::from(&[Shift(State::TokenFalseS49)]), + TK::TokenNot => Vec::from(&[Shift(State::TokenNotS50)]), + TK::TokenIsZero => Vec::from(&[Shift(State::TokenIsZeroS51)]), + _ => vec![], + } +} +fn action_tokenor_s75(token_kind: TokenKind) -> Vec> { + match token_kind { + TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), + TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), + TK::TokenStringLiteral => Vec::from(&[Shift(State::TokenStringLiteralS31)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS47)]), + TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), + TK::TokenTrue => Vec::from(&[Shift(State::TokenTrueS48)]), + TK::TokenFalse => Vec::from(&[Shift(State::TokenFalseS49)]), + TK::TokenNot => Vec::from(&[Shift(State::TokenNotS50)]), + TK::TokenIsZero => Vec::from(&[Shift(State::TokenIsZeroS51)]), _ => vec![], } } -fn action_tokenequal_s73(token_kind: TokenKind) -> Vec> { +fn action_tokenequal_s76(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => { Vec::from(&[Reduce(PK::ComparisonOpComparisonOpEqual, 1usize)]) @@ -1693,7 +1758,7 @@ fn action_tokenequal_s73(token_kind: TokenKind) -> Vec> _ => vec![], } } -fn action_tokennotequal_s74(token_kind: TokenKind) -> Vec> { +fn action_tokennotequal_s77(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => { Vec::from(&[Reduce(PK::ComparisonOpComparisonOpNotEqual, 1usize)]) @@ -1714,7 +1779,7 @@ fn action_tokennotequal_s74(token_kind: TokenKind) -> Vec vec![], } } -fn action_tokenless_s75(token_kind: TokenKind) -> Vec> { +fn action_tokenless_s78(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => { Vec::from(&[Reduce(PK::ComparisonOpComparisonOpLess, 1usize)]) @@ -1733,7 +1798,7 @@ fn action_tokenless_s75(token_kind: TokenKind) -> Vec> { _ => vec![], } } -fn action_tokenlessequal_s76(token_kind: TokenKind) -> Vec> { +fn action_tokenlessequal_s79(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => { Vec::from(&[Reduce(PK::ComparisonOpComparisonOpLessEqual, 1usize)]) @@ -1756,7 +1821,7 @@ fn action_tokenlessequal_s76(token_kind: TokenKind) -> Vec vec![], } } -fn action_tokengreater_s77(token_kind: TokenKind) -> Vec> { +fn action_tokengreater_s80(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => { Vec::from(&[Reduce(PK::ComparisonOpComparisonOpGreater, 1usize)]) @@ -1775,7 +1840,7 @@ fn action_tokengreater_s77(token_kind: TokenKind) -> Vec _ => vec![], } } -fn action_tokengreaterequal_s78(token_kind: TokenKind) -> Vec> { +fn action_tokengreaterequal_s81(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => { Vec::from(&[Reduce(PK::ComparisonOpComparisonOpGreaterEqual, 1usize)]) @@ -1798,21 +1863,7 @@ fn action_tokengreaterequal_s78(token_kind: TokenKind) -> Vec vec![], } } -fn action_booleanexpressionchain_s79( - token_kind: TokenKind, -) -> Vec> { - match token_kind { - TK::TokenParClose => { - Vec::from( - &[Reduce(PK::BooleanExpressionBooleanExpressionSimpleExpression, 2usize)], - ) - } - TK::TokenAnd => Vec::from(&[Shift(State::TokenAndS99)]), - TK::TokenOr => Vec::from(&[Shift(State::TokenOrS100)]), - _ => vec![], - } -} -fn action_comparisonop_s80(token_kind: TokenKind) -> Vec> { +fn action_comparisonop_s82(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), @@ -1823,13 +1874,19 @@ fn action_comparisonop_s80(token_kind: TokenKind) -> Vec _ => vec![], } } -fn action_tokenparclose_s81(token_kind: TokenKind) -> Vec> { +fn action_tokenparclose_s83(token_kind: TokenKind) -> Vec> { + match token_kind { + TK::TokenCBOpen => Vec::from(&[Shift(State::TokenCBOpenS104)]), + _ => vec![], + } +} +fn action_tokenparclose_s84(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenCBOpen => Vec::from(&[Shift(State::TokenCBOpenS103)]), + TK::TokenCBOpen => Vec::from(&[Shift(State::TokenCBOpenS105)]), _ => vec![], } } -fn action_tokencbclose_s82(token_kind: TokenKind) -> Vec> { +fn action_tokencbclose_s85(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::ElseStatementElseStatement, 4usize)]), TK::TokenId => Vec::from(&[Reduce(PK::ElseStatementElseStatement, 4usize)]), @@ -1842,7 +1899,7 @@ fn action_tokencbclose_s82(token_kind: TokenKind) -> Vec _ => vec![], } } -fn action_tokenparclose_s83(token_kind: TokenKind) -> Vec> { +fn action_tokenparclose_s86(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::FunctionReadFunctionReadCall, 4usize)]), TK::TokenId => Vec::from(&[Reduce(PK::FunctionReadFunctionReadCall, 4usize)]), @@ -1857,7 +1914,7 @@ fn action_tokenparclose_s83(token_kind: TokenKind) -> Vec vec![], } } -fn action_tokenparclose_s84(token_kind: TokenKind) -> Vec> { +fn action_tokenparclose_s87(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::FunctionWriteFunctionWriteCall, 4usize)]), TK::TokenId => Vec::from(&[Reduce(PK::FunctionWriteFunctionWriteCall, 4usize)]), @@ -1876,7 +1933,7 @@ fn action_tokenparclose_s84(token_kind: TokenKind) -> Vec vec![], } } -fn action_tokenparclose_s85(token_kind: TokenKind) -> Vec> { +fn action_tokenparclose_s88(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::FactorFactorParen, 3usize)]), TK::TokenId => Vec::from(&[Reduce(PK::FactorFactorParen, 3usize)]), @@ -1902,13 +1959,13 @@ fn action_tokenparclose_s85(token_kind: TokenKind) -> Vec vec![], } } -fn action_tokendate_s86(token_kind: TokenKind) -> Vec> { +fn action_tokendate_s89(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS104)]), + TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS106)]), _ => vec![], } } -fn action_tokensum_s87(token_kind: TokenKind) -> Vec> { +fn action_tokensum_s90(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), @@ -1918,7 +1975,7 @@ fn action_tokensum_s87(token_kind: TokenKind) -> Vec> { _ => vec![], } } -fn action_tokensub_s88(token_kind: TokenKind) -> Vec> { +fn action_tokensub_s91(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), @@ -1928,7 +1985,7 @@ fn action_tokensub_s88(token_kind: TokenKind) -> Vec> { _ => vec![], } } -fn action_tokenmul_s89(token_kind: TokenKind) -> Vec> { +fn action_tokenmul_s92(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), @@ -1938,7 +1995,7 @@ fn action_tokenmul_s89(token_kind: TokenKind) -> Vec> { _ => vec![], } } -fn action_tokendiv_s90(token_kind: TokenKind) -> Vec> { +fn action_tokendiv_s93(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), @@ -1948,34 +2005,34 @@ fn action_tokendiv_s90(token_kind: TokenKind) -> Vec> { _ => vec![], } } -fn action_body_s91(token_kind: TokenKind) -> Vec> { +fn action_body_s94(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS109)]), + TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS111)]), _ => vec![], } } -fn action_tokenint_s92(token_kind: TokenKind) -> Vec> { +fn action_tokenint_s95(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenId => Vec::from(&[Reduce(PK::DataTypeIntType, 1usize)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::DataTypeIntType, 1usize)]), _ => vec![], } } -fn action_tokenfloat_s93(token_kind: TokenKind) -> Vec> { +fn action_tokenfloat_s96(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenId => Vec::from(&[Reduce(PK::DataTypeFloatType, 1usize)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::DataTypeFloatType, 1usize)]), _ => vec![], } } -fn action_tokenstring_s94(token_kind: TokenKind) -> Vec> { +fn action_tokenstring_s97(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenId => Vec::from(&[Reduce(PK::DataTypeStringType, 1usize)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::DataTypeStringType, 1usize)]), _ => vec![], } } -fn action_datatype_s95(token_kind: TokenKind) -> Vec> { +fn action_datatype_s98(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenId => { Vec::from(&[Reduce(PK::VarDeclarationVarDeclarationSingle, 3usize)]) @@ -1986,7 +2043,7 @@ fn action_datatype_s95(token_kind: TokenKind) -> Vec> { _ => vec![], } } -fn action_vardeclaration_s96(token_kind: TokenKind) -> Vec> { +fn action_vardeclaration_s99(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenId => { Vec::from(&[Reduce(PK::VarDeclarationVarDeclarationRecursive, 3usize)]) @@ -1997,111 +2054,62 @@ fn action_vardeclaration_s96(token_kind: TokenKind) -> Vec vec![], } } -fn action_arithmeticexpression_s97( +fn action_arithmeticexpression_s100( token_kind: TokenKind, ) -> Vec> { match token_kind { TK::TokenSum => Vec::from(&[Reduce(PK::DummyAEP1, 0usize)]), TK::TokenSub => Vec::from(&[Reduce(PK::DummyAEP1, 0usize)]), - TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS110)]), - _ => vec![], - } -} -fn action_tokencbopen_s98(token_kind: TokenKind) -> Vec> { - match token_kind { - TK::TokenId => Vec::from(&[Shift(State::TokenIdS27)]), - TK::TokenCBClose => Vec::from(&[Reduce(PK::BodyBodyEmpty, 0usize)]), - TK::TokenInit => Vec::from(&[Shift(State::TokenInitS2)]), - TK::TokenWhile => Vec::from(&[Shift(State::TokenWhileS3)]), - TK::TokenIf => Vec::from(&[Shift(State::TokenIfS4)]), - TK::TokenElse => Vec::from(&[Shift(State::TokenElseS5)]), - TK::TokenRead => Vec::from(&[Shift(State::TokenReadS6)]), - TK::TokenWrite => Vec::from(&[Shift(State::TokenWriteS7)]), + TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS112)]), _ => vec![], } } -fn action_tokenand_s99(token_kind: TokenKind) -> Vec> { +fn action_conjunction_s101(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenIntLiteral => { - Vec::from(&[Reduce(PK::ConjunctionConjunctionAnd, 1usize)]) - } - TK::TokenFloatLiteral => { - Vec::from(&[Reduce(PK::ConjunctionConjunctionAnd, 1usize)]) - } - TK::TokenStringLiteral => { - Vec::from(&[Reduce(PK::ConjunctionConjunctionAnd, 1usize)]) - } - TK::TokenId => Vec::from(&[Reduce(PK::ConjunctionConjunctionAnd, 1usize)]), - TK::TokenSub => Vec::from(&[Reduce(PK::ConjunctionConjunctionAnd, 1usize)]), - TK::TokenParOpen => Vec::from(&[Reduce(PK::ConjunctionConjunctionAnd, 1usize)]), - TK::TokenTrue => Vec::from(&[Reduce(PK::ConjunctionConjunctionAnd, 1usize)]), - TK::TokenFalse => Vec::from(&[Reduce(PK::ConjunctionConjunctionAnd, 1usize)]), - TK::TokenNot => Vec::from(&[Reduce(PK::ConjunctionConjunctionAnd, 1usize)]), - TK::TokenIsZero => Vec::from(&[Reduce(PK::ConjunctionConjunctionAnd, 1usize)]), + TK::TokenParClose => Vec::from(&[Reduce(PK::ConjunctionConjunctionAnd, 3usize)]), _ => vec![], } } -fn action_tokenor_s100(token_kind: TokenKind) -> Vec> { +fn action_conjunction_s102(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenIntLiteral => Vec::from(&[Reduce(PK::ConjunctionConjunctionOr, 1usize)]), - TK::TokenFloatLiteral => { - Vec::from(&[Reduce(PK::ConjunctionConjunctionOr, 1usize)]) - } - TK::TokenStringLiteral => { - Vec::from(&[Reduce(PK::ConjunctionConjunctionOr, 1usize)]) - } - TK::TokenId => Vec::from(&[Reduce(PK::ConjunctionConjunctionOr, 1usize)]), - TK::TokenSub => Vec::from(&[Reduce(PK::ConjunctionConjunctionOr, 1usize)]), - TK::TokenParOpen => Vec::from(&[Reduce(PK::ConjunctionConjunctionOr, 1usize)]), - TK::TokenTrue => Vec::from(&[Reduce(PK::ConjunctionConjunctionOr, 1usize)]), - TK::TokenFalse => Vec::from(&[Reduce(PK::ConjunctionConjunctionOr, 1usize)]), - TK::TokenNot => Vec::from(&[Reduce(PK::ConjunctionConjunctionOr, 1usize)]), - TK::TokenIsZero => Vec::from(&[Reduce(PK::ConjunctionConjunctionOr, 1usize)]), + TK::TokenParClose => Vec::from(&[Reduce(PK::ConjunctionConjunctionOr, 3usize)]), _ => vec![], } } -fn action_conjunction_s101(token_kind: TokenKind) -> Vec> { - match token_kind { - TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), - TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), - TK::TokenStringLiteral => Vec::from(&[Shift(State::TokenStringLiteralS31)]), - TK::TokenId => Vec::from(&[Shift(State::TokenIdS32)]), - TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), - TK::TokenTrue => Vec::from(&[Shift(State::TokenTrueS47)]), - TK::TokenFalse => Vec::from(&[Shift(State::TokenFalseS48)]), - TK::TokenNot => Vec::from(&[Shift(State::TokenNotS49)]), - TK::TokenIsZero => Vec::from(&[Shift(State::TokenIsZeroS50)]), - _ => vec![], - } -} -fn action_simpleexpression_s102(token_kind: TokenKind) -> Vec> { +fn action_simpleexpression_s103(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenParClose => { Vec::from( - &[Reduce(PK::BooleanExpressionChainBooleanExpressionChainEmpty, 0usize)], + &[Reduce(PK::BooleanExpressionBooleanExpressionSimpleExpression, 3usize)], ) } - TK::TokenEqual => Vec::from(&[Shift(State::TokenEqualS73)]), - TK::TokenNotEqual => Vec::from(&[Shift(State::TokenNotEqualS74)]), - TK::TokenLess => Vec::from(&[Shift(State::TokenLessS75)]), - TK::TokenLessEqual => Vec::from(&[Shift(State::TokenLessEqualS76)]), - TK::TokenGreater => Vec::from(&[Shift(State::TokenGreaterS77)]), - TK::TokenGreaterEqual => Vec::from(&[Shift(State::TokenGreaterEqualS78)]), TK::TokenAnd => { Vec::from( - &[Reduce(PK::BooleanExpressionChainBooleanExpressionChainEmpty, 0usize)], + &[Reduce(PK::BooleanExpressionBooleanExpressionSimpleExpression, 3usize)], ) } TK::TokenOr => { Vec::from( - &[Reduce(PK::BooleanExpressionChainBooleanExpressionChainEmpty, 0usize)], + &[Reduce(PK::BooleanExpressionBooleanExpressionSimpleExpression, 3usize)], ) } _ => vec![], } } -fn action_tokencbopen_s103(token_kind: TokenKind) -> Vec> { +fn action_tokencbopen_s104(token_kind: TokenKind) -> Vec> { + match token_kind { + TK::TokenId => Vec::from(&[Shift(State::TokenIdS27)]), + TK::TokenCBClose => Vec::from(&[Reduce(PK::BodyBodyEmpty, 0usize)]), + TK::TokenInit => Vec::from(&[Shift(State::TokenInitS2)]), + TK::TokenWhile => Vec::from(&[Shift(State::TokenWhileS3)]), + TK::TokenIf => Vec::from(&[Shift(State::TokenIfS4)]), + TK::TokenElse => Vec::from(&[Shift(State::TokenElseS5)]), + TK::TokenRead => Vec::from(&[Shift(State::TokenReadS6)]), + TK::TokenWrite => Vec::from(&[Shift(State::TokenWriteS7)]), + _ => vec![], + } +} +fn action_tokencbopen_s105(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenId => Vec::from(&[Shift(State::TokenIdS27)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::BodyBodyEmpty, 0usize)]), @@ -2114,7 +2122,7 @@ fn action_tokencbopen_s103(token_kind: TokenKind) -> Vec _ => vec![], } } -fn action_tokenparclose_s104(token_kind: TokenKind) -> Vec> { +fn action_tokenparclose_s106(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => { Vec::from( @@ -2159,7 +2167,7 @@ fn action_tokenparclose_s104(token_kind: TokenKind) -> Vec vec![], } } -fn action_term_s105(token_kind: TokenKind) -> Vec> { +fn action_term_s107(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => { Vec::from( @@ -2261,7 +2269,7 @@ fn action_term_s105(token_kind: TokenKind) -> Vec> { _ => vec![], } } -fn action_term_s106(token_kind: TokenKind) -> Vec> { +fn action_term_s108(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => { Vec::from( @@ -2363,7 +2371,7 @@ fn action_term_s106(token_kind: TokenKind) -> Vec> { _ => vec![], } } -fn action_factor_s107(token_kind: TokenKind) -> Vec> { +fn action_factor_s109(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), TK::TokenId => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), @@ -2389,7 +2397,7 @@ fn action_factor_s107(token_kind: TokenKind) -> Vec> { _ => vec![], } } -fn action_factor_s108(token_kind: TokenKind) -> Vec> { +fn action_factor_s110(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), TK::TokenId => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), @@ -2415,60 +2423,27 @@ fn action_factor_s108(token_kind: TokenKind) -> Vec> { _ => vec![], } } -fn action_tokencbclose_s109(token_kind: TokenKind) -> Vec> { +fn action_tokencbclose_s111(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::ProgramProgramWithMain, 6usize)]), _ => vec![], } } -fn action_tokenparclose_s110(token_kind: TokenKind) -> Vec> { +fn action_tokenparclose_s112(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenParClose => { Vec::from(&[Reduce(PK::FunctionIsZeroFunctionIsZeroCall, 4usize)]) } - _ => vec![], - } -} -fn action_body_s111(token_kind: TokenKind) -> Vec> { - match token_kind { - TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS115)]), - _ => vec![], - } -} -fn action_booleanexpression_s112(token_kind: TokenKind) -> Vec> { - match token_kind { - TK::TokenParClose => { - Vec::from( - &[ - Reduce( - PK::BooleanExpressionBooleanExpressionSimpleExpressionRecursive, - 4usize, - ), - ], - ) + TK::TokenAnd => { + Vec::from(&[Reduce(PK::FunctionIsZeroFunctionIsZeroCall, 4usize)]) } + TK::TokenOr => Vec::from(&[Reduce(PK::FunctionIsZeroFunctionIsZeroCall, 4usize)]), _ => vec![], } } -fn action_booleanexpressionchain_s113( - token_kind: TokenKind, -) -> Vec> { +fn action_body_s113(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenParClose => { - Vec::from( - &[Reduce(PK::BooleanExpressionChainBooleanExpressionChainAux, 3usize)], - ) - } - TK::TokenAnd => { - Vec::from( - &[Reduce(PK::BooleanExpressionChainBooleanExpressionChainAux, 3usize)], - ) - } - TK::TokenOr => { - Vec::from( - &[Reduce(PK::BooleanExpressionChainBooleanExpressionChainAux, 3usize)], - ) - } + TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS115)]), _ => vec![], } } @@ -2601,11 +2576,12 @@ fn goto_initbody_s21(nonterm_kind: NonTermKind) -> State { } fn goto_tokenparopen_s22(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::FunctionIsZero => State::FunctionIsZeroS51, - NonTermKind::BooleanExpression => State::BooleanExpressionS52, - NonTermKind::SimpleExpression => State::SimpleExpressionS53, + NonTermKind::FunctionIsZero => State::FunctionIsZeroS52, + NonTermKind::BooleanExpression => State::BooleanExpressionS53, + NonTermKind::SimpleExpression => State::SimpleExpressionS54, + NonTermKind::Conjunction => State::ConjunctionS55, NonTermKind::Number => State::NumberS38, - NonTermKind::NotStatement => State::NotStatementS54, + NonTermKind::NotStatement => State::NotStatementS56, NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS39, NonTermKind::Term => State::TermS40, NonTermKind::Factor => State::FactorS41, @@ -2619,11 +2595,12 @@ fn goto_tokenparopen_s22(nonterm_kind: NonTermKind) -> State { } fn goto_tokenparopen_s23(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::FunctionIsZero => State::FunctionIsZeroS51, - NonTermKind::BooleanExpression => State::BooleanExpressionS55, - NonTermKind::SimpleExpression => State::SimpleExpressionS53, + NonTermKind::FunctionIsZero => State::FunctionIsZeroS52, + NonTermKind::BooleanExpression => State::BooleanExpressionS53, + NonTermKind::SimpleExpression => State::SimpleExpressionS54, + NonTermKind::Conjunction => State::ConjunctionS57, NonTermKind::Number => State::NumberS38, - NonTermKind::NotStatement => State::NotStatementS54, + NonTermKind::NotStatement => State::NotStatementS56, NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS39, NonTermKind::Term => State::TermS40, NonTermKind::Factor => State::FactorS41, @@ -2637,7 +2614,7 @@ fn goto_tokenparopen_s23(nonterm_kind: NonTermKind) -> State { } fn goto_tokencbopen_s24(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::Body => State::BodyS56, + NonTermKind::Body => State::BodyS58, NonTermKind::FunctionRead => State::FunctionReadS10, NonTermKind::FunctionWrite => State::FunctionWriteS11, NonTermKind::Expressions => State::ExpressionsS12, @@ -2656,7 +2633,7 @@ fn goto_tokencbopen_s24(nonterm_kind: NonTermKind) -> State { } fn goto_tokenparopen_s26(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::SimpleExpression => State::SimpleExpressionS58, + NonTermKind::SimpleExpression => State::SimpleExpressionS60, NonTermKind::Number => State::NumberS38, NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS39, NonTermKind::Term => State::TermS40, @@ -2672,7 +2649,7 @@ fn goto_tokenparopen_s26(nonterm_kind: NonTermKind) -> State { fn goto_tokenparopen_s34(nonterm_kind: NonTermKind) -> State { match nonterm_kind { NonTermKind::Number => State::NumberS38, - NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS61, + NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS63, NonTermKind::Term => State::TermS40, NonTermKind::Factor => State::FactorS41, _ => { @@ -2685,7 +2662,7 @@ fn goto_tokenparopen_s34(nonterm_kind: NonTermKind) -> State { } fn goto_arithmeticexpression_s39(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::DummyAE => State::DummyAES63, + NonTermKind::DummyAE => State::DummyAES65, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", @@ -2696,7 +2673,7 @@ fn goto_arithmeticexpression_s39(nonterm_kind: NonTermKind) -> State { } fn goto_term_s40(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::DummyT => State::DummyTS64, + NonTermKind::DummyT => State::DummyTS66, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", @@ -2707,7 +2684,7 @@ fn goto_term_s40(nonterm_kind: NonTermKind) -> State { } fn goto_vardeclaration_s45(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::VarDeclarations => State::VarDeclarationsS69, + NonTermKind::VarDeclarations => State::VarDeclarationsS71, NonTermKind::VarDeclaration => State::VarDeclarationS45, _ => { panic!( @@ -2717,50 +2694,49 @@ fn goto_vardeclaration_s45(nonterm_kind: NonTermKind) -> State { } } } -fn goto_tokennot_s49(nonterm_kind: NonTermKind) -> State { +fn goto_tokennot_s50(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::FunctionIsZero => State::FunctionIsZeroS51, - NonTermKind::BooleanExpression => State::BooleanExpressionS70, - NonTermKind::SimpleExpression => State::SimpleExpressionS53, + NonTermKind::FunctionIsZero => State::FunctionIsZeroS52, + NonTermKind::BooleanExpression => State::BooleanExpressionS72, + NonTermKind::SimpleExpression => State::SimpleExpressionS54, NonTermKind::Number => State::NumberS38, - NonTermKind::NotStatement => State::NotStatementS54, + NonTermKind::NotStatement => State::NotStatementS56, NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS39, NonTermKind::Term => State::TermS40, NonTermKind::Factor => State::FactorS41, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenNotS49 + State::TokenNotS50 ) } } } -fn goto_simpleexpression_s53(nonterm_kind: NonTermKind) -> State { +fn goto_simpleexpression_s54(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::BooleanExpressionChain => State::BooleanExpressionChainS79, - NonTermKind::ComparisonOp => State::ComparisonOpS80, + NonTermKind::ComparisonOp => State::ComparisonOpS82, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::SimpleExpressionS53 + State::SimpleExpressionS54 ) } } } -fn goto_arithmeticexpression_s61(nonterm_kind: NonTermKind) -> State { +fn goto_arithmeticexpression_s63(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::DummyAE => State::DummyAES63, + NonTermKind::DummyAE => State::DummyAES65, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::ArithmeticExpressionS61 + State::ArithmeticExpressionS63 ) } } } -fn goto_tokencbopen_s65(nonterm_kind: NonTermKind) -> State { +fn goto_tokencbopen_s67(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::Body => State::BodyS91, + NonTermKind::Body => State::BodyS94, NonTermKind::FunctionRead => State::FunctionReadS10, NonTermKind::FunctionWrite => State::FunctionWriteS11, NonTermKind::Expressions => State::ExpressionsS12, @@ -2772,184 +2748,181 @@ fn goto_tokencbopen_s65(nonterm_kind: NonTermKind) -> State { _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenCBOpenS65 + State::TokenCBOpenS67 ) } } } -fn goto_tokencolon_s66(nonterm_kind: NonTermKind) -> State { +fn goto_tokencolon_s68(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::DataType => State::DataTypeS95, + NonTermKind::DataType => State::DataTypeS98, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenColonS66 + State::TokenColonS68 ) } } } -fn goto_tokencomma_s67(nonterm_kind: NonTermKind) -> State { +fn goto_tokencomma_s69(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::VarDeclaration => State::VarDeclarationS96, + NonTermKind::VarDeclaration => State::VarDeclarationS99, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenCommaS67 + State::TokenCommaS69 ) } } } -fn goto_tokenparopen_s71(nonterm_kind: NonTermKind) -> State { +fn goto_tokenparopen_s73(nonterm_kind: NonTermKind) -> State { match nonterm_kind { NonTermKind::Number => State::NumberS38, - NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS97, + NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS100, NonTermKind::Term => State::TermS40, NonTermKind::Factor => State::FactorS41, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenParOpenS71 + State::TokenParOpenS73 ) } } } -fn goto_booleanexpressionchain_s79(nonterm_kind: NonTermKind) -> State { +fn goto_tokenand_s74(nonterm_kind: NonTermKind) -> State { match nonterm_kind { + NonTermKind::FunctionIsZero => State::FunctionIsZeroS52, + NonTermKind::BooleanExpression => State::BooleanExpressionS53, + NonTermKind::SimpleExpression => State::SimpleExpressionS54, NonTermKind::Conjunction => State::ConjunctionS101, + NonTermKind::Number => State::NumberS38, + NonTermKind::NotStatement => State::NotStatementS56, + NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS39, + NonTermKind::Term => State::TermS40, + NonTermKind::Factor => State::FactorS41, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::BooleanExpressionChainS79 + State::TokenAndS74 ) } } } -fn goto_comparisonop_s80(nonterm_kind: NonTermKind) -> State { +fn goto_tokenor_s75(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::SimpleExpression => State::SimpleExpressionS102, + NonTermKind::FunctionIsZero => State::FunctionIsZeroS52, + NonTermKind::BooleanExpression => State::BooleanExpressionS53, + NonTermKind::SimpleExpression => State::SimpleExpressionS54, + NonTermKind::Conjunction => State::ConjunctionS102, NonTermKind::Number => State::NumberS38, + NonTermKind::NotStatement => State::NotStatementS56, NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS39, NonTermKind::Term => State::TermS40, NonTermKind::Factor => State::FactorS41, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::ComparisonOpS80 + State::TokenOrS75 ) } } } -fn goto_tokensum_s87(nonterm_kind: NonTermKind) -> State { +fn goto_comparisonop_s82(nonterm_kind: NonTermKind) -> State { match nonterm_kind { + NonTermKind::SimpleExpression => State::SimpleExpressionS103, NonTermKind::Number => State::NumberS38, - NonTermKind::Term => State::TermS105, + NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS39, + NonTermKind::Term => State::TermS40, NonTermKind::Factor => State::FactorS41, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenSumS87 + State::ComparisonOpS82 ) } } } -fn goto_tokensub_s88(nonterm_kind: NonTermKind) -> State { +fn goto_tokensum_s90(nonterm_kind: NonTermKind) -> State { match nonterm_kind { NonTermKind::Number => State::NumberS38, - NonTermKind::Term => State::TermS106, + NonTermKind::Term => State::TermS107, NonTermKind::Factor => State::FactorS41, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenSubS88 + State::TokenSumS90 ) } } } -fn goto_tokenmul_s89(nonterm_kind: NonTermKind) -> State { +fn goto_tokensub_s91(nonterm_kind: NonTermKind) -> State { match nonterm_kind { NonTermKind::Number => State::NumberS38, - NonTermKind::Factor => State::FactorS107, + NonTermKind::Term => State::TermS108, + NonTermKind::Factor => State::FactorS41, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenMulS89 + State::TokenSubS91 ) } } } -fn goto_tokendiv_s90(nonterm_kind: NonTermKind) -> State { +fn goto_tokenmul_s92(nonterm_kind: NonTermKind) -> State { match nonterm_kind { NonTermKind::Number => State::NumberS38, - NonTermKind::Factor => State::FactorS108, + NonTermKind::Factor => State::FactorS109, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenDivS90 + State::TokenMulS92 ) } } } -fn goto_arithmeticexpression_s97(nonterm_kind: NonTermKind) -> State { +fn goto_tokendiv_s93(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::DummyAE => State::DummyAES63, + NonTermKind::Number => State::NumberS38, + NonTermKind::Factor => State::FactorS110, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::ArithmeticExpressionS97 + State::TokenDivS93 ) } } } -fn goto_tokencbopen_s98(nonterm_kind: NonTermKind) -> State { +fn goto_arithmeticexpression_s100(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::Body => State::BodyS111, - NonTermKind::FunctionRead => State::FunctionReadS10, - NonTermKind::FunctionWrite => State::FunctionWriteS11, - NonTermKind::Expressions => State::ExpressionsS12, - NonTermKind::Statement => State::StatementS13, - NonTermKind::Assignment => State::AssignmentS14, - NonTermKind::WhileLoop => State::WhileLoopS15, - NonTermKind::IfStatement => State::IfStatementS16, - NonTermKind::ElseStatement => State::ElseStatementS17, + NonTermKind::DummyAE => State::DummyAES65, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenCBOpenS98 + State::ArithmeticExpressionS100 ) } } } -fn goto_conjunction_s101(nonterm_kind: NonTermKind) -> State { +fn goto_tokencbopen_s104(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::FunctionIsZero => State::FunctionIsZeroS51, - NonTermKind::BooleanExpression => State::BooleanExpressionS112, - NonTermKind::SimpleExpression => State::SimpleExpressionS53, - NonTermKind::Number => State::NumberS38, - NonTermKind::NotStatement => State::NotStatementS54, - NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS39, - NonTermKind::Term => State::TermS40, - NonTermKind::Factor => State::FactorS41, - _ => { - panic!( - "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::ConjunctionS101 - ) - } - } -} -fn goto_simpleexpression_s102(nonterm_kind: NonTermKind) -> State { - match nonterm_kind { - NonTermKind::BooleanExpressionChain => State::BooleanExpressionChainS113, - NonTermKind::ComparisonOp => State::ComparisonOpS80, + NonTermKind::Body => State::BodyS113, + NonTermKind::FunctionRead => State::FunctionReadS10, + NonTermKind::FunctionWrite => State::FunctionWriteS11, + NonTermKind::Expressions => State::ExpressionsS12, + NonTermKind::Statement => State::StatementS13, + NonTermKind::Assignment => State::AssignmentS14, + NonTermKind::WhileLoop => State::WhileLoopS15, + NonTermKind::IfStatement => State::IfStatementS16, + NonTermKind::ElseStatement => State::ElseStatementS17, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::SimpleExpressionS102 + State::TokenCBOpenS104 ) } } } -fn goto_tokencbopen_s103(nonterm_kind: NonTermKind) -> State { +fn goto_tokencbopen_s105(nonterm_kind: NonTermKind) -> State { match nonterm_kind { NonTermKind::Body => State::BodyS114, NonTermKind::FunctionRead => State::FunctionReadS10, @@ -2963,29 +2936,29 @@ fn goto_tokencbopen_s103(nonterm_kind: NonTermKind) -> State { _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenCBOpenS103 + State::TokenCBOpenS105 ) } } } -fn goto_term_s105(nonterm_kind: NonTermKind) -> State { +fn goto_term_s107(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::DummyT => State::DummyTS64, + NonTermKind::DummyT => State::DummyTS66, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TermS105 + State::TermS107 ) } } } -fn goto_term_s106(nonterm_kind: NonTermKind) -> State { +fn goto_term_s108(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::DummyT => State::DummyTS64, + NonTermKind::DummyT => State::DummyTS66, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TermS106 + State::TermS108 ) } } @@ -3042,73 +3015,73 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti action_vardeclarations_s44, action_vardeclaration_s45, action_expressions_s46, - action_tokentrue_s47, - action_tokenfalse_s48, - action_tokennot_s49, - action_tokeniszero_s50, - action_functioniszero_s51, - action_booleanexpression_s52, - action_simpleexpression_s53, - action_notstatement_s54, - action_booleanexpression_s55, - action_body_s56, - action_tokenid_s57, - action_simpleexpression_s58, - action_tokenintliteral_s59, - action_tokenfloatliteral_s60, - action_arithmeticexpression_s61, - action_tokenparopen_s62, - action_dummyae_s63, - action_dummyt_s64, - action_tokencbopen_s65, - action_tokencolon_s66, - action_tokencomma_s67, - action_tokencbclose_s68, - action_vardeclarations_s69, - action_booleanexpression_s70, - action_tokenparopen_s71, - action_tokenparclose_s72, - action_tokenequal_s73, - action_tokennotequal_s74, - action_tokenless_s75, - action_tokenlessequal_s76, - action_tokengreater_s77, - action_tokengreaterequal_s78, - action_booleanexpressionchain_s79, - action_comparisonop_s80, - action_tokenparclose_s81, - action_tokencbclose_s82, + action_tokenid_s47, + action_tokentrue_s48, + action_tokenfalse_s49, + action_tokennot_s50, + action_tokeniszero_s51, + action_functioniszero_s52, + action_booleanexpression_s53, + action_simpleexpression_s54, + action_conjunction_s55, + action_notstatement_s56, + action_conjunction_s57, + action_body_s58, + action_tokenid_s59, + action_simpleexpression_s60, + action_tokenintliteral_s61, + action_tokenfloatliteral_s62, + action_arithmeticexpression_s63, + action_tokenparopen_s64, + action_dummyae_s65, + action_dummyt_s66, + action_tokencbopen_s67, + action_tokencolon_s68, + action_tokencomma_s69, + action_tokencbclose_s70, + action_vardeclarations_s71, + action_booleanexpression_s72, + action_tokenparopen_s73, + action_tokenand_s74, + action_tokenor_s75, + action_tokenequal_s76, + action_tokennotequal_s77, + action_tokenless_s78, + action_tokenlessequal_s79, + action_tokengreater_s80, + action_tokengreaterequal_s81, + action_comparisonop_s82, action_tokenparclose_s83, action_tokenparclose_s84, - action_tokenparclose_s85, - action_tokendate_s86, - action_tokensum_s87, - action_tokensub_s88, - action_tokenmul_s89, - action_tokendiv_s90, - action_body_s91, - action_tokenint_s92, - action_tokenfloat_s93, - action_tokenstring_s94, - action_datatype_s95, - action_vardeclaration_s96, - action_arithmeticexpression_s97, - action_tokencbopen_s98, - action_tokenand_s99, - action_tokenor_s100, + action_tokencbclose_s85, + action_tokenparclose_s86, + action_tokenparclose_s87, + action_tokenparclose_s88, + action_tokendate_s89, + action_tokensum_s90, + action_tokensub_s91, + action_tokenmul_s92, + action_tokendiv_s93, + action_body_s94, + action_tokenint_s95, + action_tokenfloat_s96, + action_tokenstring_s97, + action_datatype_s98, + action_vardeclaration_s99, + action_arithmeticexpression_s100, action_conjunction_s101, - action_simpleexpression_s102, - action_tokencbopen_s103, - action_tokenparclose_s104, - action_term_s105, - action_term_s106, - action_factor_s107, - action_factor_s108, - action_tokencbclose_s109, - action_tokenparclose_s110, - action_body_s111, - action_booleanexpression_s112, - action_booleanexpressionchain_s113, + action_conjunction_s102, + action_simpleexpression_s103, + action_tokencbopen_s104, + action_tokencbopen_s105, + action_tokenparclose_s106, + action_term_s107, + action_term_s108, + action_factor_s109, + action_factor_s110, + action_tokencbclose_s111, + action_tokenparclose_s112, + action_body_s113, action_body_s114, action_tokencbclose_s115, action_tokencbclose_s116, @@ -3163,11 +3136,12 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti goto_invalid, goto_invalid, goto_invalid, - goto_tokennot_s49, + goto_invalid, + goto_tokennot_s50, goto_invalid, goto_invalid, goto_invalid, - goto_simpleexpression_s53, + goto_simpleexpression_s54, goto_invalid, goto_invalid, goto_invalid, @@ -3175,54 +3149,53 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti goto_invalid, goto_invalid, goto_invalid, - goto_arithmeticexpression_s61, goto_invalid, + goto_arithmeticexpression_s63, goto_invalid, goto_invalid, - goto_tokencbopen_s65, - goto_tokencolon_s66, - goto_tokencomma_s67, goto_invalid, + goto_tokencbopen_s67, + goto_tokencolon_s68, + goto_tokencomma_s69, goto_invalid, goto_invalid, - goto_tokenparopen_s71, goto_invalid, + goto_tokenparopen_s73, + goto_tokenand_s74, + goto_tokenor_s75, goto_invalid, goto_invalid, goto_invalid, goto_invalid, goto_invalid, goto_invalid, - goto_booleanexpressionchain_s79, - goto_comparisonop_s80, + goto_comparisonop_s82, goto_invalid, goto_invalid, goto_invalid, goto_invalid, goto_invalid, goto_invalid, - goto_tokensum_s87, - goto_tokensub_s88, - goto_tokenmul_s89, - goto_tokendiv_s90, goto_invalid, + goto_tokensum_s90, + goto_tokensub_s91, + goto_tokenmul_s92, + goto_tokendiv_s93, goto_invalid, goto_invalid, goto_invalid, goto_invalid, goto_invalid, - goto_arithmeticexpression_s97, - goto_tokencbopen_s98, goto_invalid, + goto_arithmeticexpression_s100, goto_invalid, - goto_conjunction_s101, - goto_simpleexpression_s102, - goto_tokencbopen_s103, goto_invalid, - goto_term_s105, - goto_term_s106, goto_invalid, + goto_tokencbopen_s104, + goto_tokencbopen_s105, goto_invalid, + goto_term_s107, + goto_term_s108, goto_invalid, goto_invalid, goto_invalid, @@ -4315,7 +4288,19 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, ], [ + Some((TK::TokenSum, false)), + Some((TK::TokenMul, false)), + Some((TK::TokenSub, false)), + Some((TK::TokenDiv, false)), Some((TK::TokenParClose, false)), + Some((TK::TokenEqual, false)), + Some((TK::TokenNotEqual, false)), + Some((TK::TokenLess, false)), + Some((TK::TokenLessEqual, false)), + Some((TK::TokenGreater, false)), + Some((TK::TokenGreaterEqual, false)), + Some((TK::TokenAnd, false)), + Some((TK::TokenOr, false)), None, None, None, @@ -4324,6 +4309,17 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, + ], + [ + Some((TK::TokenParClose, false)), + Some((TK::TokenAnd, false)), + Some((TK::TokenOr, false)), + None, + None, + None, + None, + None, + None, None, None, None, @@ -4339,8 +4335,8 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti ], [ Some((TK::TokenParClose, false)), - None, - None, + Some((TK::TokenAnd, false)), + Some((TK::TokenOr, false)), None, None, None, @@ -4408,8 +4404,8 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti ], [ Some((TK::TokenParClose, false)), - None, - None, + Some((TK::TokenAnd, false)), + Some((TK::TokenOr, false)), None, None, None, @@ -4431,8 +4427,8 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti ], [ Some((TK::TokenParClose, false)), - None, - None, + Some((TK::TokenAnd, false)), + Some((TK::TokenOr, false)), None, None, None, @@ -4453,15 +4449,15 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, ], [ - Some((TK::TokenParClose, false)), Some((TK::TokenEqual, false)), Some((TK::TokenNotEqual, false)), Some((TK::TokenLess, false)), Some((TK::TokenLessEqual, false)), Some((TK::TokenGreater, false)), Some((TK::TokenGreaterEqual, false)), - Some((TK::TokenAnd, false)), - Some((TK::TokenOr, false)), + None, + None, + None, None, None, None, @@ -4498,6 +4494,29 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, ], + [ + Some((TK::TokenParClose, false)), + Some((TK::TokenAnd, false)), + Some((TK::TokenOr, false)), + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + ], [ Some((TK::TokenParClose, false)), None, @@ -4845,8 +4864,8 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti ], [ Some((TK::TokenParClose, false)), - None, - None, + Some((TK::TokenAnd, false)), + Some((TK::TokenOr, false)), None, None, None, @@ -4890,7 +4909,16 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, ], [ - Some((TK::TokenCBOpen, false)), + Some((TK::TokenIntLiteral, false)), + Some((TK::TokenFloatLiteral, false)), + Some((TK::TokenStringLiteral, false)), + Some((TK::TokenId, false)), + Some((TK::TokenSub, false)), + Some((TK::TokenParOpen, false)), + Some((TK::TokenTrue, false)), + Some((TK::TokenFalse, false)), + Some((TK::TokenNot, false)), + Some((TK::TokenIsZero, false)), None, None, None, @@ -4902,6 +4930,20 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, + ], + [ + Some((TK::TokenIntLiteral, false)), + Some((TK::TokenFloatLiteral, false)), + Some((TK::TokenStringLiteral, false)), + Some((TK::TokenId, false)), + Some((TK::TokenSub, false)), + Some((TK::TokenParOpen, false)), + Some((TK::TokenTrue, false)), + Some((TK::TokenFalse, false)), + Some((TK::TokenNot, false)), + Some((TK::TokenIsZero, false)), + None, + None, None, None, None, @@ -5051,9 +5093,12 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, ], [ - Some((TK::TokenParClose, false)), - Some((TK::TokenAnd, false)), - Some((TK::TokenOr, false)), + Some((TK::TokenIntLiteral, false)), + Some((TK::TokenFloatLiteral, false)), + Some((TK::TokenStringLiteral, false)), + Some((TK::TokenId, false)), + Some((TK::TokenSub, false)), + Some((TK::TokenParOpen, false)), None, None, None, @@ -5069,17 +5114,14 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, + ], + [ + Some((TK::TokenCBOpen, false)), + None, + None, None, None, None, - ], - [ - Some((TK::TokenIntLiteral, false)), - Some((TK::TokenFloatLiteral, false)), - Some((TK::TokenStringLiteral, false)), - Some((TK::TokenId, false)), - Some((TK::TokenSub, false)), - Some((TK::TokenParOpen, false)), None, None, None, @@ -5488,14 +5530,7 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, ], [ - Some((TK::TokenId, false)), - Some((TK::TokenCBClose, false)), - Some((TK::TokenInit, false)), - Some((TK::TokenWhile, false)), - Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), - Some((TK::TokenRead, false)), - Some((TK::TokenWrite, false)), + Some((TK::TokenParClose, false)), None, None, None, @@ -5509,18 +5544,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - ], - [ - Some((TK::TokenIntLiteral, false)), - Some((TK::TokenFloatLiteral, false)), - Some((TK::TokenStringLiteral, false)), - Some((TK::TokenId, false)), - Some((TK::TokenSub, false)), - Some((TK::TokenParOpen, false)), - Some((TK::TokenTrue, false)), - Some((TK::TokenFalse, false)), - Some((TK::TokenNot, false)), - Some((TK::TokenIsZero, false)), None, None, None, @@ -5528,22 +5551,18 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, + ], + [ + Some((TK::TokenParClose, false)), + None, + None, + None, + None, + None, None, None, None, None, - ], - [ - Some((TK::TokenIntLiteral, false)), - Some((TK::TokenFloatLiteral, false)), - Some((TK::TokenStringLiteral, false)), - Some((TK::TokenId, false)), - Some((TK::TokenSub, false)), - Some((TK::TokenParOpen, false)), - Some((TK::TokenTrue, false)), - Some((TK::TokenFalse, false)), - Some((TK::TokenNot, false)), - Some((TK::TokenIsZero, false)), None, None, None, @@ -5557,16 +5576,16 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, ], [ - Some((TK::TokenIntLiteral, false)), - Some((TK::TokenFloatLiteral, false)), - Some((TK::TokenStringLiteral, false)), - Some((TK::TokenId, false)), - Some((TK::TokenSub, false)), - Some((TK::TokenParOpen, false)), - Some((TK::TokenTrue, false)), - Some((TK::TokenFalse, false)), - Some((TK::TokenNot, false)), - Some((TK::TokenIsZero, false)), + Some((TK::TokenParClose, false)), + Some((TK::TokenAnd, false)), + Some((TK::TokenOr, false)), + None, + None, + None, + None, + None, + None, + None, None, None, None, @@ -5580,15 +5599,15 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, ], [ - Some((TK::TokenParClose, false)), - Some((TK::TokenEqual, false)), - Some((TK::TokenNotEqual, false)), - Some((TK::TokenLess, false)), - Some((TK::TokenLessEqual, false)), - Some((TK::TokenGreater, false)), - Some((TK::TokenGreaterEqual, false)), - Some((TK::TokenAnd, false)), - Some((TK::TokenOr, false)), + Some((TK::TokenId, false)), + Some((TK::TokenCBClose, false)), + Some((TK::TokenInit, false)), + Some((TK::TokenWhile, false)), + Some((TK::TokenIf, false)), + Some((TK::TokenElse, false)), + Some((TK::TokenRead, false)), + Some((TK::TokenWrite, false)), + None, None, None, None, @@ -5765,8 +5784,8 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti ], [ Some((TK::TokenParClose, false)), - None, - None, + Some((TK::TokenAnd, false)), + Some((TK::TokenOr, false)), None, None, None, @@ -5809,52 +5828,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, ], - [ - Some((TK::TokenParClose, false)), - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - ], - [ - Some((TK::TokenParClose, false)), - Some((TK::TokenAnd, false)), - Some((TK::TokenOr, false)), - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - ], [ Some((TK::TokenCBClose, false)), None, diff --git a/src/grammar/rules.rustemo b/src/grammar/rules.rustemo index cbe34e4..2d286b5 100644 --- a/src/grammar/rules.rustemo +++ b/src/grammar/rules.rustemo @@ -39,27 +39,25 @@ DataType: TokenInt {IntType} | TokenFloat {FloatType} | TokenString {StringType}; -WhileLoop: TokenWhile TokenParOpen BooleanExpression TokenParClose TokenCBOpen Body TokenCBClose {While}; +WhileLoop: TokenWhile TokenParOpen Conjunction TokenParClose TokenCBOpen Body TokenCBClose {While}; -IfStatement: TokenIf TokenParOpen BooleanExpression TokenParClose TokenCBOpen Body TokenCBClose {IfStatement}; +IfStatement: TokenIf TokenParOpen Conjunction TokenParClose TokenCBOpen Body TokenCBClose {IfStatement}; ElseStatement: TokenElse TokenCBOpen Body TokenCBClose {ElseStatement}; -BooleanExpression: SimpleExpression BooleanExpressionChain {BooleanExpressionSimpleExpression} +BooleanExpression: SimpleExpression ComparisonOp SimpleExpression {BooleanExpressionSimpleExpression} | TokenTrue {BooleanExpressionTrue} | TokenFalse {BooleanExpressionFalse} - | SimpleExpression BooleanExpressionChain Conjunction BooleanExpression {BooleanExpressionSimpleExpressionRecursive} | NotStatement {BooleanExpressionNotStatement} - | FunctionIsZero {BooleanExpressionIsZero}; - -BooleanExpressionChain: ComparisonOp SimpleExpression BooleanExpressionChain {BooleanExpressionChainAux} - | EMPTY {BooleanExpressionChainEmpty}; + | FunctionIsZero {BooleanExpressionIsZero} + | TokenId {BooleanExpressionVar}; SimpleExpression: ArithmeticExpression {SimpleExpressionArithmetic} | TokenStringLiteral {SimpleExpressionString}; -Conjunction: TokenAnd {ConjunctionAnd} - | TokenOr {ConjunctionOr}; +Conjunction: BooleanExpression TokenAnd Conjunction{ConjunctionAnd} + | BooleanExpression TokenOr Conjunction {ConjunctionOr} + | BooleanExpression {ConjunctionBoolean}; ComparisonOp: TokenEqual {ComparisonOpEqual} | TokenNotEqual {ComparisonOpNotEqual} diff --git a/src/grammar/rules_actions.rs b/src/grammar/rules_actions.rs index e2403b8..872479c 100644 --- a/src/grammar/rules_actions.rs +++ b/src/grammar/rules_actions.rs @@ -710,13 +710,13 @@ pub fn data_type_string_type( DataType::StringType(token_string) } -/// Parses the rule ` -> TokenWhile TokenParOpen TokenParClose TokenCBOpen TokenCBClose` +/// Parses the rule ` -> TokenWhile TokenParOpen TokenParClose TokenCBOpen TokenCBClose` #[expect(clippy::too_many_arguments)] pub fn while_loop_while( _ctx: &Ctx, token_while: TokenWhile, token_par_open: TokenParOpen, - boolean_expression: BooleanExpression, + conjunction: Conjunction, token_par_close: TokenParClose, token_cbopen: TokenCBOpen, body: Body, @@ -724,12 +724,12 @@ pub fn while_loop_while( compiler_context: &mut CompilerContext, ) -> WhileLoop { compiler_context.write_to_parser_file(&format!( - " -> {token_while} {token_par_open} {token_par_close} {token_cbopen} {token_cbclose}" + " -> {token_while} {token_par_open} {token_par_close} {token_cbopen} {token_cbclose}" )); WhileLoop { token_while, token_par_open, - boolean_expression, + conjunction, token_par_close, token_cbopen, body: Box::new(body), @@ -737,13 +737,13 @@ pub fn while_loop_while( } } -/// Parses the rule `: TokenIf TokenParOpen TokenParClose TokenCBOpen TokenCBClose` +/// Parses the rule `: TokenIf TokenParOpen TokenParClose TokenCBOpen TokenCBClose` #[expect(clippy::too_many_arguments)] pub fn if_statement_if_statement( _ctx: &Ctx, token_if: TokenIf, token_par_open: TokenParOpen, - boolean_expression: BooleanExpression, + conjunction: Conjunction, token_par_close: TokenParClose, token_cbopen: TokenCBOpen, body: Body, @@ -751,12 +751,12 @@ pub fn if_statement_if_statement( compiler_context: &mut CompilerContext, ) -> IfStatement { compiler_context.write_to_parser_file(&format!( - " -> {token_if} {token_par_open} {token_par_close} {token_cbopen} {token_cbclose}" + " -> {token_if} {token_par_open} {token_par_close} {token_cbopen} {token_cbclose}" )); IfStatement { token_if, token_par_open, - boolean_expression, + conjunction, token_par_close, token_cbopen, body: Box::new(body), @@ -784,18 +784,21 @@ pub fn else_statement_else_statement( } } -/// Parses the rule ` -> ` +/// Parses the rule ` -> ` pub fn boolean_expression_boolean_expression_simple_expression( _ctx: &Ctx, simple_expression: SimpleExpression, - boolean_expression_chain: BooleanExpressionChain, + comparison_op: ComparisonOp, + simple_expression_2: SimpleExpression, compiler_context: &mut CompilerContext, ) -> BooleanExpression { - compiler_context - .write_to_parser_file(" -> "); + compiler_context.write_to_parser_file( + " -> ", + ); BooleanExpression::BooleanExpressionSimpleExpression(BooleanExpressionSimpleExpression { simple_expression, - boolean_expression_chain, + comparison_op, + simple_expression_2, }) } @@ -819,26 +822,14 @@ pub fn boolean_expression_boolean_expression_false( BooleanExpression::BooleanExpressionFalse(token_false) } -/// Parses the rule ` -> ` -pub fn boolean_expression_boolean_expression_simple_expression_recursive( +/// Parses the rule ` -> TokenId +pub fn boolean_expression_boolean_expression_token_id( _ctx: &Ctx, - simple_expression: SimpleExpression, - boolean_expression_chain: BooleanExpressionChain, - conjunction: Conjunction, - boolean_expression: BooleanExpression, + token_id: TokenId, compiler_context: &mut CompilerContext, ) -> BooleanExpression { - compiler_context.write_to_parser_file( - " -> ", - ); - BooleanExpression::BooleanExpressionSimpleExpressionRecursive( - BooleanExpressionSimpleExpressionRecursive { - simple_expression, - boolean_expression_chain, - conjunction, - boolean_expression: Box::new(boolean_expression), - }, - ) + compiler_context.write_to_parser_file(&format!(" -> {token_id}")); + BooleanExpression::BooleanExpressionTokenId(token_id) } /// Parses the rule ` -> ` @@ -861,33 +852,6 @@ pub fn boolean_expression_boolean_expression_is_zero( BooleanExpression::BooleanExpressionIsZero(function_is_zero) } -/// Parses the rule ` -> ComparisonOp ` -pub fn boolean_expression_chain_boolean_expression_chain_aux( - _ctx: &Ctx, - comparison_op: ComparisonOp, - simple_expression: SimpleExpression, - boolean_expression_chain: BooleanExpressionChain, - compiler_context: &mut CompilerContext, -) -> BooleanExpressionChain { - compiler_context.write_to_parser_file( - " -> ", - ); - Some(BooleanExpressionChainNoO { - comparison_op, - simple_expression, - boolean_expression_chain: Box::new(boolean_expression_chain), - }) -} - -/// Parses the rule ` -> EMPTY` -pub fn boolean_expression_chain_boolean_expression_chain_empty( - _ctx: &Ctx, - compiler_context: &mut CompilerContext, -) -> BooleanExpressionChain { - compiler_context.write_to_parser_file(" -> EMPTY"); - None -} - /// Parses the rule ` -> ` pub fn simple_expression_simple_expression_arithmetic( _ctx: &Ctx, @@ -913,24 +877,50 @@ pub fn simple_expression_simple_expression_string( SimpleExpression::SimpleExpressionString(token_string_literal) } -/// Parses the rule ` -> "and"` +/// Parses the rule ` -> "and" ` pub fn conjunction_conjunction_and( _ctx: &Ctx, + boolean_expression: BooleanExpression, token_and: TokenAnd, + conjunction: Conjunction, compiler_context: &mut CompilerContext, ) -> Conjunction { - compiler_context.write_to_parser_file(&format!(" -> {token_and}")); - Conjunction::ConjunctionAnd(token_and) + compiler_context.write_to_parser_file(&format!( + " -> {token_and} " + )); + Conjunction::ConjunctionAnd(ConjunctionAnd { + boolean_expression, + token_and, + conjunction: Box::new(conjunction), + }) } -/// Parses the rule ` -> "or"` +/// Parses the rule ` -> "or" ` pub fn conjunction_conjunction_or( _ctx: &Ctx, + boolean_expression: BooleanExpression, token_or: TokenOr, + conjunction: Conjunction, + compiler_context: &mut CompilerContext, +) -> Conjunction { + compiler_context.write_to_parser_file(&format!( + " -> {token_or} " + )); + Conjunction::ConjunctionOr(ConjunctionOr { + boolean_expression, + token_or, + conjunction: Box::new(conjunction), + }) +} + +/// Parses the rule ` -> ` +pub fn conjunction_conjunction_boolean_expression( + _ctx: &Ctx, + boolean_expression: BooleanExpression, compiler_context: &mut CompilerContext, ) -> Conjunction { - compiler_context.write_to_parser_file(&format!(" -> {token_or}")); - Conjunction::ConjunctionOr(token_or) + compiler_context.write_to_parser_file(" -> "); + Conjunction::ConjunctionBooleanExpression(boolean_expression) } /// Parses the rule ` -> "=="` diff --git a/src/grammar/rules_builder.rs b/src/grammar/rules_builder.rs index 918e633..92c97ec 100644 --- a/src/grammar/rules_builder.rs +++ b/src/grammar/rules_builder.rs @@ -71,7 +71,6 @@ pub enum NonTerminal { IfStatement(rules_actions::IfStatement), ElseStatement(rules_actions::ElseStatement), BooleanExpression(rules_actions::BooleanExpression), - BooleanExpressionChain(rules_actions::BooleanExpressionChain), SimpleExpression(rules_actions::SimpleExpression), Conjunction(rules_actions::Conjunction), ComparisonOp(rules_actions::ComparisonOp), @@ -838,7 +837,7 @@ impl<'i> LRBuilder<'i, Input, Context<'i, Input>, State, ProdKind, TokenKind> fo ( Symbol::Terminal(Terminal::TokenWhile(p0)), Symbol::Terminal(Terminal::TokenParOpen(p1)), - Symbol::NonTerminal(NonTerminal::BooleanExpression(p2)), + Symbol::NonTerminal(NonTerminal::Conjunction(p2)), Symbol::Terminal(Terminal::TokenParClose(p3)), Symbol::Terminal(Terminal::TokenCBOpen(p4)), Symbol::NonTerminal(NonTerminal::Body(p5)), @@ -874,7 +873,7 @@ impl<'i> LRBuilder<'i, Input, Context<'i, Input>, State, ProdKind, TokenKind> fo ( Symbol::Terminal(Terminal::TokenIf(p0)), Symbol::Terminal(Terminal::TokenParOpen(p1)), - Symbol::NonTerminal(NonTerminal::BooleanExpression(p2)), + Symbol::NonTerminal(NonTerminal::Conjunction(p2)), Symbol::Terminal(Terminal::TokenParClose(p3)), Symbol::Terminal(Terminal::TokenCBOpen(p4)), Symbol::NonTerminal(NonTerminal::Body(p5)), @@ -923,17 +922,19 @@ impl<'i> LRBuilder<'i, Input, Context<'i, Input>, State, ProdKind, TokenKind> fo ProdKind::BooleanExpressionBooleanExpressionSimpleExpression => { let mut i = compiler_context .res_stack - .split_off(stack_len - 2usize) + .split_off(stack_len - 3usize) .into_iter(); - match (i.next().unwrap(), i.next().unwrap()) { + match (i.next().unwrap(), i.next().unwrap(), i.next().unwrap()) { ( Symbol::NonTerminal(NonTerminal::SimpleExpression(p0)), - Symbol::NonTerminal(NonTerminal::BooleanExpressionChain(p1)), + Symbol::NonTerminal(NonTerminal::ComparisonOp(p1)), + Symbol::NonTerminal(NonTerminal::SimpleExpression(p2)), ) => NonTerminal::BooleanExpression( rules_actions::boolean_expression_boolean_expression_simple_expression( context, p0, p1, + p2, &mut compiler_context, ), ), @@ -972,36 +973,81 @@ impl<'i> LRBuilder<'i, Input, Context<'i, Input>, State, ProdKind, TokenKind> fo _ => panic!("Invalid symbol parse stack data."), } } - ProdKind::BooleanExpressionBooleanExpressionSimpleExpressionRecursive => { + ProdKind::BooleanExpressionBooleanExpressionVar => { let mut i = compiler_context .res_stack - .split_off(stack_len - 4usize) + .split_off(stack_len - 1_usize) .into_iter(); - match ( - i.next().unwrap(), - i.next().unwrap(), - i.next().unwrap(), - i.next().unwrap(), - ) { - ( - Symbol::NonTerminal(NonTerminal::SimpleExpression(p0)), - Symbol::NonTerminal(NonTerminal::BooleanExpressionChain(p1)), - Symbol::NonTerminal(NonTerminal::Conjunction(p2)), - Symbol::NonTerminal(NonTerminal::BooleanExpression(p3)), - ) => { - NonTerminal::BooleanExpression( - rules_actions::boolean_expression_boolean_expression_simple_expression_recursive( + match i.next().unwrap() { + Symbol::Terminal(Terminal::TokenId(p0)) => NonTerminal::BooleanExpression( + rules_actions::boolean_expression_boolean_expression_token_id( + context, + p0, + &mut compiler_context, + ), + ), + _ => panic!("Invalid symbol parse stack data."), + } + } + ProdKind::ConjunctionConjunctionBoolean => { + let mut i = compiler_context + .res_stack + .split_off(stack_len - 1_usize) + .into_iter(); + match i.next().unwrap() { + Symbol::NonTerminal(NonTerminal::BooleanExpression(p0)) => { + NonTerminal::Conjunction( + rules_actions::conjunction_conjunction_boolean_expression( context, p0, - p1, - p2, - p3,&mut compiler_context + &mut compiler_context, ), ) } _ => panic!("Invalid symbol parse stack data."), } } + ProdKind::ConjunctionConjunctionAnd => { + let mut i = compiler_context + .res_stack + .split_off(stack_len - 3usize) + .into_iter(); + match (i.next().unwrap(), i.next().unwrap(), i.next().unwrap()) { + ( + Symbol::NonTerminal(NonTerminal::BooleanExpression(p0)), + Symbol::Terminal(Terminal::TokenAnd(p1)), + Symbol::NonTerminal(NonTerminal::Conjunction(p2)), + ) => NonTerminal::Conjunction(rules_actions::conjunction_conjunction_and( + context, + p0, + p1, + p2, + &mut compiler_context, + )), + + _ => panic!("Invalid symbol parse stack data."), + } + } + ProdKind::ConjunctionConjunctionOr => { + let mut i = compiler_context + .res_stack + .split_off(stack_len - 3usize) + .into_iter(); + match (i.next().unwrap(), i.next().unwrap(), i.next().unwrap()) { + ( + Symbol::NonTerminal(NonTerminal::BooleanExpression(p0)), + Symbol::Terminal(Terminal::TokenOr(p1)), + Symbol::NonTerminal(NonTerminal::Conjunction(p2)), + ) => NonTerminal::Conjunction(rules_actions::conjunction_conjunction_or( + context, + p0, + p1, + p2, + &mut compiler_context, + )), + _ => panic!("Invalid symbol parse stack data."), + } + } ProdKind::BooleanExpressionBooleanExpressionNotStatement => { let mut i = compiler_context .res_stack @@ -1038,36 +1084,6 @@ impl<'i> LRBuilder<'i, Input, Context<'i, Input>, State, ProdKind, TokenKind> fo _ => panic!("Invalid symbol parse stack data."), } } - ProdKind::BooleanExpressionChainBooleanExpressionChainAux => { - let mut i = compiler_context - .res_stack - .split_off(stack_len - 3usize) - .into_iter(); - match (i.next().unwrap(), i.next().unwrap(), i.next().unwrap()) { - ( - Symbol::NonTerminal(NonTerminal::ComparisonOp(p0)), - Symbol::NonTerminal(NonTerminal::SimpleExpression(p1)), - Symbol::NonTerminal(NonTerminal::BooleanExpressionChain(p2)), - ) => NonTerminal::BooleanExpressionChain( - rules_actions::boolean_expression_chain_boolean_expression_chain_aux( - context, - p0, - p1, - p2, - &mut compiler_context, - ), - ), - _ => panic!("Invalid symbol parse stack data."), - } - } - ProdKind::BooleanExpressionChainBooleanExpressionChainEmpty => { - NonTerminal::BooleanExpressionChain( - rules_actions::boolean_expression_chain_boolean_expression_chain_empty( - context, - &mut compiler_context, - ), - ) - } ProdKind::SimpleExpressionSimpleExpressionArithmetic => { let mut i = compiler_context .res_stack @@ -1104,38 +1120,7 @@ impl<'i> LRBuilder<'i, Input, Context<'i, Input>, State, ProdKind, TokenKind> fo _ => panic!("Invalid symbol parse stack data."), } } - ProdKind::ConjunctionConjunctionAnd => { - let mut i = compiler_context - .res_stack - .split_off(stack_len - 1usize) - .into_iter(); - match i.next().unwrap() { - Symbol::Terminal(Terminal::TokenAnd(p0)) => { - NonTerminal::Conjunction(rules_actions::conjunction_conjunction_and( - context, - p0, - &mut compiler_context, - )) - } - _ => panic!("Invalid symbol parse stack data."), - } - } - ProdKind::ConjunctionConjunctionOr => { - let mut i = compiler_context - .res_stack - .split_off(stack_len - 1usize) - .into_iter(); - match i.next().unwrap() { - Symbol::Terminal(Terminal::TokenOr(p0)) => { - NonTerminal::Conjunction(rules_actions::conjunction_conjunction_or( - context, - p0, - &mut compiler_context, - )) - } - _ => panic!("Invalid symbol parse stack data."), - } - } + ProdKind::ComparisonOpComparisonOpEqual => { let mut i = compiler_context .res_stack diff --git a/src/grammar/types.rs b/src/grammar/types.rs index 6d19df9..64b82bd 100644 --- a/src/grammar/types.rs +++ b/src/grammar/types.rs @@ -448,12 +448,12 @@ impl Display for DataType { /// Struct representation of the rule /// -/// ` -> TokenWhile TokenParOpen TokenParClose TokenCBOpen TokenCBClose` +/// ` -> TokenWhile TokenParOpen TokenParClose TokenCBOpen TokenCBClose` #[derive(Debug, Clone)] pub struct WhileLoop { pub token_while: TokenWhile, pub token_par_open: TokenParOpen, - pub boolean_expression: BooleanExpression, + pub conjunction: Conjunction, pub token_par_close: TokenParClose, pub token_cbopen: TokenCBOpen, pub body: Box, @@ -462,12 +462,12 @@ pub struct WhileLoop { /// Struct representation of the rule /// -/// ` -> TokenIf TokenParOpen TokenParClose TokenCBOpen TokenCBClose` +/// ` -> TokenIf TokenParOpen TokenParClose TokenCBOpen TokenCBClose` #[derive(Debug, Clone)] pub struct IfStatement { pub token_if: TokenIf, pub token_par_open: TokenParOpen, - pub boolean_expression: BooleanExpression, + pub conjunction: Conjunction, pub token_par_close: TokenParClose, pub token_cbopen: TokenCBOpen, pub body: Box, @@ -488,27 +488,28 @@ pub struct ElseStatement { /// Enum representing all the possible rules for the `` non terminal #[derive(Debug, Clone)] pub enum BooleanExpression { - /// ` -> ` + /// ` -> ` BooleanExpressionSimpleExpression(BooleanExpressionSimpleExpression), /// ` -> "true"` BooleanExpressionTrue(TokenTrue), /// ` -> "false"` BooleanExpressionFalse(TokenFalse), - /// ` -> ` - BooleanExpressionSimpleExpressionRecursive(BooleanExpressionSimpleExpressionRecursive), /// ` -> ` BooleanExpressionNotStatement(NotStatement), /// ` -> ` BooleanExpressionIsZero(FunctionIsZero), + /// ` -> TokenId` + BooleanExpressionTokenId(TokenId), } /// Struct representation of the rule /// -/// ` -> ` +/// ` -> ` #[derive(Debug, Clone)] pub struct BooleanExpressionSimpleExpression { pub simple_expression: SimpleExpression, - pub boolean_expression_chain: BooleanExpressionChain, + pub comparison_op: ComparisonOp, + pub simple_expression_2: SimpleExpression, } /// Struct representation of the rule @@ -548,9 +549,25 @@ pub enum SimpleExpression { #[derive(Debug, Clone)] pub enum Conjunction { /// ` -> "and"` - ConjunctionAnd(TokenAnd), + ConjunctionAnd(ConjunctionAnd), /// ` -> "or"` - ConjunctionOr(TokenOr), + ConjunctionOr(ConjunctionOr), + /// ` -> ` + ConjunctionBooleanExpression(BooleanExpression), +} + +#[derive(Debug, Clone)] +pub struct ConjunctionAnd { + pub boolean_expression: BooleanExpression, + pub token_and: TokenAnd, + pub conjunction: Box, +} + +#[derive(Debug, Clone)] +pub struct ConjunctionOr { + pub boolean_expression: BooleanExpression, + pub token_or: TokenOr, + pub conjunction: Box, } /// Enum representing all the possible rules for the `` non terminal From fc203fdd4b458bfa844e8730cb45c212c20cee8f Mon Sep 17 00:00:00 2001 From: LeanSerra <46695152+LeanSerra@users.noreply.github.com> Date: Sun, 26 Oct 2025 12:20:43 -0300 Subject: [PATCH 10/25] feat: AST generation for booleanStatements and If --- src/compiler/ast.rs | 99 +++++++++++-- src/grammar/rules_actions.rs | 263 ++++++++++++++++++++++++++++++----- src/main.rs | 5 +- 3 files changed, 321 insertions(+), 46 deletions(-) diff --git a/src/compiler/ast.rs b/src/compiler/ast.rs index 1d9ed3f..fbb9bf7 100644 --- a/src/compiler/ast.rs +++ b/src/compiler/ast.rs @@ -1,7 +1,8 @@ +use crate::grammar::types::ComparisonOp; use std::{ array, cell::Cell, - fmt::Display, + fmt::{Debug, Display}, fs::File, io::{self, Write}, mem, @@ -10,8 +11,20 @@ use std::{ pub struct Ast { tree: [Rc; mem::variant_count::()], - stack_t: Vec>, - stack_e: Vec>, + pub stack_t: Vec>, + pub stack_e: Vec>, + pub comparision_op_stack: Vec, + pub comparision_expressions_stack: Vec>, + pub boolean_expression_stack: Vec>, +} + +impl Debug for Ast { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + writeln!(f, "{:?}", self.stack_t)?; + writeln!(f, "{:?}", self.stack_e)?; + writeln!(f, "{:?}", self.comparision_op_stack)?; + writeln!(f, "{:?}", self.comparision_expressions_stack) + } } #[derive(Clone, Copy)] @@ -22,7 +35,15 @@ pub enum AstPtr { Factor, Term, ArithmeticExpression, + BooleanExpression, + Conjunction, SimpleExpression, + Body, + Statement, + Expressions, + If, + Not, + IsZero, } pub enum AstNodeRef { @@ -30,6 +51,18 @@ pub enum AstNodeRef { Node(Rc), } +impl From for AstNodeRef { + fn from(value: AstPtr) -> Self { + Self::Ptr(value) + } +} + +impl From> for AstNodeRef { + fn from(value: Rc) -> Self { + Self::Node(value) + } +} + pub struct Node { pub value: NodeValue, parent: Cell>>, @@ -37,6 +70,12 @@ pub struct Node { right_child: Option>, } +impl Debug for Node { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.value) + } +} + impl Node { pub fn new_leaf(value: NodeValue) -> Self { Self { @@ -70,16 +109,53 @@ pub enum AstAction { Mult, Div, Assign, + If, + And, + Or, + Not, + IsZero, + GT, + GTE, + EQ, + NE, + LT, + LTE, + Noop, } impl Display for AstAction { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - Self::Plus => write!(f, "PLUS"), - Self::Sub => write!(f, "SUB"), - Self::Mult => write!(f, "MUL"), - Self::Div => write!(f, "DIV"), - Self::Assign => write!(f, "ASSIGN"), + Self::Plus => write!(f, "+"), + Self::Sub => write!(f, "-"), + Self::Mult => write!(f, "*"), + Self::Div => write!(f, "/"), + Self::Assign => write!(f, ":="), + Self::GT => write!(f, ">"), + Self::GTE => write!(f, ">="), + Self::EQ => write!(f, "=="), + Self::NE => write!(f, "!="), + Self::LT => write!(f, "<"), + Self::LTE => write!(f, "<="), + Self::If => write!(f, "IF"), + Self::And => write!(f, "AND"), + Self::Or => write!(f, "OR"), + Self::Not => write!(f, "NOT"), + Self::IsZero => write!(f, "ISZERO"), + Self::Noop => write!(f, "NOOP"), + } + } +} + +impl From for AstAction { + fn from(value: ComparisonOp) -> Self { + match value { + ComparisonOp::ComparisonOpEqual(_) => Self::EQ, + ComparisonOp::ComparisonOpNotEqual(_) => Self::NE, + ComparisonOp::ComparisonOpLess(_) => Self::LT, + ComparisonOp::ComparisonOpLessEqual(_) => Self::LTE, + ComparisonOp::ComparisonOpGreater(_) => Self::GT, + ComparisonOp::ComparisonOpGreaterEqual(_) => Self::GTE, } } } @@ -90,6 +166,9 @@ impl Default for Ast { tree: array::from_fn(|_| Rc::new(Node::new_leaf(NodeValue::Value("".to_string())))), stack_e: Vec::new(), stack_t: Vec::new(), + comparision_op_stack: Vec::new(), + comparision_expressions_stack: Vec::new(), + boolean_expression_stack: Vec::new(), } } } @@ -212,4 +291,8 @@ impl Ast { pub fn pop_e_stack(&mut self) -> Option> { self.stack_e.pop() } + + pub fn get_node_from_ptr(&self, from: AstPtr) -> Rc { + self.tree[from as usize].clone() + } } diff --git a/src/grammar/rules_actions.rs b/src/grammar/rules_actions.rs index 872479c..f23b2c6 100644 --- a/src/grammar/rules_actions.rs +++ b/src/grammar/rules_actions.rs @@ -1,10 +1,11 @@ use crate::compiler::{ - ast::{AstAction, AstNodeRef, AstPtr, Node, NodeValue}, + ast::{AstAction, AstPtr, Node, NodeValue}, context::CompilerContext, error::{CompilerError, log_error_and_exit}, }; pub use crate::grammar::types::*; use rustemo::{Context, Input}; +use std::rc::Rc; /// Parses the keyword "int" pub fn token_int(_ctx: &Ctx, token: Token, compiler_context: &mut CompilerContext) -> TokenInt { @@ -383,6 +384,9 @@ pub fn body_body_expressions( compiler_context: &mut CompilerContext, ) -> Body { compiler_context.write_to_parser_file(" -> "); + compiler_context + .ast + .assign_node_to_ptr(AstPtr::Expressions.into(), AstPtr::Body); Some(BodyNoO::BodyExpressions(expressions)) } @@ -560,6 +564,9 @@ pub fn expressions_expression_single( compiler_context: &mut CompilerContext, ) -> Expressions { compiler_context.write_to_parser_file(" -> "); + compiler_context + .ast + .assign_node_to_ptr(AstPtr::Statement.into(), AstPtr::Expressions); Expressions::ExpressionSingle(statement) } @@ -584,6 +591,9 @@ pub fn statement_statement_assignment( compiler_context: &mut CompilerContext, ) -> Statement { compiler_context.write_to_parser_file(" -> "); + compiler_context + .ast + .assign_node_to_ptr(AstPtr::Assignment.into(), AstPtr::Statement); Statement::StatementAssignment(assignment) } @@ -594,6 +604,9 @@ pub fn statement_statement_if_statement( compiler_context: &mut CompilerContext, ) -> Statement { compiler_context.write_to_parser_file(" -> "); + compiler_context + .ast + .assign_node_to_ptr(AstPtr::If.into(), AstPtr::Statement); Statement::StatementIfStatement(if_statement) } @@ -651,8 +664,8 @@ pub fn assignment_assignment_expression( let leaf = Node::new_leaf(NodeValue::Value(token_id.clone())); compiler_context.ast.create_node( AstAction::Assign, - AstNodeRef::Node(leaf.into()), - AstNodeRef::Ptr(AstPtr::SimpleExpression), + Rc::new(leaf).into(), + AstPtr::SimpleExpression.into(), AstPtr::Assignment, ); Assignment::AssignmentExpression(AssignmentExpression { @@ -753,6 +766,12 @@ pub fn if_statement_if_statement( compiler_context.write_to_parser_file(&format!( " -> {token_if} {token_par_open} {token_par_close} {token_cbopen} {token_cbclose}" )); + compiler_context.ast.create_node( + AstAction::If, + AstPtr::Conjunction.into(), + AstPtr::Body.into(), + AstPtr::If, + ); IfStatement { token_if, token_par_open, @@ -786,7 +805,7 @@ pub fn else_statement_else_statement( /// Parses the rule ` -> ` pub fn boolean_expression_boolean_expression_simple_expression( - _ctx: &Ctx, + ctx: &Ctx, simple_expression: SimpleExpression, comparison_op: ComparisonOp, simple_expression_2: SimpleExpression, @@ -795,6 +814,37 @@ pub fn boolean_expression_boolean_expression_simple_expression( compiler_context.write_to_parser_file( " -> ", ); + let Some(left_child) = compiler_context.ast.comparision_expressions_stack.pop() else { + log_error_and_exit( + ctx.range(), + CompilerError::Internal( + "ComparisonExpressions stack was empty when parsing ` -> `" + .into(), + ), + 0, + true, + compiler_context, + ) + }; + let Some(operator) = compiler_context.ast.comparision_op_stack.pop() else { + log_error_and_exit( + ctx.range(), + CompilerError::Internal( + "ComparisonOperator stack was empty when parsing ` -> `" + .into(), + ), + 0, + true, + compiler_context, + ) + }; + let node = compiler_context.ast.create_node( + operator.into(), + left_child.into(), + AstPtr::SimpleExpression.into(), + AstPtr::BooleanExpression, + ); + compiler_context.ast.boolean_expression_stack.push(node); BooleanExpression::BooleanExpressionSimpleExpression(BooleanExpressionSimpleExpression { simple_expression, comparison_op, @@ -809,6 +859,10 @@ pub fn boolean_expression_boolean_expression_true( compiler_context: &mut CompilerContext, ) -> BooleanExpression { compiler_context.write_to_parser_file(&format!(" -> {token_true}")); + let node = compiler_context + .ast + .create_leaf(token_true.clone(), AstPtr::BooleanExpression); + compiler_context.ast.boolean_expression_stack.push(node); BooleanExpression::BooleanExpressionTrue(token_true) } @@ -819,6 +873,10 @@ pub fn boolean_expression_boolean_expression_false( compiler_context: &mut CompilerContext, ) -> BooleanExpression { compiler_context.write_to_parser_file(&format!(" -> {token_false}")); + let node = compiler_context + .ast + .create_leaf(token_false.clone(), AstPtr::BooleanExpression); + compiler_context.ast.boolean_expression_stack.push(node); BooleanExpression::BooleanExpressionFalse(token_false) } @@ -829,6 +887,10 @@ pub fn boolean_expression_boolean_expression_token_id( compiler_context: &mut CompilerContext, ) -> BooleanExpression { compiler_context.write_to_parser_file(&format!(" -> {token_id}")); + let node = compiler_context + .ast + .create_leaf(token_id.clone(), AstPtr::BooleanExpression); + compiler_context.ast.boolean_expression_stack.push(node); BooleanExpression::BooleanExpressionTokenId(token_id) } @@ -839,6 +901,8 @@ pub fn boolean_expression_boolean_expression_not_statement( compiler_context: &mut CompilerContext, ) -> BooleanExpression { compiler_context.write_to_parser_file(" -> "); + let node = compiler_context.ast.get_node_from_ptr(AstPtr::Not); + compiler_context.ast.boolean_expression_stack.push(node); BooleanExpression::BooleanExpressionNotStatement(not_statement) } @@ -849,6 +913,8 @@ pub fn boolean_expression_boolean_expression_is_zero( compiler_context: &mut CompilerContext, ) -> BooleanExpression { compiler_context.write_to_parser_file(" -> "); + let node = compiler_context.ast.get_node_from_ptr(AstPtr::IsZero); + compiler_context.ast.boolean_expression_stack.push(node); BooleanExpression::BooleanExpressionIsZero(function_is_zero) } @@ -860,7 +926,7 @@ pub fn simple_expression_simple_expression_arithmetic( ) -> SimpleExpression { compiler_context.write_to_parser_file(" -> "); compiler_context.ast.assign_node_to_ptr( - AstNodeRef::Ptr(AstPtr::ArithmeticExpression), + AstPtr::ArithmeticExpression.into(), AstPtr::SimpleExpression, ); SimpleExpression::SimpleExpressionArithmeticExpression(arithmetic_expression) @@ -879,7 +945,7 @@ pub fn simple_expression_simple_expression_string( /// Parses the rule ` -> "and" ` pub fn conjunction_conjunction_and( - _ctx: &Ctx, + ctx: &Ctx, boolean_expression: BooleanExpression, token_and: TokenAnd, conjunction: Conjunction, @@ -888,6 +954,24 @@ pub fn conjunction_conjunction_and( compiler_context.write_to_parser_file(&format!( " -> {token_and} " )); + let Some(boolean_expression_node) = compiler_context.ast.boolean_expression_stack.pop() else { + log_error_and_exit( + ctx.range(), + CompilerError::Internal( + "BooleanExpression stack was empty when parsing ` -> \"and\" `" + .into(), + ), + 0, + true, + compiler_context, + ) + }; + compiler_context.ast.create_node( + AstAction::And, + boolean_expression_node.into(), + AstPtr::Conjunction.into(), + AstPtr::Conjunction, + ); Conjunction::ConjunctionAnd(ConjunctionAnd { boolean_expression, token_and, @@ -897,7 +981,7 @@ pub fn conjunction_conjunction_and( /// Parses the rule ` -> "or" ` pub fn conjunction_conjunction_or( - _ctx: &Ctx, + ctx: &Ctx, boolean_expression: BooleanExpression, token_or: TokenOr, conjunction: Conjunction, @@ -906,6 +990,24 @@ pub fn conjunction_conjunction_or( compiler_context.write_to_parser_file(&format!( " -> {token_or} " )); + let Some(boolean_expression_node) = compiler_context.ast.boolean_expression_stack.pop() else { + log_error_and_exit( + ctx.range(), + CompilerError::Internal( + "BooleanExpression stack was empty when parsing ` -> \"or\" `" + .into(), + ), + 0, + true, + compiler_context, + ) + }; + compiler_context.ast.create_node( + AstAction::Or, + boolean_expression_node.into(), + AstPtr::Conjunction.into(), + AstPtr::Conjunction, + ); Conjunction::ConjunctionOr(ConjunctionOr { boolean_expression, token_or, @@ -915,11 +1017,27 @@ pub fn conjunction_conjunction_or( /// Parses the rule ` -> ` pub fn conjunction_conjunction_boolean_expression( - _ctx: &Ctx, + ctx: &Ctx, boolean_expression: BooleanExpression, compiler_context: &mut CompilerContext, ) -> Conjunction { compiler_context.write_to_parser_file(" -> "); + let Some(boolean_expression_node) = compiler_context.ast.boolean_expression_stack.pop() else { + log_error_and_exit( + ctx.range(), + CompilerError::Internal( + "BooleanExpression stack was empty when parsing ` -> `" + .into(), + ), + 0, + true, + compiler_context, + ) + }; + + compiler_context + .ast + .assign_node_to_ptr(boolean_expression_node.into(), AstPtr::Conjunction); Conjunction::ConjunctionBooleanExpression(boolean_expression) } @@ -930,7 +1048,17 @@ pub fn comparison_op_comparison_op_equal( compiler_context: &mut CompilerContext, ) -> ComparisonOp { compiler_context.write_to_parser_file(&format!(" -> {token_equal}")); - ComparisonOp::ComparisonOpEqual(token_equal) + let result = ComparisonOp::ComparisonOpEqual(token_equal); + compiler_context + .ast + .comparision_op_stack + .push(result.clone()); + compiler_context.ast.comparision_expressions_stack.push( + compiler_context + .ast + .get_node_from_ptr(AstPtr::SimpleExpression), + ); + result } /// Parses the rule ` -> "!="` @@ -940,7 +1068,17 @@ pub fn comparison_op_comparison_op_not_equal( compiler_context: &mut CompilerContext, ) -> ComparisonOp { compiler_context.write_to_parser_file(&format!(" -> {token_not_equal}")); - ComparisonOp::ComparisonOpNotEqual(token_not_equal) + let result = ComparisonOp::ComparisonOpNotEqual(token_not_equal); + compiler_context + .ast + .comparision_op_stack + .push(result.clone()); + compiler_context.ast.comparision_expressions_stack.push( + compiler_context + .ast + .get_node_from_ptr(AstPtr::SimpleExpression), + ); + result } /// Parses the rule ` -> "<"` @@ -950,7 +1088,17 @@ pub fn comparison_op_comparison_op_less( compiler_context: &mut CompilerContext, ) -> ComparisonOp { compiler_context.write_to_parser_file(&format!(" -> {token_less}")); - ComparisonOp::ComparisonOpLess(token_less) + let result = ComparisonOp::ComparisonOpLess(token_less); + compiler_context + .ast + .comparision_op_stack + .push(result.clone()); + compiler_context.ast.comparision_expressions_stack.push( + compiler_context + .ast + .get_node_from_ptr(AstPtr::SimpleExpression), + ); + result } /// Parses the rule ` -> ">="` @@ -960,7 +1108,17 @@ pub fn comparison_op_comparison_op_less_equal( compiler_context: &mut CompilerContext, ) -> ComparisonOp { compiler_context.write_to_parser_file(&format!(" -> {token_less_equal}")); - ComparisonOp::ComparisonOpLessEqual(token_less_equal) + let result = ComparisonOp::ComparisonOpLessEqual(token_less_equal); + compiler_context + .ast + .comparision_op_stack + .push(result.clone()); + compiler_context.ast.comparision_expressions_stack.push( + compiler_context + .ast + .get_node_from_ptr(AstPtr::SimpleExpression), + ); + result } /// Parses the rule ` -> ">"` @@ -970,7 +1128,17 @@ pub fn comparison_op_comparison_op_greater( compiler_context: &mut CompilerContext, ) -> ComparisonOp { compiler_context.write_to_parser_file(&format!(" -> {token_greater}")); - ComparisonOp::ComparisonOpGreater(token_greater) + let result = ComparisonOp::ComparisonOpGreater(token_greater); + compiler_context + .ast + .comparision_op_stack + .push(result.clone()); + compiler_context.ast.comparision_expressions_stack.push( + compiler_context + .ast + .get_node_from_ptr(AstPtr::SimpleExpression), + ); + result } /// Parses the rule ` -> ">="` @@ -980,7 +1148,17 @@ pub fn comparison_op_comparison_op_greater_equal( compiler_context: &mut CompilerContext, ) -> ComparisonOp { compiler_context.write_to_parser_file(&format!(" -> {token_greater_equal}")); - ComparisonOp::ComparisonOpGreaterEqual(token_greater_equal) + let result = ComparisonOp::ComparisonOpGreaterEqual(token_greater_equal); + compiler_context + .ast + .comparision_op_stack + .push(result.clone()); + compiler_context.ast.comparision_expressions_stack.push( + compiler_context + .ast + .get_node_from_ptr(AstPtr::SimpleExpression), + ); + result } /// Parses the rule ` -> TokenIntLiteral` @@ -1040,7 +1218,7 @@ pub fn number_number_negative_float( /// Parses the rule ` -> TokenNot ` pub fn not_statement_not( - _ctx: &Ctx, + ctx: &Ctx, token_not: TokenNot, boolean_expression: BooleanExpression, compiler_context: &mut CompilerContext, @@ -1048,6 +1226,26 @@ pub fn not_statement_not( compiler_context.write_to_parser_file(&format!( " -> {token_not} " )); + let Some(boolean_expression_node) = compiler_context.ast.boolean_expression_stack.pop() else { + log_error_and_exit( + ctx.range(), + CompilerError::Internal( + "BooleanExpression stack was empty when parsing ` -> TokenNot `" + .into(), + ), + 0, + true, + compiler_context, + ) + }; + + let dummy = Node::new_leaf(NodeValue::Action(AstAction::Noop)); + compiler_context.ast.create_node( + AstAction::Not, + boolean_expression_node.into(), + Rc::new(dummy).into(), + AstPtr::Not, + ); NotStatement { token_not, boolean_expression: Box::new(boolean_expression), @@ -1079,8 +1277,8 @@ pub fn arithmetic_expression_arithmetic_expression_sum_term( }; compiler_context.ast.create_node( AstAction::Plus, - AstNodeRef::Node(node), - AstNodeRef::Ptr(AstPtr::Term), + node.into(), + AstPtr::Term.into(), AstPtr::ArithmeticExpression, ); ArithmeticExpression::ArithmeticExpressionSumTerm(ArithmeticExpressionSumTerm { @@ -1115,8 +1313,8 @@ pub fn arithmetic_expression_arithmetic_expression_sub_term( }; compiler_context.ast.create_node( AstAction::Sub, - AstNodeRef::Node(node), - AstNodeRef::Ptr(AstPtr::Term), + node.into(), + AstPtr::Term.into(), AstPtr::ArithmeticExpression, ); ArithmeticExpression::ArithmeticExpressionSubTerm(ArithmeticExpressionSubTerm { @@ -1130,7 +1328,7 @@ pub fn arithmetic_expression_arithmetic_expression_sub_term( pub fn dummy_ae_empty(_ctx: &Ctx, compiler_context: &mut CompilerContext) -> DummyAE { compiler_context .ast - .push_e_stack(AstNodeRef::Ptr(AstPtr::ArithmeticExpression)); + .push_e_stack(AstPtr::ArithmeticExpression.into()); None } @@ -1143,7 +1341,7 @@ pub fn arithmetic_expression_arithmetic_expression_term( compiler_context.write_to_parser_file(" -> "); compiler_context .ast - .assign_node_to_ptr(AstNodeRef::Ptr(AstPtr::Term), AstPtr::ArithmeticExpression); + .assign_node_to_ptr(AstPtr::Term.into(), AstPtr::ArithmeticExpression); ArithmeticExpression::ArithmeticExpressionTerm(term) } @@ -1171,8 +1369,8 @@ pub fn term_term_mul_factor( }; compiler_context.ast.create_node( AstAction::Mult, - AstNodeRef::Node(node), - AstNodeRef::Ptr(AstPtr::Factor), + node.into(), + AstPtr::Factor.into(), AstPtr::Term, ); Term::TermMulFactor(TermMulFactor { @@ -1206,8 +1404,8 @@ pub fn term_term_div_factor( }; compiler_context.ast.create_node( AstAction::Div, - AstNodeRef::Node(node), - AstNodeRef::Ptr(AstPtr::Factor), + node.into(), + AstPtr::Factor.into(), AstPtr::Term, ); Term::TermDivFactor(TermDivFactor { @@ -1219,9 +1417,7 @@ pub fn term_term_div_factor( // Parses the rule ` -> EMPTY` pub fn dummy_t_empty(_ctx: &Ctx, compiler_context: &mut CompilerContext) -> DummyT { - compiler_context - .ast - .push_t_stack(AstNodeRef::Ptr(AstPtr::Term)); + compiler_context.ast.push_t_stack(AstPtr::Term.into()); None } @@ -1234,7 +1430,7 @@ pub fn term_term_factor( compiler_context.write_to_parser_file(" -> "); compiler_context .ast - .assign_node_to_ptr(AstNodeRef::Ptr(AstPtr::Factor), AstPtr::Term); + .assign_node_to_ptr(AstPtr::Factor.into(), AstPtr::Term); Term::TermFactor(factor) } @@ -1260,7 +1456,7 @@ pub fn factor_factor_number( compiler_context.write_to_parser_file(" -> "); compiler_context .ast - .assign_node_to_ptr(AstNodeRef::Ptr(AstPtr::Number), AstPtr::Factor); + .assign_node_to_ptr(AstPtr::Number.into(), AstPtr::Factor); Factor::FactorNumber(number) } @@ -1275,10 +1471,9 @@ pub fn factor_factor_paren( compiler_context.write_to_parser_file(&format!( " -> {token_par_open} {token_par_close}" )); - compiler_context.ast.assign_node_to_ptr( - AstNodeRef::Ptr(AstPtr::ArithmeticExpression), - AstPtr::Factor, - ); + compiler_context + .ast + .assign_node_to_ptr(AstPtr::ArithmeticExpression.into(), AstPtr::Factor); Factor::FactorParen(FactorParen { token_par_open, arithmetic_expression: Box::new(arithmetic_expression), diff --git a/src/main.rs b/src/main.rs index 708d31a..543cae3 100644 --- a/src/main.rs +++ b/src/main.rs @@ -26,10 +26,7 @@ fn main() -> Result<(), CompilerError> { println!("{rules}"); - compiler - .inner - .borrow_mut() - .create_ast_graph(AstPtr::Assignment)?; + compiler.inner.borrow_mut().create_ast_graph(AstPtr::Body)?; Ok(()) } From 6b42e9ceab5d48d700a9a8c1c348ddba7d49042c Mon Sep 17 00:00:00 2001 From: LeanSerra <46695152+LeanSerra@users.noreply.github.com> Date: Sun, 26 Oct 2025 13:06:30 -0300 Subject: [PATCH 11/25] fix: else statements could be used before having an if before it --- src/grammar/rules.rs | 2185 ++++++++++++++++------------------ src/grammar/rules.rustemo | 4 +- src/grammar/rules_actions.rs | 41 +- src/grammar/rules_builder.rs | 58 +- src/grammar/types.rs | 28 +- 5 files changed, 1102 insertions(+), 1214 deletions(-) diff --git a/src/grammar/rules.rs b/src/grammar/rules.rs index 3d5511d..96212a9 100644 --- a/src/grammar/rules.rs +++ b/src/grammar/rules.rs @@ -16,7 +16,7 @@ use rustemo::debug::{log, logn}; use rustemo::colored::*; pub type Input = str; const STATE_COUNT: usize = 117usize; -const MAX_RECOGNIZERS: usize = 21usize; +const MAX_RECOGNIZERS: usize = 20usize; #[allow(dead_code)] const TERMINAL_COUNT: usize = 39usize; #[allow(clippy::upper_case_acronyms)] @@ -91,7 +91,6 @@ pub enum ProdKind { ExpressionsExpressionRecursive, StatementStatementAssignment, StatementStatementIfStatement, - StatementStatementElseStatement, StatementStatementWhile, StatementStatementWrite, StatementStatementRead, @@ -102,6 +101,7 @@ pub enum ProdKind { DataTypeStringType, WhileLoopWhile, IfStatementIfStatement, + IfStatementIfElseStatement, ElseStatementElseStatement, BooleanExpressionBooleanExpressionSimpleExpression, BooleanExpressionBooleanExpressionTrue, @@ -182,7 +182,6 @@ impl std::fmt::Debug for ProdKind { } ProdKind::StatementStatementAssignment => "Statement: Assignment", ProdKind::StatementStatementIfStatement => "Statement: IfStatement", - ProdKind::StatementStatementElseStatement => "Statement: ElseStatement", ProdKind::StatementStatementWhile => "Statement: WhileLoop", ProdKind::StatementStatementWrite => "Statement: FunctionWrite", ProdKind::StatementStatementRead => "Statement: FunctionRead", @@ -201,6 +200,9 @@ impl std::fmt::Debug for ProdKind { ProdKind::IfStatementIfStatement => { "IfStatement: TokenIf TokenParOpen Conjunction TokenParClose TokenCBOpen Body TokenCBClose" } + ProdKind::IfStatementIfElseStatement => { + "IfStatement: TokenIf TokenParOpen Conjunction TokenParClose TokenCBOpen Body TokenCBClose ElseStatement" + } ProdKind::ElseStatementElseStatement => { "ElseStatement: TokenElse TokenCBOpen Body TokenCBClose" } @@ -335,7 +337,6 @@ impl From for NonTermKind { ProdKind::ExpressionsExpressionRecursive => NonTermKind::Expressions, ProdKind::StatementStatementAssignment => NonTermKind::Statement, ProdKind::StatementStatementIfStatement => NonTermKind::Statement, - ProdKind::StatementStatementElseStatement => NonTermKind::Statement, ProdKind::StatementStatementWhile => NonTermKind::Statement, ProdKind::StatementStatementWrite => NonTermKind::Statement, ProdKind::StatementStatementRead => NonTermKind::Statement, @@ -346,6 +347,7 @@ impl From for NonTermKind { ProdKind::DataTypeStringType => NonTermKind::DataType, ProdKind::WhileLoopWhile => NonTermKind::WhileLoop, ProdKind::IfStatementIfStatement => NonTermKind::IfStatement, + ProdKind::IfStatementIfElseStatement => NonTermKind::IfStatement, ProdKind::ElseStatementElseStatement => NonTermKind::ElseStatement, ProdKind::BooleanExpressionBooleanExpressionSimpleExpression => { NonTermKind::BooleanExpression @@ -414,117 +416,117 @@ pub enum State { TokenInitS2, TokenWhileS3, TokenIfS4, - TokenElseS5, - TokenReadS6, - TokenWriteS7, - ProgramS8, - BodyS9, - FunctionReadS10, - FunctionWriteS11, - ExpressionsS12, - StatementS13, - AssignmentS14, - WhileLoopS15, - IfStatementS16, - ElseStatementS17, - TokenAssignS18, - TokenParOpenS19, - TokenCBOpenS20, - InitBodyS21, + TokenReadS5, + TokenWriteS6, + ProgramS7, + BodyS8, + FunctionReadS9, + FunctionWriteS10, + ExpressionsS11, + StatementS12, + AssignmentS13, + WhileLoopS14, + IfStatementS15, + TokenAssignS16, + TokenParOpenS17, + TokenCBOpenS18, + InitBodyS19, + TokenParOpenS20, + TokenParOpenS21, TokenParOpenS22, TokenParOpenS23, - TokenCBOpenS24, - TokenParOpenS25, - TokenParOpenS26, - TokenIdS27, - ExpressionsS28, - TokenIntLiteralS29, - TokenFloatLiteralS30, - TokenStringLiteralS31, - TokenIdS32, - TokenSubS33, - TokenParOpenS34, - TokenConvDateS35, - FunctionConvDateS36, - SimpleExpressionS37, - NumberS38, - ArithmeticExpressionS39, - TermS40, - FactorS41, - TokenParCloseS42, - TokenIdS43, - VarDeclarationsS44, - VarDeclarationS45, - ExpressionsS46, - TokenIdS47, - TokenTrueS48, - TokenFalseS49, - TokenNotS50, - TokenIsZeroS51, - FunctionIsZeroS52, - BooleanExpressionS53, - SimpleExpressionS54, - ConjunctionS55, - NotStatementS56, - ConjunctionS57, - BodyS58, - TokenIdS59, - SimpleExpressionS60, - TokenIntLiteralS61, - TokenFloatLiteralS62, - ArithmeticExpressionS63, - TokenParOpenS64, - DummyAES65, - DummyTS66, - TokenCBOpenS67, - TokenColonS68, - TokenCommaS69, - TokenCBCloseS70, - VarDeclarationsS71, - BooleanExpressionS72, - TokenParOpenS73, - TokenAndS74, - TokenOrS75, - TokenEqualS76, - TokenNotEqualS77, - TokenLessS78, - TokenLessEqualS79, - TokenGreaterS80, - TokenGreaterEqualS81, - ComparisonOpS82, + TokenIdS24, + ExpressionsS25, + TokenIntLiteralS26, + TokenFloatLiteralS27, + TokenStringLiteralS28, + TokenIdS29, + TokenSubS30, + TokenParOpenS31, + TokenConvDateS32, + FunctionConvDateS33, + SimpleExpressionS34, + NumberS35, + ArithmeticExpressionS36, + TermS37, + FactorS38, + TokenParCloseS39, + TokenIdS40, + VarDeclarationsS41, + VarDeclarationS42, + ExpressionsS43, + TokenIdS44, + TokenTrueS45, + TokenFalseS46, + TokenNotS47, + TokenIsZeroS48, + FunctionIsZeroS49, + BooleanExpressionS50, + SimpleExpressionS51, + ConjunctionS52, + NotStatementS53, + ConjunctionS54, + TokenIdS55, + SimpleExpressionS56, + TokenIntLiteralS57, + TokenFloatLiteralS58, + ArithmeticExpressionS59, + TokenParOpenS60, + DummyAES61, + DummyTS62, + TokenCBOpenS63, + TokenColonS64, + TokenCommaS65, + TokenCBCloseS66, + VarDeclarationsS67, + BooleanExpressionS68, + TokenParOpenS69, + TokenAndS70, + TokenOrS71, + TokenEqualS72, + TokenNotEqualS73, + TokenLessS74, + TokenLessEqualS75, + TokenGreaterS76, + TokenGreaterEqualS77, + ComparisonOpS78, + TokenParCloseS79, + TokenParCloseS80, + TokenParCloseS81, + TokenParCloseS82, TokenParCloseS83, - TokenParCloseS84, - TokenCBCloseS85, - TokenParCloseS86, - TokenParCloseS87, - TokenParCloseS88, - TokenDateS89, - TokenSumS90, - TokenSubS91, - TokenMulS92, - TokenDivS93, - BodyS94, - TokenIntS95, - TokenFloatS96, - TokenStringS97, - DataTypeS98, - VarDeclarationS99, - ArithmeticExpressionS100, - ConjunctionS101, - ConjunctionS102, - SimpleExpressionS103, - TokenCBOpenS104, - TokenCBOpenS105, - TokenParCloseS106, - TermS107, - TermS108, - FactorS109, - FactorS110, + TokenDateS84, + TokenSumS85, + TokenSubS86, + TokenMulS87, + TokenDivS88, + BodyS89, + TokenIntS90, + TokenFloatS91, + TokenStringS92, + DataTypeS93, + VarDeclarationS94, + ArithmeticExpressionS95, + ConjunctionS96, + ConjunctionS97, + SimpleExpressionS98, + TokenCBOpenS99, + TokenCBOpenS100, + TokenParCloseS101, + TermS102, + TermS103, + FactorS104, + FactorS105, + TokenCBCloseS106, + TokenParCloseS107, + BodyS108, + BodyS109, + TokenCBCloseS110, TokenCBCloseS111, - TokenParCloseS112, - BodyS113, - BodyS114, - TokenCBCloseS115, + TokenElseS112, + ElseStatementS113, + TokenCBOpenS114, + BodyS115, TokenCBCloseS116, } impl StateT for State { @@ -545,117 +547,117 @@ impl std::fmt::Debug for State { State::TokenInitS2 => "2:TokenInit", State::TokenWhileS3 => "3:TokenWhile", State::TokenIfS4 => "4:TokenIf", - State::TokenElseS5 => "5:TokenElse", - State::TokenReadS6 => "6:TokenRead", - State::TokenWriteS7 => "7:TokenWrite", - State::ProgramS8 => "8:Program", - State::BodyS9 => "9:Body", - State::FunctionReadS10 => "10:FunctionRead", - State::FunctionWriteS11 => "11:FunctionWrite", - State::ExpressionsS12 => "12:Expressions", - State::StatementS13 => "13:Statement", - State::AssignmentS14 => "14:Assignment", - State::WhileLoopS15 => "15:WhileLoop", - State::IfStatementS16 => "16:IfStatement", - State::ElseStatementS17 => "17:ElseStatement", - State::TokenAssignS18 => "18:TokenAssign", - State::TokenParOpenS19 => "19:TokenParOpen", - State::TokenCBOpenS20 => "20:TokenCBOpen", - State::InitBodyS21 => "21:InitBody", + State::TokenReadS5 => "5:TokenRead", + State::TokenWriteS6 => "6:TokenWrite", + State::ProgramS7 => "7:Program", + State::BodyS8 => "8:Body", + State::FunctionReadS9 => "9:FunctionRead", + State::FunctionWriteS10 => "10:FunctionWrite", + State::ExpressionsS11 => "11:Expressions", + State::StatementS12 => "12:Statement", + State::AssignmentS13 => "13:Assignment", + State::WhileLoopS14 => "14:WhileLoop", + State::IfStatementS15 => "15:IfStatement", + State::TokenAssignS16 => "16:TokenAssign", + State::TokenParOpenS17 => "17:TokenParOpen", + State::TokenCBOpenS18 => "18:TokenCBOpen", + State::InitBodyS19 => "19:InitBody", + State::TokenParOpenS20 => "20:TokenParOpen", + State::TokenParOpenS21 => "21:TokenParOpen", State::TokenParOpenS22 => "22:TokenParOpen", State::TokenParOpenS23 => "23:TokenParOpen", - State::TokenCBOpenS24 => "24:TokenCBOpen", - State::TokenParOpenS25 => "25:TokenParOpen", - State::TokenParOpenS26 => "26:TokenParOpen", - State::TokenIdS27 => "27:TokenId", - State::ExpressionsS28 => "28:Expressions", - State::TokenIntLiteralS29 => "29:TokenIntLiteral", - State::TokenFloatLiteralS30 => "30:TokenFloatLiteral", - State::TokenStringLiteralS31 => "31:TokenStringLiteral", - State::TokenIdS32 => "32:TokenId", - State::TokenSubS33 => "33:TokenSub", - State::TokenParOpenS34 => "34:TokenParOpen", - State::TokenConvDateS35 => "35:TokenConvDate", - State::FunctionConvDateS36 => "36:FunctionConvDate", - State::SimpleExpressionS37 => "37:SimpleExpression", - State::NumberS38 => "38:Number", - State::ArithmeticExpressionS39 => "39:ArithmeticExpression", - State::TermS40 => "40:Term", - State::FactorS41 => "41:Factor", - State::TokenParCloseS42 => "42:TokenParClose", - State::TokenIdS43 => "43:TokenId", - State::VarDeclarationsS44 => "44:VarDeclarations", - State::VarDeclarationS45 => "45:VarDeclaration", - State::ExpressionsS46 => "46:Expressions", - State::TokenIdS47 => "47:TokenId", - State::TokenTrueS48 => "48:TokenTrue", - State::TokenFalseS49 => "49:TokenFalse", - State::TokenNotS50 => "50:TokenNot", - State::TokenIsZeroS51 => "51:TokenIsZero", - State::FunctionIsZeroS52 => "52:FunctionIsZero", - State::BooleanExpressionS53 => "53:BooleanExpression", - State::SimpleExpressionS54 => "54:SimpleExpression", - State::ConjunctionS55 => "55:Conjunction", - State::NotStatementS56 => "56:NotStatement", - State::ConjunctionS57 => "57:Conjunction", - State::BodyS58 => "58:Body", - State::TokenIdS59 => "59:TokenId", - State::SimpleExpressionS60 => "60:SimpleExpression", - State::TokenIntLiteralS61 => "61:TokenIntLiteral", - State::TokenFloatLiteralS62 => "62:TokenFloatLiteral", - State::ArithmeticExpressionS63 => "63:ArithmeticExpression", - State::TokenParOpenS64 => "64:TokenParOpen", - State::DummyAES65 => "65:DummyAE", - State::DummyTS66 => "66:DummyT", - State::TokenCBOpenS67 => "67:TokenCBOpen", - State::TokenColonS68 => "68:TokenColon", - State::TokenCommaS69 => "69:TokenComma", - State::TokenCBCloseS70 => "70:TokenCBClose", - State::VarDeclarationsS71 => "71:VarDeclarations", - State::BooleanExpressionS72 => "72:BooleanExpression", - State::TokenParOpenS73 => "73:TokenParOpen", - State::TokenAndS74 => "74:TokenAnd", - State::TokenOrS75 => "75:TokenOr", - State::TokenEqualS76 => "76:TokenEqual", - State::TokenNotEqualS77 => "77:TokenNotEqual", - State::TokenLessS78 => "78:TokenLess", - State::TokenLessEqualS79 => "79:TokenLessEqual", - State::TokenGreaterS80 => "80:TokenGreater", - State::TokenGreaterEqualS81 => "81:TokenGreaterEqual", - State::ComparisonOpS82 => "82:ComparisonOp", + State::TokenIdS24 => "24:TokenId", + State::ExpressionsS25 => "25:Expressions", + State::TokenIntLiteralS26 => "26:TokenIntLiteral", + State::TokenFloatLiteralS27 => "27:TokenFloatLiteral", + State::TokenStringLiteralS28 => "28:TokenStringLiteral", + State::TokenIdS29 => "29:TokenId", + State::TokenSubS30 => "30:TokenSub", + State::TokenParOpenS31 => "31:TokenParOpen", + State::TokenConvDateS32 => "32:TokenConvDate", + State::FunctionConvDateS33 => "33:FunctionConvDate", + State::SimpleExpressionS34 => "34:SimpleExpression", + State::NumberS35 => "35:Number", + State::ArithmeticExpressionS36 => "36:ArithmeticExpression", + State::TermS37 => "37:Term", + State::FactorS38 => "38:Factor", + State::TokenParCloseS39 => "39:TokenParClose", + State::TokenIdS40 => "40:TokenId", + State::VarDeclarationsS41 => "41:VarDeclarations", + State::VarDeclarationS42 => "42:VarDeclaration", + State::ExpressionsS43 => "43:Expressions", + State::TokenIdS44 => "44:TokenId", + State::TokenTrueS45 => "45:TokenTrue", + State::TokenFalseS46 => "46:TokenFalse", + State::TokenNotS47 => "47:TokenNot", + State::TokenIsZeroS48 => "48:TokenIsZero", + State::FunctionIsZeroS49 => "49:FunctionIsZero", + State::BooleanExpressionS50 => "50:BooleanExpression", + State::SimpleExpressionS51 => "51:SimpleExpression", + State::ConjunctionS52 => "52:Conjunction", + State::NotStatementS53 => "53:NotStatement", + State::ConjunctionS54 => "54:Conjunction", + State::TokenIdS55 => "55:TokenId", + State::SimpleExpressionS56 => "56:SimpleExpression", + State::TokenIntLiteralS57 => "57:TokenIntLiteral", + State::TokenFloatLiteralS58 => "58:TokenFloatLiteral", + State::ArithmeticExpressionS59 => "59:ArithmeticExpression", + State::TokenParOpenS60 => "60:TokenParOpen", + State::DummyAES61 => "61:DummyAE", + State::DummyTS62 => "62:DummyT", + State::TokenCBOpenS63 => "63:TokenCBOpen", + State::TokenColonS64 => "64:TokenColon", + State::TokenCommaS65 => "65:TokenComma", + State::TokenCBCloseS66 => "66:TokenCBClose", + State::VarDeclarationsS67 => "67:VarDeclarations", + State::BooleanExpressionS68 => "68:BooleanExpression", + State::TokenParOpenS69 => "69:TokenParOpen", + State::TokenAndS70 => "70:TokenAnd", + State::TokenOrS71 => "71:TokenOr", + State::TokenEqualS72 => "72:TokenEqual", + State::TokenNotEqualS73 => "73:TokenNotEqual", + State::TokenLessS74 => "74:TokenLess", + State::TokenLessEqualS75 => "75:TokenLessEqual", + State::TokenGreaterS76 => "76:TokenGreater", + State::TokenGreaterEqualS77 => "77:TokenGreaterEqual", + State::ComparisonOpS78 => "78:ComparisonOp", + State::TokenParCloseS79 => "79:TokenParClose", + State::TokenParCloseS80 => "80:TokenParClose", + State::TokenParCloseS81 => "81:TokenParClose", + State::TokenParCloseS82 => "82:TokenParClose", State::TokenParCloseS83 => "83:TokenParClose", - State::TokenParCloseS84 => "84:TokenParClose", - State::TokenCBCloseS85 => "85:TokenCBClose", - State::TokenParCloseS86 => "86:TokenParClose", - State::TokenParCloseS87 => "87:TokenParClose", - State::TokenParCloseS88 => "88:TokenParClose", - State::TokenDateS89 => "89:TokenDate", - State::TokenSumS90 => "90:TokenSum", - State::TokenSubS91 => "91:TokenSub", - State::TokenMulS92 => "92:TokenMul", - State::TokenDivS93 => "93:TokenDiv", - State::BodyS94 => "94:Body", - State::TokenIntS95 => "95:TokenInt", - State::TokenFloatS96 => "96:TokenFloat", - State::TokenStringS97 => "97:TokenString", - State::DataTypeS98 => "98:DataType", - State::VarDeclarationS99 => "99:VarDeclaration", - State::ArithmeticExpressionS100 => "100:ArithmeticExpression", - State::ConjunctionS101 => "101:Conjunction", - State::ConjunctionS102 => "102:Conjunction", - State::SimpleExpressionS103 => "103:SimpleExpression", - State::TokenCBOpenS104 => "104:TokenCBOpen", - State::TokenCBOpenS105 => "105:TokenCBOpen", - State::TokenParCloseS106 => "106:TokenParClose", - State::TermS107 => "107:Term", - State::TermS108 => "108:Term", - State::FactorS109 => "109:Factor", - State::FactorS110 => "110:Factor", + State::TokenDateS84 => "84:TokenDate", + State::TokenSumS85 => "85:TokenSum", + State::TokenSubS86 => "86:TokenSub", + State::TokenMulS87 => "87:TokenMul", + State::TokenDivS88 => "88:TokenDiv", + State::BodyS89 => "89:Body", + State::TokenIntS90 => "90:TokenInt", + State::TokenFloatS91 => "91:TokenFloat", + State::TokenStringS92 => "92:TokenString", + State::DataTypeS93 => "93:DataType", + State::VarDeclarationS94 => "94:VarDeclaration", + State::ArithmeticExpressionS95 => "95:ArithmeticExpression", + State::ConjunctionS96 => "96:Conjunction", + State::ConjunctionS97 => "97:Conjunction", + State::SimpleExpressionS98 => "98:SimpleExpression", + State::TokenCBOpenS99 => "99:TokenCBOpen", + State::TokenCBOpenS100 => "100:TokenCBOpen", + State::TokenParCloseS101 => "101:TokenParClose", + State::TermS102 => "102:Term", + State::TermS103 => "103:Term", + State::FactorS104 => "104:Factor", + State::FactorS105 => "105:Factor", + State::TokenCBCloseS106 => "106:TokenCBClose", + State::TokenParCloseS107 => "107:TokenParClose", + State::BodyS108 => "108:Body", + State::BodyS109 => "109:Body", + State::TokenCBCloseS110 => "110:TokenCBClose", State::TokenCBCloseS111 => "111:TokenCBClose", - State::TokenParCloseS112 => "112:TokenParClose", - State::BodyS113 => "113:Body", - State::BodyS114 => "114:Body", - State::TokenCBCloseS115 => "115:TokenCBClose", + State::TokenElseS112 => "112:TokenElse", + State::ElseStatementS113 => "113:ElseStatement", + State::TokenCBOpenS114 => "114:TokenCBOpen", + State::BodyS115 => "115:Body", State::TokenCBCloseS116 => "116:TokenCBClose", }; write!(f, "{name}") @@ -674,114 +676,104 @@ fn action_aug_s0(token_kind: TokenKind) -> Vec> { TK::TokenInit => Vec::from(&[Shift(State::TokenInitS2)]), TK::TokenWhile => Vec::from(&[Shift(State::TokenWhileS3)]), TK::TokenIf => Vec::from(&[Shift(State::TokenIfS4)]), - TK::TokenElse => Vec::from(&[Shift(State::TokenElseS5)]), - TK::TokenRead => Vec::from(&[Shift(State::TokenReadS6)]), - TK::TokenWrite => Vec::from(&[Shift(State::TokenWriteS7)]), + TK::TokenRead => Vec::from(&[Shift(State::TokenReadS5)]), + TK::TokenWrite => Vec::from(&[Shift(State::TokenWriteS6)]), _ => vec![], } } fn action_tokenid_s1(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenAssign => Vec::from(&[Shift(State::TokenAssignS18)]), - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS19)]), + TK::TokenAssign => Vec::from(&[Shift(State::TokenAssignS16)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS17)]), _ => vec![], } } fn action_tokeninit_s2(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenCBOpen => Vec::from(&[Shift(State::TokenCBOpenS20)]), + TK::TokenCBOpen => Vec::from(&[Shift(State::TokenCBOpenS18)]), _ => vec![], } } fn action_tokenwhile_s3(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS22)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS20)]), _ => vec![], } } fn action_tokenif_s4(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS23)]), - _ => vec![], - } -} -fn action_tokenelse_s5(token_kind: TokenKind) -> Vec> { - match token_kind { - TK::TokenCBOpen => Vec::from(&[Shift(State::TokenCBOpenS24)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS21)]), _ => vec![], } } -fn action_tokenread_s6(token_kind: TokenKind) -> Vec> { +fn action_tokenread_s5(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS25)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS22)]), _ => vec![], } } -fn action_tokenwrite_s7(token_kind: TokenKind) -> Vec> { +fn action_tokenwrite_s6(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS26)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS23)]), _ => vec![], } } -fn action_program_s8(token_kind: TokenKind) -> Vec> { +fn action_program_s7(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Accept]), _ => vec![], } } -fn action_body_s9(token_kind: TokenKind) -> Vec> { +fn action_body_s8(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::ProgramProgramOnlyBody, 1usize)]), _ => vec![], } } -fn action_functionread_s10(token_kind: TokenKind) -> Vec> { +fn action_functionread_s9(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::StatementStatementRead, 1usize)]), TK::TokenId => Vec::from(&[Reduce(PK::StatementStatementRead, 1usize)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::StatementStatementRead, 1usize)]), TK::TokenWhile => Vec::from(&[Reduce(PK::StatementStatementRead, 1usize)]), TK::TokenIf => Vec::from(&[Reduce(PK::StatementStatementRead, 1usize)]), - TK::TokenElse => Vec::from(&[Reduce(PK::StatementStatementRead, 1usize)]), TK::TokenRead => Vec::from(&[Reduce(PK::StatementStatementRead, 1usize)]), TK::TokenWrite => Vec::from(&[Reduce(PK::StatementStatementRead, 1usize)]), _ => vec![], } } -fn action_functionwrite_s11(token_kind: TokenKind) -> Vec> { +fn action_functionwrite_s10(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::StatementStatementWrite, 1usize)]), TK::TokenId => Vec::from(&[Reduce(PK::StatementStatementWrite, 1usize)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::StatementStatementWrite, 1usize)]), TK::TokenWhile => Vec::from(&[Reduce(PK::StatementStatementWrite, 1usize)]), TK::TokenIf => Vec::from(&[Reduce(PK::StatementStatementWrite, 1usize)]), - TK::TokenElse => Vec::from(&[Reduce(PK::StatementStatementWrite, 1usize)]), TK::TokenRead => Vec::from(&[Reduce(PK::StatementStatementWrite, 1usize)]), TK::TokenWrite => Vec::from(&[Reduce(PK::StatementStatementWrite, 1usize)]), _ => vec![], } } -fn action_expressions_s12(token_kind: TokenKind) -> Vec> { +fn action_expressions_s11(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::BodyBodyExpressions, 1usize)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::BodyBodyExpressions, 1usize)]), _ => vec![], } } -fn action_statement_s13(token_kind: TokenKind) -> Vec> { +fn action_statement_s12(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::ExpressionsExpressionSingle, 1usize)]), - TK::TokenId => Vec::from(&[Shift(State::TokenIdS27)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS24)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::ExpressionsExpressionSingle, 1usize)]), TK::TokenWhile => Vec::from(&[Shift(State::TokenWhileS3)]), TK::TokenIf => Vec::from(&[Shift(State::TokenIfS4)]), - TK::TokenElse => Vec::from(&[Shift(State::TokenElseS5)]), - TK::TokenRead => Vec::from(&[Shift(State::TokenReadS6)]), - TK::TokenWrite => Vec::from(&[Shift(State::TokenWriteS7)]), + TK::TokenRead => Vec::from(&[Shift(State::TokenReadS5)]), + TK::TokenWrite => Vec::from(&[Shift(State::TokenWriteS6)]), _ => vec![], } } -fn action_assignment_s14(token_kind: TokenKind) -> Vec> { +fn action_assignment_s13(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::StatementStatementAssignment, 1usize)]), TK::TokenId => Vec::from(&[Reduce(PK::StatementStatementAssignment, 1usize)]), @@ -790,26 +782,24 @@ fn action_assignment_s14(token_kind: TokenKind) -> Vec> } TK::TokenWhile => Vec::from(&[Reduce(PK::StatementStatementAssignment, 1usize)]), TK::TokenIf => Vec::from(&[Reduce(PK::StatementStatementAssignment, 1usize)]), - TK::TokenElse => Vec::from(&[Reduce(PK::StatementStatementAssignment, 1usize)]), TK::TokenRead => Vec::from(&[Reduce(PK::StatementStatementAssignment, 1usize)]), TK::TokenWrite => Vec::from(&[Reduce(PK::StatementStatementAssignment, 1usize)]), _ => vec![], } } -fn action_whileloop_s15(token_kind: TokenKind) -> Vec> { +fn action_whileloop_s14(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::StatementStatementWhile, 1usize)]), TK::TokenId => Vec::from(&[Reduce(PK::StatementStatementWhile, 1usize)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::StatementStatementWhile, 1usize)]), TK::TokenWhile => Vec::from(&[Reduce(PK::StatementStatementWhile, 1usize)]), TK::TokenIf => Vec::from(&[Reduce(PK::StatementStatementWhile, 1usize)]), - TK::TokenElse => Vec::from(&[Reduce(PK::StatementStatementWhile, 1usize)]), TK::TokenRead => Vec::from(&[Reduce(PK::StatementStatementWhile, 1usize)]), TK::TokenWrite => Vec::from(&[Reduce(PK::StatementStatementWhile, 1usize)]), _ => vec![], } } -fn action_ifstatement_s16(token_kind: TokenKind) -> Vec> { +fn action_ifstatement_s15(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::StatementStatementIfStatement, 1usize)]), TK::TokenId => Vec::from(&[Reduce(PK::StatementStatementIfStatement, 1usize)]), @@ -818,139 +808,101 @@ fn action_ifstatement_s16(token_kind: TokenKind) -> Vec> } TK::TokenWhile => Vec::from(&[Reduce(PK::StatementStatementIfStatement, 1usize)]), TK::TokenIf => Vec::from(&[Reduce(PK::StatementStatementIfStatement, 1usize)]), - TK::TokenElse => Vec::from(&[Reduce(PK::StatementStatementIfStatement, 1usize)]), TK::TokenRead => Vec::from(&[Reduce(PK::StatementStatementIfStatement, 1usize)]), TK::TokenWrite => Vec::from(&[Reduce(PK::StatementStatementIfStatement, 1usize)]), _ => vec![], } } -fn action_elsestatement_s17(token_kind: TokenKind) -> Vec> { +fn action_tokenassign_s16(token_kind: TokenKind) -> Vec> { match token_kind { - TK::STOP => Vec::from(&[Reduce(PK::StatementStatementElseStatement, 1usize)]), - TK::TokenId => Vec::from(&[Reduce(PK::StatementStatementElseStatement, 1usize)]), - TK::TokenCBClose => { - Vec::from(&[Reduce(PK::StatementStatementElseStatement, 1usize)]) - } - TK::TokenWhile => { - Vec::from(&[Reduce(PK::StatementStatementElseStatement, 1usize)]) - } - TK::TokenIf => Vec::from(&[Reduce(PK::StatementStatementElseStatement, 1usize)]), - TK::TokenElse => { - Vec::from(&[Reduce(PK::StatementStatementElseStatement, 1usize)]) - } - TK::TokenRead => { - Vec::from(&[Reduce(PK::StatementStatementElseStatement, 1usize)]) - } - TK::TokenWrite => { - Vec::from(&[Reduce(PK::StatementStatementElseStatement, 1usize)]) - } - _ => vec![], - } -} -fn action_tokenassign_s18(token_kind: TokenKind) -> Vec> { - match token_kind { - TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), - TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), - TK::TokenStringLiteral => Vec::from(&[Shift(State::TokenStringLiteralS31)]), - TK::TokenId => Vec::from(&[Shift(State::TokenIdS32)]), - TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), - TK::TokenConvDate => Vec::from(&[Shift(State::TokenConvDateS35)]), + TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS26)]), + TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS27)]), + TK::TokenStringLiteral => Vec::from(&[Shift(State::TokenStringLiteralS28)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS29)]), + TK::TokenSub => Vec::from(&[Shift(State::TokenSubS30)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS31)]), + TK::TokenConvDate => Vec::from(&[Shift(State::TokenConvDateS32)]), _ => vec![], } } -fn action_tokenparopen_s19(token_kind: TokenKind) -> Vec> { +fn action_tokenparopen_s17(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS42)]), + TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS39)]), _ => vec![], } } -fn action_tokencbopen_s20(token_kind: TokenKind) -> Vec> { +fn action_tokencbopen_s18(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenId => Vec::from(&[Shift(State::TokenIdS43)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS40)]), _ => vec![], } } -fn action_initbody_s21(token_kind: TokenKind) -> Vec> { +fn action_initbody_s19(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::BodyBodyInit, 2usize)]), - TK::TokenId => Vec::from(&[Shift(State::TokenIdS27)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS24)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::BodyBodyInit, 2usize)]), TK::TokenWhile => Vec::from(&[Shift(State::TokenWhileS3)]), TK::TokenIf => Vec::from(&[Shift(State::TokenIfS4)]), - TK::TokenElse => Vec::from(&[Shift(State::TokenElseS5)]), - TK::TokenRead => Vec::from(&[Shift(State::TokenReadS6)]), - TK::TokenWrite => Vec::from(&[Shift(State::TokenWriteS7)]), + TK::TokenRead => Vec::from(&[Shift(State::TokenReadS5)]), + TK::TokenWrite => Vec::from(&[Shift(State::TokenWriteS6)]), _ => vec![], } } -fn action_tokenparopen_s22(token_kind: TokenKind) -> Vec> { +fn action_tokenparopen_s20(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), - TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), - TK::TokenStringLiteral => Vec::from(&[Shift(State::TokenStringLiteralS31)]), - TK::TokenId => Vec::from(&[Shift(State::TokenIdS47)]), - TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), - TK::TokenTrue => Vec::from(&[Shift(State::TokenTrueS48)]), - TK::TokenFalse => Vec::from(&[Shift(State::TokenFalseS49)]), - TK::TokenNot => Vec::from(&[Shift(State::TokenNotS50)]), - TK::TokenIsZero => Vec::from(&[Shift(State::TokenIsZeroS51)]), + TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS26)]), + TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS27)]), + TK::TokenStringLiteral => Vec::from(&[Shift(State::TokenStringLiteralS28)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS44)]), + TK::TokenSub => Vec::from(&[Shift(State::TokenSubS30)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS31)]), + TK::TokenTrue => Vec::from(&[Shift(State::TokenTrueS45)]), + TK::TokenFalse => Vec::from(&[Shift(State::TokenFalseS46)]), + TK::TokenNot => Vec::from(&[Shift(State::TokenNotS47)]), + TK::TokenIsZero => Vec::from(&[Shift(State::TokenIsZeroS48)]), _ => vec![], } } -fn action_tokenparopen_s23(token_kind: TokenKind) -> Vec> { +fn action_tokenparopen_s21(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), - TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), - TK::TokenStringLiteral => Vec::from(&[Shift(State::TokenStringLiteralS31)]), - TK::TokenId => Vec::from(&[Shift(State::TokenIdS47)]), - TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), - TK::TokenTrue => Vec::from(&[Shift(State::TokenTrueS48)]), - TK::TokenFalse => Vec::from(&[Shift(State::TokenFalseS49)]), - TK::TokenNot => Vec::from(&[Shift(State::TokenNotS50)]), - TK::TokenIsZero => Vec::from(&[Shift(State::TokenIsZeroS51)]), + TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS26)]), + TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS27)]), + TK::TokenStringLiteral => Vec::from(&[Shift(State::TokenStringLiteralS28)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS44)]), + TK::TokenSub => Vec::from(&[Shift(State::TokenSubS30)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS31)]), + TK::TokenTrue => Vec::from(&[Shift(State::TokenTrueS45)]), + TK::TokenFalse => Vec::from(&[Shift(State::TokenFalseS46)]), + TK::TokenNot => Vec::from(&[Shift(State::TokenNotS47)]), + TK::TokenIsZero => Vec::from(&[Shift(State::TokenIsZeroS48)]), _ => vec![], } } -fn action_tokencbopen_s24(token_kind: TokenKind) -> Vec> { - match token_kind { - TK::TokenId => Vec::from(&[Shift(State::TokenIdS27)]), - TK::TokenCBClose => Vec::from(&[Reduce(PK::BodyBodyEmpty, 0usize)]), - TK::TokenInit => Vec::from(&[Shift(State::TokenInitS2)]), - TK::TokenWhile => Vec::from(&[Shift(State::TokenWhileS3)]), - TK::TokenIf => Vec::from(&[Shift(State::TokenIfS4)]), - TK::TokenElse => Vec::from(&[Shift(State::TokenElseS5)]), - TK::TokenRead => Vec::from(&[Shift(State::TokenReadS6)]), - TK::TokenWrite => Vec::from(&[Shift(State::TokenWriteS7)]), - _ => vec![], - } -} -fn action_tokenparopen_s25(token_kind: TokenKind) -> Vec> { +fn action_tokenparopen_s22(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenId => Vec::from(&[Shift(State::TokenIdS59)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS55)]), _ => vec![], } } -fn action_tokenparopen_s26(token_kind: TokenKind) -> Vec> { +fn action_tokenparopen_s23(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), - TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), - TK::TokenStringLiteral => Vec::from(&[Shift(State::TokenStringLiteralS31)]), - TK::TokenId => Vec::from(&[Shift(State::TokenIdS32)]), - TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), + TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS26)]), + TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS27)]), + TK::TokenStringLiteral => Vec::from(&[Shift(State::TokenStringLiteralS28)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS29)]), + TK::TokenSub => Vec::from(&[Shift(State::TokenSubS30)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS31)]), _ => vec![], } } -fn action_tokenid_s27(token_kind: TokenKind) -> Vec> { +fn action_tokenid_s24(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenAssign => Vec::from(&[Shift(State::TokenAssignS18)]), + TK::TokenAssign => Vec::from(&[Shift(State::TokenAssignS16)]), _ => vec![], } } -fn action_expressions_s28(token_kind: TokenKind) -> Vec> { +fn action_expressions_s25(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::ExpressionsExpressionRecursive, 2usize)]), TK::TokenCBClose => { @@ -959,7 +911,7 @@ fn action_expressions_s28(token_kind: TokenKind) -> Vec> _ => vec![], } } -fn action_tokenintliteral_s29(token_kind: TokenKind) -> Vec> { +fn action_tokenintliteral_s26(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::NumberNumberInt, 1usize)]), TK::TokenId => Vec::from(&[Reduce(PK::NumberNumberInt, 1usize)]), @@ -977,7 +929,6 @@ fn action_tokenintliteral_s29(token_kind: TokenKind) -> Vec Vec::from(&[Reduce(PK::NumberNumberInt, 1usize)]), TK::TokenGreaterEqual => Vec::from(&[Reduce(PK::NumberNumberInt, 1usize)]), TK::TokenIf => Vec::from(&[Reduce(PK::NumberNumberInt, 1usize)]), - TK::TokenElse => Vec::from(&[Reduce(PK::NumberNumberInt, 1usize)]), TK::TokenAnd => Vec::from(&[Reduce(PK::NumberNumberInt, 1usize)]), TK::TokenOr => Vec::from(&[Reduce(PK::NumberNumberInt, 1usize)]), TK::TokenRead => Vec::from(&[Reduce(PK::NumberNumberInt, 1usize)]), @@ -985,7 +936,7 @@ fn action_tokenintliteral_s29(token_kind: TokenKind) -> Vec vec![], } } -fn action_tokenfloatliteral_s30(token_kind: TokenKind) -> Vec> { +fn action_tokenfloatliteral_s27(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::NumberNumberFloat, 1usize)]), TK::TokenId => Vec::from(&[Reduce(PK::NumberNumberFloat, 1usize)]), @@ -1003,7 +954,6 @@ fn action_tokenfloatliteral_s30(token_kind: TokenKind) -> Vec Vec::from(&[Reduce(PK::NumberNumberFloat, 1usize)]), TK::TokenGreaterEqual => Vec::from(&[Reduce(PK::NumberNumberFloat, 1usize)]), TK::TokenIf => Vec::from(&[Reduce(PK::NumberNumberFloat, 1usize)]), - TK::TokenElse => Vec::from(&[Reduce(PK::NumberNumberFloat, 1usize)]), TK::TokenAnd => Vec::from(&[Reduce(PK::NumberNumberFloat, 1usize)]), TK::TokenOr => Vec::from(&[Reduce(PK::NumberNumberFloat, 1usize)]), TK::TokenRead => Vec::from(&[Reduce(PK::NumberNumberFloat, 1usize)]), @@ -1011,7 +961,7 @@ fn action_tokenfloatliteral_s30(token_kind: TokenKind) -> Vec vec![], } } -fn action_tokenstringliteral_s31(token_kind: TokenKind) -> Vec> { +fn action_tokenstringliteral_s28(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => { Vec::from(&[Reduce(PK::SimpleExpressionSimpleExpressionString, 1usize)]) @@ -1049,9 +999,6 @@ fn action_tokenstringliteral_s31(token_kind: TokenKind) -> Vec { Vec::from(&[Reduce(PK::SimpleExpressionSimpleExpressionString, 1usize)]) } - TK::TokenElse => { - Vec::from(&[Reduce(PK::SimpleExpressionSimpleExpressionString, 1usize)]) - } TK::TokenAnd => { Vec::from(&[Reduce(PK::SimpleExpressionSimpleExpressionString, 1usize)]) } @@ -1067,7 +1014,7 @@ fn action_tokenstringliteral_s31(token_kind: TokenKind) -> Vec vec![], } } -fn action_tokenid_s32(token_kind: TokenKind) -> Vec> { +fn action_tokenid_s29(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::FactorFactorId, 1usize)]), TK::TokenId => Vec::from(&[Reduce(PK::FactorFactorId, 1usize)]), @@ -1085,7 +1032,6 @@ fn action_tokenid_s32(token_kind: TokenKind) -> Vec> { TK::TokenGreater => Vec::from(&[Reduce(PK::FactorFactorId, 1usize)]), TK::TokenGreaterEqual => Vec::from(&[Reduce(PK::FactorFactorId, 1usize)]), TK::TokenIf => Vec::from(&[Reduce(PK::FactorFactorId, 1usize)]), - TK::TokenElse => Vec::from(&[Reduce(PK::FactorFactorId, 1usize)]), TK::TokenAnd => Vec::from(&[Reduce(PK::FactorFactorId, 1usize)]), TK::TokenOr => Vec::from(&[Reduce(PK::FactorFactorId, 1usize)]), TK::TokenRead => Vec::from(&[Reduce(PK::FactorFactorId, 1usize)]), @@ -1093,30 +1039,30 @@ fn action_tokenid_s32(token_kind: TokenKind) -> Vec> { _ => vec![], } } -fn action_tokensub_s33(token_kind: TokenKind) -> Vec> { +fn action_tokensub_s30(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS61)]), - TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS62)]), + TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS57)]), + TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS58)]), _ => vec![], } } -fn action_tokenparopen_s34(token_kind: TokenKind) -> Vec> { +fn action_tokenparopen_s31(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), - TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), - TK::TokenId => Vec::from(&[Shift(State::TokenIdS32)]), - TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), + TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS26)]), + TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS27)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS29)]), + TK::TokenSub => Vec::from(&[Shift(State::TokenSubS30)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS31)]), _ => vec![], } } -fn action_tokenconvdate_s35(token_kind: TokenKind) -> Vec> { +fn action_tokenconvdate_s32(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS64)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS60)]), _ => vec![], } } -fn action_functionconvdate_s36(token_kind: TokenKind) -> Vec> { +fn action_functionconvdate_s33(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::AssignmentAssignmentConvDate, 3usize)]), TK::TokenId => Vec::from(&[Reduce(PK::AssignmentAssignmentConvDate, 3usize)]), @@ -1125,13 +1071,12 @@ fn action_functionconvdate_s36(token_kind: TokenKind) -> Vec Vec::from(&[Reduce(PK::AssignmentAssignmentConvDate, 3usize)]), TK::TokenIf => Vec::from(&[Reduce(PK::AssignmentAssignmentConvDate, 3usize)]), - TK::TokenElse => Vec::from(&[Reduce(PK::AssignmentAssignmentConvDate, 3usize)]), TK::TokenRead => Vec::from(&[Reduce(PK::AssignmentAssignmentConvDate, 3usize)]), TK::TokenWrite => Vec::from(&[Reduce(PK::AssignmentAssignmentConvDate, 3usize)]), _ => vec![], } } -fn action_simpleexpression_s37(token_kind: TokenKind) -> Vec> { +fn action_simpleexpression_s34(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::AssignmentAssignmentExpression, 3usize)]), TK::TokenId => Vec::from(&[Reduce(PK::AssignmentAssignmentExpression, 3usize)]), @@ -1142,7 +1087,6 @@ fn action_simpleexpression_s37(token_kind: TokenKind) -> Vec Vec::from(&[Reduce(PK::AssignmentAssignmentExpression, 3usize)]), - TK::TokenElse => Vec::from(&[Reduce(PK::AssignmentAssignmentExpression, 3usize)]), TK::TokenRead => Vec::from(&[Reduce(PK::AssignmentAssignmentExpression, 3usize)]), TK::TokenWrite => { Vec::from(&[Reduce(PK::AssignmentAssignmentExpression, 3usize)]) @@ -1150,7 +1094,7 @@ fn action_simpleexpression_s37(token_kind: TokenKind) -> Vec vec![], } } -fn action_number_s38(token_kind: TokenKind) -> Vec> { +fn action_number_s35(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::FactorFactorNumber, 1usize)]), TK::TokenId => Vec::from(&[Reduce(PK::FactorFactorNumber, 1usize)]), @@ -1168,7 +1112,6 @@ fn action_number_s38(token_kind: TokenKind) -> Vec> { TK::TokenGreater => Vec::from(&[Reduce(PK::FactorFactorNumber, 1usize)]), TK::TokenGreaterEqual => Vec::from(&[Reduce(PK::FactorFactorNumber, 1usize)]), TK::TokenIf => Vec::from(&[Reduce(PK::FactorFactorNumber, 1usize)]), - TK::TokenElse => Vec::from(&[Reduce(PK::FactorFactorNumber, 1usize)]), TK::TokenAnd => Vec::from(&[Reduce(PK::FactorFactorNumber, 1usize)]), TK::TokenOr => Vec::from(&[Reduce(PK::FactorFactorNumber, 1usize)]), TK::TokenRead => Vec::from(&[Reduce(PK::FactorFactorNumber, 1usize)]), @@ -1176,7 +1119,7 @@ fn action_number_s38(token_kind: TokenKind) -> Vec> { _ => vec![], } } -fn action_arithmeticexpression_s39( +fn action_arithmeticexpression_s36( token_kind: TokenKind, ) -> Vec> { match token_kind { @@ -1218,9 +1161,6 @@ fn action_arithmeticexpression_s39( TK::TokenIf => { Vec::from(&[Reduce(PK::SimpleExpressionSimpleExpressionArithmetic, 1usize)]) } - TK::TokenElse => { - Vec::from(&[Reduce(PK::SimpleExpressionSimpleExpressionArithmetic, 1usize)]) - } TK::TokenAnd => { Vec::from(&[Reduce(PK::SimpleExpressionSimpleExpressionArithmetic, 1usize)]) } @@ -1236,7 +1176,7 @@ fn action_arithmeticexpression_s39( _ => vec![], } } -fn action_term_s40(token_kind: TokenKind) -> Vec> { +fn action_term_s37(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => { Vec::from( @@ -1310,11 +1250,6 @@ fn action_term_s40(token_kind: TokenKind) -> Vec> { &[Reduce(PK::ArithmeticExpressionArithmeticExpressionTerm, 1usize)], ) } - TK::TokenElse => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionTerm, 1usize)], - ) - } TK::TokenAnd => { Vec::from( &[Reduce(PK::ArithmeticExpressionArithmeticExpressionTerm, 1usize)], @@ -1338,7 +1273,7 @@ fn action_term_s40(token_kind: TokenKind) -> Vec> { _ => vec![], } } -fn action_factor_s41(token_kind: TokenKind) -> Vec> { +fn action_factor_s38(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::TermTermFactor, 1usize)]), TK::TokenId => Vec::from(&[Reduce(PK::TermTermFactor, 1usize)]), @@ -1356,7 +1291,6 @@ fn action_factor_s41(token_kind: TokenKind) -> Vec> { TK::TokenGreater => Vec::from(&[Reduce(PK::TermTermFactor, 1usize)]), TK::TokenGreaterEqual => Vec::from(&[Reduce(PK::TermTermFactor, 1usize)]), TK::TokenIf => Vec::from(&[Reduce(PK::TermTermFactor, 1usize)]), - TK::TokenElse => Vec::from(&[Reduce(PK::TermTermFactor, 1usize)]), TK::TokenAnd => Vec::from(&[Reduce(PK::TermTermFactor, 1usize)]), TK::TokenOr => Vec::from(&[Reduce(PK::TermTermFactor, 1usize)]), TK::TokenRead => Vec::from(&[Reduce(PK::TermTermFactor, 1usize)]), @@ -1364,42 +1298,42 @@ fn action_factor_s41(token_kind: TokenKind) -> Vec> { _ => vec![], } } -fn action_tokenparclose_s42(token_kind: TokenKind) -> Vec> { +fn action_tokenparclose_s39(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenCBOpen => Vec::from(&[Shift(State::TokenCBOpenS67)]), + TK::TokenCBOpen => Vec::from(&[Shift(State::TokenCBOpenS63)]), _ => vec![], } } -fn action_tokenid_s43(token_kind: TokenKind) -> Vec> { +fn action_tokenid_s40(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenColon => Vec::from(&[Shift(State::TokenColonS68)]), - TK::TokenComma => Vec::from(&[Shift(State::TokenCommaS69)]), + TK::TokenColon => Vec::from(&[Shift(State::TokenColonS64)]), + TK::TokenComma => Vec::from(&[Shift(State::TokenCommaS65)]), _ => vec![], } } -fn action_vardeclarations_s44(token_kind: TokenKind) -> Vec> { +fn action_vardeclarations_s41(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS70)]), + TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS66)]), _ => vec![], } } -fn action_vardeclaration_s45(token_kind: TokenKind) -> Vec> { +fn action_vardeclaration_s42(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenId => Vec::from(&[Shift(State::TokenIdS43)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS40)]), TK::TokenCBClose => { Vec::from(&[Reduce(PK::VarDeclarationsVarDeclarationsSingle, 1usize)]) } _ => vec![], } } -fn action_expressions_s46(token_kind: TokenKind) -> Vec> { +fn action_expressions_s43(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::BodyBodyInitExpressions, 3usize)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::BodyBodyInitExpressions, 3usize)]), _ => vec![], } } -fn action_tokenid_s47(token_kind: TokenKind) -> Vec> { +fn action_tokenid_s44(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenSum => Vec::from(&[Reduce(PK::FactorFactorId, 1usize)]), TK::TokenMul => Vec::from(&[Reduce(PK::FactorFactorId, 1usize)]), @@ -1423,7 +1357,7 @@ fn action_tokenid_s47(token_kind: TokenKind) -> Vec> { _ => vec![], } } -fn action_tokentrue_s48(token_kind: TokenKind) -> Vec> { +fn action_tokentrue_s45(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenParClose => { Vec::from(&[Reduce(PK::BooleanExpressionBooleanExpressionTrue, 1usize)]) @@ -1437,7 +1371,7 @@ fn action_tokentrue_s48(token_kind: TokenKind) -> Vec> { _ => vec![], } } -fn action_tokenfalse_s49(token_kind: TokenKind) -> Vec> { +fn action_tokenfalse_s46(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenParClose => { Vec::from(&[Reduce(PK::BooleanExpressionBooleanExpressionFalse, 1usize)]) @@ -1451,28 +1385,28 @@ fn action_tokenfalse_s49(token_kind: TokenKind) -> Vec> _ => vec![], } } -fn action_tokennot_s50(token_kind: TokenKind) -> Vec> { +fn action_tokennot_s47(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), - TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), - TK::TokenStringLiteral => Vec::from(&[Shift(State::TokenStringLiteralS31)]), - TK::TokenId => Vec::from(&[Shift(State::TokenIdS47)]), - TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), - TK::TokenTrue => Vec::from(&[Shift(State::TokenTrueS48)]), - TK::TokenFalse => Vec::from(&[Shift(State::TokenFalseS49)]), - TK::TokenNot => Vec::from(&[Shift(State::TokenNotS50)]), - TK::TokenIsZero => Vec::from(&[Shift(State::TokenIsZeroS51)]), + TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS26)]), + TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS27)]), + TK::TokenStringLiteral => Vec::from(&[Shift(State::TokenStringLiteralS28)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS44)]), + TK::TokenSub => Vec::from(&[Shift(State::TokenSubS30)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS31)]), + TK::TokenTrue => Vec::from(&[Shift(State::TokenTrueS45)]), + TK::TokenFalse => Vec::from(&[Shift(State::TokenFalseS46)]), + TK::TokenNot => Vec::from(&[Shift(State::TokenNotS47)]), + TK::TokenIsZero => Vec::from(&[Shift(State::TokenIsZeroS48)]), _ => vec![], } } -fn action_tokeniszero_s51(token_kind: TokenKind) -> Vec> { +fn action_tokeniszero_s48(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS73)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS69)]), _ => vec![], } } -fn action_functioniszero_s52(token_kind: TokenKind) -> Vec> { +fn action_functioniszero_s49(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenParClose => { Vec::from(&[Reduce(PK::BooleanExpressionBooleanExpressionIsZero, 1usize)]) @@ -1486,34 +1420,34 @@ fn action_functioniszero_s52(token_kind: TokenKind) -> Vec vec![], } } -fn action_booleanexpression_s53(token_kind: TokenKind) -> Vec> { +fn action_booleanexpression_s50(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenParClose => { Vec::from(&[Reduce(PK::ConjunctionConjunctionBoolean, 1usize)]) } - TK::TokenAnd => Vec::from(&[Shift(State::TokenAndS74)]), - TK::TokenOr => Vec::from(&[Shift(State::TokenOrS75)]), + TK::TokenAnd => Vec::from(&[Shift(State::TokenAndS70)]), + TK::TokenOr => Vec::from(&[Shift(State::TokenOrS71)]), _ => vec![], } } -fn action_simpleexpression_s54(token_kind: TokenKind) -> Vec> { +fn action_simpleexpression_s51(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenEqual => Vec::from(&[Shift(State::TokenEqualS76)]), - TK::TokenNotEqual => Vec::from(&[Shift(State::TokenNotEqualS77)]), - TK::TokenLess => Vec::from(&[Shift(State::TokenLessS78)]), - TK::TokenLessEqual => Vec::from(&[Shift(State::TokenLessEqualS79)]), - TK::TokenGreater => Vec::from(&[Shift(State::TokenGreaterS80)]), - TK::TokenGreaterEqual => Vec::from(&[Shift(State::TokenGreaterEqualS81)]), + TK::TokenEqual => Vec::from(&[Shift(State::TokenEqualS72)]), + TK::TokenNotEqual => Vec::from(&[Shift(State::TokenNotEqualS73)]), + TK::TokenLess => Vec::from(&[Shift(State::TokenLessS74)]), + TK::TokenLessEqual => Vec::from(&[Shift(State::TokenLessEqualS75)]), + TK::TokenGreater => Vec::from(&[Shift(State::TokenGreaterS76)]), + TK::TokenGreaterEqual => Vec::from(&[Shift(State::TokenGreaterEqualS77)]), _ => vec![], } } -fn action_conjunction_s55(token_kind: TokenKind) -> Vec> { +fn action_conjunction_s52(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS83)]), + TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS79)]), _ => vec![], } } -fn action_notstatement_s56(token_kind: TokenKind) -> Vec> { +fn action_notstatement_s53(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenParClose => { Vec::from( @@ -1533,31 +1467,25 @@ fn action_notstatement_s56(token_kind: TokenKind) -> Vec _ => vec![], } } -fn action_conjunction_s57(token_kind: TokenKind) -> Vec> { +fn action_conjunction_s54(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS84)]), + TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS80)]), _ => vec![], } } -fn action_body_s58(token_kind: TokenKind) -> Vec> { +fn action_tokenid_s55(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS85)]), + TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS81)]), _ => vec![], } } -fn action_tokenid_s59(token_kind: TokenKind) -> Vec> { +fn action_simpleexpression_s56(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS86)]), + TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS82)]), _ => vec![], } } -fn action_simpleexpression_s60(token_kind: TokenKind) -> Vec> { - match token_kind { - TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS87)]), - _ => vec![], - } -} -fn action_tokenintliteral_s61(token_kind: TokenKind) -> Vec> { +fn action_tokenintliteral_s57(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::NumberNumberNegativeInt, 2usize)]), TK::TokenId => Vec::from(&[Reduce(PK::NumberNumberNegativeInt, 2usize)]), @@ -1577,7 +1505,6 @@ fn action_tokenintliteral_s61(token_kind: TokenKind) -> Vec Vec::from(&[Reduce(PK::NumberNumberNegativeInt, 2usize)]), - TK::TokenElse => Vec::from(&[Reduce(PK::NumberNumberNegativeInt, 2usize)]), TK::TokenAnd => Vec::from(&[Reduce(PK::NumberNumberNegativeInt, 2usize)]), TK::TokenOr => Vec::from(&[Reduce(PK::NumberNumberNegativeInt, 2usize)]), TK::TokenRead => Vec::from(&[Reduce(PK::NumberNumberNegativeInt, 2usize)]), @@ -1585,7 +1512,7 @@ fn action_tokenintliteral_s61(token_kind: TokenKind) -> Vec vec![], } } -fn action_tokenfloatliteral_s62(token_kind: TokenKind) -> Vec> { +fn action_tokenfloatliteral_s58(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::NumberNumberNegativeFloat, 2usize)]), TK::TokenId => Vec::from(&[Reduce(PK::NumberNumberNegativeFloat, 2usize)]), @@ -1605,7 +1532,6 @@ fn action_tokenfloatliteral_s62(token_kind: TokenKind) -> Vec Vec::from(&[Reduce(PK::NumberNumberNegativeFloat, 2usize)]), - TK::TokenElse => Vec::from(&[Reduce(PK::NumberNumberNegativeFloat, 2usize)]), TK::TokenAnd => Vec::from(&[Reduce(PK::NumberNumberNegativeFloat, 2usize)]), TK::TokenOr => Vec::from(&[Reduce(PK::NumberNumberNegativeFloat, 2usize)]), TK::TokenRead => Vec::from(&[Reduce(PK::NumberNumberNegativeFloat, 2usize)]), @@ -1613,77 +1539,75 @@ fn action_tokenfloatliteral_s62(token_kind: TokenKind) -> Vec vec![], } } -fn action_arithmeticexpression_s63( +fn action_arithmeticexpression_s59( token_kind: TokenKind, ) -> Vec> { match token_kind { TK::TokenSum => Vec::from(&[Reduce(PK::DummyAEP1, 0usize)]), TK::TokenSub => Vec::from(&[Reduce(PK::DummyAEP1, 0usize)]), - TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS88)]), + TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS83)]), _ => vec![], } } -fn action_tokenparopen_s64(token_kind: TokenKind) -> Vec> { +fn action_tokenparopen_s60(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenDate => Vec::from(&[Shift(State::TokenDateS89)]), + TK::TokenDate => Vec::from(&[Shift(State::TokenDateS84)]), _ => vec![], } } -fn action_dummyae_s65(token_kind: TokenKind) -> Vec> { +fn action_dummyae_s61(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenSum => Vec::from(&[Shift(State::TokenSumS90)]), - TK::TokenSub => Vec::from(&[Shift(State::TokenSubS91)]), + TK::TokenSum => Vec::from(&[Shift(State::TokenSumS85)]), + TK::TokenSub => Vec::from(&[Shift(State::TokenSubS86)]), _ => vec![], } } -fn action_dummyt_s66(token_kind: TokenKind) -> Vec> { +fn action_dummyt_s62(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenMul => Vec::from(&[Shift(State::TokenMulS92)]), - TK::TokenDiv => Vec::from(&[Shift(State::TokenDivS93)]), + TK::TokenMul => Vec::from(&[Shift(State::TokenMulS87)]), + TK::TokenDiv => Vec::from(&[Shift(State::TokenDivS88)]), _ => vec![], } } -fn action_tokencbopen_s67(token_kind: TokenKind) -> Vec> { +fn action_tokencbopen_s63(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenId => Vec::from(&[Shift(State::TokenIdS27)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS24)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::BodyBodyEmpty, 0usize)]), TK::TokenInit => Vec::from(&[Shift(State::TokenInitS2)]), TK::TokenWhile => Vec::from(&[Shift(State::TokenWhileS3)]), TK::TokenIf => Vec::from(&[Shift(State::TokenIfS4)]), - TK::TokenElse => Vec::from(&[Shift(State::TokenElseS5)]), - TK::TokenRead => Vec::from(&[Shift(State::TokenReadS6)]), - TK::TokenWrite => Vec::from(&[Shift(State::TokenWriteS7)]), + TK::TokenRead => Vec::from(&[Shift(State::TokenReadS5)]), + TK::TokenWrite => Vec::from(&[Shift(State::TokenWriteS6)]), _ => vec![], } } -fn action_tokencolon_s68(token_kind: TokenKind) -> Vec> { +fn action_tokencolon_s64(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenInt => Vec::from(&[Shift(State::TokenIntS95)]), - TK::TokenFloat => Vec::from(&[Shift(State::TokenFloatS96)]), - TK::TokenString => Vec::from(&[Shift(State::TokenStringS97)]), + TK::TokenInt => Vec::from(&[Shift(State::TokenIntS90)]), + TK::TokenFloat => Vec::from(&[Shift(State::TokenFloatS91)]), + TK::TokenString => Vec::from(&[Shift(State::TokenStringS92)]), _ => vec![], } } -fn action_tokencomma_s69(token_kind: TokenKind) -> Vec> { +fn action_tokencomma_s65(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenId => Vec::from(&[Shift(State::TokenIdS43)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS40)]), _ => vec![], } } -fn action_tokencbclose_s70(token_kind: TokenKind) -> Vec> { +fn action_tokencbclose_s66(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::InitBodyInitBody, 3usize)]), TK::TokenId => Vec::from(&[Reduce(PK::InitBodyInitBody, 3usize)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::InitBodyInitBody, 3usize)]), TK::TokenWhile => Vec::from(&[Reduce(PK::InitBodyInitBody, 3usize)]), TK::TokenIf => Vec::from(&[Reduce(PK::InitBodyInitBody, 3usize)]), - TK::TokenElse => Vec::from(&[Reduce(PK::InitBodyInitBody, 3usize)]), TK::TokenRead => Vec::from(&[Reduce(PK::InitBodyInitBody, 3usize)]), TK::TokenWrite => Vec::from(&[Reduce(PK::InitBodyInitBody, 3usize)]), _ => vec![], } } -fn action_vardeclarations_s71(token_kind: TokenKind) -> Vec> { +fn action_vardeclarations_s67(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenCBClose => { Vec::from(&[Reduce(PK::VarDeclarationsVarDeclarationsRecursive, 2usize)]) @@ -1691,7 +1615,7 @@ fn action_vardeclarations_s71(token_kind: TokenKind) -> Vec vec![], } } -fn action_booleanexpression_s72(token_kind: TokenKind) -> Vec> { +fn action_booleanexpression_s68(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenParClose => Vec::from(&[Reduce(PK::NotStatementNot, 2usize)]), TK::TokenAnd => Vec::from(&[Reduce(PK::NotStatementNot, 2usize)]), @@ -1699,47 +1623,47 @@ fn action_booleanexpression_s72(token_kind: TokenKind) -> Vec vec![], } } -fn action_tokenparopen_s73(token_kind: TokenKind) -> Vec> { +fn action_tokenparopen_s69(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), - TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), - TK::TokenId => Vec::from(&[Shift(State::TokenIdS32)]), - TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), + TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS26)]), + TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS27)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS29)]), + TK::TokenSub => Vec::from(&[Shift(State::TokenSubS30)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS31)]), _ => vec![], } } -fn action_tokenand_s74(token_kind: TokenKind) -> Vec> { +fn action_tokenand_s70(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), - TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), - TK::TokenStringLiteral => Vec::from(&[Shift(State::TokenStringLiteralS31)]), - TK::TokenId => Vec::from(&[Shift(State::TokenIdS47)]), - TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), - TK::TokenTrue => Vec::from(&[Shift(State::TokenTrueS48)]), - TK::TokenFalse => Vec::from(&[Shift(State::TokenFalseS49)]), - TK::TokenNot => Vec::from(&[Shift(State::TokenNotS50)]), - TK::TokenIsZero => Vec::from(&[Shift(State::TokenIsZeroS51)]), + TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS26)]), + TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS27)]), + TK::TokenStringLiteral => Vec::from(&[Shift(State::TokenStringLiteralS28)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS44)]), + TK::TokenSub => Vec::from(&[Shift(State::TokenSubS30)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS31)]), + TK::TokenTrue => Vec::from(&[Shift(State::TokenTrueS45)]), + TK::TokenFalse => Vec::from(&[Shift(State::TokenFalseS46)]), + TK::TokenNot => Vec::from(&[Shift(State::TokenNotS47)]), + TK::TokenIsZero => Vec::from(&[Shift(State::TokenIsZeroS48)]), _ => vec![], } } -fn action_tokenor_s75(token_kind: TokenKind) -> Vec> { +fn action_tokenor_s71(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), - TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), - TK::TokenStringLiteral => Vec::from(&[Shift(State::TokenStringLiteralS31)]), - TK::TokenId => Vec::from(&[Shift(State::TokenIdS47)]), - TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), - TK::TokenTrue => Vec::from(&[Shift(State::TokenTrueS48)]), - TK::TokenFalse => Vec::from(&[Shift(State::TokenFalseS49)]), - TK::TokenNot => Vec::from(&[Shift(State::TokenNotS50)]), - TK::TokenIsZero => Vec::from(&[Shift(State::TokenIsZeroS51)]), + TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS26)]), + TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS27)]), + TK::TokenStringLiteral => Vec::from(&[Shift(State::TokenStringLiteralS28)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS44)]), + TK::TokenSub => Vec::from(&[Shift(State::TokenSubS30)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS31)]), + TK::TokenTrue => Vec::from(&[Shift(State::TokenTrueS45)]), + TK::TokenFalse => Vec::from(&[Shift(State::TokenFalseS46)]), + TK::TokenNot => Vec::from(&[Shift(State::TokenNotS47)]), + TK::TokenIsZero => Vec::from(&[Shift(State::TokenIsZeroS48)]), _ => vec![], } } -fn action_tokenequal_s76(token_kind: TokenKind) -> Vec> { +fn action_tokenequal_s72(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => { Vec::from(&[Reduce(PK::ComparisonOpComparisonOpEqual, 1usize)]) @@ -1758,7 +1682,7 @@ fn action_tokenequal_s76(token_kind: TokenKind) -> Vec> _ => vec![], } } -fn action_tokennotequal_s77(token_kind: TokenKind) -> Vec> { +fn action_tokennotequal_s73(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => { Vec::from(&[Reduce(PK::ComparisonOpComparisonOpNotEqual, 1usize)]) @@ -1779,7 +1703,7 @@ fn action_tokennotequal_s77(token_kind: TokenKind) -> Vec vec![], } } -fn action_tokenless_s78(token_kind: TokenKind) -> Vec> { +fn action_tokenless_s74(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => { Vec::from(&[Reduce(PK::ComparisonOpComparisonOpLess, 1usize)]) @@ -1798,7 +1722,7 @@ fn action_tokenless_s78(token_kind: TokenKind) -> Vec> { _ => vec![], } } -fn action_tokenlessequal_s79(token_kind: TokenKind) -> Vec> { +fn action_tokenlessequal_s75(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => { Vec::from(&[Reduce(PK::ComparisonOpComparisonOpLessEqual, 1usize)]) @@ -1821,7 +1745,7 @@ fn action_tokenlessequal_s79(token_kind: TokenKind) -> Vec vec![], } } -fn action_tokengreater_s80(token_kind: TokenKind) -> Vec> { +fn action_tokengreater_s76(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => { Vec::from(&[Reduce(PK::ComparisonOpComparisonOpGreater, 1usize)]) @@ -1840,7 +1764,7 @@ fn action_tokengreater_s80(token_kind: TokenKind) -> Vec _ => vec![], } } -fn action_tokengreaterequal_s81(token_kind: TokenKind) -> Vec> { +fn action_tokengreaterequal_s77(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenIntLiteral => { Vec::from(&[Reduce(PK::ComparisonOpComparisonOpGreaterEqual, 1usize)]) @@ -1863,43 +1787,30 @@ fn action_tokengreaterequal_s81(token_kind: TokenKind) -> Vec vec![], } } -fn action_comparisonop_s82(token_kind: TokenKind) -> Vec> { +fn action_comparisonop_s78(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), - TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), - TK::TokenStringLiteral => Vec::from(&[Shift(State::TokenStringLiteralS31)]), - TK::TokenId => Vec::from(&[Shift(State::TokenIdS32)]), - TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), + TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS26)]), + TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS27)]), + TK::TokenStringLiteral => Vec::from(&[Shift(State::TokenStringLiteralS28)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS29)]), + TK::TokenSub => Vec::from(&[Shift(State::TokenSubS30)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS31)]), _ => vec![], } } -fn action_tokenparclose_s83(token_kind: TokenKind) -> Vec> { +fn action_tokenparclose_s79(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenCBOpen => Vec::from(&[Shift(State::TokenCBOpenS104)]), + TK::TokenCBOpen => Vec::from(&[Shift(State::TokenCBOpenS99)]), _ => vec![], } } -fn action_tokenparclose_s84(token_kind: TokenKind) -> Vec> { +fn action_tokenparclose_s80(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenCBOpen => Vec::from(&[Shift(State::TokenCBOpenS105)]), + TK::TokenCBOpen => Vec::from(&[Shift(State::TokenCBOpenS100)]), _ => vec![], } } -fn action_tokencbclose_s85(token_kind: TokenKind) -> Vec> { - match token_kind { - TK::STOP => Vec::from(&[Reduce(PK::ElseStatementElseStatement, 4usize)]), - TK::TokenId => Vec::from(&[Reduce(PK::ElseStatementElseStatement, 4usize)]), - TK::TokenCBClose => Vec::from(&[Reduce(PK::ElseStatementElseStatement, 4usize)]), - TK::TokenWhile => Vec::from(&[Reduce(PK::ElseStatementElseStatement, 4usize)]), - TK::TokenIf => Vec::from(&[Reduce(PK::ElseStatementElseStatement, 4usize)]), - TK::TokenElse => Vec::from(&[Reduce(PK::ElseStatementElseStatement, 4usize)]), - TK::TokenRead => Vec::from(&[Reduce(PK::ElseStatementElseStatement, 4usize)]), - TK::TokenWrite => Vec::from(&[Reduce(PK::ElseStatementElseStatement, 4usize)]), - _ => vec![], - } -} -fn action_tokenparclose_s86(token_kind: TokenKind) -> Vec> { +fn action_tokenparclose_s81(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::FunctionReadFunctionReadCall, 4usize)]), TK::TokenId => Vec::from(&[Reduce(PK::FunctionReadFunctionReadCall, 4usize)]), @@ -1908,13 +1819,12 @@ fn action_tokenparclose_s86(token_kind: TokenKind) -> Vec Vec::from(&[Reduce(PK::FunctionReadFunctionReadCall, 4usize)]), TK::TokenIf => Vec::from(&[Reduce(PK::FunctionReadFunctionReadCall, 4usize)]), - TK::TokenElse => Vec::from(&[Reduce(PK::FunctionReadFunctionReadCall, 4usize)]), TK::TokenRead => Vec::from(&[Reduce(PK::FunctionReadFunctionReadCall, 4usize)]), TK::TokenWrite => Vec::from(&[Reduce(PK::FunctionReadFunctionReadCall, 4usize)]), _ => vec![], } } -fn action_tokenparclose_s87(token_kind: TokenKind) -> Vec> { +fn action_tokenparclose_s82(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::FunctionWriteFunctionWriteCall, 4usize)]), TK::TokenId => Vec::from(&[Reduce(PK::FunctionWriteFunctionWriteCall, 4usize)]), @@ -1925,7 +1835,6 @@ fn action_tokenparclose_s87(token_kind: TokenKind) -> Vec Vec::from(&[Reduce(PK::FunctionWriteFunctionWriteCall, 4usize)]), - TK::TokenElse => Vec::from(&[Reduce(PK::FunctionWriteFunctionWriteCall, 4usize)]), TK::TokenRead => Vec::from(&[Reduce(PK::FunctionWriteFunctionWriteCall, 4usize)]), TK::TokenWrite => { Vec::from(&[Reduce(PK::FunctionWriteFunctionWriteCall, 4usize)]) @@ -1933,7 +1842,7 @@ fn action_tokenparclose_s87(token_kind: TokenKind) -> Vec vec![], } } -fn action_tokenparclose_s88(token_kind: TokenKind) -> Vec> { +fn action_tokenparclose_s83(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::FactorFactorParen, 3usize)]), TK::TokenId => Vec::from(&[Reduce(PK::FactorFactorParen, 3usize)]), @@ -1951,7 +1860,6 @@ fn action_tokenparclose_s88(token_kind: TokenKind) -> Vec Vec::from(&[Reduce(PK::FactorFactorParen, 3usize)]), TK::TokenGreaterEqual => Vec::from(&[Reduce(PK::FactorFactorParen, 3usize)]), TK::TokenIf => Vec::from(&[Reduce(PK::FactorFactorParen, 3usize)]), - TK::TokenElse => Vec::from(&[Reduce(PK::FactorFactorParen, 3usize)]), TK::TokenAnd => Vec::from(&[Reduce(PK::FactorFactorParen, 3usize)]), TK::TokenOr => Vec::from(&[Reduce(PK::FactorFactorParen, 3usize)]), TK::TokenRead => Vec::from(&[Reduce(PK::FactorFactorParen, 3usize)]), @@ -1959,80 +1867,80 @@ fn action_tokenparclose_s88(token_kind: TokenKind) -> Vec vec![], } } -fn action_tokendate_s89(token_kind: TokenKind) -> Vec> { +fn action_tokendate_s84(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS106)]), + TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS101)]), _ => vec![], } } -fn action_tokensum_s90(token_kind: TokenKind) -> Vec> { +fn action_tokensum_s85(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), - TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), - TK::TokenId => Vec::from(&[Shift(State::TokenIdS32)]), - TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), + TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS26)]), + TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS27)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS29)]), + TK::TokenSub => Vec::from(&[Shift(State::TokenSubS30)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS31)]), _ => vec![], } } -fn action_tokensub_s91(token_kind: TokenKind) -> Vec> { +fn action_tokensub_s86(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), - TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), - TK::TokenId => Vec::from(&[Shift(State::TokenIdS32)]), - TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), + TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS26)]), + TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS27)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS29)]), + TK::TokenSub => Vec::from(&[Shift(State::TokenSubS30)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS31)]), _ => vec![], } } -fn action_tokenmul_s92(token_kind: TokenKind) -> Vec> { +fn action_tokenmul_s87(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), - TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), - TK::TokenId => Vec::from(&[Shift(State::TokenIdS32)]), - TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), + TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS26)]), + TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS27)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS29)]), + TK::TokenSub => Vec::from(&[Shift(State::TokenSubS30)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS31)]), _ => vec![], } } -fn action_tokendiv_s93(token_kind: TokenKind) -> Vec> { +fn action_tokendiv_s88(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS29)]), - TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS30)]), - TK::TokenId => Vec::from(&[Shift(State::TokenIdS32)]), - TK::TokenSub => Vec::from(&[Shift(State::TokenSubS33)]), - TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS34)]), + TK::TokenIntLiteral => Vec::from(&[Shift(State::TokenIntLiteralS26)]), + TK::TokenFloatLiteral => Vec::from(&[Shift(State::TokenFloatLiteralS27)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS29)]), + TK::TokenSub => Vec::from(&[Shift(State::TokenSubS30)]), + TK::TokenParOpen => Vec::from(&[Shift(State::TokenParOpenS31)]), _ => vec![], } } -fn action_body_s94(token_kind: TokenKind) -> Vec> { +fn action_body_s89(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS111)]), + TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS106)]), _ => vec![], } } -fn action_tokenint_s95(token_kind: TokenKind) -> Vec> { +fn action_tokenint_s90(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenId => Vec::from(&[Reduce(PK::DataTypeIntType, 1usize)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::DataTypeIntType, 1usize)]), _ => vec![], } } -fn action_tokenfloat_s96(token_kind: TokenKind) -> Vec> { +fn action_tokenfloat_s91(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenId => Vec::from(&[Reduce(PK::DataTypeFloatType, 1usize)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::DataTypeFloatType, 1usize)]), _ => vec![], } } -fn action_tokenstring_s97(token_kind: TokenKind) -> Vec> { +fn action_tokenstring_s92(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenId => Vec::from(&[Reduce(PK::DataTypeStringType, 1usize)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::DataTypeStringType, 1usize)]), _ => vec![], } } -fn action_datatype_s98(token_kind: TokenKind) -> Vec> { +fn action_datatype_s93(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenId => { Vec::from(&[Reduce(PK::VarDeclarationVarDeclarationSingle, 3usize)]) @@ -2043,7 +1951,7 @@ fn action_datatype_s98(token_kind: TokenKind) -> Vec> { _ => vec![], } } -fn action_vardeclaration_s99(token_kind: TokenKind) -> Vec> { +fn action_vardeclaration_s94(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenId => { Vec::from(&[Reduce(PK::VarDeclarationVarDeclarationRecursive, 3usize)]) @@ -2054,29 +1962,29 @@ fn action_vardeclaration_s99(token_kind: TokenKind) -> Vec vec![], } } -fn action_arithmeticexpression_s100( +fn action_arithmeticexpression_s95( token_kind: TokenKind, ) -> Vec> { match token_kind { TK::TokenSum => Vec::from(&[Reduce(PK::DummyAEP1, 0usize)]), TK::TokenSub => Vec::from(&[Reduce(PK::DummyAEP1, 0usize)]), - TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS112)]), + TK::TokenParClose => Vec::from(&[Shift(State::TokenParCloseS107)]), _ => vec![], } } -fn action_conjunction_s101(token_kind: TokenKind) -> Vec> { +fn action_conjunction_s96(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenParClose => Vec::from(&[Reduce(PK::ConjunctionConjunctionAnd, 3usize)]), _ => vec![], } } -fn action_conjunction_s102(token_kind: TokenKind) -> Vec> { +fn action_conjunction_s97(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenParClose => Vec::from(&[Reduce(PK::ConjunctionConjunctionOr, 3usize)]), _ => vec![], } } -fn action_simpleexpression_s103(token_kind: TokenKind) -> Vec> { +fn action_simpleexpression_s98(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenParClose => { Vec::from( @@ -2096,33 +2004,31 @@ fn action_simpleexpression_s103(token_kind: TokenKind) -> Vec vec![], } } -fn action_tokencbopen_s104(token_kind: TokenKind) -> Vec> { +fn action_tokencbopen_s99(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenId => Vec::from(&[Shift(State::TokenIdS27)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS24)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::BodyBodyEmpty, 0usize)]), TK::TokenInit => Vec::from(&[Shift(State::TokenInitS2)]), TK::TokenWhile => Vec::from(&[Shift(State::TokenWhileS3)]), TK::TokenIf => Vec::from(&[Shift(State::TokenIfS4)]), - TK::TokenElse => Vec::from(&[Shift(State::TokenElseS5)]), - TK::TokenRead => Vec::from(&[Shift(State::TokenReadS6)]), - TK::TokenWrite => Vec::from(&[Shift(State::TokenWriteS7)]), + TK::TokenRead => Vec::from(&[Shift(State::TokenReadS5)]), + TK::TokenWrite => Vec::from(&[Shift(State::TokenWriteS6)]), _ => vec![], } } -fn action_tokencbopen_s105(token_kind: TokenKind) -> Vec> { +fn action_tokencbopen_s100(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenId => Vec::from(&[Shift(State::TokenIdS27)]), + TK::TokenId => Vec::from(&[Shift(State::TokenIdS24)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::BodyBodyEmpty, 0usize)]), TK::TokenInit => Vec::from(&[Shift(State::TokenInitS2)]), TK::TokenWhile => Vec::from(&[Shift(State::TokenWhileS3)]), TK::TokenIf => Vec::from(&[Shift(State::TokenIfS4)]), - TK::TokenElse => Vec::from(&[Shift(State::TokenElseS5)]), - TK::TokenRead => Vec::from(&[Shift(State::TokenReadS6)]), - TK::TokenWrite => Vec::from(&[Shift(State::TokenWriteS7)]), + TK::TokenRead => Vec::from(&[Shift(State::TokenReadS5)]), + TK::TokenWrite => Vec::from(&[Shift(State::TokenWriteS6)]), _ => vec![], } } -fn action_tokenparclose_s106(token_kind: TokenKind) -> Vec> { +fn action_tokenparclose_s101(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => { Vec::from( @@ -2149,11 +2055,6 @@ fn action_tokenparclose_s106(token_kind: TokenKind) -> Vec { - Vec::from( - &[Reduce(PK::FunctionConvDateFunctionConvDateVariableCall, 4usize)], - ) - } TK::TokenRead => { Vec::from( &[Reduce(PK::FunctionConvDateFunctionConvDateVariableCall, 4usize)], @@ -2167,7 +2068,7 @@ fn action_tokenparclose_s106(token_kind: TokenKind) -> Vec vec![], } } -fn action_term_s107(token_kind: TokenKind) -> Vec> { +fn action_term_s102(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => { Vec::from( @@ -2241,11 +2142,6 @@ fn action_term_s107(token_kind: TokenKind) -> Vec> { &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 4usize)], ) } - TK::TokenElse => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 4usize)], - ) - } TK::TokenAnd => { Vec::from( &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSumTerm, 4usize)], @@ -2269,7 +2165,7 @@ fn action_term_s107(token_kind: TokenKind) -> Vec> { _ => vec![], } } -fn action_term_s108(token_kind: TokenKind) -> Vec> { +fn action_term_s103(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => { Vec::from( @@ -2343,11 +2239,6 @@ fn action_term_s108(token_kind: TokenKind) -> Vec> { &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 4usize)], ) } - TK::TokenElse => { - Vec::from( - &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 4usize)], - ) - } TK::TokenAnd => { Vec::from( &[Reduce(PK::ArithmeticExpressionArithmeticExpressionSubTerm, 4usize)], @@ -2371,7 +2262,7 @@ fn action_term_s108(token_kind: TokenKind) -> Vec> { _ => vec![], } } -fn action_factor_s109(token_kind: TokenKind) -> Vec> { +fn action_factor_s104(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), TK::TokenId => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), @@ -2389,7 +2280,6 @@ fn action_factor_s109(token_kind: TokenKind) -> Vec> { TK::TokenGreater => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), TK::TokenGreaterEqual => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), TK::TokenIf => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), - TK::TokenElse => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), TK::TokenAnd => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), TK::TokenOr => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), TK::TokenRead => Vec::from(&[Reduce(PK::TermTermMulFactor, 4usize)]), @@ -2397,7 +2287,7 @@ fn action_factor_s109(token_kind: TokenKind) -> Vec> { _ => vec![], } } -fn action_factor_s110(token_kind: TokenKind) -> Vec> { +fn action_factor_s105(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), TK::TokenId => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), @@ -2415,7 +2305,6 @@ fn action_factor_s110(token_kind: TokenKind) -> Vec> { TK::TokenGreater => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), TK::TokenGreaterEqual => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), TK::TokenIf => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), - TK::TokenElse => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), TK::TokenAnd => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), TK::TokenOr => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), TK::TokenRead => Vec::from(&[Reduce(PK::TermTermDivFactor, 4usize)]), @@ -2423,13 +2312,13 @@ fn action_factor_s110(token_kind: TokenKind) -> Vec> { _ => vec![], } } -fn action_tokencbclose_s111(token_kind: TokenKind) -> Vec> { +fn action_tokencbclose_s106(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::ProgramProgramWithMain, 6usize)]), _ => vec![], } } -fn action_tokenparclose_s112(token_kind: TokenKind) -> Vec> { +fn action_tokenparclose_s107(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenParClose => { Vec::from(&[Reduce(PK::FunctionIsZeroFunctionIsZeroCall, 4usize)]) @@ -2441,56 +2330,102 @@ fn action_tokenparclose_s112(token_kind: TokenKind) -> Vec vec![], } } -fn action_body_s113(token_kind: TokenKind) -> Vec> { +fn action_body_s108(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS115)]), + TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS110)]), _ => vec![], } } -fn action_body_s114(token_kind: TokenKind) -> Vec> { +fn action_body_s109(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS116)]), + TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS111)]), _ => vec![], } } -fn action_tokencbclose_s115(token_kind: TokenKind) -> Vec> { +fn action_tokencbclose_s110(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::WhileLoopWhile, 7usize)]), TK::TokenId => Vec::from(&[Reduce(PK::WhileLoopWhile, 7usize)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::WhileLoopWhile, 7usize)]), TK::TokenWhile => Vec::from(&[Reduce(PK::WhileLoopWhile, 7usize)]), TK::TokenIf => Vec::from(&[Reduce(PK::WhileLoopWhile, 7usize)]), - TK::TokenElse => Vec::from(&[Reduce(PK::WhileLoopWhile, 7usize)]), TK::TokenRead => Vec::from(&[Reduce(PK::WhileLoopWhile, 7usize)]), TK::TokenWrite => Vec::from(&[Reduce(PK::WhileLoopWhile, 7usize)]), _ => vec![], } } -fn action_tokencbclose_s116(token_kind: TokenKind) -> Vec> { +fn action_tokencbclose_s111(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::IfStatementIfStatement, 7usize)]), TK::TokenId => Vec::from(&[Reduce(PK::IfStatementIfStatement, 7usize)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::IfStatementIfStatement, 7usize)]), TK::TokenWhile => Vec::from(&[Reduce(PK::IfStatementIfStatement, 7usize)]), TK::TokenIf => Vec::from(&[Reduce(PK::IfStatementIfStatement, 7usize)]), - TK::TokenElse => Vec::from(&[Reduce(PK::IfStatementIfStatement, 7usize)]), + TK::TokenElse => Vec::from(&[Shift(State::TokenElseS112)]), TK::TokenRead => Vec::from(&[Reduce(PK::IfStatementIfStatement, 7usize)]), TK::TokenWrite => Vec::from(&[Reduce(PK::IfStatementIfStatement, 7usize)]), _ => vec![], } } +fn action_tokenelse_s112(token_kind: TokenKind) -> Vec> { + match token_kind { + TK::TokenCBOpen => Vec::from(&[Shift(State::TokenCBOpenS114)]), + _ => vec![], + } +} +fn action_elsestatement_s113(token_kind: TokenKind) -> Vec> { + match token_kind { + TK::STOP => Vec::from(&[Reduce(PK::IfStatementIfElseStatement, 8usize)]), + TK::TokenId => Vec::from(&[Reduce(PK::IfStatementIfElseStatement, 8usize)]), + TK::TokenCBClose => Vec::from(&[Reduce(PK::IfStatementIfElseStatement, 8usize)]), + TK::TokenWhile => Vec::from(&[Reduce(PK::IfStatementIfElseStatement, 8usize)]), + TK::TokenIf => Vec::from(&[Reduce(PK::IfStatementIfElseStatement, 8usize)]), + TK::TokenRead => Vec::from(&[Reduce(PK::IfStatementIfElseStatement, 8usize)]), + TK::TokenWrite => Vec::from(&[Reduce(PK::IfStatementIfElseStatement, 8usize)]), + _ => vec![], + } +} +fn action_tokencbopen_s114(token_kind: TokenKind) -> Vec> { + match token_kind { + TK::TokenId => Vec::from(&[Shift(State::TokenIdS24)]), + TK::TokenCBClose => Vec::from(&[Reduce(PK::BodyBodyEmpty, 0usize)]), + TK::TokenInit => Vec::from(&[Shift(State::TokenInitS2)]), + TK::TokenWhile => Vec::from(&[Shift(State::TokenWhileS3)]), + TK::TokenIf => Vec::from(&[Shift(State::TokenIfS4)]), + TK::TokenRead => Vec::from(&[Shift(State::TokenReadS5)]), + TK::TokenWrite => Vec::from(&[Shift(State::TokenWriteS6)]), + _ => vec![], + } +} +fn action_body_s115(token_kind: TokenKind) -> Vec> { + match token_kind { + TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS116)]), + _ => vec![], + } +} +fn action_tokencbclose_s116(token_kind: TokenKind) -> Vec> { + match token_kind { + TK::STOP => Vec::from(&[Reduce(PK::ElseStatementElseStatement, 4usize)]), + TK::TokenId => Vec::from(&[Reduce(PK::ElseStatementElseStatement, 4usize)]), + TK::TokenCBClose => Vec::from(&[Reduce(PK::ElseStatementElseStatement, 4usize)]), + TK::TokenWhile => Vec::from(&[Reduce(PK::ElseStatementElseStatement, 4usize)]), + TK::TokenIf => Vec::from(&[Reduce(PK::ElseStatementElseStatement, 4usize)]), + TK::TokenRead => Vec::from(&[Reduce(PK::ElseStatementElseStatement, 4usize)]), + TK::TokenWrite => Vec::from(&[Reduce(PK::ElseStatementElseStatement, 4usize)]), + _ => vec![], + } +} fn goto_aug_s0(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::Program => State::ProgramS8, - NonTermKind::Body => State::BodyS9, - NonTermKind::FunctionRead => State::FunctionReadS10, - NonTermKind::FunctionWrite => State::FunctionWriteS11, - NonTermKind::Expressions => State::ExpressionsS12, - NonTermKind::Statement => State::StatementS13, - NonTermKind::Assignment => State::AssignmentS14, - NonTermKind::WhileLoop => State::WhileLoopS15, - NonTermKind::IfStatement => State::IfStatementS16, - NonTermKind::ElseStatement => State::ElseStatementS17, + NonTermKind::Program => State::ProgramS7, + NonTermKind::Body => State::BodyS8, + NonTermKind::FunctionRead => State::FunctionReadS9, + NonTermKind::FunctionWrite => State::FunctionWriteS10, + NonTermKind::Expressions => State::ExpressionsS11, + NonTermKind::Statement => State::StatementS12, + NonTermKind::Assignment => State::AssignmentS13, + NonTermKind::WhileLoop => State::WhileLoopS14, + NonTermKind::IfStatement => State::IfStatementS15, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", @@ -2501,7 +2436,7 @@ fn goto_aug_s0(nonterm_kind: NonTermKind) -> State { } fn goto_tokeninit_s2(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::InitBody => State::InitBodyS21, + NonTermKind::InitBody => State::InitBodyS19, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", @@ -2510,100 +2445,113 @@ fn goto_tokeninit_s2(nonterm_kind: NonTermKind) -> State { } } } -fn goto_statement_s13(nonterm_kind: NonTermKind) -> State { +fn goto_statement_s12(nonterm_kind: NonTermKind) -> State { + match nonterm_kind { + NonTermKind::FunctionRead => State::FunctionReadS9, + NonTermKind::FunctionWrite => State::FunctionWriteS10, + NonTermKind::Expressions => State::ExpressionsS25, + NonTermKind::Statement => State::StatementS12, + NonTermKind::Assignment => State::AssignmentS13, + NonTermKind::WhileLoop => State::WhileLoopS14, + NonTermKind::IfStatement => State::IfStatementS15, + _ => { + panic!( + "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", + State::StatementS12 + ) + } + } +} +fn goto_tokenassign_s16(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::FunctionRead => State::FunctionReadS10, - NonTermKind::FunctionWrite => State::FunctionWriteS11, - NonTermKind::Expressions => State::ExpressionsS28, - NonTermKind::Statement => State::StatementS13, - NonTermKind::Assignment => State::AssignmentS14, - NonTermKind::WhileLoop => State::WhileLoopS15, - NonTermKind::IfStatement => State::IfStatementS16, - NonTermKind::ElseStatement => State::ElseStatementS17, + NonTermKind::FunctionConvDate => State::FunctionConvDateS33, + NonTermKind::SimpleExpression => State::SimpleExpressionS34, + NonTermKind::Number => State::NumberS35, + NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS36, + NonTermKind::Term => State::TermS37, + NonTermKind::Factor => State::FactorS38, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::StatementS13 + State::TokenAssignS16 ) } } } -fn goto_tokenassign_s18(nonterm_kind: NonTermKind) -> State { +fn goto_tokencbopen_s18(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::FunctionConvDate => State::FunctionConvDateS36, - NonTermKind::SimpleExpression => State::SimpleExpressionS37, - NonTermKind::Number => State::NumberS38, - NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS39, - NonTermKind::Term => State::TermS40, - NonTermKind::Factor => State::FactorS41, + NonTermKind::VarDeclarations => State::VarDeclarationsS41, + NonTermKind::VarDeclaration => State::VarDeclarationS42, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenAssignS18 + State::TokenCBOpenS18 ) } } } -fn goto_tokencbopen_s20(nonterm_kind: NonTermKind) -> State { +fn goto_initbody_s19(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::VarDeclarations => State::VarDeclarationsS44, - NonTermKind::VarDeclaration => State::VarDeclarationS45, + NonTermKind::FunctionRead => State::FunctionReadS9, + NonTermKind::FunctionWrite => State::FunctionWriteS10, + NonTermKind::Expressions => State::ExpressionsS43, + NonTermKind::Statement => State::StatementS12, + NonTermKind::Assignment => State::AssignmentS13, + NonTermKind::WhileLoop => State::WhileLoopS14, + NonTermKind::IfStatement => State::IfStatementS15, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenCBOpenS20 + State::InitBodyS19 ) } } } -fn goto_initbody_s21(nonterm_kind: NonTermKind) -> State { +fn goto_tokenparopen_s20(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::FunctionRead => State::FunctionReadS10, - NonTermKind::FunctionWrite => State::FunctionWriteS11, - NonTermKind::Expressions => State::ExpressionsS46, - NonTermKind::Statement => State::StatementS13, - NonTermKind::Assignment => State::AssignmentS14, - NonTermKind::WhileLoop => State::WhileLoopS15, - NonTermKind::IfStatement => State::IfStatementS16, - NonTermKind::ElseStatement => State::ElseStatementS17, + NonTermKind::FunctionIsZero => State::FunctionIsZeroS49, + NonTermKind::BooleanExpression => State::BooleanExpressionS50, + NonTermKind::SimpleExpression => State::SimpleExpressionS51, + NonTermKind::Conjunction => State::ConjunctionS52, + NonTermKind::Number => State::NumberS35, + NonTermKind::NotStatement => State::NotStatementS53, + NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS36, + NonTermKind::Term => State::TermS37, + NonTermKind::Factor => State::FactorS38, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::InitBodyS21 + State::TokenParOpenS20 ) } } } -fn goto_tokenparopen_s22(nonterm_kind: NonTermKind) -> State { +fn goto_tokenparopen_s21(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::FunctionIsZero => State::FunctionIsZeroS52, - NonTermKind::BooleanExpression => State::BooleanExpressionS53, - NonTermKind::SimpleExpression => State::SimpleExpressionS54, - NonTermKind::Conjunction => State::ConjunctionS55, - NonTermKind::Number => State::NumberS38, - NonTermKind::NotStatement => State::NotStatementS56, - NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS39, - NonTermKind::Term => State::TermS40, - NonTermKind::Factor => State::FactorS41, + NonTermKind::FunctionIsZero => State::FunctionIsZeroS49, + NonTermKind::BooleanExpression => State::BooleanExpressionS50, + NonTermKind::SimpleExpression => State::SimpleExpressionS51, + NonTermKind::Conjunction => State::ConjunctionS54, + NonTermKind::Number => State::NumberS35, + NonTermKind::NotStatement => State::NotStatementS53, + NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS36, + NonTermKind::Term => State::TermS37, + NonTermKind::Factor => State::FactorS38, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenParOpenS22 + State::TokenParOpenS21 ) } } } fn goto_tokenparopen_s23(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::FunctionIsZero => State::FunctionIsZeroS52, - NonTermKind::BooleanExpression => State::BooleanExpressionS53, - NonTermKind::SimpleExpression => State::SimpleExpressionS54, - NonTermKind::Conjunction => State::ConjunctionS57, - NonTermKind::Number => State::NumberS38, - NonTermKind::NotStatement => State::NotStatementS56, - NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS39, - NonTermKind::Term => State::TermS40, - NonTermKind::Factor => State::FactorS41, + NonTermKind::SimpleExpression => State::SimpleExpressionS56, + NonTermKind::Number => State::NumberS35, + NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS36, + NonTermKind::Term => State::TermS37, + NonTermKind::Factor => State::FactorS38, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", @@ -2612,353 +2560,345 @@ fn goto_tokenparopen_s23(nonterm_kind: NonTermKind) -> State { } } } -fn goto_tokencbopen_s24(nonterm_kind: NonTermKind) -> State { +fn goto_tokenparopen_s31(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::Body => State::BodyS58, - NonTermKind::FunctionRead => State::FunctionReadS10, - NonTermKind::FunctionWrite => State::FunctionWriteS11, - NonTermKind::Expressions => State::ExpressionsS12, - NonTermKind::Statement => State::StatementS13, - NonTermKind::Assignment => State::AssignmentS14, - NonTermKind::WhileLoop => State::WhileLoopS15, - NonTermKind::IfStatement => State::IfStatementS16, - NonTermKind::ElseStatement => State::ElseStatementS17, + NonTermKind::Number => State::NumberS35, + NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS59, + NonTermKind::Term => State::TermS37, + NonTermKind::Factor => State::FactorS38, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenCBOpenS24 + State::TokenParOpenS31 ) } } } -fn goto_tokenparopen_s26(nonterm_kind: NonTermKind) -> State { +fn goto_arithmeticexpression_s36(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::SimpleExpression => State::SimpleExpressionS60, - NonTermKind::Number => State::NumberS38, - NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS39, - NonTermKind::Term => State::TermS40, - NonTermKind::Factor => State::FactorS41, + NonTermKind::DummyAE => State::DummyAES61, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenParOpenS26 + State::ArithmeticExpressionS36 ) } } } -fn goto_tokenparopen_s34(nonterm_kind: NonTermKind) -> State { +fn goto_term_s37(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::Number => State::NumberS38, - NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS63, - NonTermKind::Term => State::TermS40, - NonTermKind::Factor => State::FactorS41, + NonTermKind::DummyT => State::DummyTS62, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenParOpenS34 + State::TermS37 ) } } } -fn goto_arithmeticexpression_s39(nonterm_kind: NonTermKind) -> State { +fn goto_vardeclaration_s42(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::DummyAE => State::DummyAES65, + NonTermKind::VarDeclarations => State::VarDeclarationsS67, + NonTermKind::VarDeclaration => State::VarDeclarationS42, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::ArithmeticExpressionS39 + State::VarDeclarationS42 ) } } } -fn goto_term_s40(nonterm_kind: NonTermKind) -> State { +fn goto_tokennot_s47(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::DummyT => State::DummyTS66, + NonTermKind::FunctionIsZero => State::FunctionIsZeroS49, + NonTermKind::BooleanExpression => State::BooleanExpressionS68, + NonTermKind::SimpleExpression => State::SimpleExpressionS51, + NonTermKind::Number => State::NumberS35, + NonTermKind::NotStatement => State::NotStatementS53, + NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS36, + NonTermKind::Term => State::TermS37, + NonTermKind::Factor => State::FactorS38, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TermS40 + State::TokenNotS47 ) } } } -fn goto_vardeclaration_s45(nonterm_kind: NonTermKind) -> State { +fn goto_simpleexpression_s51(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::VarDeclarations => State::VarDeclarationsS71, - NonTermKind::VarDeclaration => State::VarDeclarationS45, + NonTermKind::ComparisonOp => State::ComparisonOpS78, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::VarDeclarationS45 + State::SimpleExpressionS51 ) } } } -fn goto_tokennot_s50(nonterm_kind: NonTermKind) -> State { +fn goto_arithmeticexpression_s59(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::FunctionIsZero => State::FunctionIsZeroS52, - NonTermKind::BooleanExpression => State::BooleanExpressionS72, - NonTermKind::SimpleExpression => State::SimpleExpressionS54, - NonTermKind::Number => State::NumberS38, - NonTermKind::NotStatement => State::NotStatementS56, - NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS39, - NonTermKind::Term => State::TermS40, - NonTermKind::Factor => State::FactorS41, + NonTermKind::DummyAE => State::DummyAES61, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenNotS50 + State::ArithmeticExpressionS59 ) } } } -fn goto_simpleexpression_s54(nonterm_kind: NonTermKind) -> State { +fn goto_tokencbopen_s63(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::ComparisonOp => State::ComparisonOpS82, + NonTermKind::Body => State::BodyS89, + NonTermKind::FunctionRead => State::FunctionReadS9, + NonTermKind::FunctionWrite => State::FunctionWriteS10, + NonTermKind::Expressions => State::ExpressionsS11, + NonTermKind::Statement => State::StatementS12, + NonTermKind::Assignment => State::AssignmentS13, + NonTermKind::WhileLoop => State::WhileLoopS14, + NonTermKind::IfStatement => State::IfStatementS15, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::SimpleExpressionS54 + State::TokenCBOpenS63 ) } } } -fn goto_arithmeticexpression_s63(nonterm_kind: NonTermKind) -> State { +fn goto_tokencolon_s64(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::DummyAE => State::DummyAES65, + NonTermKind::DataType => State::DataTypeS93, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::ArithmeticExpressionS63 + State::TokenColonS64 ) } } } -fn goto_tokencbopen_s67(nonterm_kind: NonTermKind) -> State { +fn goto_tokencomma_s65(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::Body => State::BodyS94, - NonTermKind::FunctionRead => State::FunctionReadS10, - NonTermKind::FunctionWrite => State::FunctionWriteS11, - NonTermKind::Expressions => State::ExpressionsS12, - NonTermKind::Statement => State::StatementS13, - NonTermKind::Assignment => State::AssignmentS14, - NonTermKind::WhileLoop => State::WhileLoopS15, - NonTermKind::IfStatement => State::IfStatementS16, - NonTermKind::ElseStatement => State::ElseStatementS17, + NonTermKind::VarDeclaration => State::VarDeclarationS94, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenCBOpenS67 + State::TokenCommaS65 ) } } } -fn goto_tokencolon_s68(nonterm_kind: NonTermKind) -> State { +fn goto_tokenparopen_s69(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::DataType => State::DataTypeS98, + NonTermKind::Number => State::NumberS35, + NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS95, + NonTermKind::Term => State::TermS37, + NonTermKind::Factor => State::FactorS38, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenColonS68 + State::TokenParOpenS69 ) } } } -fn goto_tokencomma_s69(nonterm_kind: NonTermKind) -> State { +fn goto_tokenand_s70(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::VarDeclaration => State::VarDeclarationS99, + NonTermKind::FunctionIsZero => State::FunctionIsZeroS49, + NonTermKind::BooleanExpression => State::BooleanExpressionS50, + NonTermKind::SimpleExpression => State::SimpleExpressionS51, + NonTermKind::Conjunction => State::ConjunctionS96, + NonTermKind::Number => State::NumberS35, + NonTermKind::NotStatement => State::NotStatementS53, + NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS36, + NonTermKind::Term => State::TermS37, + NonTermKind::Factor => State::FactorS38, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenCommaS69 + State::TokenAndS70 ) } } } -fn goto_tokenparopen_s73(nonterm_kind: NonTermKind) -> State { +fn goto_tokenor_s71(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::Number => State::NumberS38, - NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS100, - NonTermKind::Term => State::TermS40, - NonTermKind::Factor => State::FactorS41, + NonTermKind::FunctionIsZero => State::FunctionIsZeroS49, + NonTermKind::BooleanExpression => State::BooleanExpressionS50, + NonTermKind::SimpleExpression => State::SimpleExpressionS51, + NonTermKind::Conjunction => State::ConjunctionS97, + NonTermKind::Number => State::NumberS35, + NonTermKind::NotStatement => State::NotStatementS53, + NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS36, + NonTermKind::Term => State::TermS37, + NonTermKind::Factor => State::FactorS38, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenParOpenS73 + State::TokenOrS71 ) } } } -fn goto_tokenand_s74(nonterm_kind: NonTermKind) -> State { +fn goto_comparisonop_s78(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::FunctionIsZero => State::FunctionIsZeroS52, - NonTermKind::BooleanExpression => State::BooleanExpressionS53, - NonTermKind::SimpleExpression => State::SimpleExpressionS54, - NonTermKind::Conjunction => State::ConjunctionS101, - NonTermKind::Number => State::NumberS38, - NonTermKind::NotStatement => State::NotStatementS56, - NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS39, - NonTermKind::Term => State::TermS40, - NonTermKind::Factor => State::FactorS41, + NonTermKind::SimpleExpression => State::SimpleExpressionS98, + NonTermKind::Number => State::NumberS35, + NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS36, + NonTermKind::Term => State::TermS37, + NonTermKind::Factor => State::FactorS38, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenAndS74 + State::ComparisonOpS78 ) } } } -fn goto_tokenor_s75(nonterm_kind: NonTermKind) -> State { +fn goto_tokensum_s85(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::FunctionIsZero => State::FunctionIsZeroS52, - NonTermKind::BooleanExpression => State::BooleanExpressionS53, - NonTermKind::SimpleExpression => State::SimpleExpressionS54, - NonTermKind::Conjunction => State::ConjunctionS102, - NonTermKind::Number => State::NumberS38, - NonTermKind::NotStatement => State::NotStatementS56, - NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS39, - NonTermKind::Term => State::TermS40, - NonTermKind::Factor => State::FactorS41, + NonTermKind::Number => State::NumberS35, + NonTermKind::Term => State::TermS102, + NonTermKind::Factor => State::FactorS38, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenOrS75 + State::TokenSumS85 ) } } } -fn goto_comparisonop_s82(nonterm_kind: NonTermKind) -> State { +fn goto_tokensub_s86(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::SimpleExpression => State::SimpleExpressionS103, - NonTermKind::Number => State::NumberS38, - NonTermKind::ArithmeticExpression => State::ArithmeticExpressionS39, - NonTermKind::Term => State::TermS40, - NonTermKind::Factor => State::FactorS41, + NonTermKind::Number => State::NumberS35, + NonTermKind::Term => State::TermS103, + NonTermKind::Factor => State::FactorS38, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::ComparisonOpS82 + State::TokenSubS86 ) } } } -fn goto_tokensum_s90(nonterm_kind: NonTermKind) -> State { +fn goto_tokenmul_s87(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::Number => State::NumberS38, - NonTermKind::Term => State::TermS107, - NonTermKind::Factor => State::FactorS41, + NonTermKind::Number => State::NumberS35, + NonTermKind::Factor => State::FactorS104, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenSumS90 + State::TokenMulS87 ) } } } -fn goto_tokensub_s91(nonterm_kind: NonTermKind) -> State { +fn goto_tokendiv_s88(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::Number => State::NumberS38, - NonTermKind::Term => State::TermS108, - NonTermKind::Factor => State::FactorS41, + NonTermKind::Number => State::NumberS35, + NonTermKind::Factor => State::FactorS105, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenSubS91 + State::TokenDivS88 ) } } } -fn goto_tokenmul_s92(nonterm_kind: NonTermKind) -> State { +fn goto_arithmeticexpression_s95(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::Number => State::NumberS38, - NonTermKind::Factor => State::FactorS109, + NonTermKind::DummyAE => State::DummyAES61, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenMulS92 + State::ArithmeticExpressionS95 ) } } } -fn goto_tokendiv_s93(nonterm_kind: NonTermKind) -> State { +fn goto_tokencbopen_s99(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::Number => State::NumberS38, - NonTermKind::Factor => State::FactorS110, + NonTermKind::Body => State::BodyS108, + NonTermKind::FunctionRead => State::FunctionReadS9, + NonTermKind::FunctionWrite => State::FunctionWriteS10, + NonTermKind::Expressions => State::ExpressionsS11, + NonTermKind::Statement => State::StatementS12, + NonTermKind::Assignment => State::AssignmentS13, + NonTermKind::WhileLoop => State::WhileLoopS14, + NonTermKind::IfStatement => State::IfStatementS15, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenDivS93 + State::TokenCBOpenS99 ) } } } -fn goto_arithmeticexpression_s100(nonterm_kind: NonTermKind) -> State { +fn goto_tokencbopen_s100(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::DummyAE => State::DummyAES65, + NonTermKind::Body => State::BodyS109, + NonTermKind::FunctionRead => State::FunctionReadS9, + NonTermKind::FunctionWrite => State::FunctionWriteS10, + NonTermKind::Expressions => State::ExpressionsS11, + NonTermKind::Statement => State::StatementS12, + NonTermKind::Assignment => State::AssignmentS13, + NonTermKind::WhileLoop => State::WhileLoopS14, + NonTermKind::IfStatement => State::IfStatementS15, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::ArithmeticExpressionS100 + State::TokenCBOpenS100 ) } } } -fn goto_tokencbopen_s104(nonterm_kind: NonTermKind) -> State { +fn goto_term_s102(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::Body => State::BodyS113, - NonTermKind::FunctionRead => State::FunctionReadS10, - NonTermKind::FunctionWrite => State::FunctionWriteS11, - NonTermKind::Expressions => State::ExpressionsS12, - NonTermKind::Statement => State::StatementS13, - NonTermKind::Assignment => State::AssignmentS14, - NonTermKind::WhileLoop => State::WhileLoopS15, - NonTermKind::IfStatement => State::IfStatementS16, - NonTermKind::ElseStatement => State::ElseStatementS17, + NonTermKind::DummyT => State::DummyTS62, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenCBOpenS104 + State::TermS102 ) } } } -fn goto_tokencbopen_s105(nonterm_kind: NonTermKind) -> State { +fn goto_term_s103(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::Body => State::BodyS114, - NonTermKind::FunctionRead => State::FunctionReadS10, - NonTermKind::FunctionWrite => State::FunctionWriteS11, - NonTermKind::Expressions => State::ExpressionsS12, - NonTermKind::Statement => State::StatementS13, - NonTermKind::Assignment => State::AssignmentS14, - NonTermKind::WhileLoop => State::WhileLoopS15, - NonTermKind::IfStatement => State::IfStatementS16, - NonTermKind::ElseStatement => State::ElseStatementS17, + NonTermKind::DummyT => State::DummyTS62, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenCBOpenS105 + State::TermS103 ) } } } -fn goto_term_s107(nonterm_kind: NonTermKind) -> State { +fn goto_tokencbclose_s111(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::DummyT => State::DummyTS66, + NonTermKind::ElseStatement => State::ElseStatementS113, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TermS107 + State::TokenCBCloseS111 ) } } } -fn goto_term_s108(nonterm_kind: NonTermKind) -> State { +fn goto_tokencbopen_s114(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::DummyT => State::DummyTS66, + NonTermKind::Body => State::BodyS115, + NonTermKind::FunctionRead => State::FunctionReadS9, + NonTermKind::FunctionWrite => State::FunctionWriteS10, + NonTermKind::Expressions => State::ExpressionsS11, + NonTermKind::Statement => State::StatementS12, + NonTermKind::Assignment => State::AssignmentS13, + NonTermKind::WhileLoop => State::WhileLoopS14, + NonTermKind::IfStatement => State::IfStatementS15, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TermS108 + State::TokenCBOpenS114 ) } } @@ -2973,117 +2913,117 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti action_tokeninit_s2, action_tokenwhile_s3, action_tokenif_s4, - action_tokenelse_s5, - action_tokenread_s6, - action_tokenwrite_s7, - action_program_s8, - action_body_s9, - action_functionread_s10, - action_functionwrite_s11, - action_expressions_s12, - action_statement_s13, - action_assignment_s14, - action_whileloop_s15, - action_ifstatement_s16, - action_elsestatement_s17, - action_tokenassign_s18, - action_tokenparopen_s19, - action_tokencbopen_s20, - action_initbody_s21, + action_tokenread_s5, + action_tokenwrite_s6, + action_program_s7, + action_body_s8, + action_functionread_s9, + action_functionwrite_s10, + action_expressions_s11, + action_statement_s12, + action_assignment_s13, + action_whileloop_s14, + action_ifstatement_s15, + action_tokenassign_s16, + action_tokenparopen_s17, + action_tokencbopen_s18, + action_initbody_s19, + action_tokenparopen_s20, + action_tokenparopen_s21, action_tokenparopen_s22, action_tokenparopen_s23, - action_tokencbopen_s24, - action_tokenparopen_s25, - action_tokenparopen_s26, - action_tokenid_s27, - action_expressions_s28, - action_tokenintliteral_s29, - action_tokenfloatliteral_s30, - action_tokenstringliteral_s31, - action_tokenid_s32, - action_tokensub_s33, - action_tokenparopen_s34, - action_tokenconvdate_s35, - action_functionconvdate_s36, - action_simpleexpression_s37, - action_number_s38, - action_arithmeticexpression_s39, - action_term_s40, - action_factor_s41, - action_tokenparclose_s42, - action_tokenid_s43, - action_vardeclarations_s44, - action_vardeclaration_s45, - action_expressions_s46, - action_tokenid_s47, - action_tokentrue_s48, - action_tokenfalse_s49, - action_tokennot_s50, - action_tokeniszero_s51, - action_functioniszero_s52, - action_booleanexpression_s53, - action_simpleexpression_s54, - action_conjunction_s55, - action_notstatement_s56, - action_conjunction_s57, - action_body_s58, - action_tokenid_s59, - action_simpleexpression_s60, - action_tokenintliteral_s61, - action_tokenfloatliteral_s62, - action_arithmeticexpression_s63, - action_tokenparopen_s64, - action_dummyae_s65, - action_dummyt_s66, - action_tokencbopen_s67, - action_tokencolon_s68, - action_tokencomma_s69, - action_tokencbclose_s70, - action_vardeclarations_s71, - action_booleanexpression_s72, - action_tokenparopen_s73, - action_tokenand_s74, - action_tokenor_s75, - action_tokenequal_s76, - action_tokennotequal_s77, - action_tokenless_s78, - action_tokenlessequal_s79, - action_tokengreater_s80, - action_tokengreaterequal_s81, - action_comparisonop_s82, + action_tokenid_s24, + action_expressions_s25, + action_tokenintliteral_s26, + action_tokenfloatliteral_s27, + action_tokenstringliteral_s28, + action_tokenid_s29, + action_tokensub_s30, + action_tokenparopen_s31, + action_tokenconvdate_s32, + action_functionconvdate_s33, + action_simpleexpression_s34, + action_number_s35, + action_arithmeticexpression_s36, + action_term_s37, + action_factor_s38, + action_tokenparclose_s39, + action_tokenid_s40, + action_vardeclarations_s41, + action_vardeclaration_s42, + action_expressions_s43, + action_tokenid_s44, + action_tokentrue_s45, + action_tokenfalse_s46, + action_tokennot_s47, + action_tokeniszero_s48, + action_functioniszero_s49, + action_booleanexpression_s50, + action_simpleexpression_s51, + action_conjunction_s52, + action_notstatement_s53, + action_conjunction_s54, + action_tokenid_s55, + action_simpleexpression_s56, + action_tokenintliteral_s57, + action_tokenfloatliteral_s58, + action_arithmeticexpression_s59, + action_tokenparopen_s60, + action_dummyae_s61, + action_dummyt_s62, + action_tokencbopen_s63, + action_tokencolon_s64, + action_tokencomma_s65, + action_tokencbclose_s66, + action_vardeclarations_s67, + action_booleanexpression_s68, + action_tokenparopen_s69, + action_tokenand_s70, + action_tokenor_s71, + action_tokenequal_s72, + action_tokennotequal_s73, + action_tokenless_s74, + action_tokenlessequal_s75, + action_tokengreater_s76, + action_tokengreaterequal_s77, + action_comparisonop_s78, + action_tokenparclose_s79, + action_tokenparclose_s80, + action_tokenparclose_s81, + action_tokenparclose_s82, action_tokenparclose_s83, - action_tokenparclose_s84, - action_tokencbclose_s85, - action_tokenparclose_s86, - action_tokenparclose_s87, - action_tokenparclose_s88, - action_tokendate_s89, - action_tokensum_s90, - action_tokensub_s91, - action_tokenmul_s92, - action_tokendiv_s93, - action_body_s94, - action_tokenint_s95, - action_tokenfloat_s96, - action_tokenstring_s97, - action_datatype_s98, - action_vardeclaration_s99, - action_arithmeticexpression_s100, - action_conjunction_s101, - action_conjunction_s102, - action_simpleexpression_s103, - action_tokencbopen_s104, - action_tokencbopen_s105, - action_tokenparclose_s106, - action_term_s107, - action_term_s108, - action_factor_s109, - action_factor_s110, + action_tokendate_s84, + action_tokensum_s85, + action_tokensub_s86, + action_tokenmul_s87, + action_tokendiv_s88, + action_body_s89, + action_tokenint_s90, + action_tokenfloat_s91, + action_tokenstring_s92, + action_datatype_s93, + action_vardeclaration_s94, + action_arithmeticexpression_s95, + action_conjunction_s96, + action_conjunction_s97, + action_simpleexpression_s98, + action_tokencbopen_s99, + action_tokencbopen_s100, + action_tokenparclose_s101, + action_term_s102, + action_term_s103, + action_factor_s104, + action_factor_s105, + action_tokencbclose_s106, + action_tokenparclose_s107, + action_body_s108, + action_body_s109, + action_tokencbclose_s110, action_tokencbclose_s111, - action_tokenparclose_s112, - action_body_s113, - action_body_s114, - action_tokencbclose_s115, + action_tokenelse_s112, + action_elsestatement_s113, + action_tokencbopen_s114, + action_body_s115, action_tokencbclose_s116, ], gotos: [ @@ -3099,109 +3039,109 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti goto_invalid, goto_invalid, goto_invalid, - goto_invalid, - goto_statement_s13, + goto_statement_s12, goto_invalid, goto_invalid, goto_invalid, + goto_tokenassign_s16, goto_invalid, - goto_tokenassign_s18, + goto_tokencbopen_s18, + goto_initbody_s19, + goto_tokenparopen_s20, + goto_tokenparopen_s21, goto_invalid, - goto_tokencbopen_s20, - goto_initbody_s21, - goto_tokenparopen_s22, goto_tokenparopen_s23, - goto_tokencbopen_s24, goto_invalid, - goto_tokenparopen_s26, goto_invalid, goto_invalid, goto_invalid, goto_invalid, goto_invalid, goto_invalid, + goto_tokenparopen_s31, goto_invalid, - goto_tokenparopen_s34, goto_invalid, goto_invalid, goto_invalid, + goto_arithmeticexpression_s36, + goto_term_s37, goto_invalid, - goto_arithmeticexpression_s39, - goto_term_s40, goto_invalid, goto_invalid, goto_invalid, + goto_vardeclaration_s42, goto_invalid, - goto_vardeclaration_s45, goto_invalid, goto_invalid, goto_invalid, + goto_tokennot_s47, goto_invalid, - goto_tokennot_s50, goto_invalid, goto_invalid, + goto_simpleexpression_s51, goto_invalid, - goto_simpleexpression_s54, goto_invalid, goto_invalid, goto_invalid, goto_invalid, goto_invalid, goto_invalid, + goto_arithmeticexpression_s59, goto_invalid, goto_invalid, - goto_arithmeticexpression_s63, goto_invalid, + goto_tokencbopen_s63, + goto_tokencolon_s64, + goto_tokencomma_s65, goto_invalid, goto_invalid, - goto_tokencbopen_s67, - goto_tokencolon_s68, - goto_tokencomma_s69, goto_invalid, + goto_tokenparopen_s69, + goto_tokenand_s70, + goto_tokenor_s71, goto_invalid, goto_invalid, - goto_tokenparopen_s73, - goto_tokenand_s74, - goto_tokenor_s75, goto_invalid, goto_invalid, goto_invalid, goto_invalid, + goto_comparisonop_s78, goto_invalid, goto_invalid, - goto_comparisonop_s82, goto_invalid, goto_invalid, goto_invalid, goto_invalid, + goto_tokensum_s85, + goto_tokensub_s86, + goto_tokenmul_s87, + goto_tokendiv_s88, goto_invalid, goto_invalid, goto_invalid, - goto_tokensum_s90, - goto_tokensub_s91, - goto_tokenmul_s92, - goto_tokendiv_s93, goto_invalid, goto_invalid, goto_invalid, + goto_arithmeticexpression_s95, goto_invalid, goto_invalid, goto_invalid, - goto_arithmeticexpression_s100, + goto_tokencbopen_s99, + goto_tokencbopen_s100, goto_invalid, + goto_term_s102, + goto_term_s103, goto_invalid, goto_invalid, - goto_tokencbopen_s104, - goto_tokencbopen_s105, goto_invalid, - goto_term_s107, - goto_term_s108, goto_invalid, goto_invalid, goto_invalid, goto_invalid, + goto_tokencbclose_s111, goto_invalid, goto_invalid, + goto_tokencbopen_s114, goto_invalid, goto_invalid, ], @@ -3212,7 +3152,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenInit, false)), Some((TK::TokenWhile, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenRead, false)), Some((TK::TokenWrite, false)), None, @@ -3250,7 +3189,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenCBOpen, false)), @@ -3273,7 +3211,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenParOpen, false)), @@ -3296,7 +3233,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenParOpen, false)), @@ -3319,30 +3255,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, - ], - [ - Some((TK::TokenCBOpen, false)), - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, ], [ Some((TK::TokenParOpen, false)), @@ -3365,7 +3277,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenParOpen, false)), @@ -3388,7 +3299,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::STOP, false)), @@ -3411,7 +3321,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::STOP, false)), @@ -3434,7 +3343,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::STOP, true)), @@ -3442,7 +3350,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenCBClose, false)), Some((TK::TokenWhile, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenRead, false)), Some((TK::TokenWrite, false)), None, @@ -3465,7 +3372,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenCBClose, false)), Some((TK::TokenWhile, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenRead, false)), Some((TK::TokenWrite, false)), None, @@ -3503,7 +3409,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::STOP, true)), @@ -3511,7 +3416,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenCBClose, false)), Some((TK::TokenWhile, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenRead, false)), Some((TK::TokenWrite, false)), None, @@ -3534,7 +3438,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenCBClose, false)), Some((TK::TokenWhile, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenRead, false)), Some((TK::TokenWrite, false)), None, @@ -3557,7 +3460,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenCBClose, false)), Some((TK::TokenWhile, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenRead, false)), Some((TK::TokenWrite, false)), None, @@ -3580,30 +3482,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenCBClose, false)), Some((TK::TokenWhile, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), - Some((TK::TokenRead, false)), - Some((TK::TokenWrite, false)), - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - ], - [ - Some((TK::STOP, true)), - Some((TK::TokenId, false)), - Some((TK::TokenCBClose, false)), - Some((TK::TokenWhile, false)), - Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenRead, false)), Some((TK::TokenWrite, false)), None, @@ -3641,7 +3519,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenParClose, false)), @@ -3664,7 +3541,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenId, false)), @@ -3687,7 +3563,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::STOP, true)), @@ -3695,7 +3570,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenCBClose, false)), Some((TK::TokenWhile, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenRead, false)), Some((TK::TokenWrite, false)), None, @@ -3733,7 +3607,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenIntLiteral, false)), @@ -3756,30 +3629,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, - ], - [ - Some((TK::TokenId, false)), - Some((TK::TokenCBClose, false)), - Some((TK::TokenInit, false)), - Some((TK::TokenWhile, false)), - Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), - Some((TK::TokenRead, false)), - Some((TK::TokenWrite, false)), - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, ], [ Some((TK::TokenId, false)), @@ -3802,7 +3651,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenIntLiteral, false)), @@ -3825,7 +3673,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenAssign, false)), @@ -3848,7 +3695,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::STOP, true)), @@ -3871,7 +3717,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::STOP, true)), @@ -3890,7 +3735,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenGreater, false)), Some((TK::TokenGreaterEqual, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenAnd, false)), Some((TK::TokenOr, false)), Some((TK::TokenRead, false)), @@ -3913,7 +3757,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenGreater, false)), Some((TK::TokenGreaterEqual, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenAnd, false)), Some((TK::TokenOr, false)), Some((TK::TokenRead, false)), @@ -3932,7 +3775,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenGreater, false)), Some((TK::TokenGreaterEqual, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenAnd, false)), Some((TK::TokenOr, false)), Some((TK::TokenRead, false)), @@ -3959,7 +3801,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenGreater, false)), Some((TK::TokenGreaterEqual, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenAnd, false)), Some((TK::TokenOr, false)), Some((TK::TokenRead, false)), @@ -3986,7 +3827,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenIntLiteral, false)), @@ -4009,7 +3849,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenParOpen, false)), @@ -4032,7 +3871,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::STOP, true)), @@ -4040,7 +3878,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenCBClose, false)), Some((TK::TokenWhile, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenRead, false)), Some((TK::TokenWrite, false)), None, @@ -4063,7 +3900,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenCBClose, false)), Some((TK::TokenWhile, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenRead, false)), Some((TK::TokenWrite, false)), None, @@ -4097,7 +3933,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenGreater, false)), Some((TK::TokenGreaterEqual, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenAnd, false)), Some((TK::TokenOr, false)), Some((TK::TokenRead, false)), @@ -4118,7 +3953,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenGreater, false)), Some((TK::TokenGreaterEqual, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenAnd, false)), Some((TK::TokenOr, false)), Some((TK::TokenRead, false)), @@ -4143,7 +3977,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenGreater, false)), Some((TK::TokenGreaterEqual, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenAnd, false)), Some((TK::TokenOr, false)), Some((TK::TokenRead, false)), @@ -4166,7 +3999,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenGreater, false)), Some((TK::TokenGreaterEqual, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenAnd, false)), Some((TK::TokenOr, false)), Some((TK::TokenRead, false)), @@ -4193,7 +4025,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenColon, false)), @@ -4216,7 +4047,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenCBClose, false)), @@ -4239,7 +4069,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenId, false)), @@ -4262,7 +4091,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::STOP, true)), @@ -4285,7 +4113,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenSum, false)), @@ -4308,7 +4135,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenParClose, false)), @@ -4331,7 +4157,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenParClose, false)), @@ -4354,7 +4179,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenIntLiteral, false)), @@ -4377,7 +4201,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenParOpen, false)), @@ -4400,7 +4223,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenParClose, false)), @@ -4423,7 +4245,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenParClose, false)), @@ -4446,7 +4267,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenEqual, false)), @@ -4469,7 +4289,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenParClose, false)), @@ -4492,7 +4311,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenParClose, false)), @@ -4515,7 +4333,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenParClose, false)), @@ -4538,30 +4355,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, - ], - [ - Some((TK::TokenCBClose, false)), - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, ], [ Some((TK::TokenParClose, false)), @@ -4584,7 +4377,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenParClose, false)), @@ -4607,7 +4399,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::STOP, true)), @@ -4626,7 +4417,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenGreater, false)), Some((TK::TokenGreaterEqual, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenAnd, false)), Some((TK::TokenOr, false)), Some((TK::TokenRead, false)), @@ -4649,7 +4439,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenGreater, false)), Some((TK::TokenGreaterEqual, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenAnd, false)), Some((TK::TokenOr, false)), Some((TK::TokenRead, false)), @@ -4676,7 +4465,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenDate, false)), @@ -4699,7 +4487,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenSum, false)), @@ -4722,7 +4509,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenMul, false)), @@ -4745,7 +4531,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenId, false)), @@ -4753,7 +4538,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenInit, false)), Some((TK::TokenWhile, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenRead, false)), Some((TK::TokenWrite, false)), None, @@ -4791,7 +4575,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenId, false)), @@ -4814,7 +4597,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::STOP, true)), @@ -4822,7 +4604,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenCBClose, false)), Some((TK::TokenWhile, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenRead, false)), Some((TK::TokenWrite, false)), None, @@ -4860,7 +4641,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenParClose, false)), @@ -4883,7 +4663,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenIntLiteral, false)), @@ -4906,7 +4685,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenIntLiteral, false)), @@ -4929,7 +4707,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenIntLiteral, false)), @@ -4952,7 +4729,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenIntLiteral, false)), @@ -4975,7 +4751,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenIntLiteral, false)), @@ -4998,7 +4773,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenIntLiteral, false)), @@ -5021,7 +4795,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenIntLiteral, false)), @@ -5044,7 +4817,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenIntLiteral, false)), @@ -5067,7 +4839,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenIntLiteral, false)), @@ -5090,7 +4861,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenIntLiteral, false)), @@ -5113,7 +4883,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenCBOpen, false)), @@ -5136,7 +4905,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenCBOpen, false)), @@ -5159,30 +4927,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, - ], - [ - Some((TK::STOP, true)), - Some((TK::TokenId, false)), - Some((TK::TokenCBClose, false)), - Some((TK::TokenWhile, false)), - Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), - Some((TK::TokenRead, false)), - Some((TK::TokenWrite, false)), - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, - None, ], [ Some((TK::STOP, true)), @@ -5190,7 +4934,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenCBClose, false)), Some((TK::TokenWhile, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenRead, false)), Some((TK::TokenWrite, false)), None, @@ -5213,7 +4956,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenCBClose, false)), Some((TK::TokenWhile, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenRead, false)), Some((TK::TokenWrite, false)), None, @@ -5247,7 +4989,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenGreater, false)), Some((TK::TokenGreaterEqual, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenAnd, false)), Some((TK::TokenOr, false)), Some((TK::TokenRead, false)), @@ -5274,7 +5015,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenIntLiteral, false)), @@ -5297,7 +5037,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenIntLiteral, false)), @@ -5320,7 +5059,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenIntLiteral, false)), @@ -5343,7 +5081,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenIntLiteral, false)), @@ -5366,7 +5103,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenCBClose, false)), @@ -5389,7 +5125,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenId, false)), @@ -5412,7 +5147,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenId, false)), @@ -5435,7 +5169,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenId, false)), @@ -5458,7 +5191,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenId, false)), @@ -5481,7 +5213,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenId, false)), @@ -5504,7 +5235,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenSum, false)), @@ -5527,7 +5257,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenParClose, false)), @@ -5550,7 +5279,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenParClose, false)), @@ -5573,7 +5301,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenParClose, false)), @@ -5596,7 +5323,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenId, false)), @@ -5604,7 +5330,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenInit, false)), Some((TK::TokenWhile, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenRead, false)), Some((TK::TokenWrite, false)), None, @@ -5627,7 +5352,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenInit, false)), Some((TK::TokenWhile, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenRead, false)), Some((TK::TokenWrite, false)), None, @@ -5650,7 +5374,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenCBClose, false)), Some((TK::TokenWhile, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenRead, false)), Some((TK::TokenWrite, false)), None, @@ -5684,7 +5407,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenGreater, false)), Some((TK::TokenGreaterEqual, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenAnd, false)), Some((TK::TokenOr, false)), Some((TK::TokenRead, false)), @@ -5707,7 +5429,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenGreater, false)), Some((TK::TokenGreaterEqual, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenAnd, false)), Some((TK::TokenOr, false)), Some((TK::TokenRead, false)), @@ -5730,7 +5451,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenGreater, false)), Some((TK::TokenGreaterEqual, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenAnd, false)), Some((TK::TokenOr, false)), Some((TK::TokenRead, false)), @@ -5753,7 +5473,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenGreater, false)), Some((TK::TokenGreaterEqual, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenAnd, false)), Some((TK::TokenOr, false)), Some((TK::TokenRead, false)), @@ -5780,7 +5499,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, - None, ], [ Some((TK::TokenParClose, false)), @@ -5803,6 +5521,27 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, + ], + [ + Some((TK::TokenCBClose, false)), + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, None, ], [ @@ -5826,10 +5565,39 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, + ], + [ + Some((TK::STOP, true)), + Some((TK::TokenId, false)), + Some((TK::TokenCBClose, false)), + Some((TK::TokenWhile, false)), + Some((TK::TokenIf, false)), + Some((TK::TokenRead, false)), + Some((TK::TokenWrite, false)), + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, None, ], [ + Some((TK::STOP, true)), + Some((TK::TokenId, false)), Some((TK::TokenCBClose, false)), + Some((TK::TokenWhile, false)), + Some((TK::TokenIf, false)), + Some((TK::TokenElse, false)), + Some((TK::TokenRead, false)), + Some((TK::TokenWrite, false)), + None, None, None, None, @@ -5841,6 +5609,19 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, None, + ], + [ + Some((TK::TokenCBOpen, false)), + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, None, None, None, @@ -5857,7 +5638,6 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti Some((TK::TokenCBClose, false)), Some((TK::TokenWhile, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenRead, false)), Some((TK::TokenWrite, false)), None, @@ -5874,13 +5654,56 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, ], + [ + Some((TK::TokenId, false)), + Some((TK::TokenCBClose, false)), + Some((TK::TokenInit, false)), + Some((TK::TokenWhile, false)), + Some((TK::TokenIf, false)), + Some((TK::TokenRead, false)), + Some((TK::TokenWrite, false)), + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + ], + [ + Some((TK::TokenCBClose, false)), + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + ], [ Some((TK::STOP, true)), Some((TK::TokenId, false)), Some((TK::TokenCBClose, false)), Some((TK::TokenWhile, false)), Some((TK::TokenIf, false)), - Some((TK::TokenElse, false)), Some((TK::TokenRead, false)), Some((TK::TokenWrite, false)), None, diff --git a/src/grammar/rules.rustemo b/src/grammar/rules.rustemo index 2d286b5..94cd247 100644 --- a/src/grammar/rules.rustemo +++ b/src/grammar/rules.rustemo @@ -27,7 +27,6 @@ Expressions: Statement {ExpressionSingle} Statement: Assignment {StatementAssignment} | IfStatement {StatementIfStatement} - | ElseStatement {StatementElseStatement} | WhileLoop {StatementWhile} | FunctionWrite {StatementWrite} | FunctionRead {StatementRead}; @@ -41,7 +40,8 @@ DataType: TokenInt {IntType} WhileLoop: TokenWhile TokenParOpen Conjunction TokenParClose TokenCBOpen Body TokenCBClose {While}; -IfStatement: TokenIf TokenParOpen Conjunction TokenParClose TokenCBOpen Body TokenCBClose {IfStatement}; +IfStatement: TokenIf TokenParOpen Conjunction TokenParClose TokenCBOpen Body TokenCBClose {IfStatement} + | TokenIf TokenParOpen Conjunction TokenParClose TokenCBOpen Body TokenCBClose ElseStatement {IfElseStatement}; ElseStatement: TokenElse TokenCBOpen Body TokenCBClose {ElseStatement}; diff --git a/src/grammar/rules_actions.rs b/src/grammar/rules_actions.rs index f23b2c6..75571d1 100644 --- a/src/grammar/rules_actions.rs +++ b/src/grammar/rules_actions.rs @@ -610,16 +610,6 @@ pub fn statement_statement_if_statement( Statement::StatementIfStatement(if_statement) } -/// Parses the rule ` -> ` -pub fn statement_statement_else_statement( - _ctx: &Ctx, - else_statement: ElseStatement, - compiler_context: &mut CompilerContext, -) -> Statement { - compiler_context.write_to_parser_file(" -> "); - Statement::StatementElseStatement(else_statement) -} - /// Parses the rule ` -> ` pub fn statement_statement_while( _ctx: &Ctx, @@ -772,7 +762,7 @@ pub fn if_statement_if_statement( AstPtr::Body.into(), AstPtr::If, ); - IfStatement { + IfStatement::IfStatementIfStatement(IfStatementIfStatement { token_if, token_par_open, conjunction, @@ -780,7 +770,34 @@ pub fn if_statement_if_statement( token_cbopen, body: Box::new(body), token_cbclose, - } + }) +} + +/// Parses the rule `: TokenIf TokenParOpen TokenParClose TokenCBOpen TokenCBClose ` +#[expect(clippy::too_many_arguments)] +pub fn if_statement_if_statement_else_statement( + _ctx: &Ctx, + token_if: TokenIf, + token_par_open: TokenParOpen, + conjunction: Conjunction, + token_par_close: TokenParClose, + token_cbopen: TokenCBOpen, + body: Body, + token_cbclose: TokenCBClose, + else_statement: ElseStatement, + compiler_context: &mut CompilerContext, +) -> IfStatement { + compiler_context.write_to_parser_file(": TokenIf TokenParOpen TokenParClose TokenCBOpen TokenCBClose "); + IfStatement::IfStatementElseStatement(IfStatementElseStatement { + token_if, + token_par_open, + conjunction, + token_par_close, + token_cbopen, + body: Box::new(body), + token_cbclose, + else_statement: Box::new(else_statement), + }) } /// Parses the rule `: TokenElse TokenCBOpen TokenCBClose` diff --git a/src/grammar/rules_builder.rs b/src/grammar/rules_builder.rs index 92c97ec..287b3ef 100644 --- a/src/grammar/rules_builder.rs +++ b/src/grammar/rules_builder.rs @@ -684,22 +684,6 @@ impl<'i> LRBuilder<'i, Input, Context<'i, Input>, State, ProdKind, TokenKind> fo _ => panic!("Invalid symbol parse stack data."), } } - ProdKind::StatementStatementElseStatement => { - let mut i = compiler_context - .res_stack - .split_off(stack_len - 1usize) - .into_iter(); - match i.next().unwrap() { - Symbol::NonTerminal(NonTerminal::ElseStatement(p0)) => { - NonTerminal::Statement(rules_actions::statement_statement_else_statement( - context, - p0, - &mut compiler_context, - )) - } - _ => panic!("Invalid symbol parse stack data."), - } - } ProdKind::StatementStatementWhile => { let mut i = compiler_context .res_stack @@ -892,6 +876,48 @@ impl<'i> LRBuilder<'i, Input, Context<'i, Input>, State, ProdKind, TokenKind> fo _ => panic!("Invalid symbol parse stack data."), } } + ProdKind::IfStatementIfElseStatement => { + let mut i = compiler_context + .res_stack + .split_off(stack_len - 8usize) + .into_iter(); + match ( + i.next().unwrap(), + i.next().unwrap(), + i.next().unwrap(), + i.next().unwrap(), + i.next().unwrap(), + i.next().unwrap(), + i.next().unwrap(), + i.next().unwrap(), + ) { + ( + Symbol::Terminal(Terminal::TokenIf(p0)), + Symbol::Terminal(Terminal::TokenParOpen(p1)), + Symbol::NonTerminal(NonTerminal::Conjunction(p2)), + Symbol::Terminal(Terminal::TokenParClose(p3)), + Symbol::Terminal(Terminal::TokenCBOpen(p4)), + Symbol::NonTerminal(NonTerminal::Body(p5)), + Symbol::Terminal(Terminal::TokenCBClose(p6)), + Symbol::NonTerminal(NonTerminal::ElseStatement(p7)), + ) => NonTerminal::IfStatement( + rules_actions::if_statement_if_statement_else_statement( + context, + p0, + p1, + p2, + p3, + p4, + p5, + p6, + p7, + &mut compiler_context, + ), + ), + _ => panic!("Invalid symbol parse stack data."), + } + } + ProdKind::ElseStatementElseStatement => { let mut i = compiler_context .res_stack diff --git a/src/grammar/types.rs b/src/grammar/types.rs index 64b82bd..2abe853 100644 --- a/src/grammar/types.rs +++ b/src/grammar/types.rs @@ -385,8 +385,6 @@ pub enum Statement { StatementAssignment(Assignment), /// ` -> ` StatementIfStatement(IfStatement), - /// ` -> ` - StatementElseStatement(ElseStatement), /// ` -> ` StatementWhile(WhileLoop), /// ` -> ` @@ -460,11 +458,34 @@ pub struct WhileLoop { pub token_cbclose: TokenCBClose, } +/// Enum representing all the possible rules for the `` non terminal +#[derive(Debug, Clone)] +pub enum IfStatement { + /// ` -> TokenIf TokenParOpen TokenParClose TokenCBOpen TokenCBClose` + IfStatementIfStatement(IfStatementIfStatement), + /// ` -> TokenIf TokenParOpen TokenParClose TokenCBOpen TokenCBClose ` + IfStatementElseStatement(IfStatementElseStatement), +} + /// Struct representation of the rule /// /// ` -> TokenIf TokenParOpen TokenParClose TokenCBOpen TokenCBClose` #[derive(Debug, Clone)] -pub struct IfStatement { +pub struct IfStatementIfStatement { + pub token_if: TokenIf, + pub token_par_open: TokenParOpen, + pub conjunction: Conjunction, + pub token_par_close: TokenParClose, + pub token_cbopen: TokenCBOpen, + pub body: Box, + pub token_cbclose: TokenCBClose, +} + +/// Struct representation of the rule +/// +/// ` -> TokenIf TokenParOpen TokenParClose TokenCBOpen TokenCBClose ` +#[derive(Debug, Clone)] +pub struct IfStatementElseStatement { pub token_if: TokenIf, pub token_par_open: TokenParOpen, pub conjunction: Conjunction, @@ -472,6 +493,7 @@ pub struct IfStatement { pub token_cbopen: TokenCBOpen, pub body: Box, pub token_cbclose: TokenCBClose, + pub else_statement: Box, } /// Struct representation of the rule From f549d6335bcb5373f3fe67ef032d25a6eab3d879 Mon Sep 17 00:00:00 2001 From: LeanSerra <46695152+LeanSerra@users.noreply.github.com> Date: Sun, 26 Oct 2025 20:07:30 -0300 Subject: [PATCH 12/25] feat: AST generation for else statements and nested ifs --- src/compiler/ast.rs | 6 ++ src/grammar/rules.rs | 121 ++++++++++++++++++++++++----------- src/grammar/rules.rustemo | 4 +- src/grammar/rules_actions.rs | 118 ++++++++++++++++++++++++++++++---- src/grammar/rules_builder.rs | 10 ++- src/grammar/types.rs | 3 + 6 files changed, 209 insertions(+), 53 deletions(-) diff --git a/src/compiler/ast.rs b/src/compiler/ast.rs index fbb9bf7..21f7bc2 100644 --- a/src/compiler/ast.rs +++ b/src/compiler/ast.rs @@ -16,6 +16,8 @@ pub struct Ast { pub comparision_op_stack: Vec, pub comparision_expressions_stack: Vec>, pub boolean_expression_stack: Vec>, + pub if_body_stack: Vec>, + pub conjunction_stack: Vec>, } impl Debug for Ast { @@ -110,6 +112,7 @@ pub enum AstAction { Div, Assign, If, + Else, And, Or, Not, @@ -138,6 +141,7 @@ impl Display for AstAction { Self::LT => write!(f, "<"), Self::LTE => write!(f, "<="), Self::If => write!(f, "IF"), + Self::Else => write!(f, "ELSE"), Self::And => write!(f, "AND"), Self::Or => write!(f, "OR"), Self::Not => write!(f, "NOT"), @@ -169,6 +173,8 @@ impl Default for Ast { comparision_op_stack: Vec::new(), comparision_expressions_stack: Vec::new(), boolean_expression_stack: Vec::new(), + if_body_stack: Vec::new(), + conjunction_stack: Vec::new(), } } } diff --git a/src/grammar/rules.rs b/src/grammar/rules.rs index 96212a9..a33b36c 100644 --- a/src/grammar/rules.rs +++ b/src/grammar/rules.rs @@ -15,7 +15,7 @@ use rustemo::debug::{log, logn}; #[cfg(debug_assertions)] use rustemo::colored::*; pub type Input = str; -const STATE_COUNT: usize = 117usize; +const STATE_COUNT: usize = 118usize; const MAX_RECOGNIZERS: usize = 20usize; #[allow(dead_code)] const TERMINAL_COUNT: usize = 39usize; @@ -102,6 +102,7 @@ pub enum ProdKind { WhileLoopWhile, IfStatementIfStatement, IfStatementIfElseStatement, + DummyElseP1, ElseStatementElseStatement, BooleanExpressionBooleanExpressionSimpleExpression, BooleanExpressionBooleanExpressionTrue, @@ -201,8 +202,9 @@ impl std::fmt::Debug for ProdKind { "IfStatement: TokenIf TokenParOpen Conjunction TokenParClose TokenCBOpen Body TokenCBClose" } ProdKind::IfStatementIfElseStatement => { - "IfStatement: TokenIf TokenParOpen Conjunction TokenParClose TokenCBOpen Body TokenCBClose ElseStatement" + "IfStatement: TokenIf TokenParOpen Conjunction TokenParClose TokenCBOpen Body TokenCBClose DummyElse ElseStatement" } + ProdKind::DummyElseP1 => "DummyElse: ", ProdKind::ElseStatementElseStatement => { "ElseStatement: TokenElse TokenCBOpen Body TokenCBClose" } @@ -294,6 +296,7 @@ pub enum NonTermKind { DataType, WhileLoop, IfStatement, + DummyElse, ElseStatement, BooleanExpression, SimpleExpression, @@ -348,6 +351,7 @@ impl From for NonTermKind { ProdKind::WhileLoopWhile => NonTermKind::WhileLoop, ProdKind::IfStatementIfStatement => NonTermKind::IfStatement, ProdKind::IfStatementIfElseStatement => NonTermKind::IfStatement, + ProdKind::DummyElseP1 => NonTermKind::DummyElse, ProdKind::ElseStatementElseStatement => NonTermKind::ElseStatement, ProdKind::BooleanExpressionBooleanExpressionSimpleExpression => { NonTermKind::BooleanExpression @@ -523,11 +527,12 @@ pub enum State { BodyS109, TokenCBCloseS110, TokenCBCloseS111, - TokenElseS112, - ElseStatementS113, - TokenCBOpenS114, - BodyS115, - TokenCBCloseS116, + DummyElseS112, + TokenElseS113, + ElseStatementS114, + TokenCBOpenS115, + BodyS116, + TokenCBCloseS117, } impl StateT for State { fn default_layout() -> Option { @@ -654,11 +659,12 @@ impl std::fmt::Debug for State { State::BodyS109 => "109:Body", State::TokenCBCloseS110 => "110:TokenCBClose", State::TokenCBCloseS111 => "111:TokenCBClose", - State::TokenElseS112 => "112:TokenElse", - State::ElseStatementS113 => "113:ElseStatement", - State::TokenCBOpenS114 => "114:TokenCBOpen", - State::BodyS115 => "115:Body", - State::TokenCBCloseS116 => "116:TokenCBClose", + State::DummyElseS112 => "112:DummyElse", + State::TokenElseS113 => "113:TokenElse", + State::ElseStatementS114 => "114:ElseStatement", + State::TokenCBOpenS115 => "115:TokenCBOpen", + State::BodyS116 => "116:Body", + State::TokenCBCloseS117 => "117:TokenCBClose", }; write!(f, "{name}") } @@ -2361,31 +2367,37 @@ fn action_tokencbclose_s111(token_kind: TokenKind) -> Vec Vec::from(&[Reduce(PK::IfStatementIfStatement, 7usize)]), TK::TokenWhile => Vec::from(&[Reduce(PK::IfStatementIfStatement, 7usize)]), TK::TokenIf => Vec::from(&[Reduce(PK::IfStatementIfStatement, 7usize)]), - TK::TokenElse => Vec::from(&[Shift(State::TokenElseS112)]), + TK::TokenElse => Vec::from(&[Reduce(PK::DummyElseP1, 0usize)]), TK::TokenRead => Vec::from(&[Reduce(PK::IfStatementIfStatement, 7usize)]), TK::TokenWrite => Vec::from(&[Reduce(PK::IfStatementIfStatement, 7usize)]), _ => vec![], } } -fn action_tokenelse_s112(token_kind: TokenKind) -> Vec> { +fn action_dummyelse_s112(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenCBOpen => Vec::from(&[Shift(State::TokenCBOpenS114)]), + TK::TokenElse => Vec::from(&[Shift(State::TokenElseS113)]), _ => vec![], } } -fn action_elsestatement_s113(token_kind: TokenKind) -> Vec> { +fn action_tokenelse_s113(token_kind: TokenKind) -> Vec> { match token_kind { - TK::STOP => Vec::from(&[Reduce(PK::IfStatementIfElseStatement, 8usize)]), - TK::TokenId => Vec::from(&[Reduce(PK::IfStatementIfElseStatement, 8usize)]), - TK::TokenCBClose => Vec::from(&[Reduce(PK::IfStatementIfElseStatement, 8usize)]), - TK::TokenWhile => Vec::from(&[Reduce(PK::IfStatementIfElseStatement, 8usize)]), - TK::TokenIf => Vec::from(&[Reduce(PK::IfStatementIfElseStatement, 8usize)]), - TK::TokenRead => Vec::from(&[Reduce(PK::IfStatementIfElseStatement, 8usize)]), - TK::TokenWrite => Vec::from(&[Reduce(PK::IfStatementIfElseStatement, 8usize)]), + TK::TokenCBOpen => Vec::from(&[Shift(State::TokenCBOpenS115)]), _ => vec![], } } -fn action_tokencbopen_s114(token_kind: TokenKind) -> Vec> { +fn action_elsestatement_s114(token_kind: TokenKind) -> Vec> { + match token_kind { + TK::STOP => Vec::from(&[Reduce(PK::IfStatementIfElseStatement, 9usize)]), + TK::TokenId => Vec::from(&[Reduce(PK::IfStatementIfElseStatement, 9usize)]), + TK::TokenCBClose => Vec::from(&[Reduce(PK::IfStatementIfElseStatement, 9usize)]), + TK::TokenWhile => Vec::from(&[Reduce(PK::IfStatementIfElseStatement, 9usize)]), + TK::TokenIf => Vec::from(&[Reduce(PK::IfStatementIfElseStatement, 9usize)]), + TK::TokenRead => Vec::from(&[Reduce(PK::IfStatementIfElseStatement, 9usize)]), + TK::TokenWrite => Vec::from(&[Reduce(PK::IfStatementIfElseStatement, 9usize)]), + _ => vec![], + } +} +fn action_tokencbopen_s115(token_kind: TokenKind) -> Vec> { match token_kind { TK::TokenId => Vec::from(&[Shift(State::TokenIdS24)]), TK::TokenCBClose => Vec::from(&[Reduce(PK::BodyBodyEmpty, 0usize)]), @@ -2397,13 +2409,13 @@ fn action_tokencbopen_s114(token_kind: TokenKind) -> Vec _ => vec![], } } -fn action_body_s115(token_kind: TokenKind) -> Vec> { +fn action_body_s116(token_kind: TokenKind) -> Vec> { match token_kind { - TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS116)]), + TK::TokenCBClose => Vec::from(&[Shift(State::TokenCBCloseS117)]), _ => vec![], } } -fn action_tokencbclose_s116(token_kind: TokenKind) -> Vec> { +fn action_tokencbclose_s117(token_kind: TokenKind) -> Vec> { match token_kind { TK::STOP => Vec::from(&[Reduce(PK::ElseStatementElseStatement, 4usize)]), TK::TokenId => Vec::from(&[Reduce(PK::ElseStatementElseStatement, 4usize)]), @@ -2876,7 +2888,7 @@ fn goto_term_s103(nonterm_kind: NonTermKind) -> State { } fn goto_tokencbclose_s111(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::ElseStatement => State::ElseStatementS113, + NonTermKind::DummyElse => State::DummyElseS112, _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", @@ -2885,9 +2897,20 @@ fn goto_tokencbclose_s111(nonterm_kind: NonTermKind) -> State { } } } -fn goto_tokencbopen_s114(nonterm_kind: NonTermKind) -> State { +fn goto_dummyelse_s112(nonterm_kind: NonTermKind) -> State { match nonterm_kind { - NonTermKind::Body => State::BodyS115, + NonTermKind::ElseStatement => State::ElseStatementS114, + _ => { + panic!( + "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", + State::DummyElseS112 + ) + } + } +} +fn goto_tokencbopen_s115(nonterm_kind: NonTermKind) -> State { + match nonterm_kind { + NonTermKind::Body => State::BodyS116, NonTermKind::FunctionRead => State::FunctionReadS9, NonTermKind::FunctionWrite => State::FunctionWriteS10, NonTermKind::Expressions => State::ExpressionsS11, @@ -2898,7 +2921,7 @@ fn goto_tokencbopen_s114(nonterm_kind: NonTermKind) -> State { _ => { panic!( "Invalid terminal kind ({nonterm_kind:?}) for GOTO state ({:?}).", - State::TokenCBOpenS114 + State::TokenCBOpenS115 ) } } @@ -3020,11 +3043,12 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti action_body_s109, action_tokencbclose_s110, action_tokencbclose_s111, - action_tokenelse_s112, - action_elsestatement_s113, - action_tokencbopen_s114, - action_body_s115, - action_tokencbclose_s116, + action_dummyelse_s112, + action_tokenelse_s113, + action_elsestatement_s114, + action_tokencbopen_s115, + action_body_s116, + action_tokencbclose_s117, ], gotos: [ goto_aug_s0, @@ -3139,9 +3163,10 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti goto_invalid, goto_invalid, goto_tokencbclose_s111, + goto_dummyelse_s112, goto_invalid, goto_invalid, - goto_tokencbopen_s114, + goto_tokencbopen_s115, goto_invalid, goto_invalid, ], @@ -5610,6 +5635,28 @@ pub(crate) static PARSER_DEFINITION: RulesParserDefinition = RulesParserDefiniti None, None, ], + [ + Some((TK::TokenElse, false)), + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + None, + ], [ Some((TK::TokenCBOpen, false)), None, diff --git a/src/grammar/rules.rustemo b/src/grammar/rules.rustemo index 94cd247..e513817 100644 --- a/src/grammar/rules.rustemo +++ b/src/grammar/rules.rustemo @@ -41,7 +41,9 @@ DataType: TokenInt {IntType} WhileLoop: TokenWhile TokenParOpen Conjunction TokenParClose TokenCBOpen Body TokenCBClose {While}; IfStatement: TokenIf TokenParOpen Conjunction TokenParClose TokenCBOpen Body TokenCBClose {IfStatement} - | TokenIf TokenParOpen Conjunction TokenParClose TokenCBOpen Body TokenCBClose ElseStatement {IfElseStatement}; + | TokenIf TokenParOpen Conjunction TokenParClose TokenCBOpen Body TokenCBClose DummyElse ElseStatement {IfElseStatement}; + +DummyElse: EMPTY; ElseStatement: TokenElse TokenCBOpen Body TokenCBClose {ElseStatement}; diff --git a/src/grammar/rules_actions.rs b/src/grammar/rules_actions.rs index 75571d1..f26958d 100644 --- a/src/grammar/rules_actions.rs +++ b/src/grammar/rules_actions.rs @@ -740,10 +740,10 @@ pub fn while_loop_while( } } -/// Parses the rule `: TokenIf TokenParOpen TokenParClose TokenCBOpen TokenCBClose` +/// Parses the rule ` -> TokenIf TokenParOpen TokenParClose TokenCBOpen TokenCBClose` #[expect(clippy::too_many_arguments)] pub fn if_statement_if_statement( - _ctx: &Ctx, + ctx: &Ctx, token_if: TokenIf, token_par_open: TokenParOpen, conjunction: Conjunction, @@ -756,9 +756,21 @@ pub fn if_statement_if_statement( compiler_context.write_to_parser_file(&format!( " -> {token_if} {token_par_open} {token_par_close} {token_cbopen} {token_cbclose}" )); + let Some(conjunction_node) = compiler_context.ast.conjunction_stack.pop() else { + log_error_and_exit( + ctx.range(), + CompilerError::Internal( + "Conjunction stack was empty when parsing ` -> TokenIf TokenParOpen TokenParClose TokenCBOpen TokenCBClose`" + .into(), + ), + 0, + true, + compiler_context, + ) + }; compiler_context.ast.create_node( AstAction::If, - AstPtr::Conjunction.into(), + conjunction_node.into(), AstPtr::Body.into(), AstPtr::If, ); @@ -773,10 +785,10 @@ pub fn if_statement_if_statement( }) } -/// Parses the rule `: TokenIf TokenParOpen TokenParClose TokenCBOpen TokenCBClose ` +/// Parses the rule ` -> TokenIf TokenParOpen TokenParClose TokenCBOpen TokenCBClose ` #[expect(clippy::too_many_arguments)] pub fn if_statement_if_statement_else_statement( - _ctx: &Ctx, + ctx: &Ctx, token_if: TokenIf, token_par_open: TokenParOpen, conjunction: Conjunction, @@ -787,7 +799,45 @@ pub fn if_statement_if_statement_else_statement( else_statement: ElseStatement, compiler_context: &mut CompilerContext, ) -> IfStatement { - compiler_context.write_to_parser_file(": TokenIf TokenParOpen TokenParClose TokenCBOpen TokenCBClose "); + compiler_context.write_to_parser_file(&format!( + " -> {token_if} {token_par_open} {token_par_close} {token_cbopen} {token_cbclose} " + )); + let Some(if_true_body) = compiler_context.ast.if_body_stack.pop() else { + log_error_and_exit( + ctx.range(), + CompilerError::Internal( + "IfBody stack was empty when parsing ` -> TokenIf TokenParOpen TokenParClose TokenCBOpen TokenCBClose `" + .into(), + ), + 0, + true, + compiler_context, + ) + }; + let else_node = compiler_context.ast.create_node( + AstAction::Else, + if_true_body.into(), + AstPtr::Body.into(), + AstPtr::If, + ); + let Some(conjunction_node) = compiler_context.ast.conjunction_stack.pop() else { + log_error_and_exit( + ctx.range(), + CompilerError::Internal( + "Conjunction stack was empty when parsing ` -> TokenIf TokenParOpen TokenParClose TokenCBOpen TokenCBClose `" + .into(), + ), + 0, + true, + compiler_context, + ) + }; + compiler_context.ast.create_node( + AstAction::If, + conjunction_node.into(), + else_node.into(), + AstPtr::If, + ); IfStatement::IfStatementElseStatement(IfStatementElseStatement { token_if, token_par_open, @@ -800,7 +850,15 @@ pub fn if_statement_if_statement_else_statement( }) } -/// Parses the rule `: TokenElse TokenCBOpen TokenCBClose` +/// Parses the rule ` -> EMPTY` +pub fn dummy_else_empty(_ctx: &Ctx, compiler_context: &mut CompilerContext) -> DummyElse { + compiler_context.write_to_parser_file(" -> EMPTY"); + let body_node = compiler_context.ast.get_node_from_ptr(AstPtr::Body); + compiler_context.ast.if_body_stack.push(body_node); + None +} + +/// Parses the rule ` -> TokenElse TokenCBOpen TokenCBClose` pub fn else_statement_else_statement( _ctx: &Ctx, token_else: TokenElse, @@ -983,12 +1041,30 @@ pub fn conjunction_conjunction_and( compiler_context, ) }; - compiler_context.ast.create_node( + + let Some(conjunction_node) = compiler_context.ast.conjunction_stack.pop() else { + log_error_and_exit( + ctx.range(), + CompilerError::Internal( + "Conjunction stack was empty when parsing ` -> \"and\" `" + .into(), + ), + 0, + true, + compiler_context, + ) + }; + let conjunction_node = compiler_context.ast.create_node( AstAction::And, boolean_expression_node.into(), - AstPtr::Conjunction.into(), + conjunction_node.into(), AstPtr::Conjunction, ); + compiler_context + .ast + .conjunction_stack + .push(conjunction_node); + Conjunction::ConjunctionAnd(ConjunctionAnd { boolean_expression, token_and, @@ -1019,12 +1095,28 @@ pub fn conjunction_conjunction_or( compiler_context, ) }; - compiler_context.ast.create_node( + let Some(conjunction_node) = compiler_context.ast.conjunction_stack.pop() else { + log_error_and_exit( + ctx.range(), + CompilerError::Internal( + "Conjunction stack was empty when parsing ` -> \"or\" `" + .into(), + ), + 0, + true, + compiler_context, + ) + }; + let conjunction_node = compiler_context.ast.create_node( AstAction::Or, boolean_expression_node.into(), - AstPtr::Conjunction.into(), + conjunction_node.into(), AstPtr::Conjunction, ); + compiler_context + .ast + .conjunction_stack + .push(conjunction_node); Conjunction::ConjunctionOr(ConjunctionOr { boolean_expression, token_or, @@ -1051,10 +1143,10 @@ pub fn conjunction_conjunction_boolean_expression( compiler_context, ) }; - compiler_context .ast - .assign_node_to_ptr(boolean_expression_node.into(), AstPtr::Conjunction); + .conjunction_stack + .push(boolean_expression_node); Conjunction::ConjunctionBooleanExpression(boolean_expression) } diff --git a/src/grammar/rules_builder.rs b/src/grammar/rules_builder.rs index 287b3ef..ff75f29 100644 --- a/src/grammar/rules_builder.rs +++ b/src/grammar/rules_builder.rs @@ -81,6 +81,7 @@ pub enum NonTerminal { Factor(rules_actions::Factor), DummyAE(rules_actions::DummyAE), DummyT(rules_actions::DummyT), + DummyElse(rules_actions::DummyElse), } impl Builder for Compiler { @@ -879,7 +880,7 @@ impl<'i> LRBuilder<'i, Input, Context<'i, Input>, State, ProdKind, TokenKind> fo ProdKind::IfStatementIfElseStatement => { let mut i = compiler_context .res_stack - .split_off(stack_len - 8usize) + .split_off(stack_len - 9usize) .into_iter(); match ( i.next().unwrap(), @@ -890,6 +891,7 @@ impl<'i> LRBuilder<'i, Input, Context<'i, Input>, State, ProdKind, TokenKind> fo i.next().unwrap(), i.next().unwrap(), i.next().unwrap(), + i.next().unwrap(), ) { ( Symbol::Terminal(Terminal::TokenIf(p0)), @@ -899,6 +901,7 @@ impl<'i> LRBuilder<'i, Input, Context<'i, Input>, State, ProdKind, TokenKind> fo Symbol::Terminal(Terminal::TokenCBOpen(p4)), Symbol::NonTerminal(NonTerminal::Body(p5)), Symbol::Terminal(Terminal::TokenCBClose(p6)), + Symbol::NonTerminal(NonTerminal::DummyElse(_)), Symbol::NonTerminal(NonTerminal::ElseStatement(p7)), ) => NonTerminal::IfStatement( rules_actions::if_statement_if_statement_else_statement( @@ -917,7 +920,10 @@ impl<'i> LRBuilder<'i, Input, Context<'i, Input>, State, ProdKind, TokenKind> fo _ => panic!("Invalid symbol parse stack data."), } } - + ProdKind::DummyElseP1 => NonTerminal::DummyElse(rules_actions::dummy_else_empty( + context, + &mut compiler_context, + )), ProdKind::ElseStatementElseStatement => { let mut i = compiler_context .res_stack diff --git a/src/grammar/types.rs b/src/grammar/types.rs index 2abe853..2d38957 100644 --- a/src/grammar/types.rs +++ b/src/grammar/types.rs @@ -720,6 +720,9 @@ pub type DummyAE = Option; /// Type declaration for the `` non terminal represented by an `Option` because it should alway be EMPTY pub type DummyT = Option; +/// Type declaration for the `` non terminal represented by an `Option` because it should always be EMPTY +pub type DummyElse = Option; + impl Display for TokenKind { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { let text = match self { From e597648e2e96e55c36b665754722978c704d1eaf Mon Sep 17 00:00:00 2001 From: LeanSerra <46695152+LeanSerra@users.noreply.github.com> Date: Sun, 26 Oct 2025 22:31:11 -0300 Subject: [PATCH 13/25] fix: reset body pointer on empty body --- src/grammar/rules_actions.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/grammar/rules_actions.rs b/src/grammar/rules_actions.rs index f26958d..a74b26d 100644 --- a/src/grammar/rules_actions.rs +++ b/src/grammar/rules_actions.rs @@ -393,6 +393,10 @@ pub fn body_body_expressions( /// Parses the rule ` -> EMPTY` pub fn body_body_empty(_ctx: &Ctx, compiler_context: &mut CompilerContext) -> Body { compiler_context.write_to_parser_file(" -> EMPTY"); + let leaf = Node::new_leaf(NodeValue::Action(AstAction::Noop)); + compiler_context + .ast + .assign_node_to_ptr(Rc::new(leaf).into(), AstPtr::Body); None } From 16cf44d6ebec455114ef108dd68995c35d657191 Mon Sep 17 00:00:00 2001 From: LeanSerra <46695152+LeanSerra@users.noreply.github.com> Date: Sun, 26 Oct 2025 22:54:15 -0300 Subject: [PATCH 14/25] feat: AST generation for while statements --- src/compiler/ast.rs | 3 +++ src/grammar/rules_actions.rs | 21 +++++++++++++++++++++ 2 files changed, 24 insertions(+) diff --git a/src/compiler/ast.rs b/src/compiler/ast.rs index 21f7bc2..43a2344 100644 --- a/src/compiler/ast.rs +++ b/src/compiler/ast.rs @@ -46,6 +46,7 @@ pub enum AstPtr { If, Not, IsZero, + While, } pub enum AstNodeRef { @@ -123,6 +124,7 @@ pub enum AstAction { NE, LT, LTE, + While, Noop, } @@ -146,6 +148,7 @@ impl Display for AstAction { Self::Or => write!(f, "OR"), Self::Not => write!(f, "NOT"), Self::IsZero => write!(f, "ISZERO"), + Self::While => write!(f, "WHILE"), Self::Noop => write!(f, "NOOP"), } } diff --git a/src/grammar/rules_actions.rs b/src/grammar/rules_actions.rs index a74b26d..7419f30 100644 --- a/src/grammar/rules_actions.rs +++ b/src/grammar/rules_actions.rs @@ -621,6 +621,9 @@ pub fn statement_statement_while( compiler_context: &mut CompilerContext, ) -> Statement { compiler_context.write_to_parser_file(" -> "); + compiler_context + .ast + .assign_node_to_ptr(AstPtr::While.into(), AstPtr::Statement); Statement::StatementWhile(while_loop) } @@ -733,6 +736,24 @@ pub fn while_loop_while( compiler_context.write_to_parser_file(&format!( " -> {token_while} {token_par_open} {token_par_close} {token_cbopen} {token_cbclose}" )); + let Some(conjunction_node) = compiler_context.ast.conjunction_stack.pop() else { + log_error_and_exit( + ctx.range(), + CompilerError::Internal( + "Conjunction stack was empty when parsing ` -> TokenWhile TokenParOpen TokenParClose TokenCBOpen TokenCBClose`" + .into(), + ), + 0, + true, + compiler_context, + ) + }; + compiler_context.ast.create_node( + AstAction::While, + conjunction_node.into(), + AstPtr::Body.into(), + AstPtr::While, + ); WhileLoop { token_while, token_par_open, From 6a181085f2046a062a75bb80ee0740cedb7ddc50 Mon Sep 17 00:00:00 2001 From: LeanSerra <46695152+LeanSerra@users.noreply.github.com> Date: Sun, 26 Oct 2025 22:55:18 -0300 Subject: [PATCH 15/25] fix: var name in while_loop_while --- src/grammar/rules_actions.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/grammar/rules_actions.rs b/src/grammar/rules_actions.rs index 7419f30..ea4b41f 100644 --- a/src/grammar/rules_actions.rs +++ b/src/grammar/rules_actions.rs @@ -723,7 +723,7 @@ pub fn data_type_string_type( /// Parses the rule ` -> TokenWhile TokenParOpen TokenParClose TokenCBOpen TokenCBClose` #[expect(clippy::too_many_arguments)] pub fn while_loop_while( - _ctx: &Ctx, + ctx: &Ctx, token_while: TokenWhile, token_par_open: TokenParOpen, conjunction: Conjunction, From aabb950ceecdd87da0e3c6c0f7f9e9479f783f92 Mon Sep 17 00:00:00 2001 From: LeanSerra <46695152+LeanSerra@users.noreply.github.com> Date: Sun, 26 Oct 2025 23:08:46 -0300 Subject: [PATCH 16/25] feat: AST generation for write io function --- src/compiler/ast.rs | 3 +++ src/grammar/rules_actions.rs | 13 +++++++++++++ 2 files changed, 16 insertions(+) diff --git a/src/compiler/ast.rs b/src/compiler/ast.rs index 43a2344..f6e7e15 100644 --- a/src/compiler/ast.rs +++ b/src/compiler/ast.rs @@ -47,6 +47,7 @@ pub enum AstPtr { Not, IsZero, While, + Write, } pub enum AstNodeRef { @@ -125,6 +126,7 @@ pub enum AstAction { LT, LTE, While, + Write, Noop, } @@ -149,6 +151,7 @@ impl Display for AstAction { Self::Not => write!(f, "NOT"), Self::IsZero => write!(f, "ISZERO"), Self::While => write!(f, "WHILE"), + Self::Write => write!(f, "WRITE"), Self::Noop => write!(f, "NOOP"), } } diff --git a/src/grammar/rules_actions.rs b/src/grammar/rules_actions.rs index ea4b41f..6d47c63 100644 --- a/src/grammar/rules_actions.rs +++ b/src/grammar/rules_actions.rs @@ -450,6 +450,13 @@ pub fn function_write_function_write_call( compiler_context.write_to_parser_file(&format!( " -> {token_write} {token_par_open} {token_par_close}" )); + let leaf = Node::new_leaf(NodeValue::Action(AstAction::Noop)); + compiler_context.ast.create_node( + AstAction::Write, + AstPtr::SimpleExpression.into(), + Rc::new(leaf).into(), + AstPtr::Write, + ); FunctionWrite { token_write, token_par_open, @@ -634,6 +641,9 @@ pub fn statement_statement_write( compiler_context: &mut CompilerContext, ) -> Statement { compiler_context.write_to_parser_file(" -> "); + compiler_context + .ast + .assign_node_to_ptr(AstPtr::Write.into(), AstPtr::Statement); Statement::StatementWrite(function_write) } @@ -1040,6 +1050,9 @@ pub fn simple_expression_simple_expression_string( ) -> SimpleExpression { compiler_context.push_to_symbol_table(token_string_literal.clone().into()); compiler_context.write_to_parser_file(&format!(" -> {token_string_literal}")); + compiler_context + .ast + .create_leaf(token_string_literal.clone(), AstPtr::SimpleExpression); SimpleExpression::SimpleExpressionString(token_string_literal) } From 7036d797f8718d909aa2dfee585144396d72e421 Mon Sep 17 00:00:00 2001 From: LeanSerra <46695152+LeanSerra@users.noreply.github.com> Date: Sun, 26 Oct 2025 23:12:46 -0300 Subject: [PATCH 17/25] feat: AST generation for read io function --- src/compiler/ast.rs | 3 +++ src/grammar/rules_actions.rs | 11 +++++++++++ 2 files changed, 14 insertions(+) diff --git a/src/compiler/ast.rs b/src/compiler/ast.rs index f6e7e15..54ae462 100644 --- a/src/compiler/ast.rs +++ b/src/compiler/ast.rs @@ -47,6 +47,7 @@ pub enum AstPtr { Not, IsZero, While, + Read, Write, } @@ -126,6 +127,7 @@ pub enum AstAction { LT, LTE, While, + Read, Write, Noop, } @@ -151,6 +153,7 @@ impl Display for AstAction { Self::Not => write!(f, "NOT"), Self::IsZero => write!(f, "ISZERO"), Self::While => write!(f, "WHILE"), + Self::Read => write!(f, "READ"), Self::Write => write!(f, "WRITE"), Self::Noop => write!(f, "NOOP"), } diff --git a/src/grammar/rules_actions.rs b/src/grammar/rules_actions.rs index 6d47c63..ed6f79c 100644 --- a/src/grammar/rules_actions.rs +++ b/src/grammar/rules_actions.rs @@ -430,6 +430,14 @@ pub fn function_read_function_read_call( compiler_context.write_to_parser_file(&format!( " -> {token_read} {token_par_open} {token_id} {token_par_close}" )); + let right_child = Node::new_leaf(NodeValue::Action(AstAction::Noop)); + let left_child = Node::new_leaf(NodeValue::Value(token_id.clone())); + compiler_context.ast.create_node( + AstAction::Read, + Rc::new(left_child).into(), + Rc::new(right_child).into(), + AstPtr::Read, + ); FunctionRead { token_read, token_par_open, @@ -654,6 +662,9 @@ pub fn statement_statement_read( compiler_context: &mut CompilerContext, ) -> Statement { compiler_context.write_to_parser_file(" -> "); + compiler_context + .ast + .assign_node_to_ptr(AstPtr::Read.into(), AstPtr::Statement); Statement::StatementRead(function_read) } From 7a1021dd6386d230c80c0f5744934cfb7e9d7e77 Mon Sep 17 00:00:00 2001 From: LeanSerra <46695152+LeanSerra@users.noreply.github.com> Date: Sun, 26 Oct 2025 23:35:32 -0300 Subject: [PATCH 18/25] feat: AST support multiple statements in body --- src/compiler/ast.rs | 4 ++++ src/grammar/rules_actions.rs | 39 ++++++++++++++++++++++++++++++------ src/main.rs | 5 ++++- 3 files changed, 41 insertions(+), 7 deletions(-) diff --git a/src/compiler/ast.rs b/src/compiler/ast.rs index 54ae462..24bed60 100644 --- a/src/compiler/ast.rs +++ b/src/compiler/ast.rs @@ -18,6 +18,7 @@ pub struct Ast { pub boolean_expression_stack: Vec>, pub if_body_stack: Vec>, pub conjunction_stack: Vec>, + pub statement_stack: Vec>, } impl Debug for Ast { @@ -129,6 +130,7 @@ pub enum AstAction { While, Read, Write, + S, Noop, } @@ -155,6 +157,7 @@ impl Display for AstAction { Self::While => write!(f, "WHILE"), Self::Read => write!(f, "READ"), Self::Write => write!(f, "WRITE"), + Self::S => write!(f, "S"), Self::Noop => write!(f, "NOOP"), } } @@ -184,6 +187,7 @@ impl Default for Ast { boolean_expression_stack: Vec::new(), if_body_stack: Vec::new(), conjunction_stack: Vec::new(), + statement_stack: Vec::new(), } } } diff --git a/src/grammar/rules_actions.rs b/src/grammar/rules_actions.rs index ed6f79c..b058de0 100644 --- a/src/grammar/rules_actions.rs +++ b/src/grammar/rules_actions.rs @@ -326,6 +326,9 @@ pub fn program_program_with_main( compiler_context.write_to_parser_file(&format!( " -> {token_id} {token_par_open} {token_par_close} {token_cbopen} {token_cbclose}" )); + compiler_context + .ast + .assign_node_to_ptr(AstPtr::Body.into(), AstPtr::Program); Program::ProgramWithMain(ProgramWithMain { token_id, token_par_open, @@ -343,6 +346,9 @@ pub fn program_program_only_body( compiler_context: &mut CompilerContext, ) -> Program { compiler_context.write_to_parser_file(" -> "); + compiler_context + .ast + .assign_node_to_ptr(AstPtr::Body.into(), AstPtr::Program); Program::ProgramOnlyBody(body) } @@ -356,6 +362,9 @@ pub fn body_body_init_expressions( ) -> Body { compiler_context .write_to_parser_file(&format!(" -> {token_init} ")); + compiler_context + .ast + .assign_node_to_ptr(AstPtr::Expressions.into(), AstPtr::Body); Some(BodyNoO::BodyInitExpressions(BodyInitExpressions { token_init, init_body, @@ -583,9 +592,10 @@ pub fn expressions_expression_single( compiler_context: &mut CompilerContext, ) -> Expressions { compiler_context.write_to_parser_file(" -> "); + let statement_node = compiler_context.ast.statement_stack.pop().unwrap(); compiler_context .ast - .assign_node_to_ptr(AstPtr::Statement.into(), AstPtr::Expressions); + .assign_node_to_ptr(statement_node.into(), AstPtr::Expressions); Expressions::ExpressionSingle(statement) } @@ -597,6 +607,13 @@ pub fn expressions_expression_recursive( compiler_context: &mut CompilerContext, ) -> Expressions { compiler_context.write_to_parser_file(" -> "); + let statement_node = compiler_context.ast.statement_stack.pop().unwrap(); + compiler_context.ast.create_node( + AstAction::S, + AstPtr::Expressions.into(), + statement_node.into(), + AstPtr::Expressions, + ); Expressions::ExpressionRecursive(ExpressionRecursive { statement, expressions: Box::new(expressions), @@ -610,9 +627,11 @@ pub fn statement_statement_assignment( compiler_context: &mut CompilerContext, ) -> Statement { compiler_context.write_to_parser_file(" -> "); + let assignment_node = compiler_context.ast.get_node_from_ptr(AstPtr::Assignment); compiler_context .ast - .assign_node_to_ptr(AstPtr::Assignment.into(), AstPtr::Statement); + .assign_node_to_ptr(assignment_node.clone().into(), AstPtr::Statement); + compiler_context.ast.statement_stack.push(assignment_node); Statement::StatementAssignment(assignment) } @@ -623,9 +642,11 @@ pub fn statement_statement_if_statement( compiler_context: &mut CompilerContext, ) -> Statement { compiler_context.write_to_parser_file(" -> "); + let if_node = compiler_context.ast.get_node_from_ptr(AstPtr::If); compiler_context .ast - .assign_node_to_ptr(AstPtr::If.into(), AstPtr::Statement); + .assign_node_to_ptr(if_node.clone().into(), AstPtr::Statement); + compiler_context.ast.statement_stack.push(if_node); Statement::StatementIfStatement(if_statement) } @@ -636,9 +657,11 @@ pub fn statement_statement_while( compiler_context: &mut CompilerContext, ) -> Statement { compiler_context.write_to_parser_file(" -> "); + let while_node = compiler_context.ast.get_node_from_ptr(AstPtr::While); compiler_context .ast - .assign_node_to_ptr(AstPtr::While.into(), AstPtr::Statement); + .assign_node_to_ptr(while_node.clone().into(), AstPtr::Statement); + compiler_context.ast.statement_stack.push(while_node); Statement::StatementWhile(while_loop) } @@ -649,9 +672,11 @@ pub fn statement_statement_write( compiler_context: &mut CompilerContext, ) -> Statement { compiler_context.write_to_parser_file(" -> "); + let write_node = compiler_context.ast.get_node_from_ptr(AstPtr::Write); compiler_context .ast - .assign_node_to_ptr(AstPtr::Write.into(), AstPtr::Statement); + .assign_node_to_ptr(write_node.clone().into(), AstPtr::Statement); + compiler_context.ast.statement_stack.push(write_node); Statement::StatementWrite(function_write) } @@ -662,9 +687,11 @@ pub fn statement_statement_read( compiler_context: &mut CompilerContext, ) -> Statement { compiler_context.write_to_parser_file(" -> "); + let read_node = compiler_context.ast.get_node_from_ptr(AstPtr::Read); compiler_context .ast - .assign_node_to_ptr(AstPtr::Read.into(), AstPtr::Statement); + .assign_node_to_ptr(read_node.clone().into(), AstPtr::Statement); + compiler_context.ast.statement_stack.push(read_node); Statement::StatementRead(function_read) } diff --git a/src/main.rs b/src/main.rs index 543cae3..4416b82 100644 --- a/src/main.rs +++ b/src/main.rs @@ -26,7 +26,10 @@ fn main() -> Result<(), CompilerError> { println!("{rules}"); - compiler.inner.borrow_mut().create_ast_graph(AstPtr::Body)?; + compiler + .inner + .borrow_mut() + .create_ast_graph(AstPtr::Program)?; Ok(()) } From 0fed07189560ae91df98b3dfd385dd2659989d03 Mon Sep 17 00:00:00 2001 From: LeanSerra <46695152+LeanSerra@users.noreply.github.com> Date: Sun, 26 Oct 2025 23:41:17 -0300 Subject: [PATCH 19/25] fix: handle errors for empty statement stack --- src/grammar/rules_actions.rs | 28 ++++++++++++++++++++++++---- 1 file changed, 24 insertions(+), 4 deletions(-) diff --git a/src/grammar/rules_actions.rs b/src/grammar/rules_actions.rs index b058de0..9e1eb1d 100644 --- a/src/grammar/rules_actions.rs +++ b/src/grammar/rules_actions.rs @@ -587,12 +587,22 @@ pub fn var_declaration_var_declaration_recursive( /// Parses the rule ` -> ` pub fn expressions_expression_single( - _ctx: &Ctx, + ctx: &Ctx, statement: Statement, compiler_context: &mut CompilerContext, ) -> Expressions { compiler_context.write_to_parser_file(" -> "); - let statement_node = compiler_context.ast.statement_stack.pop().unwrap(); + let Some(statement_node) = compiler_context.ast.statement_stack.pop() else { + log_error_and_exit( + ctx.range(), + CompilerError::Internal( + "Statement stack was empty when parsing ` -> `".into(), + ), + 0, + true, + compiler_context, + ) + }; compiler_context .ast .assign_node_to_ptr(statement_node.into(), AstPtr::Expressions); @@ -601,13 +611,23 @@ pub fn expressions_expression_single( /// Parses the rule ` -> ` pub fn expressions_expression_recursive( - _ctx: &Ctx, + ctx: &Ctx, statement: Statement, expressions: Expressions, compiler_context: &mut CompilerContext, ) -> Expressions { compiler_context.write_to_parser_file(" -> "); - let statement_node = compiler_context.ast.statement_stack.pop().unwrap(); + let Some(statement_node) = compiler_context.ast.statement_stack.pop() else { + log_error_and_exit( + ctx.range(), + CompilerError::Internal( + "Statement stack was empty when parsing ` -> `".into(), + ), + 0, + true, + compiler_context, + ) + }; compiler_context.ast.create_node( AstAction::S, AstPtr::Expressions.into(), From 70af0d454d7eaeb0802ef049daa4272365d50d99 Mon Sep 17 00:00:00 2001 From: LeanSerra <46695152+LeanSerra@users.noreply.github.com> Date: Mon, 27 Oct 2025 21:51:59 -0300 Subject: [PATCH 20/25] feat: AST support for convDate --- src/compiler/ast.rs | 1 + src/grammar/rules_actions.rs | 47 ++++++++++++++++++++++++++++++++++++ 2 files changed, 48 insertions(+) diff --git a/src/compiler/ast.rs b/src/compiler/ast.rs index 24bed60..3e34385 100644 --- a/src/compiler/ast.rs +++ b/src/compiler/ast.rs @@ -50,6 +50,7 @@ pub enum AstPtr { While, Read, Write, + ConvDate, } pub enum AstNodeRef { diff --git a/src/grammar/rules_actions.rs b/src/grammar/rules_actions.rs index 9e1eb1d..85b231f 100644 --- a/src/grammar/rules_actions.rs +++ b/src/grammar/rules_actions.rs @@ -514,6 +514,46 @@ pub fn function_conv_date_function_conv_date_variable_call( compiler_context.write_to_parser_file(&format!( " -> {token_conv_date} {token_par_open} {token_date} {token_par_close}" )); + let thousand_leaf = Rc::new(Node::new_leaf(NodeValue::Value("1000".into()))); + let hundread_leaf = Rc::new(Node::new_leaf(NodeValue::Value("100".into()))); + let one_leaf = Rc::new(Node::new_leaf(NodeValue::Value("1".into()))); + + let year_leaf = Rc::new(Node::new_leaf(NodeValue::Value(token_date.year.clone()))); + let month_leaf = Rc::new(Node::new_leaf(NodeValue::Value(token_date.month.clone()))); + let day_leaf = Rc::new(Node::new_leaf(NodeValue::Value(token_date.day.clone()))); + + let year_node = compiler_context.ast.create_node( + AstAction::Mult, + year_leaf.into(), + thousand_leaf.into(), + AstPtr::ConvDate, + ); + let month_node = compiler_context.ast.create_node( + AstAction::Mult, + month_leaf.into(), + hundread_leaf.into(), + AstPtr::ConvDate, + ); + let day_node = compiler_context.ast.create_node( + AstAction::Mult, + day_leaf.into(), + one_leaf.into(), + AstPtr::ConvDate, + ); + + let sum_year_month_node = compiler_context.ast.create_node( + AstAction::Plus, + year_node.into(), + month_node.into(), + AstPtr::ConvDate, + ); + compiler_context.ast.create_node( + AstAction::Plus, + sum_year_month_node.into(), + day_node.into(), + AstPtr::ConvDate, + ); + FunctionConvDate { token_conv_date, token_par_open, @@ -751,6 +791,13 @@ pub fn assignment_assignment_conv_date( compiler_context.write_to_parser_file(&format!( " -> {token_id} {token_assign} " )); + let leaf = Rc::new(Node::new_leaf(NodeValue::Value(token_id.clone()))); + compiler_context.ast.create_node( + AstAction::Assign, + leaf.into(), + AstPtr::ConvDate.into(), + AstPtr::Assignment, + ); Assignment::AssignmentConvDate(ConvDate { token_id, token_assign, From 78c44188a0d2dad79b8c647c460d63256c0357ad Mon Sep 17 00:00:00 2001 From: LeanSerra <46695152+LeanSerra@users.noreply.github.com> Date: Mon, 27 Oct 2025 22:11:46 -0300 Subject: [PATCH 21/25] feat: AST support for isZero --- src/grammar/rules_actions.rs | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/grammar/rules_actions.rs b/src/grammar/rules_actions.rs index 85b231f..24186e1 100644 --- a/src/grammar/rules_actions.rs +++ b/src/grammar/rules_actions.rs @@ -494,6 +494,13 @@ pub fn function_is_zero_function_is_zero_call( compiler_context.write_to_parser_file(&format!( " -> {token_is_zero} {token_par_open} {token_par_close}" )); + let zero_leaf = Rc::new(Node::new_leaf(NodeValue::Value("0".into()))); + compiler_context.ast.create_node( + AstAction::EQ, + AstPtr::ArithmeticExpression.into(), + zero_leaf.into(), + AstPtr::IsZero, + ); FunctionIsZero { token_is_zero, token_par_open, From 550851814cc53d4b0571ea76b085d28ac2a93b94 Mon Sep 17 00:00:00 2001 From: LeanSerra <46695152+LeanSerra@users.noreply.github.com> Date: Mon, 27 Oct 2025 22:45:41 -0300 Subject: [PATCH 22/25] feat: AST generation for Number --- src/compiler/ast.rs | 2 ++ src/grammar/rules_actions.rs | 24 ++++++++++++++++++++++++ 2 files changed, 26 insertions(+) diff --git a/src/compiler/ast.rs b/src/compiler/ast.rs index 3e34385..a4958c5 100644 --- a/src/compiler/ast.rs +++ b/src/compiler/ast.rs @@ -132,6 +132,7 @@ pub enum AstAction { Read, Write, S, + Negative, Noop, } @@ -159,6 +160,7 @@ impl Display for AstAction { Self::Read => write!(f, "READ"), Self::Write => write!(f, "WRITE"), Self::S => write!(f, "S"), + Self::Negative => write!(f, "NEG"), Self::Noop => write!(f, "NOOP"), } } diff --git a/src/grammar/rules_actions.rs b/src/grammar/rules_actions.rs index 24186e1..470593c 100644 --- a/src/grammar/rules_actions.rs +++ b/src/grammar/rules_actions.rs @@ -1442,6 +1442,9 @@ pub fn number_number_float( ) -> Number { compiler_context.push_to_symbol_table(token_float_literal.clone().into()); compiler_context.write_to_parser_file(&format!(" -> {}", token_float_literal.original)); + compiler_context + .ast + .create_leaf(token_float_literal.original.clone(), AstPtr::Number); Number::NumberFloat(token_float_literal) } @@ -1455,6 +1458,16 @@ pub fn number_number_negative_int( let value: i64 = format!("{token_sub}{token_int_literal}").parse().unwrap(); compiler_context.push_to_symbol_table(value.into()); compiler_context.write_to_parser_file(&format!(" -> {token_sub} {token_int_literal}")); + let leaf = Rc::new(Node::new_leaf(NodeValue::Value( + token_int_literal.to_string(), + ))); + let noop = Rc::new(Node::new_leaf(NodeValue::Action(AstAction::Noop))); + compiler_context.ast.create_node( + AstAction::Negative, + leaf.into(), + noop.into(), + AstPtr::Number, + ); Number::NumberInt(value) } @@ -1472,6 +1485,17 @@ pub fn number_number_negative_float( " -> {token_sub} {}", token_float_literal.original )); + let leaf = Rc::new(Node::new_leaf(NodeValue::Value( + token_float_literal.original.clone(), + ))); + let noop = Rc::new(Node::new_leaf(NodeValue::Action(AstAction::Noop))); + compiler_context.ast.create_node( + AstAction::Negative, + leaf.into(), + noop.into(), + AstPtr::Number, + ); + Number::NumberFloat(token_float_literal) } From efb80e30686a12a2a2450e04cc8b806c5c7a0091 Mon Sep 17 00:00:00 2001 From: LeanSerra <46695152+LeanSerra@users.noreply.github.com> Date: Mon, 27 Oct 2025 23:28:46 -0300 Subject: [PATCH 23/25] refactor: cleanup AST code --- src/compiler/ast.rs | 38 +-- src/grammar/rules_actions.rs | 480 ++++++++++++++++++++--------------- 2 files changed, 280 insertions(+), 238 deletions(-) diff --git a/src/compiler/ast.rs b/src/compiler/ast.rs index a4958c5..95beb94 100644 --- a/src/compiler/ast.rs +++ b/src/compiler/ast.rs @@ -11,8 +11,8 @@ use std::{ pub struct Ast { tree: [Rc; mem::variant_count::()], - pub stack_t: Vec>, - pub stack_e: Vec>, + pub term_stack: Vec>, + pub expression_stack: Vec>, pub comparision_op_stack: Vec, pub comparision_expressions_stack: Vec>, pub boolean_expression_stack: Vec>, @@ -23,8 +23,8 @@ pub struct Ast { impl Debug for Ast { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - writeln!(f, "{:?}", self.stack_t)?; - writeln!(f, "{:?}", self.stack_e)?; + writeln!(f, "{:?}", self.term_stack)?; + writeln!(f, "{:?}", self.expression_stack)?; writeln!(f, "{:?}", self.comparision_op_stack)?; writeln!(f, "{:?}", self.comparision_expressions_stack) } @@ -183,8 +183,8 @@ impl Default for Ast { fn default() -> Self { Self { tree: array::from_fn(|_| Rc::new(Node::new_leaf(NodeValue::Value("".to_string())))), - stack_e: Vec::new(), - stack_t: Vec::new(), + expression_stack: Vec::new(), + term_stack: Vec::new(), comparision_op_stack: Vec::new(), comparision_expressions_stack: Vec::new(), boolean_expression_stack: Vec::new(), @@ -288,32 +288,6 @@ impl Ast { Ok(node_count) } - pub fn push_t_stack(&mut self, node: AstNodeRef) { - let node = match node { - AstNodeRef::Node(node) => node, - AstNodeRef::Ptr(ptr) => self.tree[ptr as usize].clone(), - }; - - self.stack_t.push(node); - } - - pub fn pop_t_stack(&mut self) -> Option> { - self.stack_t.pop() - } - - pub fn push_e_stack(&mut self, node: AstNodeRef) { - let node = match node { - AstNodeRef::Node(node) => node, - AstNodeRef::Ptr(ptr) => self.tree[ptr as usize].clone(), - }; - - self.stack_e.push(node); - } - - pub fn pop_e_stack(&mut self) -> Option> { - self.stack_e.pop() - } - pub fn get_node_from_ptr(&self, from: AstPtr) -> Rc { self.tree[from as usize].clone() } diff --git a/src/grammar/rules_actions.rs b/src/grammar/rules_actions.rs index 470593c..045ff39 100644 --- a/src/grammar/rules_actions.rs +++ b/src/grammar/rules_actions.rs @@ -326,9 +326,10 @@ pub fn program_program_with_main( compiler_context.write_to_parser_file(&format!( " -> {token_id} {token_par_open} {token_par_close} {token_cbopen} {token_cbclose}" )); - compiler_context - .ast - .assign_node_to_ptr(AstPtr::Body.into(), AstPtr::Program); + + let ast = &mut compiler_context.ast; + ast.assign_node_to_ptr(AstPtr::Body.into(), AstPtr::Program); + Program::ProgramWithMain(ProgramWithMain { token_id, token_par_open, @@ -346,9 +347,10 @@ pub fn program_program_only_body( compiler_context: &mut CompilerContext, ) -> Program { compiler_context.write_to_parser_file(" -> "); - compiler_context - .ast - .assign_node_to_ptr(AstPtr::Body.into(), AstPtr::Program); + + let ast = &mut compiler_context.ast; + ast.assign_node_to_ptr(AstPtr::Body.into(), AstPtr::Program); + Program::ProgramOnlyBody(body) } @@ -362,9 +364,10 @@ pub fn body_body_init_expressions( ) -> Body { compiler_context .write_to_parser_file(&format!(" -> {token_init} ")); - compiler_context - .ast - .assign_node_to_ptr(AstPtr::Expressions.into(), AstPtr::Body); + + let ast = &mut compiler_context.ast; + ast.assign_node_to_ptr(AstPtr::Expressions.into(), AstPtr::Body); + Some(BodyNoO::BodyInitExpressions(BodyInitExpressions { token_init, init_body, @@ -393,19 +396,21 @@ pub fn body_body_expressions( compiler_context: &mut CompilerContext, ) -> Body { compiler_context.write_to_parser_file(" -> "); - compiler_context - .ast - .assign_node_to_ptr(AstPtr::Expressions.into(), AstPtr::Body); + + let ast = &mut compiler_context.ast; + ast.assign_node_to_ptr(AstPtr::Expressions.into(), AstPtr::Body); + Some(BodyNoO::BodyExpressions(expressions)) } /// Parses the rule ` -> EMPTY` pub fn body_body_empty(_ctx: &Ctx, compiler_context: &mut CompilerContext) -> Body { compiler_context.write_to_parser_file(" -> EMPTY"); - let leaf = Node::new_leaf(NodeValue::Action(AstAction::Noop)); - compiler_context - .ast - .assign_node_to_ptr(Rc::new(leaf).into(), AstPtr::Body); + + let ast = &mut compiler_context.ast; + let leaf = Rc::new(Node::new_leaf(NodeValue::Action(AstAction::Noop))); + ast.assign_node_to_ptr(leaf.into(), AstPtr::Body); + None } @@ -439,14 +444,17 @@ pub fn function_read_function_read_call( compiler_context.write_to_parser_file(&format!( " -> {token_read} {token_par_open} {token_id} {token_par_close}" )); - let right_child = Node::new_leaf(NodeValue::Action(AstAction::Noop)); - let left_child = Node::new_leaf(NodeValue::Value(token_id.clone())); - compiler_context.ast.create_node( + + let ast = &mut compiler_context.ast; + let right_child = Rc::new(Node::new_leaf(NodeValue::Action(AstAction::Noop))); + let left_child = Rc::new(Node::new_leaf(NodeValue::Value(token_id.clone()))); + ast.create_node( AstAction::Read, - Rc::new(left_child).into(), - Rc::new(right_child).into(), + left_child.into(), + right_child.into(), AstPtr::Read, ); + FunctionRead { token_read, token_par_open, @@ -467,13 +475,16 @@ pub fn function_write_function_write_call( compiler_context.write_to_parser_file(&format!( " -> {token_write} {token_par_open} {token_par_close}" )); - let leaf = Node::new_leaf(NodeValue::Action(AstAction::Noop)); - compiler_context.ast.create_node( + + let ast = &mut compiler_context.ast; + let leaf = Rc::new(Node::new_leaf(NodeValue::Action(AstAction::Noop))); + ast.create_node( AstAction::Write, AstPtr::SimpleExpression.into(), - Rc::new(leaf).into(), + leaf.into(), AstPtr::Write, ); + FunctionWrite { token_write, token_par_open, @@ -521,6 +532,9 @@ pub fn function_conv_date_function_conv_date_variable_call( compiler_context.write_to_parser_file(&format!( " -> {token_conv_date} {token_par_open} {token_date} {token_par_close}" )); + + let ast = &mut compiler_context.ast; + let thousand_leaf = Rc::new(Node::new_leaf(NodeValue::Value("1000".into()))); let hundread_leaf = Rc::new(Node::new_leaf(NodeValue::Value("100".into()))); let one_leaf = Rc::new(Node::new_leaf(NodeValue::Value("1".into()))); @@ -529,32 +543,32 @@ pub fn function_conv_date_function_conv_date_variable_call( let month_leaf = Rc::new(Node::new_leaf(NodeValue::Value(token_date.month.clone()))); let day_leaf = Rc::new(Node::new_leaf(NodeValue::Value(token_date.day.clone()))); - let year_node = compiler_context.ast.create_node( + let year_node = ast.create_node( AstAction::Mult, year_leaf.into(), thousand_leaf.into(), AstPtr::ConvDate, ); - let month_node = compiler_context.ast.create_node( + let month_node = ast.create_node( AstAction::Mult, month_leaf.into(), hundread_leaf.into(), AstPtr::ConvDate, ); - let day_node = compiler_context.ast.create_node( + let day_node = ast.create_node( AstAction::Mult, day_leaf.into(), one_leaf.into(), AstPtr::ConvDate, ); - let sum_year_month_node = compiler_context.ast.create_node( + let sum_year_month_node = ast.create_node( AstAction::Plus, year_node.into(), month_node.into(), AstPtr::ConvDate, ); - compiler_context.ast.create_node( + ast.create_node( AstAction::Plus, sum_year_month_node.into(), day_node.into(), @@ -639,7 +653,9 @@ pub fn expressions_expression_single( compiler_context: &mut CompilerContext, ) -> Expressions { compiler_context.write_to_parser_file(" -> "); - let Some(statement_node) = compiler_context.ast.statement_stack.pop() else { + + let ast = &mut compiler_context.ast; + let Some(statement_node) = ast.statement_stack.pop() else { log_error_and_exit( ctx.range(), CompilerError::Internal( @@ -650,9 +666,8 @@ pub fn expressions_expression_single( compiler_context, ) }; - compiler_context - .ast - .assign_node_to_ptr(statement_node.into(), AstPtr::Expressions); + ast.assign_node_to_ptr(statement_node.into(), AstPtr::Expressions); + Expressions::ExpressionSingle(statement) } @@ -664,7 +679,9 @@ pub fn expressions_expression_recursive( compiler_context: &mut CompilerContext, ) -> Expressions { compiler_context.write_to_parser_file(" -> "); - let Some(statement_node) = compiler_context.ast.statement_stack.pop() else { + + let ast = &mut compiler_context.ast; + let Some(statement_node) = ast.statement_stack.pop() else { log_error_and_exit( ctx.range(), CompilerError::Internal( @@ -675,12 +692,13 @@ pub fn expressions_expression_recursive( compiler_context, ) }; - compiler_context.ast.create_node( + ast.create_node( AstAction::S, AstPtr::Expressions.into(), statement_node.into(), AstPtr::Expressions, ); + Expressions::ExpressionRecursive(ExpressionRecursive { statement, expressions: Box::new(expressions), @@ -695,10 +713,11 @@ pub fn statement_statement_assignment( ) -> Statement { compiler_context.write_to_parser_file(" -> "); let assignment_node = compiler_context.ast.get_node_from_ptr(AstPtr::Assignment); - compiler_context - .ast - .assign_node_to_ptr(assignment_node.clone().into(), AstPtr::Statement); - compiler_context.ast.statement_stack.push(assignment_node); + + let ast = &mut compiler_context.ast; + ast.assign_node_to_ptr(assignment_node.clone().into(), AstPtr::Statement); + ast.statement_stack.push(assignment_node); + Statement::StatementAssignment(assignment) } @@ -709,11 +728,12 @@ pub fn statement_statement_if_statement( compiler_context: &mut CompilerContext, ) -> Statement { compiler_context.write_to_parser_file(" -> "); - let if_node = compiler_context.ast.get_node_from_ptr(AstPtr::If); - compiler_context - .ast - .assign_node_to_ptr(if_node.clone().into(), AstPtr::Statement); - compiler_context.ast.statement_stack.push(if_node); + + let ast = &mut compiler_context.ast; + let if_node = ast.get_node_from_ptr(AstPtr::If); + ast.assign_node_to_ptr(if_node.clone().into(), AstPtr::Statement); + ast.statement_stack.push(if_node); + Statement::StatementIfStatement(if_statement) } @@ -724,11 +744,12 @@ pub fn statement_statement_while( compiler_context: &mut CompilerContext, ) -> Statement { compiler_context.write_to_parser_file(" -> "); - let while_node = compiler_context.ast.get_node_from_ptr(AstPtr::While); - compiler_context - .ast - .assign_node_to_ptr(while_node.clone().into(), AstPtr::Statement); - compiler_context.ast.statement_stack.push(while_node); + + let ast = &mut compiler_context.ast; + let while_node = ast.get_node_from_ptr(AstPtr::While); + ast.assign_node_to_ptr(while_node.clone().into(), AstPtr::Statement); + ast.statement_stack.push(while_node); + Statement::StatementWhile(while_loop) } @@ -739,11 +760,12 @@ pub fn statement_statement_write( compiler_context: &mut CompilerContext, ) -> Statement { compiler_context.write_to_parser_file(" -> "); - let write_node = compiler_context.ast.get_node_from_ptr(AstPtr::Write); - compiler_context - .ast - .assign_node_to_ptr(write_node.clone().into(), AstPtr::Statement); - compiler_context.ast.statement_stack.push(write_node); + + let ast = &mut compiler_context.ast; + let write_node = ast.get_node_from_ptr(AstPtr::Write); + ast.assign_node_to_ptr(write_node.clone().into(), AstPtr::Statement); + ast.statement_stack.push(write_node); + Statement::StatementWrite(function_write) } @@ -754,11 +776,12 @@ pub fn statement_statement_read( compiler_context: &mut CompilerContext, ) -> Statement { compiler_context.write_to_parser_file(" -> "); - let read_node = compiler_context.ast.get_node_from_ptr(AstPtr::Read); - compiler_context - .ast - .assign_node_to_ptr(read_node.clone().into(), AstPtr::Statement); - compiler_context.ast.statement_stack.push(read_node); + + let ast = &mut compiler_context.ast; + let read_node = ast.get_node_from_ptr(AstPtr::Read); + ast.assign_node_to_ptr(read_node.clone().into(), AstPtr::Statement); + ast.statement_stack.push(read_node); + Statement::StatementRead(function_read) } @@ -773,13 +796,16 @@ pub fn assignment_assignment_expression( compiler_context.write_to_parser_file(&format!( " -> {token_id} {token_assign} " )); + + let ast = &mut compiler_context.ast; let leaf = Node::new_leaf(NodeValue::Value(token_id.clone())); - compiler_context.ast.create_node( + ast.create_node( AstAction::Assign, Rc::new(leaf).into(), AstPtr::SimpleExpression.into(), AstPtr::Assignment, ); + Assignment::AssignmentExpression(AssignmentExpression { token_id, token_assign, @@ -798,13 +824,16 @@ pub fn assignment_assignment_conv_date( compiler_context.write_to_parser_file(&format!( " -> {token_id} {token_assign} " )); + + let ast = &mut compiler_context.ast; let leaf = Rc::new(Node::new_leaf(NodeValue::Value(token_id.clone()))); - compiler_context.ast.create_node( + ast.create_node( AstAction::Assign, leaf.into(), AstPtr::ConvDate.into(), AstPtr::Assignment, ); + Assignment::AssignmentConvDate(ConvDate { token_id, token_assign, @@ -858,7 +887,9 @@ pub fn while_loop_while( compiler_context.write_to_parser_file(&format!( " -> {token_while} {token_par_open} {token_par_close} {token_cbopen} {token_cbclose}" )); - let Some(conjunction_node) = compiler_context.ast.conjunction_stack.pop() else { + + let ast = &mut compiler_context.ast; + let Some(conjunction_node) = ast.conjunction_stack.pop() else { log_error_and_exit( ctx.range(), CompilerError::Internal( @@ -870,12 +901,13 @@ pub fn while_loop_while( compiler_context, ) }; - compiler_context.ast.create_node( + ast.create_node( AstAction::While, conjunction_node.into(), AstPtr::Body.into(), AstPtr::While, ); + WhileLoop { token_while, token_par_open, @@ -903,7 +935,9 @@ pub fn if_statement_if_statement( compiler_context.write_to_parser_file(&format!( " -> {token_if} {token_par_open} {token_par_close} {token_cbopen} {token_cbclose}" )); - let Some(conjunction_node) = compiler_context.ast.conjunction_stack.pop() else { + + let ast = &mut compiler_context.ast; + let Some(conjunction_node) = ast.conjunction_stack.pop() else { log_error_and_exit( ctx.range(), CompilerError::Internal( @@ -915,12 +949,13 @@ pub fn if_statement_if_statement( compiler_context, ) }; - compiler_context.ast.create_node( + ast.create_node( AstAction::If, conjunction_node.into(), AstPtr::Body.into(), AstPtr::If, ); + IfStatement::IfStatementIfStatement(IfStatementIfStatement { token_if, token_par_open, @@ -949,7 +984,9 @@ pub fn if_statement_if_statement_else_statement( compiler_context.write_to_parser_file(&format!( " -> {token_if} {token_par_open} {token_par_close} {token_cbopen} {token_cbclose} " )); - let Some(if_true_body) = compiler_context.ast.if_body_stack.pop() else { + + let ast = &mut compiler_context.ast; + let Some(if_true_body) = ast.if_body_stack.pop() else { log_error_and_exit( ctx.range(), CompilerError::Internal( @@ -961,13 +998,13 @@ pub fn if_statement_if_statement_else_statement( compiler_context, ) }; - let else_node = compiler_context.ast.create_node( + let else_node = ast.create_node( AstAction::Else, if_true_body.into(), AstPtr::Body.into(), AstPtr::If, ); - let Some(conjunction_node) = compiler_context.ast.conjunction_stack.pop() else { + let Some(conjunction_node) = ast.conjunction_stack.pop() else { log_error_and_exit( ctx.range(), CompilerError::Internal( @@ -979,12 +1016,13 @@ pub fn if_statement_if_statement_else_statement( compiler_context, ) }; - compiler_context.ast.create_node( + ast.create_node( AstAction::If, conjunction_node.into(), else_node.into(), AstPtr::If, ); + IfStatement::IfStatementElseStatement(IfStatementElseStatement { token_if, token_par_open, @@ -1000,8 +1038,11 @@ pub fn if_statement_if_statement_else_statement( /// Parses the rule ` -> EMPTY` pub fn dummy_else_empty(_ctx: &Ctx, compiler_context: &mut CompilerContext) -> DummyElse { compiler_context.write_to_parser_file(" -> EMPTY"); - let body_node = compiler_context.ast.get_node_from_ptr(AstPtr::Body); - compiler_context.ast.if_body_stack.push(body_node); + + let ast = &mut compiler_context.ast; + let body_node = ast.get_node_from_ptr(AstPtr::Body); + ast.if_body_stack.push(body_node); + None } @@ -1036,7 +1077,9 @@ pub fn boolean_expression_boolean_expression_simple_expression( compiler_context.write_to_parser_file( " -> ", ); - let Some(left_child) = compiler_context.ast.comparision_expressions_stack.pop() else { + + let ast = &mut compiler_context.ast; + let Some(left_child) = ast.comparision_expressions_stack.pop() else { log_error_and_exit( ctx.range(), CompilerError::Internal( @@ -1048,7 +1091,7 @@ pub fn boolean_expression_boolean_expression_simple_expression( compiler_context, ) }; - let Some(operator) = compiler_context.ast.comparision_op_stack.pop() else { + let Some(operator) = ast.comparision_op_stack.pop() else { log_error_and_exit( ctx.range(), CompilerError::Internal( @@ -1060,13 +1103,14 @@ pub fn boolean_expression_boolean_expression_simple_expression( compiler_context, ) }; - let node = compiler_context.ast.create_node( + let node = ast.create_node( operator.into(), left_child.into(), AstPtr::SimpleExpression.into(), AstPtr::BooleanExpression, ); - compiler_context.ast.boolean_expression_stack.push(node); + ast.boolean_expression_stack.push(node); + BooleanExpression::BooleanExpressionSimpleExpression(BooleanExpressionSimpleExpression { simple_expression, comparison_op, @@ -1081,10 +1125,11 @@ pub fn boolean_expression_boolean_expression_true( compiler_context: &mut CompilerContext, ) -> BooleanExpression { compiler_context.write_to_parser_file(&format!(" -> {token_true}")); - let node = compiler_context - .ast - .create_leaf(token_true.clone(), AstPtr::BooleanExpression); - compiler_context.ast.boolean_expression_stack.push(node); + + let ast = &mut compiler_context.ast; + let node = ast.create_leaf(token_true.clone(), AstPtr::BooleanExpression); + ast.boolean_expression_stack.push(node); + BooleanExpression::BooleanExpressionTrue(token_true) } @@ -1095,10 +1140,11 @@ pub fn boolean_expression_boolean_expression_false( compiler_context: &mut CompilerContext, ) -> BooleanExpression { compiler_context.write_to_parser_file(&format!(" -> {token_false}")); - let node = compiler_context - .ast - .create_leaf(token_false.clone(), AstPtr::BooleanExpression); - compiler_context.ast.boolean_expression_stack.push(node); + + let ast = &mut compiler_context.ast; + let node = ast.create_leaf(token_false.clone(), AstPtr::BooleanExpression); + ast.boolean_expression_stack.push(node); + BooleanExpression::BooleanExpressionFalse(token_false) } @@ -1109,10 +1155,11 @@ pub fn boolean_expression_boolean_expression_token_id( compiler_context: &mut CompilerContext, ) -> BooleanExpression { compiler_context.write_to_parser_file(&format!(" -> {token_id}")); - let node = compiler_context - .ast - .create_leaf(token_id.clone(), AstPtr::BooleanExpression); - compiler_context.ast.boolean_expression_stack.push(node); + + let ast = &mut compiler_context.ast; + let node = ast.create_leaf(token_id.clone(), AstPtr::BooleanExpression); + ast.boolean_expression_stack.push(node); + BooleanExpression::BooleanExpressionTokenId(token_id) } @@ -1123,8 +1170,11 @@ pub fn boolean_expression_boolean_expression_not_statement( compiler_context: &mut CompilerContext, ) -> BooleanExpression { compiler_context.write_to_parser_file(" -> "); - let node = compiler_context.ast.get_node_from_ptr(AstPtr::Not); - compiler_context.ast.boolean_expression_stack.push(node); + + let ast = &mut compiler_context.ast; + let node = ast.get_node_from_ptr(AstPtr::Not); + ast.boolean_expression_stack.push(node); + BooleanExpression::BooleanExpressionNotStatement(not_statement) } @@ -1135,8 +1185,11 @@ pub fn boolean_expression_boolean_expression_is_zero( compiler_context: &mut CompilerContext, ) -> BooleanExpression { compiler_context.write_to_parser_file(" -> "); - let node = compiler_context.ast.get_node_from_ptr(AstPtr::IsZero); - compiler_context.ast.boolean_expression_stack.push(node); + + let ast = &mut compiler_context.ast; + let node = ast.get_node_from_ptr(AstPtr::IsZero); + ast.boolean_expression_stack.push(node); + BooleanExpression::BooleanExpressionIsZero(function_is_zero) } @@ -1147,10 +1200,13 @@ pub fn simple_expression_simple_expression_arithmetic( compiler_context: &mut CompilerContext, ) -> SimpleExpression { compiler_context.write_to_parser_file(" -> "); - compiler_context.ast.assign_node_to_ptr( + + let ast = &mut compiler_context.ast; + ast.assign_node_to_ptr( AstPtr::ArithmeticExpression.into(), AstPtr::SimpleExpression, ); + SimpleExpression::SimpleExpressionArithmeticExpression(arithmetic_expression) } @@ -1162,9 +1218,10 @@ pub fn simple_expression_simple_expression_string( ) -> SimpleExpression { compiler_context.push_to_symbol_table(token_string_literal.clone().into()); compiler_context.write_to_parser_file(&format!(" -> {token_string_literal}")); - compiler_context - .ast - .create_leaf(token_string_literal.clone(), AstPtr::SimpleExpression); + + let ast = &mut compiler_context.ast; + ast.create_leaf(token_string_literal.clone(), AstPtr::SimpleExpression); + SimpleExpression::SimpleExpressionString(token_string_literal) } @@ -1179,7 +1236,9 @@ pub fn conjunction_conjunction_and( compiler_context.write_to_parser_file(&format!( " -> {token_and} " )); - let Some(boolean_expression_node) = compiler_context.ast.boolean_expression_stack.pop() else { + + let ast = &mut compiler_context.ast; + let Some(boolean_expression_node) = ast.boolean_expression_stack.pop() else { log_error_and_exit( ctx.range(), CompilerError::Internal( @@ -1191,8 +1250,7 @@ pub fn conjunction_conjunction_and( compiler_context, ) }; - - let Some(conjunction_node) = compiler_context.ast.conjunction_stack.pop() else { + let Some(conjunction_node) = ast.conjunction_stack.pop() else { log_error_and_exit( ctx.range(), CompilerError::Internal( @@ -1204,16 +1262,13 @@ pub fn conjunction_conjunction_and( compiler_context, ) }; - let conjunction_node = compiler_context.ast.create_node( + let conjunction_node = ast.create_node( AstAction::And, boolean_expression_node.into(), conjunction_node.into(), AstPtr::Conjunction, ); - compiler_context - .ast - .conjunction_stack - .push(conjunction_node); + ast.conjunction_stack.push(conjunction_node); Conjunction::ConjunctionAnd(ConjunctionAnd { boolean_expression, @@ -1233,7 +1288,9 @@ pub fn conjunction_conjunction_or( compiler_context.write_to_parser_file(&format!( " -> {token_or} " )); - let Some(boolean_expression_node) = compiler_context.ast.boolean_expression_stack.pop() else { + + let ast = &mut compiler_context.ast; + let Some(boolean_expression_node) = ast.boolean_expression_stack.pop() else { log_error_and_exit( ctx.range(), CompilerError::Internal( @@ -1245,7 +1302,7 @@ pub fn conjunction_conjunction_or( compiler_context, ) }; - let Some(conjunction_node) = compiler_context.ast.conjunction_stack.pop() else { + let Some(conjunction_node) = ast.conjunction_stack.pop() else { log_error_and_exit( ctx.range(), CompilerError::Internal( @@ -1257,16 +1314,14 @@ pub fn conjunction_conjunction_or( compiler_context, ) }; - let conjunction_node = compiler_context.ast.create_node( + let conjunction_node = ast.create_node( AstAction::Or, boolean_expression_node.into(), conjunction_node.into(), AstPtr::Conjunction, ); - compiler_context - .ast - .conjunction_stack - .push(conjunction_node); + ast.conjunction_stack.push(conjunction_node); + Conjunction::ConjunctionOr(ConjunctionOr { boolean_expression, token_or, @@ -1281,7 +1336,9 @@ pub fn conjunction_conjunction_boolean_expression( compiler_context: &mut CompilerContext, ) -> Conjunction { compiler_context.write_to_parser_file(" -> "); - let Some(boolean_expression_node) = compiler_context.ast.boolean_expression_stack.pop() else { + + let ast = &mut compiler_context.ast; + let Some(boolean_expression_node) = ast.boolean_expression_stack.pop() else { log_error_and_exit( ctx.range(), CompilerError::Internal( @@ -1293,10 +1350,8 @@ pub fn conjunction_conjunction_boolean_expression( compiler_context, ) }; - compiler_context - .ast - .conjunction_stack - .push(boolean_expression_node); + ast.conjunction_stack.push(boolean_expression_node); + Conjunction::ConjunctionBooleanExpression(boolean_expression) } @@ -1308,15 +1363,12 @@ pub fn comparison_op_comparison_op_equal( ) -> ComparisonOp { compiler_context.write_to_parser_file(&format!(" -> {token_equal}")); let result = ComparisonOp::ComparisonOpEqual(token_equal); - compiler_context - .ast - .comparision_op_stack - .push(result.clone()); - compiler_context.ast.comparision_expressions_stack.push( - compiler_context - .ast - .get_node_from_ptr(AstPtr::SimpleExpression), - ); + + let ast = &mut compiler_context.ast; + ast.comparision_op_stack.push(result.clone()); + ast.comparision_expressions_stack + .push(ast.get_node_from_ptr(AstPtr::SimpleExpression)); + result } @@ -1328,15 +1380,12 @@ pub fn comparison_op_comparison_op_not_equal( ) -> ComparisonOp { compiler_context.write_to_parser_file(&format!(" -> {token_not_equal}")); let result = ComparisonOp::ComparisonOpNotEqual(token_not_equal); - compiler_context - .ast - .comparision_op_stack - .push(result.clone()); - compiler_context.ast.comparision_expressions_stack.push( - compiler_context - .ast - .get_node_from_ptr(AstPtr::SimpleExpression), - ); + + let ast = &mut compiler_context.ast; + ast.comparision_op_stack.push(result.clone()); + ast.comparision_expressions_stack + .push(ast.get_node_from_ptr(AstPtr::SimpleExpression)); + result } @@ -1348,15 +1397,12 @@ pub fn comparison_op_comparison_op_less( ) -> ComparisonOp { compiler_context.write_to_parser_file(&format!(" -> {token_less}")); let result = ComparisonOp::ComparisonOpLess(token_less); - compiler_context - .ast - .comparision_op_stack - .push(result.clone()); - compiler_context.ast.comparision_expressions_stack.push( - compiler_context - .ast - .get_node_from_ptr(AstPtr::SimpleExpression), - ); + + let ast = &mut compiler_context.ast; + ast.comparision_op_stack.push(result.clone()); + ast.comparision_expressions_stack + .push(ast.get_node_from_ptr(AstPtr::SimpleExpression)); + result } @@ -1368,15 +1414,12 @@ pub fn comparison_op_comparison_op_less_equal( ) -> ComparisonOp { compiler_context.write_to_parser_file(&format!(" -> {token_less_equal}")); let result = ComparisonOp::ComparisonOpLessEqual(token_less_equal); - compiler_context - .ast - .comparision_op_stack - .push(result.clone()); - compiler_context.ast.comparision_expressions_stack.push( - compiler_context - .ast - .get_node_from_ptr(AstPtr::SimpleExpression), - ); + + let ast = &mut compiler_context.ast; + ast.comparision_op_stack.push(result.clone()); + ast.comparision_expressions_stack + .push(ast.get_node_from_ptr(AstPtr::SimpleExpression)); + result } @@ -1388,15 +1431,12 @@ pub fn comparison_op_comparison_op_greater( ) -> ComparisonOp { compiler_context.write_to_parser_file(&format!(" -> {token_greater}")); let result = ComparisonOp::ComparisonOpGreater(token_greater); - compiler_context - .ast - .comparision_op_stack - .push(result.clone()); - compiler_context.ast.comparision_expressions_stack.push( - compiler_context - .ast - .get_node_from_ptr(AstPtr::SimpleExpression), - ); + + let ast = &mut compiler_context.ast; + ast.comparision_op_stack.push(result.clone()); + ast.comparision_expressions_stack + .push(ast.get_node_from_ptr(AstPtr::SimpleExpression)); + result } @@ -1408,15 +1448,12 @@ pub fn comparison_op_comparison_op_greater_equal( ) -> ComparisonOp { compiler_context.write_to_parser_file(&format!(" -> {token_greater_equal}")); let result = ComparisonOp::ComparisonOpGreaterEqual(token_greater_equal); - compiler_context - .ast - .comparision_op_stack - .push(result.clone()); - compiler_context.ast.comparision_expressions_stack.push( - compiler_context - .ast - .get_node_from_ptr(AstPtr::SimpleExpression), - ); + + let ast = &mut compiler_context.ast; + ast.comparision_op_stack.push(result.clone()); + ast.comparision_expressions_stack + .push(ast.get_node_from_ptr(AstPtr::SimpleExpression)); + result } @@ -1428,9 +1465,10 @@ pub fn number_number_int( ) -> Number { compiler_context.push_to_symbol_table(token_int_literal.into()); compiler_context.write_to_parser_file(&format!(" -> {token_int_literal}")); - compiler_context - .ast - .create_leaf(token_int_literal.to_string(), AstPtr::Number); + + let ast = &mut compiler_context.ast; + ast.create_leaf(token_int_literal.to_string(), AstPtr::Number); + Number::NumberInt(token_int_literal) } @@ -1442,9 +1480,10 @@ pub fn number_number_float( ) -> Number { compiler_context.push_to_symbol_table(token_float_literal.clone().into()); compiler_context.write_to_parser_file(&format!(" -> {}", token_float_literal.original)); - compiler_context - .ast - .create_leaf(token_float_literal.original.clone(), AstPtr::Number); + + let ast = &mut compiler_context.ast; + ast.create_leaf(token_float_literal.original.clone(), AstPtr::Number); + Number::NumberFloat(token_float_literal) } @@ -1458,16 +1497,19 @@ pub fn number_number_negative_int( let value: i64 = format!("{token_sub}{token_int_literal}").parse().unwrap(); compiler_context.push_to_symbol_table(value.into()); compiler_context.write_to_parser_file(&format!(" -> {token_sub} {token_int_literal}")); + + let ast = &mut compiler_context.ast; let leaf = Rc::new(Node::new_leaf(NodeValue::Value( token_int_literal.to_string(), ))); let noop = Rc::new(Node::new_leaf(NodeValue::Action(AstAction::Noop))); - compiler_context.ast.create_node( + ast.create_node( AstAction::Negative, leaf.into(), noop.into(), AstPtr::Number, ); + Number::NumberInt(value) } @@ -1485,11 +1527,13 @@ pub fn number_number_negative_float( " -> {token_sub} {}", token_float_literal.original )); + + let ast = &mut compiler_context.ast; let leaf = Rc::new(Node::new_leaf(NodeValue::Value( token_float_literal.original.clone(), ))); let noop = Rc::new(Node::new_leaf(NodeValue::Action(AstAction::Noop))); - compiler_context.ast.create_node( + ast.create_node( AstAction::Negative, leaf.into(), noop.into(), @@ -1509,7 +1553,9 @@ pub fn not_statement_not( compiler_context.write_to_parser_file(&format!( " -> {token_not} " )); - let Some(boolean_expression_node) = compiler_context.ast.boolean_expression_stack.pop() else { + + let ast = &mut compiler_context.ast; + let Some(boolean_expression_node) = ast.boolean_expression_stack.pop() else { log_error_and_exit( ctx.range(), CompilerError::Internal( @@ -1523,12 +1569,13 @@ pub fn not_statement_not( }; let dummy = Node::new_leaf(NodeValue::Action(AstAction::Noop)); - compiler_context.ast.create_node( + ast.create_node( AstAction::Not, boolean_expression_node.into(), Rc::new(dummy).into(), AstPtr::Not, ); + NotStatement { token_not, boolean_expression: Box::new(boolean_expression), @@ -1546,7 +1593,9 @@ pub fn arithmetic_expression_arithmetic_expression_sum_term( compiler_context.write_to_parser_file(&format!( " -> {token_sum} " )); - let Some(node) = compiler_context.ast.pop_e_stack() else { + + let ast = &mut compiler_context.ast; + let Some(node) = ast.expression_stack.pop() else { log_error_and_exit( ctx.range(), CompilerError::Internal( @@ -1558,12 +1607,13 @@ pub fn arithmetic_expression_arithmetic_expression_sum_term( compiler_context, ) }; - compiler_context.ast.create_node( + ast.create_node( AstAction::Plus, node.into(), AstPtr::Term.into(), AstPtr::ArithmeticExpression, ); + ArithmeticExpression::ArithmeticExpressionSumTerm(ArithmeticExpressionSumTerm { arithmetic_expression: Box::new(arithmetic_expression), token_sum, @@ -1582,7 +1632,9 @@ pub fn arithmetic_expression_arithmetic_expression_sub_term( compiler_context.write_to_parser_file(&format!( " -> {token_sub} " )); - let Some(node) = compiler_context.ast.pop_e_stack() else { + + let ast = &mut compiler_context.ast; + let Some(node) = ast.expression_stack.pop() else { log_error_and_exit( ctx.range(), CompilerError::Internal( @@ -1600,6 +1652,7 @@ pub fn arithmetic_expression_arithmetic_expression_sub_term( AstPtr::Term.into(), AstPtr::ArithmeticExpression, ); + ArithmeticExpression::ArithmeticExpressionSubTerm(ArithmeticExpressionSubTerm { arithmetic_expression: Box::new(arithmetic_expression), token_sub, @@ -1609,9 +1662,10 @@ pub fn arithmetic_expression_arithmetic_expression_sub_term( // Parses the rule ` -> EMPTY` pub fn dummy_ae_empty(_ctx: &Ctx, compiler_context: &mut CompilerContext) -> DummyAE { - compiler_context - .ast - .push_e_stack(AstPtr::ArithmeticExpression.into()); + let ast = &mut compiler_context.ast; + let node = ast.get_node_from_ptr(AstPtr::ArithmeticExpression); + ast.expression_stack.push(node); + None } @@ -1622,9 +1676,10 @@ pub fn arithmetic_expression_arithmetic_expression_term( compiler_context: &mut CompilerContext, ) -> ArithmeticExpression { compiler_context.write_to_parser_file(" -> "); - compiler_context - .ast - .assign_node_to_ptr(AstPtr::Term.into(), AstPtr::ArithmeticExpression); + + let ast = &mut compiler_context.ast; + ast.assign_node_to_ptr(AstPtr::Term.into(), AstPtr::ArithmeticExpression); + ArithmeticExpression::ArithmeticExpressionTerm(term) } @@ -1638,7 +1693,9 @@ pub fn term_term_mul_factor( ) -> Term { compiler_context .write_to_parser_file(&format!(" -> {token_mul} ")); - let Some(node) = compiler_context.ast.pop_t_stack() else { + + let ast = &mut compiler_context.ast; + let Some(node) = ast.term_stack.pop() else { log_error_and_exit( ctx.range(), CompilerError::Internal( @@ -1650,12 +1707,13 @@ pub fn term_term_mul_factor( compiler_context, ) }; - compiler_context.ast.create_node( + ast.create_node( AstAction::Mult, node.into(), AstPtr::Factor.into(), AstPtr::Term, ); + Term::TermMulFactor(TermMulFactor { term: Box::new(term), token_mul, @@ -1672,8 +1730,10 @@ pub fn term_term_div_factor( compiler_context: &mut CompilerContext, ) -> Term { compiler_context - .write_to_parser_file(&format!(" -> {token_div} ")); - let Some(node) = compiler_context.ast.pop_t_stack() else { + .write_to_parser_file(&format!(" -> {token_div} ")); + + let ast = &mut compiler_context.ast; + let Some(node) = ast.term_stack.pop() else { log_error_and_exit( ctx.range(), CompilerError::Internal( @@ -1685,12 +1745,13 @@ pub fn term_term_div_factor( compiler_context, ) }; - compiler_context.ast.create_node( + ast.create_node( AstAction::Div, node.into(), AstPtr::Factor.into(), AstPtr::Term, ); + Term::TermDivFactor(TermDivFactor { term: Box::new(term), token_div, @@ -1700,7 +1761,10 @@ pub fn term_term_div_factor( // Parses the rule ` -> EMPTY` pub fn dummy_t_empty(_ctx: &Ctx, compiler_context: &mut CompilerContext) -> DummyT { - compiler_context.ast.push_t_stack(AstPtr::Term.into()); + let ast = &mut compiler_context.ast; + let node = ast.get_node_from_ptr(AstPtr::Term); + compiler_context.ast.term_stack.push(node); + None } @@ -1711,9 +1775,10 @@ pub fn term_term_factor( compiler_context: &mut CompilerContext, ) -> Term { compiler_context.write_to_parser_file(" -> "); - compiler_context - .ast - .assign_node_to_ptr(AstPtr::Factor.into(), AstPtr::Term); + + let ast = &mut compiler_context.ast; + ast.assign_node_to_ptr(AstPtr::Factor.into(), AstPtr::Term); + Term::TermFactor(factor) } @@ -1724,9 +1789,10 @@ pub fn factor_factor_id( compiler_context: &mut CompilerContext, ) -> Factor { compiler_context.write_to_parser_file(&format!(" -> {token_id}")); - compiler_context - .ast - .create_leaf(token_id.clone(), AstPtr::Factor); + + let ast = &mut compiler_context.ast; + ast.create_leaf(token_id.clone(), AstPtr::Factor); + Factor::FactorId(token_id) } @@ -1737,9 +1803,10 @@ pub fn factor_factor_number( compiler_context: &mut CompilerContext, ) -> Factor { compiler_context.write_to_parser_file(" -> "); - compiler_context - .ast - .assign_node_to_ptr(AstPtr::Number.into(), AstPtr::Factor); + + let ast = &mut compiler_context.ast; + ast.assign_node_to_ptr(AstPtr::Number.into(), AstPtr::Factor); + Factor::FactorNumber(number) } @@ -1754,9 +1821,10 @@ pub fn factor_factor_paren( compiler_context.write_to_parser_file(&format!( " -> {token_par_open} {token_par_close}" )); - compiler_context - .ast - .assign_node_to_ptr(AstPtr::ArithmeticExpression.into(), AstPtr::Factor); + + let ast = &mut compiler_context.ast; + ast.assign_node_to_ptr(AstPtr::ArithmeticExpression.into(), AstPtr::Factor); + Factor::FactorParen(FactorParen { token_par_open, arithmetic_expression: Box::new(arithmetic_expression), From f2ae3dfb18988171f418b34b24c6723e2217182a Mon Sep 17 00:00:00 2001 From: LeanSerra <46695152+LeanSerra@users.noreply.github.com> Date: Mon, 27 Oct 2025 23:37:39 -0300 Subject: [PATCH 24/25] refactor: move wrap log_error_and_exit into log_ast_error --- src/grammar/rules_actions.rs | 228 ++++++++++++----------------------- 1 file changed, 75 insertions(+), 153 deletions(-) diff --git a/src/grammar/rules_actions.rs b/src/grammar/rules_actions.rs index 045ff39..8a499cc 100644 --- a/src/grammar/rules_actions.rs +++ b/src/grammar/rules_actions.rs @@ -656,13 +656,9 @@ pub fn expressions_expression_single( let ast = &mut compiler_context.ast; let Some(statement_node) = ast.statement_stack.pop() else { - log_error_and_exit( - ctx.range(), - CompilerError::Internal( - "Statement stack was empty when parsing ` -> `".into(), - ), - 0, - true, + log_ast_error( + "Statement stack was empty when parsing ` -> `", + ctx, compiler_context, ) }; @@ -682,13 +678,9 @@ pub fn expressions_expression_recursive( let ast = &mut compiler_context.ast; let Some(statement_node) = ast.statement_stack.pop() else { - log_error_and_exit( - ctx.range(), - CompilerError::Internal( - "Statement stack was empty when parsing ` -> `".into(), - ), - 0, - true, + log_ast_error( + "Statement stack was empty when parsing ` -> `", + ctx, compiler_context, ) }; @@ -890,14 +882,9 @@ pub fn while_loop_while( let ast = &mut compiler_context.ast; let Some(conjunction_node) = ast.conjunction_stack.pop() else { - log_error_and_exit( - ctx.range(), - CompilerError::Internal( - "Conjunction stack was empty when parsing ` -> TokenWhile TokenParOpen TokenParClose TokenCBOpen TokenCBClose`" - .into(), - ), - 0, - true, + log_ast_error( + "Conjunction stack was empty when parsing ` -> TokenWhile TokenParOpen TokenParClose TokenCBOpen TokenCBClose`", + ctx, compiler_context, ) }; @@ -938,16 +925,11 @@ pub fn if_statement_if_statement( let ast = &mut compiler_context.ast; let Some(conjunction_node) = ast.conjunction_stack.pop() else { - log_error_and_exit( - ctx.range(), - CompilerError::Internal( - "Conjunction stack was empty when parsing ` -> TokenIf TokenParOpen TokenParClose TokenCBOpen TokenCBClose`" - .into(), - ), - 0, - true, + log_ast_error( + "Conjunction stack was empty when parsing ` -> TokenIf TokenParOpen TokenParClose TokenCBOpen TokenCBClose`", + ctx, compiler_context, - ) + ); }; ast.create_node( AstAction::If, @@ -987,14 +969,9 @@ pub fn if_statement_if_statement_else_statement( let ast = &mut compiler_context.ast; let Some(if_true_body) = ast.if_body_stack.pop() else { - log_error_and_exit( - ctx.range(), - CompilerError::Internal( - "IfBody stack was empty when parsing ` -> TokenIf TokenParOpen TokenParClose TokenCBOpen TokenCBClose `" - .into(), - ), - 0, - true, + log_ast_error( + "IfBody stack was empty when parsing ` -> TokenIf TokenParOpen TokenParClose TokenCBOpen TokenCBClose `", + ctx, compiler_context, ) }; @@ -1005,14 +982,9 @@ pub fn if_statement_if_statement_else_statement( AstPtr::If, ); let Some(conjunction_node) = ast.conjunction_stack.pop() else { - log_error_and_exit( - ctx.range(), - CompilerError::Internal( - "Conjunction stack was empty when parsing ` -> TokenIf TokenParOpen TokenParClose TokenCBOpen TokenCBClose `" - .into(), - ), - 0, - true, + log_ast_error( + "Conjunction stack was empty when parsing ` -> TokenIf TokenParOpen TokenParClose TokenCBOpen TokenCBClose `", + ctx, compiler_context, ) }; @@ -1080,28 +1052,18 @@ pub fn boolean_expression_boolean_expression_simple_expression( let ast = &mut compiler_context.ast; let Some(left_child) = ast.comparision_expressions_stack.pop() else { - log_error_and_exit( - ctx.range(), - CompilerError::Internal( - "ComparisonExpressions stack was empty when parsing ` -> `" - .into(), - ), - 0, - true, + log_ast_error( + "ComparisonExpressions stack was empty when parsing ` -> `", + ctx, compiler_context, - ) + ); }; let Some(operator) = ast.comparision_op_stack.pop() else { - log_error_and_exit( - ctx.range(), - CompilerError::Internal( - "ComparisonOperator stack was empty when parsing ` -> `" - .into(), - ), - 0, - true, + log_ast_error( + "ComparisonOperator stack was empty when parsing ` -> `", + ctx, compiler_context, - ) + ); }; let node = ast.create_node( operator.into(), @@ -1239,26 +1201,16 @@ pub fn conjunction_conjunction_and( let ast = &mut compiler_context.ast; let Some(boolean_expression_node) = ast.boolean_expression_stack.pop() else { - log_error_and_exit( - ctx.range(), - CompilerError::Internal( - "BooleanExpression stack was empty when parsing ` -> \"and\" `" - .into(), - ), - 0, - true, + log_ast_error( + "BooleanExpression stack was empty when parsing ` -> \"and\" `", + ctx, compiler_context, - ) + ); }; let Some(conjunction_node) = ast.conjunction_stack.pop() else { - log_error_and_exit( - ctx.range(), - CompilerError::Internal( - "Conjunction stack was empty when parsing ` -> \"and\" `" - .into(), - ), - 0, - true, + log_ast_error( + "Conjunction stack was empty when parsing ` -> \"and\" `", + ctx, compiler_context, ) }; @@ -1291,28 +1243,18 @@ pub fn conjunction_conjunction_or( let ast = &mut compiler_context.ast; let Some(boolean_expression_node) = ast.boolean_expression_stack.pop() else { - log_error_and_exit( - ctx.range(), - CompilerError::Internal( - "BooleanExpression stack was empty when parsing ` -> \"or\" `" - .into(), - ), - 0, - true, + log_ast_error( + "BooleanExpression stack was empty when parsing ` -> \"or\" `", + ctx, compiler_context, - ) + ); }; let Some(conjunction_node) = ast.conjunction_stack.pop() else { - log_error_and_exit( - ctx.range(), - CompilerError::Internal( - "Conjunction stack was empty when parsing ` -> \"or\" `" - .into(), - ), - 0, - true, + log_ast_error( + "Conjunction stack was empty when parsing ` -> \"or\" `", + ctx, compiler_context, - ) + ); }; let conjunction_node = ast.create_node( AstAction::Or, @@ -1339,16 +1281,11 @@ pub fn conjunction_conjunction_boolean_expression( let ast = &mut compiler_context.ast; let Some(boolean_expression_node) = ast.boolean_expression_stack.pop() else { - log_error_and_exit( - ctx.range(), - CompilerError::Internal( - "BooleanExpression stack was empty when parsing ` -> `" - .into(), - ), - 0, - true, + log_ast_error( + "BooleanExpression stack was empty when parsing ` -> `", + ctx, compiler_context, - ) + ); }; ast.conjunction_stack.push(boolean_expression_node); @@ -1556,16 +1493,11 @@ pub fn not_statement_not( let ast = &mut compiler_context.ast; let Some(boolean_expression_node) = ast.boolean_expression_stack.pop() else { - log_error_and_exit( - ctx.range(), - CompilerError::Internal( - "BooleanExpression stack was empty when parsing ` -> TokenNot `" - .into(), - ), - 0, - true, + log_ast_error( + "BooleanExpression stack was empty when parsing ` -> TokenNot `", + ctx, compiler_context, - ) + ); }; let dummy = Node::new_leaf(NodeValue::Action(AstAction::Noop)); @@ -1596,16 +1528,11 @@ pub fn arithmetic_expression_arithmetic_expression_sum_term( let ast = &mut compiler_context.ast; let Some(node) = ast.expression_stack.pop() else { - log_error_and_exit( - ctx.range(), - CompilerError::Internal( - "ArithmeticExpression stack was empty when parsing ` -> TokenSum `" - .into(), - ), - 0, - true, + log_ast_error( + "ArithmeticExpression stack was empty when parsing ` -> TokenSum `", + ctx, compiler_context, - ) + ); }; ast.create_node( AstAction::Plus, @@ -1635,14 +1562,9 @@ pub fn arithmetic_expression_arithmetic_expression_sub_term( let ast = &mut compiler_context.ast; let Some(node) = ast.expression_stack.pop() else { - log_error_and_exit( - ctx.range(), - CompilerError::Internal( - "ArithmeticExpression stack was empty when parsing ` -> TokenSub `" - .into(), - ), - 0, - true, + log_ast_error( + "ArithmeticExpression stack was empty when parsing ` -> TokenSub `", + ctx, compiler_context, ) }; @@ -1696,16 +1618,11 @@ pub fn term_term_mul_factor( let ast = &mut compiler_context.ast; let Some(node) = ast.term_stack.pop() else { - log_error_and_exit( - ctx.range(), - CompilerError::Internal( - "Term stack was empty when parsing ` -> TokenMul `" - .into(), - ), - 0, - true, + log_ast_error( + "Term stack was empty when parsing ` -> TokenMul `", + ctx, compiler_context, - ) + ); }; ast.create_node( AstAction::Mult, @@ -1734,16 +1651,11 @@ pub fn term_term_div_factor( let ast = &mut compiler_context.ast; let Some(node) = ast.term_stack.pop() else { - log_error_and_exit( - ctx.range(), - CompilerError::Internal( - "Term stack was empty when parsing ` -> TokenDiv `" - .into(), - ), - 0, - true, + log_ast_error( + "Term stack was empty when parsing ` -> TokenDiv `", + ctx, compiler_context, - ) + ); }; ast.create_node( AstAction::Div, @@ -1831,3 +1743,13 @@ pub fn factor_factor_paren( token_par_close, }) } + +fn log_ast_error(error: &str, ctx: &Ctx, compiler_context: &mut CompilerContext) -> ! { + log_error_and_exit( + ctx.range(), + CompilerError::Internal(error.into()), + 0, + true, + compiler_context, + ) +} From 7357639937aa72d051a32ccbb0014c62442d5f8d Mon Sep 17 00:00:00 2001 From: LeanSerra <46695152+LeanSerra@users.noreply.github.com> Date: Tue, 28 Oct 2025 22:08:53 -0300 Subject: [PATCH 25/25] fix: comment in term_term_div_factor --- src/grammar/rules_actions.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/grammar/rules_actions.rs b/src/grammar/rules_actions.rs index 0012c84..78452c5 100644 --- a/src/grammar/rules_actions.rs +++ b/src/grammar/rules_actions.rs @@ -1646,7 +1646,7 @@ pub fn term_term_div_factor( compiler_context: &mut CompilerContext, ) -> Term { compiler_context - .write_to_parser_file(&format!(" -> {token_div} ")); + .write_to_parser_file(&format!(" -> {token_div} ")); let ast = &mut compiler_context.ast; let Some(node) = ast.term_stack.pop() else {