From a5f8e41d3692c8db3214ce8d9d1ff6fc29443091 Mon Sep 17 00:00:00 2001 From: azjezz Date: Thu, 1 Dec 2022 00:40:13 +0100 Subject: [PATCH] chore: refactor project structure Signed-off-by: azjezz --- .github/workflows/tests.yml | 2 +- bin/snapshot.rs | 18 +- build.rs | 60 +- src/lexer/error.rs | 58 + src/lexer/lexer.rs | 1776 ------------------------------ src/lexer/macros.rs | 15 + src/lexer/mod.rs | 1170 +++++++++++++++++++- src/lexer/token.rs | 2 +- src/lib.rs | 12 +- src/main.rs | 42 +- src/{ => parser}/ast.rs | 15 +- src/parser/block.rs | 11 +- src/parser/classish.rs | 29 +- src/parser/classish_statement.rs | 34 +- src/parser/comments.rs | 12 +- src/parser/error.rs | 12 +- src/parser/flags.rs | 16 +- src/parser/functions.rs | 24 +- src/parser/ident.rs | 24 +- src/parser/mod.rs | 23 +- src/parser/params.rs | 24 +- src/parser/precedence.rs | 2 +- src/parser/punc.rs | 23 +- src/parser/vars.rs | 11 +- src/prelude.rs | 8 + tests/0001/tokens.txt | 452 ++++++++ tests/0002/tokens.txt | 126 +++ tests/0003/tokens.txt | 157 +++ tests/0004/tokens.txt | 166 +++ tests/0005/tokens.txt | 143 +++ tests/0006/tokens.txt | 103 ++ tests/0007/tokens.txt | 215 ++++ tests/0008/tokens.txt | 395 +++++++ tests/0009/tokens.txt | 167 +++ tests/0010/tokens.txt | 1083 ++++++++++++++++++ tests/0011/tokens.txt | 1152 +++++++++++++++++++ tests/0012/tokens.txt | 1300 ++++++++++++++++++++++ tests/0013/tokens.txt | 420 +++++++ tests/0014/tokens.txt | 434 ++++++++ tests/0015/tokens.txt | 230 ++++ tests/0016/tokens.txt | 78 ++ tests/0017/tokens.txt | 220 ++++ tests/0018/tokens.txt | 235 ++++ tests/0019/tokens.txt | 1601 +++++++++++++++++++++++++++ tests/0020/tokens.txt | 811 ++++++++++++++ tests/0021/tokens.txt | 552 ++++++++++ tests/0022/tokens.txt | 265 +++++ tests/0023/tokens.txt | 505 +++++++++ tests/0024/tokens.txt | 414 +++++++ tests/0025/tokens.txt | 25 + tests/0026/tokens.txt | 34 + tests/0027/tokens.txt | 25 + tests/0028/tokens.txt | 34 + tests/0029/tokens.txt | 91 ++ tests/0030/tokens.txt | 193 ++++ tests/0031/tokens.txt | 159 +++ tests/0032/tokens.txt | 186 ++++ tests/0033/tokens.txt | 241 ++++ tests/0034/tokens.txt | 59 + tests/0035/tokens.txt | 55 + tests/0036/tokens.txt | 64 ++ tests/0037/tokens.txt | 80 ++ tests/0038/tokens.txt | 264 +++++ tests/0039/tokens.txt | 64 ++ tests/0040/tokens.txt | 122 ++ tests/0041/tokens.txt | 189 ++++ tests/0042/tokens.txt | 34 + tests/0043/tokens.txt | 41 + tests/0044/tokens.txt | 90 ++ tests/0045/tokens.txt | 108 ++ tests/0046/tokens.txt | 57 + tests/0047/tokens.txt | 73 ++ tests/0048/tokens.txt | 73 ++ tests/0049/tokens.txt | 121 ++ tests/0050/tokens.txt | 71 ++ tests/0051/tokens.txt | 80 ++ tests/0052/tokens.txt | 103 ++ tests/0053/tokens.txt | 80 ++ tests/0054/tokens.txt | 89 ++ tests/0055/tokens.txt | 105 ++ tests/0056/tokens.txt | 89 ++ tests/0057/tokens.txt | 105 ++ tests/0058/tokens.txt | 71 ++ tests/0059/tokens.txt | 71 ++ tests/0060/tokens.txt | 46 + tests/0061/tokens.txt | 85 ++ tests/0062/tokens.txt | 62 ++ tests/0063/tokens.txt | 78 ++ tests/0064/tokens.txt | 97 ++ tests/0065/tokens.txt | 25 + tests/0066/tokens.txt | 41 + tests/0067/tokens.txt | 18 + tests/0068/tokens.txt | 73 ++ tests/0069/tokens.txt | 69 ++ tests/0070/tokens.txt | 90 ++ tests/0071/tokens.txt | 27 + tests/0072/tokens.txt | 48 + tests/0073/tokens.txt | 43 + tests/0074/tokens.txt | 57 + tests/0075/tokens.txt | 66 ++ tests/0076/tokens.txt | 73 ++ tests/0077/tokens.txt | 50 + tests/0078/tokens.txt | 82 ++ tests/0079/tokens.txt | 34 + tests/0080/tokens.txt | 50 + tests/0081/tokens.txt | 64 ++ tests/0082/tokens.txt | 96 ++ tests/0083/tokens.txt | 94 ++ tests/0084/tokens.txt | 80 ++ tests/0085/tokens.txt | 48 + tests/0086/tokens.txt | 71 ++ tests/0087/tokens.txt | 119 ++ tests/0088/tokens.txt | 55 + tests/0089/tokens.txt | 64 ++ tests/0090/tokens.txt | 64 ++ tests/0091/tokens.txt | 69 ++ tests/0092/tokens.txt | 69 ++ tests/0093/tokens.txt | 48 + tests/0094/tokens.txt | 62 ++ tests/0095/tokens.txt | 34 + tests/0096/tokens.txt | 50 + tests/0097/tokens.txt | 64 ++ tests/0098/tokens.txt | 50 + tests/0099/tokens.txt | 64 ++ tests/0100/tokens.txt | 71 ++ tests/0101/tokens.txt | 53 + tests/0102/tokens.txt | 99 ++ tests/0103/tokens.txt | 34 + tests/0105/tokens.txt | 25 + tests/0106/tokens.txt | 34 + tests/0107/tokens.txt | 50 + tests/0108/tokens.txt | 41 + tests/0109/tokens.txt | 239 ++++ tests/0110/tokens.txt | 232 ++++ tests/0111/tokens.txt | 129 +++ tests/0112/tokens.txt | 129 +++ tests/0113/tokens.txt | 124 +++ tests/0114/tokens.txt | 117 ++ tests/0115/tokens.txt | 131 +++ tests/0116/tokens.txt | 124 +++ tests/0117/tokens.txt | 135 +++ tests/0118/tokens.txt | 87 ++ tests/0119/tokens.txt | 135 +++ tests/0120/tokens.txt | 85 ++ tests/0121/tokens.txt | 87 ++ tests/0122/tokens.txt | 87 ++ tests/0123/tokens.txt | 87 ++ tests/0124/tokens.txt | 99 ++ tests/0125/tokens.txt | 99 ++ tests/0126/tokens.txt | 94 ++ tests/0127/tokens.txt | 147 +++ tests/0128/tokens.txt | 138 +++ tests/0129/tokens.txt | 161 +++ tests/0130/tokens.txt | 99 ++ tests/0131/tokens.txt | 62 ++ tests/0133/code.php | 5 + tests/0133/lexer-error.txt | 1 + tests/0134/code.php | 4 + tests/0134/lexer-error.txt | 1 + tests/0135/code.php | 4 + tests/0135/lexer-error.txt | 1 + tests/0136/code.php | 4 + tests/0136/lexer-error.txt | 1 + tests/0137/code.php | 4 + tests/0137/lexer-error.txt | 1 + tests/0138/code.php | 3 + tests/0138/lexer-error.txt | 1 + tests/0139/code.php | 3 + tests/0139/lexer-error.txt | 1 + tests/0140/code.php | 3 + tests/0140/lexer-error.txt | 1 + tests/third_party_tests.rs | 6 +- 172 files changed, 21798 insertions(+), 1952 deletions(-) create mode 100644 src/lexer/error.rs delete mode 100644 src/lexer/lexer.rs create mode 100644 src/lexer/macros.rs rename src/{ => parser}/ast.rs (98%) create mode 100644 src/prelude.rs create mode 100644 tests/0001/tokens.txt create mode 100644 tests/0002/tokens.txt create mode 100644 tests/0003/tokens.txt create mode 100644 tests/0004/tokens.txt create mode 100644 tests/0005/tokens.txt create mode 100644 tests/0006/tokens.txt create mode 100644 tests/0007/tokens.txt create mode 100644 tests/0008/tokens.txt create mode 100644 tests/0009/tokens.txt create mode 100644 tests/0010/tokens.txt create mode 100644 tests/0011/tokens.txt create mode 100644 tests/0012/tokens.txt create mode 100644 tests/0013/tokens.txt create mode 100644 tests/0014/tokens.txt create mode 100644 tests/0015/tokens.txt create mode 100644 tests/0016/tokens.txt create mode 100644 tests/0017/tokens.txt create mode 100644 tests/0018/tokens.txt create mode 100644 tests/0019/tokens.txt create mode 100644 tests/0020/tokens.txt create mode 100644 tests/0021/tokens.txt create mode 100644 tests/0022/tokens.txt create mode 100644 tests/0023/tokens.txt create mode 100644 tests/0024/tokens.txt create mode 100644 tests/0025/tokens.txt create mode 100644 tests/0026/tokens.txt create mode 100644 tests/0027/tokens.txt create mode 100644 tests/0028/tokens.txt create mode 100644 tests/0029/tokens.txt create mode 100644 tests/0030/tokens.txt create mode 100644 tests/0031/tokens.txt create mode 100644 tests/0032/tokens.txt create mode 100644 tests/0033/tokens.txt create mode 100644 tests/0034/tokens.txt create mode 100644 tests/0035/tokens.txt create mode 100644 tests/0036/tokens.txt create mode 100644 tests/0037/tokens.txt create mode 100644 tests/0038/tokens.txt create mode 100644 tests/0039/tokens.txt create mode 100644 tests/0040/tokens.txt create mode 100644 tests/0041/tokens.txt create mode 100644 tests/0042/tokens.txt create mode 100644 tests/0043/tokens.txt create mode 100644 tests/0044/tokens.txt create mode 100644 tests/0045/tokens.txt create mode 100644 tests/0046/tokens.txt create mode 100644 tests/0047/tokens.txt create mode 100644 tests/0048/tokens.txt create mode 100644 tests/0049/tokens.txt create mode 100644 tests/0050/tokens.txt create mode 100644 tests/0051/tokens.txt create mode 100644 tests/0052/tokens.txt create mode 100644 tests/0053/tokens.txt create mode 100644 tests/0054/tokens.txt create mode 100644 tests/0055/tokens.txt create mode 100644 tests/0056/tokens.txt create mode 100644 tests/0057/tokens.txt create mode 100644 tests/0058/tokens.txt create mode 100644 tests/0059/tokens.txt create mode 100644 tests/0060/tokens.txt create mode 100644 tests/0061/tokens.txt create mode 100644 tests/0062/tokens.txt create mode 100644 tests/0063/tokens.txt create mode 100644 tests/0064/tokens.txt create mode 100644 tests/0065/tokens.txt create mode 100644 tests/0066/tokens.txt create mode 100644 tests/0067/tokens.txt create mode 100644 tests/0068/tokens.txt create mode 100644 tests/0069/tokens.txt create mode 100644 tests/0070/tokens.txt create mode 100644 tests/0071/tokens.txt create mode 100644 tests/0072/tokens.txt create mode 100644 tests/0073/tokens.txt create mode 100644 tests/0074/tokens.txt create mode 100644 tests/0075/tokens.txt create mode 100644 tests/0076/tokens.txt create mode 100644 tests/0077/tokens.txt create mode 100644 tests/0078/tokens.txt create mode 100644 tests/0079/tokens.txt create mode 100644 tests/0080/tokens.txt create mode 100644 tests/0081/tokens.txt create mode 100644 tests/0082/tokens.txt create mode 100644 tests/0083/tokens.txt create mode 100644 tests/0084/tokens.txt create mode 100644 tests/0085/tokens.txt create mode 100644 tests/0086/tokens.txt create mode 100644 tests/0087/tokens.txt create mode 100644 tests/0088/tokens.txt create mode 100644 tests/0089/tokens.txt create mode 100644 tests/0090/tokens.txt create mode 100644 tests/0091/tokens.txt create mode 100644 tests/0092/tokens.txt create mode 100644 tests/0093/tokens.txt create mode 100644 tests/0094/tokens.txt create mode 100644 tests/0095/tokens.txt create mode 100644 tests/0096/tokens.txt create mode 100644 tests/0097/tokens.txt create mode 100644 tests/0098/tokens.txt create mode 100644 tests/0099/tokens.txt create mode 100644 tests/0100/tokens.txt create mode 100644 tests/0101/tokens.txt create mode 100644 tests/0102/tokens.txt create mode 100644 tests/0103/tokens.txt create mode 100644 tests/0105/tokens.txt create mode 100644 tests/0106/tokens.txt create mode 100644 tests/0107/tokens.txt create mode 100644 tests/0108/tokens.txt create mode 100644 tests/0109/tokens.txt create mode 100644 tests/0110/tokens.txt create mode 100644 tests/0111/tokens.txt create mode 100644 tests/0112/tokens.txt create mode 100644 tests/0113/tokens.txt create mode 100644 tests/0114/tokens.txt create mode 100644 tests/0115/tokens.txt create mode 100644 tests/0116/tokens.txt create mode 100644 tests/0117/tokens.txt create mode 100644 tests/0118/tokens.txt create mode 100644 tests/0119/tokens.txt create mode 100644 tests/0120/tokens.txt create mode 100644 tests/0121/tokens.txt create mode 100644 tests/0122/tokens.txt create mode 100644 tests/0123/tokens.txt create mode 100644 tests/0124/tokens.txt create mode 100644 tests/0125/tokens.txt create mode 100644 tests/0126/tokens.txt create mode 100644 tests/0127/tokens.txt create mode 100644 tests/0128/tokens.txt create mode 100644 tests/0129/tokens.txt create mode 100644 tests/0130/tokens.txt create mode 100644 tests/0131/tokens.txt create mode 100644 tests/0133/code.php create mode 100644 tests/0133/lexer-error.txt create mode 100644 tests/0134/code.php create mode 100644 tests/0134/lexer-error.txt create mode 100644 tests/0135/code.php create mode 100644 tests/0135/lexer-error.txt create mode 100644 tests/0136/code.php create mode 100644 tests/0136/lexer-error.txt create mode 100644 tests/0137/code.php create mode 100644 tests/0137/lexer-error.txt create mode 100644 tests/0138/code.php create mode 100644 tests/0138/lexer-error.txt create mode 100644 tests/0139/code.php create mode 100644 tests/0139/lexer-error.txt create mode 100644 tests/0140/code.php create mode 100644 tests/0140/lexer-error.txt diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 543371a4..2b5a364d 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -37,6 +37,6 @@ jobs: run: | cargo fmt --all -- --check cargo clippy - + - name: test run: ./meta/test --all -- --skip third_party diff --git a/bin/snapshot.rs b/bin/snapshot.rs index 4f1efae1..378af610 100644 --- a/bin/snapshot.rs +++ b/bin/snapshot.rs @@ -1,4 +1,4 @@ -use php_parser_rs::{Lexer, Parser}; +use php_parser_rs::prelude::{Lexer, Parser}; use std::env; use std::fs::read_dir; use std::path::PathBuf; @@ -21,6 +21,7 @@ fn main() { for entry in entries { let code_filename = entry.join("code.php"); let ast_filename = entry.join("ast.txt"); + let tokens_filename = entry.join("tokens.txt"); let lexer_error_filename = entry.join("lexer-error.txt"); let parser_error_filename = entry.join("parser-error.txt"); @@ -32,6 +33,10 @@ fn main() { std::fs::remove_file(&ast_filename).unwrap(); } + if tokens_filename.exists() { + std::fs::remove_file(&tokens_filename).unwrap(); + } + if lexer_error_filename.exists() { std::fs::remove_file(&lexer_error_filename).unwrap(); } @@ -41,11 +46,17 @@ fn main() { } let code = std::fs::read_to_string(&code_filename).unwrap(); - let mut lexer = Lexer::new(None); + let mut lexer = Lexer::new(); let tokens = lexer.tokenize(code.as_bytes()); match tokens { Ok(tokens) => { + std::fs::write(tokens_filename, format!("{:#?}\n", tokens)).unwrap(); + println!( + "✅ generated `tokens.txt` for `{}`", + entry.to_string_lossy() + ); + let mut parser = Parser::new(None); let ast = parser.parse(tokens); match ast { @@ -67,7 +78,8 @@ fn main() { } } Err(error) => { - std::fs::write(lexer_error_filename, format!("{:?}\n", error)).unwrap(); + std::fs::write(lexer_error_filename, format!("{:?} -> {}\n", error, error)) + .unwrap(); println!( "✅ generated `lexer-error.txt` for `{}`", entry.to_string_lossy() diff --git a/build.rs b/build.rs index 7d63c68d..a4d3e13e 100644 --- a/build.rs +++ b/build.rs @@ -3,14 +3,19 @@ use std::fs::read_dir; use std::path::PathBuf; fn main() { - println!("cargo:rerun-if-changed=tests"); + let manifest = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap()); + let tests = manifest.join("tests"); + let snapshot = manifest.join("bin").join("snapshot.rs"); + + println!("cargo:rerun-if-changed={}", tests.to_string_lossy()); + println!("cargo:rerun-if-changed={}", snapshot.to_string_lossy()); + println!("cargo:rerun-if-env-changed=BUILD_INTEGRATION_TESTS"); if env::var("BUILD_INTEGRATION_TESTS").unwrap_or_else(|_| "0".to_string()) == "0" { return; } - let manifest = PathBuf::from(env::var("CARGO_MANIFEST_DIR").unwrap()); - let mut entries = read_dir(manifest.join("tests")) + let mut entries = read_dir(tests) .unwrap() .flatten() .map(|entry| entry.path()) @@ -26,6 +31,7 @@ fn main() { for entry in entries { let code_filename = entry.join("code.php"); let ast_filename = entry.join("ast.txt"); + let tokens_filename = entry.join("tokens.txt"); let lexer_error_filename = entry.join("lexer-error.txt"); let parser_error_filename = entry.join("parser-error.txt"); @@ -45,7 +51,12 @@ fn main() { entry.to_string_lossy() ); - content.push_str(&build_success_test(entry, code_filename, ast_filename)) + content.push_str(&build_success_test( + entry, + code_filename, + ast_filename, + tokens_filename, + )) } else if lexer_error_filename.exists() { assert!( !parser_error_filename.exists(), @@ -69,6 +80,7 @@ fn main() { entry, code_filename, parser_error_filename, + tokens_filename, )) } } @@ -77,21 +89,32 @@ fn main() { std::fs::write(dest, content).expect("failed to write to file"); } -fn build_success_test(entry: PathBuf, code_filename: PathBuf, ast_filename: PathBuf) -> String { +fn build_success_test( + entry: PathBuf, + code_filename: PathBuf, + ast_filename: PathBuf, + tokens_filename: PathBuf, +) -> String { format!( r#"#[test] fn test_success_{}() {{ - use php_parser_rs::{{Lexer, Parser}}; + use php_parser_rs::prelude::Parser; + use php_parser_rs::prelude::Lexer; use pretty_assertions::assert_str_eq; let code_filename = "{}"; let ast_filename = "{}"; + let tokens_filename = "{}"; let code = std::fs::read_to_string(&code_filename).unwrap(); let expected_ast = std::fs::read_to_string(&ast_filename).unwrap(); + let expected_tokens = std::fs::read_to_string(&tokens_filename).unwrap(); - let mut lexer = Lexer::new(None); + let mut lexer = Lexer::new(); let tokens = lexer.tokenize(code.as_bytes()).unwrap(); + + assert_str_eq!(expected_tokens.trim(), format!("{{:#?}}", tokens)); + let mut parser = Parser::new(None); let ast = parser.parse(tokens).unwrap(); @@ -101,7 +124,8 @@ fn test_success_{}() {{ "#, entry.file_name().unwrap().to_string_lossy(), code_filename.to_string_lossy(), - ast_filename.to_string_lossy() + ast_filename.to_string_lossy(), + tokens_filename.to_string_lossy(), ) } @@ -113,7 +137,7 @@ fn build_lexer_error_test( format!( r#"#[test] fn test_lexer_error_{}() {{ - use php_parser_rs::Lexer; + use php_parser_rs::prelude::Lexer; use pretty_assertions::assert_str_eq; let code_filename = "{}"; @@ -122,10 +146,13 @@ fn test_lexer_error_{}() {{ let code = std::fs::read_to_string(&code_filename).unwrap(); let expected_error = std::fs::read_to_string(&lexer_error_filename).unwrap(); - let mut lexer = Lexer::new(None); + let mut lexer = Lexer::new(); let error = lexer.tokenize(code.as_bytes()).err().unwrap(); - assert_str_eq!(expected_error.trim(), format!("{{:?}}", error)); + assert_str_eq!( + expected_error.trim(), + format!("{{:?}} -> {{}}", error, error.to_string()) + ); }} "#, @@ -139,22 +166,28 @@ fn build_parser_error_test( entry: PathBuf, code_filename: PathBuf, parser_error_filename: PathBuf, + tokens_filename: PathBuf, ) -> String { format!( r#"#[test] fn test_paser_error_{}() {{ - use php_parser_rs::{{Lexer, Parser}}; + use php_parser_rs::prelude::Parser; + use php_parser_rs::prelude::Lexer; use pretty_assertions::assert_str_eq; let code_filename = "{}"; + let tokens_filename = "{}"; let parser_error_filename = "{}"; let code = std::fs::read_to_string(&code_filename).unwrap(); + let expected_tokens = std::fs::read_to_string(&tokens_filename).unwrap(); let expected_error = std::fs::read_to_string(&parser_error_filename).unwrap(); - let mut lexer = Lexer::new(None); + let mut lexer = Lexer::new(); let tokens = lexer.tokenize(code.as_bytes()).unwrap(); + assert_str_eq!(expected_tokens.trim(), format!("{{:#?}}", tokens)); + let mut parser = Parser::new(None); let error = parser.parse(tokens).err().unwrap(); @@ -167,6 +200,7 @@ fn test_paser_error_{}() {{ "#, entry.file_name().unwrap().to_string_lossy(), code_filename.to_string_lossy(), + tokens_filename.to_string_lossy(), parser_error_filename.to_string_lossy() ) } diff --git a/src/lexer/error.rs b/src/lexer/error.rs new file mode 100644 index 00000000..5576a58b --- /dev/null +++ b/src/lexer/error.rs @@ -0,0 +1,58 @@ +use std::fmt::Display; + +use crate::lexer::token::Span; + +pub type LexResult = Result; + +#[derive(Debug, Eq, PartialEq)] +pub enum SyntaxError { + UnexpectedEndOfFile(Span), + UnexpectedError(Span), + UnexpectedCharacter(u8, Span), + InvalidHaltCompiler(Span), + InvalidOctalEscape(Span), + InvalidOctalLiteral(Span), + InvalidUnicodeEscape(Span), +} + +impl Display for SyntaxError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::UnexpectedEndOfFile(span) => write!( + f, + "Syntax Error: unexpected end of file on line {} column {}", + span.0, span.1 + ), + Self::UnexpectedError(span) => write!( + f, + "Syntax Error: unexpected error on line {} column {}", + span.0, span.1 + ), + Self::UnexpectedCharacter(char, span) => write!( + f, + "Syntax Error: unexpected character `{:?}` on line {} column {}", + *char as char, span.0, span.1 + ), + Self::InvalidHaltCompiler(span) => write!( + f, + "Syntax Error: invalid halt compiler on line {} column {}", + span.0, span.1 + ), + Self::InvalidOctalEscape(span) => write!( + f, + "Syntax Error: invalid octal escape on line {} column {}", + span.0, span.1 + ), + Self::InvalidOctalLiteral(span) => write!( + f, + "Syntax Error: invalid octal literal on line {} column {}", + span.0, span.1 + ), + Self::InvalidUnicodeEscape(span) => write!( + f, + "Syntax Error: invalid unicode escape on line {} column {}", + span.0, span.1 + ), + } + } +} diff --git a/src/lexer/lexer.rs b/src/lexer/lexer.rs deleted file mode 100644 index 131547cd..00000000 --- a/src/lexer/lexer.rs +++ /dev/null @@ -1,1776 +0,0 @@ -use std::num::IntErrorKind; - -use crate::{ByteString, OpenTagKind, Token, TokenKind}; - -#[derive(Debug, PartialEq, Eq)] -pub enum LexerState { - Initial, - Scripting, - Halted, - DoubleQuote, - LookingForVarname, - LookingForProperty, - VarOffset, -} - -#[allow(dead_code)] -#[derive(Default)] -pub struct LexerConfig { - short_tags: bool, -} - -#[allow(dead_code)] -pub struct Lexer { - config: LexerConfig, - state_stack: Vec, - chars: Vec, - cursor: usize, - current: Option, - col: usize, - line: usize, -} - -// Reusable pattern for the first byte of an identifier. -macro_rules! ident_start { - () => { - b'a'..=b'z' | b'A'..=b'Z' | b'_' | b'\x80'..=b'\xff' - }; -} - -// Reusable pattern for identifier after the first byte. -macro_rules! ident { - () => { - b'0'..=b'9' | b'a'..=b'z' | b'A'..=b'Z' | b'_' | b'\x80'..=b'\xff' - }; -} - -impl Lexer { - pub fn new(config: Option) -> Self { - Self { - config: config.unwrap_or_default(), - state_stack: vec![LexerState::Initial], - chars: Vec::new(), - cursor: 0, - current: None, - line: 1, - col: 1, - } - } - - pub fn tokenize>( - &mut self, - input: &B, - ) -> Result, LexerError> { - let mut tokens = Vec::new(); - self.chars = input.as_ref().to_vec(); - - self.current = self.chars.first().copied(); - - while self.current.is_some() { - match self.state_stack.last().unwrap() { - // The "Initial" state is used to parse inline HTML. It is essentially a catch-all - // state that will build up a single token buffer until it encounters an open tag - // of some description. - LexerState::Initial => { - tokens.append(&mut self.initial()?); - } - // The scripting state is entered when an open tag is encountered in the source code. - // This tells the lexer to start analysing characters at PHP tokens instead of inline HTML. - LexerState::Scripting => { - self.skip_whitespace(); - - // If we have consumed whitespace and then reached the end of the file, we should break. - if self.current.is_none() { - break; - } - - tokens.push(self.scripting()?); - } - // The "Halted" state is entered when the `__halt_compiler` token is encountered. - // In this state, all the text that follows is no longer parsed as PHP as is collected - // into a single "InlineHtml" token (kind of cheating, oh well). - LexerState::Halted => { - tokens.push(Token { - kind: TokenKind::InlineHtml(self.chars[self.cursor..].into()), - span: (self.line, self.col), - }); - break; - } - // The double quote state is entered when inside a double-quoted string that - // contains variables. - LexerState::DoubleQuote => tokens.extend(self.double_quote()?), - // LookingForProperty is entered inside double quotes, - // backticks, or a heredoc, expecting a variable name. - // If one isn't found, it switches to scripting. - LexerState::LookingForVarname => { - if let Some(token) = self.looking_for_varname() { - tokens.push(token); - } - } - // LookingForProperty is entered inside double quotes, - // backticks, or a heredoc, expecting an arrow followed by a - // property name. - LexerState::LookingForProperty => { - tokens.push(self.looking_for_property()?); - } - LexerState::VarOffset => { - if self.current.is_none() { - break; - } - - tokens.push(self.var_offset()?); - } - } - } - - Ok(tokens) - } - - fn skip_whitespace(&mut self) { - while let Some(b' ' | b'\n' | b'\r' | b'\t') = self.current { - self.next(); - } - } - - fn initial(&mut self) -> Result, LexerError> { - let inline_span = (self.line, self.col); - let mut buffer = Vec::new(); - while let Some(char) = self.current { - if self.try_read(b" Result { - let span = (self.line, self.col); - let kind = match self.peek_buf() { - [b'@', ..] => { - self.next(); - - TokenKind::At - } - [b'!', b'=', b'=', ..] => { - self.skip(3); - TokenKind::BangDoubleEquals - } - [b'!', b'=', ..] => { - self.skip(2); - TokenKind::BangEquals - } - [b'!', ..] => { - self.next(); - TokenKind::Bang - } - [b'&', b'&', ..] => { - self.skip(2); - TokenKind::BooleanAnd - } - [b'&', b'=', ..] => { - self.skip(2); - TokenKind::AmpersandEquals - } - [b'&', ..] => { - self.next(); - TokenKind::Ampersand - } - [b'?', b'>', ..] => { - // This is a close tag, we can enter "Initial" mode again. - self.skip(2); - - self.enter_state(LexerState::Initial); - - TokenKind::CloseTag - } - [b'?', b'?', b'=', ..] => { - self.skip(3); - TokenKind::CoalesceEqual - } - [b'?', b'?', ..] => { - self.skip(2); - TokenKind::Coalesce - } - [b'?', b':', ..] => { - self.skip(2); - TokenKind::QuestionColon - } - [b'?', b'-', b'>', ..] => { - self.skip(3); - TokenKind::NullsafeArrow - } - [b'?', ..] => { - self.next(); - TokenKind::Question - } - [b'=', b'>', ..] => { - self.skip(2); - TokenKind::DoubleArrow - } - [b'=', b'=', b'=', ..] => { - self.skip(3); - TokenKind::TripleEquals - } - [b'=', b'=', ..] => { - self.skip(2); - TokenKind::DoubleEquals - } - [b'=', ..] => { - self.next(); - TokenKind::Equals - } - // Single quoted string. - [b'\'', ..] => { - self.next(); - self.tokenize_single_quote_string()? - } - [b'b' | b'B', b'\'', ..] => { - self.skip(2); - self.tokenize_single_quote_string()? - } - [b'"', ..] => { - self.next(); - self.tokenize_double_quote_string()? - } - [b'b' | b'B', b'"', ..] => { - self.skip(2); - self.tokenize_double_quote_string()? - } - [b'$', ident_start!(), ..] => { - self.next(); - self.tokenize_variable() - } - [b'$', ..] => { - self.next(); - TokenKind::Dollar - } - [b'.', b'=', ..] => { - self.skip(2); - TokenKind::DotEquals - } - [b'.', b'0'..=b'9', ..] => self.tokenize_number()?, - [b'.', b'.', b'.', ..] => { - self.skip(3); - TokenKind::Ellipsis - } - [b'.', ..] => { - self.next(); - TokenKind::Dot - } - &[b'0'..=b'9', ..] => self.tokenize_number()?, - &[b'\\', ident_start!(), ..] => { - self.next(); - - match self.scripting()? { - Token { - kind: - TokenKind::Identifier(ByteString(mut i)) - | TokenKind::QualifiedIdentifier(ByteString(mut i)), - .. - } => { - i.insert(0, b'\\'); - TokenKind::FullyQualifiedIdentifier(i.into()) - } - s => unreachable!("{:?}", s), - } - } - [b'\\', ..] => { - self.next(); - TokenKind::NamespaceSeparator - } - &[b @ ident_start!(), ..] => { - self.next(); - let mut qualified = false; - let mut last_was_slash = false; - - let mut buffer = vec![b]; - while let Some(next) = self.current { - if next.is_ascii_alphanumeric() || next == b'_' { - buffer.push(next); - self.next(); - last_was_slash = false; - continue; - } - - if next == b'\\' && !last_was_slash { - qualified = true; - last_was_slash = true; - buffer.push(next); - self.next(); - continue; - } - - break; - } - - if qualified { - TokenKind::QualifiedIdentifier(buffer.into()) - } else { - let kind = identifier_to_keyword(&buffer) - .unwrap_or_else(|| TokenKind::Identifier(buffer.into())); - - if kind == TokenKind::HaltCompiler { - match self.peek_buf() { - [b'(', b')', b';', ..] => { - self.skip(3); - self.enter_state(LexerState::Halted); - } - _ => return Err(LexerError::InvalidHaltCompiler), - } - } - - kind - } - } - [b'/', b'*', ..] => { - self.next(); - let mut buffer = vec![b'/']; - - while self.current.is_some() { - match self.peek_buf() { - [b'*', b'/', ..] => { - self.skip(2); - buffer.extend_from_slice(b"*/"); - break; - } - &[t, ..] => { - self.next(); - buffer.push(t); - } - [] => unreachable!(), - } - } - self.next(); - - if buffer.starts_with(b"/**") { - TokenKind::DocComment(buffer.into()) - } else { - TokenKind::Comment(buffer.into()) - } - } - [b'#', b'[', ..] => { - self.skip(2); - TokenKind::Attribute - } - &[ch @ b'/', b'/', ..] | &[ch @ b'#', ..] => { - let mut buffer = if ch == b'/' { - self.skip(2); - b"//".to_vec() - } else { - self.next(); - b"#".to_vec() - }; - - while let Some(c) = self.current { - if c == b'\n' { - break; - } - - buffer.push(c); - self.next(); - } - - self.next(); - - TokenKind::Comment(buffer.into()) - } - [b'/', b'=', ..] => { - self.skip(2); - TokenKind::SlashEquals - } - [b'/', ..] => { - self.next(); - TokenKind::Slash - } - [b'*', b'*', b'=', ..] => { - self.skip(3); - TokenKind::PowEquals - } - [b'*', b'*', ..] => { - self.skip(2); - TokenKind::Pow - } - [b'*', b'=', ..] => { - self.skip(2); - TokenKind::AsteriskEqual - } - [b'*', ..] => { - self.next(); - TokenKind::Asterisk - } - [b'|', b'|', ..] => { - self.skip(2); - TokenKind::Pipe - } - [b'|', b'=', ..] => { - self.skip(2); - TokenKind::PipeEquals - } - [b'|', ..] => { - self.next(); - TokenKind::Pipe - } - [b'^', b'=', ..] => { - self.skip(2); - TokenKind::CaretEquals - } - [b'^', ..] => { - self.next(); - TokenKind::Caret - } - [b'{', ..] => { - self.next(); - self.push_state(LexerState::Scripting); - TokenKind::LeftBrace - } - [b'}', ..] => { - self.next(); - self.pop_state(); - TokenKind::RightBrace - } - [b'(', ..] => { - self.next(); - - if self.try_read(b"int)") { - self.skip(4); - TokenKind::IntCast - } else if self.try_read(b"integer)") { - self.skip(8); - TokenKind::IntegerCast - } else if self.try_read(b"bool)") { - self.skip(5); - TokenKind::BoolCast - } else if self.try_read(b"boolean)") { - self.skip(8); - TokenKind::BooleanCast - } else if self.try_read(b"float)") { - self.skip(6); - TokenKind::FloatCast - } else if self.try_read(b"double)") { - self.skip(7); - TokenKind::DoubleCast - } else if self.try_read(b"real)") { - self.skip(5); - TokenKind::RealCast - } else if self.try_read(b"string)") { - self.skip(7); - TokenKind::StringCast - } else if self.try_read(b"binary)") { - self.skip(7); - TokenKind::BinaryCast - } else if self.try_read(b"array)") { - self.skip(6); - TokenKind::ArrayCast - } else if self.try_read(b"object)") { - self.skip(7); - TokenKind::ObjectCast - } else if self.try_read(b"unset)") { - self.skip(6); - TokenKind::UnsetCast - } else { - TokenKind::LeftParen - } - } - [b')', ..] => { - self.next(); - TokenKind::RightParen - } - [b';', ..] => { - self.next(); - TokenKind::SemiColon - } - [b'+', b'+', ..] => { - self.skip(2); - TokenKind::Increment - } - [b'+', b'=', ..] => { - self.skip(2); - TokenKind::PlusEquals - } - [b'+', ..] => { - self.next(); - TokenKind::Plus - } - [b'%', b'=', ..] => { - self.skip(2); - TokenKind::PercentEquals - } - [b'%', ..] => { - self.next(); - TokenKind::Percent - } - [b'-', b'-', ..] => { - self.skip(2); - TokenKind::Decrement - } - [b'-', b'>', ..] => { - self.skip(2); - TokenKind::Arrow - } - [b'-', b'=', ..] => { - self.skip(2); - TokenKind::MinusEquals - } - [b'-', ..] => { - self.next(); - TokenKind::Minus - } - [b'<', b'<', b'<', ..] => { - // TODO: Handle both heredocs and nowdocs. - self.skip(3); - - todo!("heredocs & nowdocs"); - } - [b'<', b'<', b'=', ..] => { - self.skip(3); - - TokenKind::LeftShiftEquals - } - [b'<', b'<', ..] => { - self.skip(2); - TokenKind::LeftShift - } - [b'<', b'=', b'>', ..] => { - self.skip(3); - TokenKind::Spaceship - } - [b'<', b'=', ..] => { - self.skip(2); - TokenKind::LessThanEquals - } - [b'<', b'>', ..] => { - self.skip(2); - TokenKind::AngledLeftRight - } - [b'<', ..] => { - self.next(); - TokenKind::LessThan - } - [b'>', b'>', b'=', ..] => { - self.skip(3); - TokenKind::RightShiftEquals - } - [b'>', b'>', ..] => { - self.skip(2); - TokenKind::RightShift - } - [b'>', b'=', ..] => { - self.skip(2); - TokenKind::GreaterThanEquals - } - [b'>', ..] => { - self.next(); - TokenKind::GreaterThan - } - [b',', ..] => { - self.next(); - TokenKind::Comma - } - [b'[', ..] => { - self.next(); - TokenKind::LeftBracket - } - [b']', ..] => { - self.next(); - TokenKind::RightBracket - } - [b':', b':', ..] => { - self.skip(2); - TokenKind::DoubleColon - } - [b':', ..] => { - self.next(); - TokenKind::Colon - } - &[b'~', ..] => { - self.next(); - TokenKind::BitwiseNot - } - &[b, ..] => unimplemented!( - " char: {}, line: {}, col: {}", - b as char, - self.line, - self.col - ), - // We should never reach this point since we have the empty checks surrounding - // the call to this function, but it's better to be safe than sorry. - [] => return Err(LexerError::UnexpectedEndOfFile), - }; - - Ok(Token { kind, span }) - } - - fn double_quote(&mut self) -> Result, LexerError> { - let span = (self.line, self.col); - let mut buffer = Vec::new(); - let kind = loop { - match self.peek_buf() { - [b'$', b'{', ..] => { - self.skip(2); - self.push_state(LexerState::LookingForVarname); - break TokenKind::DollarLeftBrace; - } - [b'{', b'$', ..] => { - // Intentionally only consume the left brace. - self.next(); - self.push_state(LexerState::Scripting); - break TokenKind::LeftBrace; - } - [b'"', ..] => { - self.next(); - self.enter_state(LexerState::Scripting); - break TokenKind::DoubleQuote; - } - [b'$', ident_start!(), ..] => { - self.next(); - let ident = self.consume_identifier(); - - match self.peek_buf() { - [b'[', ..] => self.push_state(LexerState::VarOffset), - [b'-', b'>', ident_start!(), ..] - | [b'?', b'-', b'>', ident_start!(), ..] => { - self.push_state(LexerState::LookingForProperty) - } - _ => {} - } - - break TokenKind::Variable(ident.into()); - } - &[b, ..] => { - self.next(); - buffer.push(b); - } - [] => return Err(LexerError::UnexpectedEndOfFile), - } - }; - - let mut tokens = Vec::new(); - if !buffer.is_empty() { - tokens.push(Token { - kind: TokenKind::StringPart(buffer.into()), - span, - }) - } - - tokens.push(Token { kind, span }); - Ok(tokens) - } - - fn looking_for_varname(&mut self) -> Option { - if let Some(ident) = self.peek_identifier() { - if let Some(b'[' | b'}') = self.peek_byte(ident.len()) { - let ident = ident.to_vec(); - let span = (self.line, self.col); - self.skip(ident.len()); - self.enter_state(LexerState::Scripting); - return Some(Token { - kind: TokenKind::Identifier(ident.into()), - span, - }); - } - } - - self.enter_state(LexerState::Scripting); - None - } - - fn looking_for_property(&mut self) -> Result { - let span = (self.line, self.col); - let kind = match self.peek_buf() { - [b'-', b'>', ..] => { - self.skip(2); - TokenKind::Arrow - } - [b'?', b'-', b'>', ..] => { - self.skip(3); - TokenKind::NullsafeArrow - } - &[ident_start!(), ..] => { - let buffer = self.consume_identifier(); - self.pop_state(); - TokenKind::Identifier(buffer.into()) - } - // Should be impossible as we already looked ahead this far inside double_quote. - _ => unreachable!(), - }; - Ok(Token { kind, span }) - } - - fn var_offset(&mut self) -> Result { - let span = (self.line, self.col); - let kind = match self.peek_buf() { - [b'$', ident_start!(), ..] => { - self.next(); - self.tokenize_variable() - } - &[b'0'..=b'9', ..] => { - // TODO: all integer literals are allowed, but only decimal integers with no underscores - // are actually treated as numbers. Others are treated as strings. - // Float literals are not allowed, but that could be handled in the parser. - self.tokenize_number()? - } - [b'[', ..] => { - self.next(); - TokenKind::LeftBracket - } - [b'-', ..] => { - self.next(); - TokenKind::Minus - } - [b']', ..] => { - self.next(); - self.pop_state(); - TokenKind::RightBracket - } - &[ident_start!(), ..] => { - let label = self.consume_identifier(); - TokenKind::Identifier(label.into()) - } - &[b, ..] => unimplemented!( - " char: {}, line: {}, col: {}", - b as char, - self.line, - self.col - ), - [] => return Err(LexerError::UnexpectedEndOfFile), - }; - Ok(Token { kind, span }) - } - - fn tokenize_single_quote_string(&mut self) -> Result { - let mut buffer = Vec::new(); - - loop { - match self.peek_buf() { - [b'\'', ..] => { - self.next(); - break; - } - &[b'\\', b @ b'\'' | b @ b'\\', ..] => { - self.skip(2); - buffer.push(b); - } - &[b, ..] => { - self.next(); - buffer.push(b); - } - [] => return Err(LexerError::UnexpectedEndOfFile), - } - } - - Ok(TokenKind::LiteralString(buffer.into())) - } - - fn tokenize_double_quote_string(&mut self) -> Result { - let mut buffer = Vec::new(); - - let constant = loop { - match self.peek_buf() { - [b'"', ..] => { - self.next(); - break true; - } - &[b'\\', b @ (b'"' | b'\\' | b'$'), ..] => { - self.skip(2); - buffer.push(b); - } - &[b'\\', b'n', ..] => { - self.skip(2); - buffer.push(b'\n'); - } - &[b'\\', b'r', ..] => { - self.skip(2); - buffer.push(b'\r'); - } - &[b'\\', b't', ..] => { - self.skip(2); - buffer.push(b'\t'); - } - &[b'\\', b'v', ..] => { - self.skip(2); - buffer.push(b'\x0b'); - } - &[b'\\', b'e', ..] => { - self.skip(2); - buffer.push(b'\x1b'); - } - &[b'\\', b'f', ..] => { - self.skip(2); - buffer.push(b'\x0c'); - } - &[b'\\', b'x', b @ (b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F'), ..] => { - self.skip(3); - - let mut hex = String::from(b as char); - if let Some(b @ (b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F')) = self.current { - self.next(); - hex.push(b as char); - } - - let b = u8::from_str_radix(&hex, 16).unwrap(); - buffer.push(b); - } - &[b'\\', b'u', b'{', ..] => { - self.skip(3); - - let mut code_point = String::new(); - while let Some(b @ (b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F')) = self.current { - self.next(); - code_point.push(b as char); - } - - if code_point.is_empty() || self.current != Some(b'}') { - return Err(LexerError::InvalidUnicodeEscape); - } - self.next(); - - let c = if let Ok(c) = u32::from_str_radix(&code_point, 16) { - c - } else { - return Err(LexerError::InvalidUnicodeEscape); - }; - - if let Some(c) = char::from_u32(c) { - let mut tmp = [0; 4]; - let bytes = c.encode_utf8(&mut tmp); - buffer.extend(bytes.as_bytes()); - } else { - return Err(LexerError::InvalidUnicodeEscape); - } - } - &[b'\\', b @ b'0'..=b'7', ..] => { - self.skip(2); - - let mut octal = String::from(b as char); - if let Some(b @ b'0'..=b'7') = self.current { - self.next(); - octal.push(b as char); - } - if let Some(b @ b'0'..=b'7') = self.current { - self.next(); - octal.push(b as char); - } - - if let Ok(b) = u8::from_str_radix(&octal, 8) { - buffer.push(b); - } else { - return Err(LexerError::InvalidOctalEscape); - } - } - [b'$', ident_start!(), ..] | [b'{', b'$', ..] | [b'$', b'{', ..] => { - break false; - } - &[b, ..] => { - self.next(); - buffer.push(b); - } - [] => return Err(LexerError::UnexpectedEndOfFile), - } - }; - - Ok(if constant { - TokenKind::LiteralString(buffer.into()) - } else { - self.enter_state(LexerState::DoubleQuote); - TokenKind::StringPart(buffer.into()) - }) - } - - fn peek_identifier(&self) -> Option<&[u8]> { - let mut cursor = self.cursor; - if let Some(ident_start!()) = self.chars.get(cursor) { - cursor += 1; - while let Some(ident!()) = self.chars.get(cursor) { - cursor += 1; - } - Some(&self.chars[self.cursor..cursor]) - } else { - None - } - } - - fn consume_identifier(&mut self) -> Vec { - let ident = self.peek_identifier().unwrap().to_vec(); - self.skip(ident.len()); - - ident - } - - fn tokenize_variable(&mut self) -> TokenKind { - TokenKind::Variable(self.consume_identifier().into()) - } - - fn tokenize_number(&mut self) -> Result { - let mut buffer = String::new(); - - let (base, kind) = match self.peek_buf() { - [b'0', b'B' | b'b', ..] => { - self.skip(2); - (2, NumberKind::Int) - } - [b'0', b'O' | b'o', ..] => { - self.skip(2); - (8, NumberKind::Int) - } - [b'0', b'X' | b'x', ..] => { - self.skip(2); - (16, NumberKind::Int) - } - [b'0', ..] => (10, NumberKind::OctalOrFloat), - [b'.', ..] => (10, NumberKind::Float), - _ => (10, NumberKind::IntOrFloat), - }; - - if kind != NumberKind::Float { - self.read_digits(&mut buffer, base); - if kind == NumberKind::Int { - return parse_int(&buffer, base as u32); - } - } - - // Remaining cases: decimal integer, legacy octal integer, or float. - let is_float = matches!( - self.peek_buf(), - [b'.', ..] - | [b'e' | b'E', b'-' | b'+', b'0'..=b'9', ..] - | [b'e' | b'E', b'0'..=b'9', ..] - ); - if !is_float { - let base = if kind == NumberKind::OctalOrFloat { - 8 - } else { - 10 - }; - return parse_int(&buffer, base as u32); - } - - if self.current == Some(b'.') { - buffer.push('.'); - self.next(); - self.read_digits(&mut buffer, 10); - } - - if let Some(b'e' | b'E') = self.current { - buffer.push('e'); - self.next(); - if let Some(b @ (b'-' | b'+')) = self.current { - buffer.push(b as char); - self.next(); - } - self.read_digits(&mut buffer, 10); - } - - Ok(TokenKind::LiteralFloat(buffer.parse().unwrap())) - } - - fn read_digits(&mut self, buffer: &mut String, base: usize) { - if base == 16 { - self.read_digits_fn(buffer, u8::is_ascii_hexdigit); - } else { - let max = b'0' + base as u8; - self.read_digits_fn(buffer, |b| (b'0'..max).contains(b)); - }; - } - - fn read_digits_fn bool>(&mut self, buffer: &mut String, is_digit: F) { - if let Some(b) = self.current { - if is_digit(&b) { - self.next(); - buffer.push(b as char); - } else { - return; - } - } - loop { - match *self.peek_buf() { - [b, ..] if is_digit(&b) => { - self.next(); - buffer.push(b as char); - } - [b'_', b, ..] if is_digit(&b) => { - self.next(); - self.next(); - buffer.push(b as char); - } - _ => { - break; - } - } - } - } - - fn enter_state(&mut self, state: LexerState) { - *self.state_stack.last_mut().unwrap() = state; - } - - fn push_state(&mut self, state: LexerState) { - self.state_stack.push(state); - } - - fn pop_state(&mut self) { - self.state_stack.pop(); - } - - fn peek_buf(&self) -> &[u8] { - &self.chars[self.cursor..] - } - - fn peek_byte(&self, delta: usize) -> Option { - self.chars.get(self.cursor + delta).copied() - } - - fn try_read(&self, search: &'static [u8]) -> bool { - self.peek_buf().starts_with(search) - } - - fn skip(&mut self, count: usize) { - for _ in 0..count { - self.next(); - } - } - - fn next(&mut self) { - match self.current { - Some(b'\n') => { - self.line += 1; - self.col = 1; - } - Some(_) => self.col += 1, - _ => {} - } - self.cursor += 1; - self.current = self.chars.get(self.cursor).copied(); - } -} - -// Parses an integer literal in the given base and converts errors to LexerError. -// It returns a float token instead on overflow. -fn parse_int(buffer: &str, base: u32) -> Result { - match i64::from_str_radix(buffer, base) { - Ok(i) => Ok(TokenKind::LiteralInteger(i)), - Err(err) if err.kind() == &IntErrorKind::InvalidDigit => { - // The InvalidDigit error is only possible for legacy octal literals. - Err(LexerError::InvalidOctalLiteral) - } - Err(err) if err.kind() == &IntErrorKind::PosOverflow => { - // Parse as i128 so we can handle other bases. - // This means there's an upper limit on how large the literal can be. - let i = i128::from_str_radix(buffer, base).unwrap(); - Ok(TokenKind::LiteralFloat(i as f64)) - } - _ => Err(LexerError::UnexpectedError), - } -} - -fn identifier_to_keyword(ident: &[u8]) -> Option { - Some(match ident { - b"enddeclare" => TokenKind::EndDeclare, - b"endswitch" => TokenKind::EndSwitch, - b"endfor" => TokenKind::EndFor, - b"endwhile" => TokenKind::EndWhile, - b"endforeach" => TokenKind::EndForeach, - b"endif" => TokenKind::EndIf, - b"from" => TokenKind::From, - b"and" => TokenKind::LogicalAnd, - b"or" => TokenKind::LogicalOr, - b"xor" => TokenKind::LogicalXor, - b"print" => TokenKind::Print, - b"__halt_compiler" | b"__HALT_COMPILER" => TokenKind::HaltCompiler, - b"readonly" => TokenKind::Readonly, - b"global" => TokenKind::Global, - b"match" => TokenKind::Match, - b"abstract" => TokenKind::Abstract, - b"array" => TokenKind::Array, - b"as" => TokenKind::As, - b"break" => TokenKind::Break, - b"case" => TokenKind::Case, - b"catch" => TokenKind::Catch, - b"class" => TokenKind::Class, - b"clone" => TokenKind::Clone, - b"continue" => TokenKind::Continue, - b"const" => TokenKind::Const, - b"declare" => TokenKind::Declare, - b"default" => TokenKind::Default, - b"do" => TokenKind::Do, - b"echo" => TokenKind::Echo, - b"else" => TokenKind::Else, - b"elseif" => TokenKind::ElseIf, - b"enum" => TokenKind::Enum, - b"extends" => TokenKind::Extends, - b"false" | b"FALSE" => TokenKind::False, - b"final" => TokenKind::Final, - b"finally" => TokenKind::Finally, - b"fn" => TokenKind::Fn, - b"for" => TokenKind::For, - b"foreach" => TokenKind::Foreach, - b"function" => TokenKind::Function, - b"goto" => TokenKind::Goto, - b"if" => TokenKind::If, - b"include" => TokenKind::Include, - b"include_once" => TokenKind::IncludeOnce, - b"implements" => TokenKind::Implements, - b"interface" => TokenKind::Interface, - b"instanceof" => TokenKind::Instanceof, - b"namespace" => TokenKind::Namespace, - b"new" => TokenKind::New, - b"null" | b"NULL" => TokenKind::Null, - b"private" => TokenKind::Private, - b"protected" => TokenKind::Protected, - b"public" => TokenKind::Public, - b"require" => TokenKind::Require, - b"require_once" => TokenKind::RequireOnce, - b"return" => TokenKind::Return, - b"static" => TokenKind::Static, - b"switch" => TokenKind::Switch, - b"throw" => TokenKind::Throw, - b"trait" => TokenKind::Trait, - b"true" | b"TRUE" => TokenKind::True, - b"try" => TokenKind::Try, - b"use" => TokenKind::Use, - b"var" => TokenKind::Var, - b"yield" => TokenKind::Yield, - b"__DIR__" => TokenKind::DirConstant, - b"while" => TokenKind::While, - b"insteadof" => TokenKind::Insteadof, - _ => return None, - }) -} - -#[derive(Debug, Eq, PartialEq)] -enum NumberKind { - Int, - Float, - IntOrFloat, - OctalOrFloat, -} - -#[derive(Debug, Eq, PartialEq)] -pub enum LexerError { - UnexpectedEndOfFile, - UnexpectedError, - UnexpectedCharacter(u8), - InvalidHaltCompiler, - InvalidOctalEscape, - InvalidOctalLiteral, - InvalidUnicodeEscape, -} - -#[cfg(test)] -mod tests { - use super::Lexer; - use crate::{ByteString, LexerError, OpenTagKind, Token, TokenKind}; - - macro_rules! open { - () => { - TokenKind::OpenTag(OpenTagKind::Full) - }; - ($kind:expr) => { - TokenKind::OpenTag($kind) - }; - } - macro_rules! int { - ($i:expr) => { - TokenKind::LiteralInteger($i) - }; - } - - fn var>(v: B) -> TokenKind { - TokenKind::Variable(v.into()) - } - - #[test] - fn basic_tokens() { - assert_tokens("", &[open!(), TokenKind::CloseTag]); - } - - #[test] - fn close_tag_followed_by_content() { - assert_tokens( - " ", - &[ - open!(), - TokenKind::CloseTag, - TokenKind::InlineHtml(" ".into()), - ], - ); - } - - #[test] - fn inline_html() { - assert_tokens( - "Hello, world!\nb}" "#, - &[ - open!(), - TokenKind::StringPart("".into()), - TokenKind::LeftBrace, - TokenKind::Variable("a".into()), - TokenKind::Arrow, - TokenKind::Identifier("b".into()), - TokenKind::RightBrace, - TokenKind::DoubleQuote, - ], - ); - assert_tokens( - r#"b" "#, - &[ - open!(), - TokenKind::StringPart("".into()), - TokenKind::Variable("a".into()), - TokenKind::Arrow, - TokenKind::Identifier("b".into()), - TokenKind::DoubleQuote, - ], - ); - assert_tokens( - r#"" "#, - &[ - open!(), - TokenKind::StringPart("".into()), - TokenKind::Variable("a".into()), - TokenKind::StringPart("->".into()), - TokenKind::DoubleQuote, - ], - ); - assert_tokens( - r#"b" "#, - &[ - open!(), - TokenKind::StringPart("".into()), - TokenKind::Variable("a".into()), - TokenKind::NullsafeArrow, - TokenKind::Identifier("b".into()), - TokenKind::DoubleQuote, - ], - ); - assert_tokens( - r#"" "#, - &[ - open!(), - TokenKind::StringPart("".into()), - TokenKind::Variable("a".into()), - TokenKind::StringPart("?->".into()), - TokenKind::DoubleQuote, - ], - ); - assert_tokens( - r#" $", - &[open!(), TokenKind::Arrow, TokenKind::Dollar], - ); - } - - #[test] - fn math() { - assert_tokens( - ">(source: &B, expected: LexerError) { - let mut lexer = Lexer::new(None); - assert_eq!(lexer.tokenize(source), Err(expected)); - } - - fn assert_tokens>(source: &B, expected: &[TokenKind]) { - let mut kinds = vec![]; - - for token in get_tokens(source) { - kinds.push(token.kind); - } - - assert_eq!(kinds, expected); - } - - fn get_spans(source: &str) -> Vec<(usize, usize)> { - let tokens = get_tokens(source); - let mut spans = vec![]; - - for token in tokens { - spans.push(token.span); - } - - spans - } - - fn get_tokens>(source: &B) -> Vec { - let mut lexer = Lexer::new(None); - lexer.tokenize(source).unwrap() - } -} diff --git a/src/lexer/macros.rs b/src/lexer/macros.rs new file mode 100644 index 00000000..d8d2eb15 --- /dev/null +++ b/src/lexer/macros.rs @@ -0,0 +1,15 @@ +// Reusable pattern for the first byte of an identifier. +#[macro_export] +macro_rules! ident_start { + () => { + b'a'..=b'z' | b'A'..=b'Z' | b'_' | b'\x80'..=b'\xff' + }; +} + +// Reusable pattern for identifier after the first byte. +#[macro_export] +macro_rules! ident { + () => { + b'0'..=b'9' | b'a'..=b'z' | b'A'..=b'Z' | b'_' | b'\x80'..=b'\xff' + }; +} diff --git a/src/lexer/mod.rs b/src/lexer/mod.rs index cea155ad..46dc993b 100644 --- a/src/lexer/mod.rs +++ b/src/lexer/mod.rs @@ -1,9 +1,1165 @@ -#![allow(clippy::module_inception)] +pub mod byte_string; +pub mod error; +mod macros; +pub mod token; -mod byte_string; -mod lexer; -mod token; +use std::num::IntErrorKind; -pub use byte_string::ByteString; -pub use lexer::{Lexer, LexerError}; -pub use token::{OpenTagKind, Span, Token, TokenKind}; +use crate::lexer::byte_string::ByteString; +use crate::lexer::error::SyntaxError; +use crate::lexer::token::OpenTagKind; +use crate::lexer::token::Span; +use crate::lexer::token::Token; +use crate::lexer::token::TokenKind; + +use crate::ident; +use crate::ident_start; + +#[derive(Debug, PartialEq, Eq)] +pub enum LexerState { + Initial, + Scripting, + Halted, + DoubleQuote, + LookingForVarname, + LookingForProperty, + VarOffset, +} + +pub struct Lexer { + state_stack: Vec, + chars: Vec, + cursor: usize, + current: Option, + span: Span, +} + +impl Default for Lexer { + fn default() -> Self { + Self::new() + } +} + +impl Lexer { + pub fn new() -> Self { + Self { + state_stack: vec![LexerState::Initial], + chars: Vec::new(), + cursor: 0, + current: None, + span: (1, 1), + } + } + + pub fn tokenize>( + &mut self, + input: &B, + ) -> Result, SyntaxError> { + let mut tokens = Vec::new(); + self.chars = input.as_ref().to_vec(); + + self.current = self.chars.first().copied(); + + while self.current.is_some() { + match self.state_stack.last().unwrap() { + // The "Initial" state is used to parse inline HTML. It is essentially a catch-all + // state that will build up a single token buffer until it encounters an open tag + // of some description. + LexerState::Initial => { + tokens.append(&mut self.initial()?); + } + // The scripting state is entered when an open tag is encountered in the source code. + // This tells the lexer to start analysing characters at PHP tokens instead of inline HTML. + LexerState::Scripting => { + self.skip_whitespace(); + + // If we have consumed whitespace and then reached the end of the file, we should break. + if self.current.is_none() { + break; + } + + tokens.push(self.scripting()?); + } + // The "Halted" state is entered when the `__halt_compiler` token is encountered. + // In this state, all the text that follows is no longer parsed as PHP as is collected + // into a single "InlineHtml" token (kind of cheating, oh well). + LexerState::Halted => { + tokens.push(Token { + kind: TokenKind::InlineHtml(self.chars[self.cursor..].into()), + span: self.span, + }); + break; + } + // The double quote state is entered when inside a double-quoted string that + // contains variables. + LexerState::DoubleQuote => tokens.extend(self.double_quote()?), + // LookingForProperty is entered inside double quotes, + // backticks, or a heredoc, expecting a variable name. + // If one isn't found, it switches to scripting. + LexerState::LookingForVarname => { + if let Some(token) = self.looking_for_varname() { + tokens.push(token); + } + } + // LookingForProperty is entered inside double quotes, + // backticks, or a heredoc, expecting an arrow followed by a + // property name. + LexerState::LookingForProperty => { + tokens.push(self.looking_for_property()?); + } + LexerState::VarOffset => { + if self.current.is_none() { + break; + } + + tokens.push(self.var_offset()?); + } + } + } + + Ok(tokens) + } + + fn skip_whitespace(&mut self) { + while let Some(b' ' | b'\n' | b'\r' | b'\t') = self.current { + self.next(); + } + } + + fn initial(&mut self) -> Result, SyntaxError> { + let inline_span = self.span; + let mut buffer = Vec::new(); + while let Some(char) = self.current { + if self.try_read(b" Result { + let span = self.span; + let kind = match self.peek_buf() { + [b'@', ..] => { + self.next(); + + TokenKind::At + } + [b'!', b'=', b'=', ..] => { + self.skip(3); + TokenKind::BangDoubleEquals + } + [b'!', b'=', ..] => { + self.skip(2); + TokenKind::BangEquals + } + [b'!', ..] => { + self.next(); + TokenKind::Bang + } + [b'&', b'&', ..] => { + self.skip(2); + TokenKind::BooleanAnd + } + [b'&', b'=', ..] => { + self.skip(2); + TokenKind::AmpersandEquals + } + [b'&', ..] => { + self.next(); + TokenKind::Ampersand + } + [b'?', b'>', ..] => { + // This is a close tag, we can enter "Initial" mode again. + self.skip(2); + + self.enter_state(LexerState::Initial); + + TokenKind::CloseTag + } + [b'?', b'?', b'=', ..] => { + self.skip(3); + TokenKind::CoalesceEqual + } + [b'?', b'?', ..] => { + self.skip(2); + TokenKind::Coalesce + } + [b'?', b':', ..] => { + self.skip(2); + TokenKind::QuestionColon + } + [b'?', b'-', b'>', ..] => { + self.skip(3); + TokenKind::NullsafeArrow + } + [b'?', ..] => { + self.next(); + TokenKind::Question + } + [b'=', b'>', ..] => { + self.skip(2); + TokenKind::DoubleArrow + } + [b'=', b'=', b'=', ..] => { + self.skip(3); + TokenKind::TripleEquals + } + [b'=', b'=', ..] => { + self.skip(2); + TokenKind::DoubleEquals + } + [b'=', ..] => { + self.next(); + TokenKind::Equals + } + // Single quoted string. + [b'\'', ..] => { + self.next(); + self.tokenize_single_quote_string()? + } + [b'b' | b'B', b'\'', ..] => { + self.skip(2); + self.tokenize_single_quote_string()? + } + [b'"', ..] => { + self.next(); + self.tokenize_double_quote_string()? + } + [b'b' | b'B', b'"', ..] => { + self.skip(2); + self.tokenize_double_quote_string()? + } + [b'$', ident_start!(), ..] => { + self.next(); + self.tokenize_variable() + } + [b'$', ..] => { + self.next(); + TokenKind::Dollar + } + [b'.', b'=', ..] => { + self.skip(2); + TokenKind::DotEquals + } + [b'.', b'0'..=b'9', ..] => self.tokenize_number()?, + [b'.', b'.', b'.', ..] => { + self.skip(3); + TokenKind::Ellipsis + } + [b'.', ..] => { + self.next(); + TokenKind::Dot + } + &[b'0'..=b'9', ..] => self.tokenize_number()?, + &[b'\\', ident_start!(), ..] => { + self.next(); + + match self.scripting()? { + Token { + kind: + TokenKind::Identifier(ByteString(mut i)) + | TokenKind::QualifiedIdentifier(ByteString(mut i)), + .. + } => { + i.insert(0, b'\\'); + TokenKind::FullyQualifiedIdentifier(i.into()) + } + s => unreachable!("{:?}", s), + } + } + [b'\\', ..] => { + self.next(); + TokenKind::NamespaceSeparator + } + &[b @ ident_start!(), ..] => { + self.next(); + let mut qualified = false; + let mut last_was_slash = false; + + let mut buffer = vec![b]; + while let Some(next) = self.current { + if next.is_ascii_alphanumeric() || next == b'_' { + buffer.push(next); + self.next(); + last_was_slash = false; + continue; + } + + if next == b'\\' && !last_was_slash { + qualified = true; + last_was_slash = true; + buffer.push(next); + self.next(); + continue; + } + + break; + } + + if qualified { + TokenKind::QualifiedIdentifier(buffer.into()) + } else { + let kind = identifier_to_keyword(&buffer) + .unwrap_or_else(|| TokenKind::Identifier(buffer.into())); + + if kind == TokenKind::HaltCompiler { + match self.peek_buf() { + [b'(', b')', b';', ..] => { + self.skip(3); + self.enter_state(LexerState::Halted); + } + _ => return Err(SyntaxError::InvalidHaltCompiler(self.span)), + } + } + + kind + } + } + [b'/', b'*', ..] => { + self.next(); + let mut buffer = vec![b'/']; + + while self.current.is_some() { + match self.peek_buf() { + [b'*', b'/', ..] => { + self.skip(2); + buffer.extend_from_slice(b"*/"); + break; + } + &[t, ..] => { + self.next(); + buffer.push(t); + } + [] => unreachable!(), + } + } + self.next(); + + if buffer.starts_with(b"/**") { + TokenKind::DocComment(buffer.into()) + } else { + TokenKind::Comment(buffer.into()) + } + } + [b'#', b'[', ..] => { + self.skip(2); + TokenKind::Attribute + } + &[ch @ b'/', b'/', ..] | &[ch @ b'#', ..] => { + let mut buffer = if ch == b'/' { + self.skip(2); + b"//".to_vec() + } else { + self.next(); + b"#".to_vec() + }; + + while let Some(c) = self.current { + if c == b'\n' { + break; + } + + buffer.push(c); + self.next(); + } + + self.next(); + + TokenKind::Comment(buffer.into()) + } + [b'/', b'=', ..] => { + self.skip(2); + TokenKind::SlashEquals + } + [b'/', ..] => { + self.next(); + TokenKind::Slash + } + [b'*', b'*', b'=', ..] => { + self.skip(3); + TokenKind::PowEquals + } + [b'*', b'*', ..] => { + self.skip(2); + TokenKind::Pow + } + [b'*', b'=', ..] => { + self.skip(2); + TokenKind::AsteriskEqual + } + [b'*', ..] => { + self.next(); + TokenKind::Asterisk + } + [b'|', b'|', ..] => { + self.skip(2); + TokenKind::Pipe + } + [b'|', b'=', ..] => { + self.skip(2); + TokenKind::PipeEquals + } + [b'|', ..] => { + self.next(); + TokenKind::Pipe + } + [b'^', b'=', ..] => { + self.skip(2); + TokenKind::CaretEquals + } + [b'^', ..] => { + self.next(); + TokenKind::Caret + } + [b'{', ..] => { + self.next(); + self.push_state(LexerState::Scripting); + TokenKind::LeftBrace + } + [b'}', ..] => { + self.next(); + self.pop_state(); + TokenKind::RightBrace + } + [b'(', ..] => { + self.next(); + + if self.try_read(b"int)") { + self.skip(4); + TokenKind::IntCast + } else if self.try_read(b"integer)") { + self.skip(8); + TokenKind::IntegerCast + } else if self.try_read(b"bool)") { + self.skip(5); + TokenKind::BoolCast + } else if self.try_read(b"boolean)") { + self.skip(8); + TokenKind::BooleanCast + } else if self.try_read(b"float)") { + self.skip(6); + TokenKind::FloatCast + } else if self.try_read(b"double)") { + self.skip(7); + TokenKind::DoubleCast + } else if self.try_read(b"real)") { + self.skip(5); + TokenKind::RealCast + } else if self.try_read(b"string)") { + self.skip(7); + TokenKind::StringCast + } else if self.try_read(b"binary)") { + self.skip(7); + TokenKind::BinaryCast + } else if self.try_read(b"array)") { + self.skip(6); + TokenKind::ArrayCast + } else if self.try_read(b"object)") { + self.skip(7); + TokenKind::ObjectCast + } else if self.try_read(b"unset)") { + self.skip(6); + TokenKind::UnsetCast + } else { + TokenKind::LeftParen + } + } + [b')', ..] => { + self.next(); + TokenKind::RightParen + } + [b';', ..] => { + self.next(); + TokenKind::SemiColon + } + [b'+', b'+', ..] => { + self.skip(2); + TokenKind::Increment + } + [b'+', b'=', ..] => { + self.skip(2); + TokenKind::PlusEquals + } + [b'+', ..] => { + self.next(); + TokenKind::Plus + } + [b'%', b'=', ..] => { + self.skip(2); + TokenKind::PercentEquals + } + [b'%', ..] => { + self.next(); + TokenKind::Percent + } + [b'-', b'-', ..] => { + self.skip(2); + TokenKind::Decrement + } + [b'-', b'>', ..] => { + self.skip(2); + TokenKind::Arrow + } + [b'-', b'=', ..] => { + self.skip(2); + TokenKind::MinusEquals + } + [b'-', ..] => { + self.next(); + TokenKind::Minus + } + [b'<', b'<', b'<', ..] => { + // TODO: Handle both heredocs and nowdocs. + self.skip(3); + + todo!("heredocs & nowdocs"); + } + [b'<', b'<', b'=', ..] => { + self.skip(3); + + TokenKind::LeftShiftEquals + } + [b'<', b'<', ..] => { + self.skip(2); + TokenKind::LeftShift + } + [b'<', b'=', b'>', ..] => { + self.skip(3); + TokenKind::Spaceship + } + [b'<', b'=', ..] => { + self.skip(2); + TokenKind::LessThanEquals + } + [b'<', b'>', ..] => { + self.skip(2); + TokenKind::AngledLeftRight + } + [b'<', ..] => { + self.next(); + TokenKind::LessThan + } + [b'>', b'>', b'=', ..] => { + self.skip(3); + TokenKind::RightShiftEquals + } + [b'>', b'>', ..] => { + self.skip(2); + TokenKind::RightShift + } + [b'>', b'=', ..] => { + self.skip(2); + TokenKind::GreaterThanEquals + } + [b'>', ..] => { + self.next(); + TokenKind::GreaterThan + } + [b',', ..] => { + self.next(); + TokenKind::Comma + } + [b'[', ..] => { + self.next(); + TokenKind::LeftBracket + } + [b']', ..] => { + self.next(); + TokenKind::RightBracket + } + [b':', b':', ..] => { + self.skip(2); + TokenKind::DoubleColon + } + [b':', ..] => { + self.next(); + TokenKind::Colon + } + &[b'~', ..] => { + self.next(); + TokenKind::BitwiseNot + } + &[b, ..] => unimplemented!( + " char: {}, line: {}, col: {}", + b as char, + self.span.0, + self.span.1 + ), + // We should never reach this point since we have the empty checks surrounding + // the call to this function, but it's better to be safe than sorry. + [] => return Err(SyntaxError::UnexpectedEndOfFile(self.span)), + }; + + Ok(Token { kind, span }) + } + + fn double_quote(&mut self) -> Result, SyntaxError> { + let span = self.span; + let mut buffer = Vec::new(); + let kind = loop { + match self.peek_buf() { + [b'$', b'{', ..] => { + self.skip(2); + self.push_state(LexerState::LookingForVarname); + break TokenKind::DollarLeftBrace; + } + [b'{', b'$', ..] => { + // Intentionally only consume the left brace. + self.next(); + self.push_state(LexerState::Scripting); + break TokenKind::LeftBrace; + } + [b'"', ..] => { + self.next(); + self.enter_state(LexerState::Scripting); + break TokenKind::DoubleQuote; + } + [b'$', ident_start!(), ..] => { + self.next(); + let ident = self.consume_identifier(); + + match self.peek_buf() { + [b'[', ..] => self.push_state(LexerState::VarOffset), + [b'-', b'>', ident_start!(), ..] + | [b'?', b'-', b'>', ident_start!(), ..] => { + self.push_state(LexerState::LookingForProperty) + } + _ => {} + } + + break TokenKind::Variable(ident.into()); + } + &[b, ..] => { + self.next(); + buffer.push(b); + } + [] => return Err(SyntaxError::UnexpectedEndOfFile(self.span)), + } + }; + + let mut tokens = Vec::new(); + if !buffer.is_empty() { + tokens.push(Token { + kind: TokenKind::StringPart(buffer.into()), + span, + }) + } + + tokens.push(Token { kind, span }); + Ok(tokens) + } + + fn looking_for_varname(&mut self) -> Option { + if let Some(ident) = self.peek_identifier() { + if let Some(b'[' | b'}') = self.peek_byte(ident.len()) { + let ident = ident.to_vec(); + let span = self.span; + self.skip(ident.len()); + self.enter_state(LexerState::Scripting); + return Some(Token { + kind: TokenKind::Identifier(ident.into()), + span, + }); + } + } + + self.enter_state(LexerState::Scripting); + None + } + + fn looking_for_property(&mut self) -> Result { + let span = self.span; + let kind = match self.peek_buf() { + [b'-', b'>', ..] => { + self.skip(2); + TokenKind::Arrow + } + [b'?', b'-', b'>', ..] => { + self.skip(3); + TokenKind::NullsafeArrow + } + &[ident_start!(), ..] => { + let buffer = self.consume_identifier(); + self.pop_state(); + TokenKind::Identifier(buffer.into()) + } + // Should be impossible as we already looked ahead this far inside double_quote. + _ => unreachable!(), + }; + Ok(Token { kind, span }) + } + + fn var_offset(&mut self) -> Result { + let span = self.span; + let kind = match self.peek_buf() { + [b'$', ident_start!(), ..] => { + self.next(); + self.tokenize_variable() + } + &[b'0'..=b'9', ..] => { + // TODO: all integer literals are allowed, but only decimal integers with no underscores + // are actually treated as numbers. Others are treated as strings. + // Float literals are not allowed, but that could be handled in the parser. + self.tokenize_number()? + } + [b'[', ..] => { + self.next(); + TokenKind::LeftBracket + } + [b'-', ..] => { + self.next(); + TokenKind::Minus + } + [b']', ..] => { + self.next(); + self.pop_state(); + TokenKind::RightBracket + } + &[ident_start!(), ..] => { + let label = self.consume_identifier(); + TokenKind::Identifier(label.into()) + } + &[b, ..] => unimplemented!( + " char: {}, line: {}, col: {}", + b as char, + self.span.0, + self.span.1 + ), + [] => return Err(SyntaxError::UnexpectedEndOfFile(self.span)), + }; + Ok(Token { kind, span }) + } + + fn tokenize_single_quote_string(&mut self) -> Result { + let mut buffer = Vec::new(); + + loop { + match self.peek_buf() { + [b'\'', ..] => { + self.next(); + break; + } + &[b'\\', b @ b'\'' | b @ b'\\', ..] => { + self.skip(2); + buffer.push(b); + } + &[b, ..] => { + self.next(); + buffer.push(b); + } + [] => return Err(SyntaxError::UnexpectedEndOfFile(self.span)), + } + } + + Ok(TokenKind::LiteralString(buffer.into())) + } + + fn tokenize_double_quote_string(&mut self) -> Result { + let mut buffer = Vec::new(); + + let constant = loop { + match self.peek_buf() { + [b'"', ..] => { + self.next(); + break true; + } + &[b'\\', b @ (b'"' | b'\\' | b'$'), ..] => { + self.skip(2); + buffer.push(b); + } + &[b'\\', b'n', ..] => { + self.skip(2); + buffer.push(b'\n'); + } + &[b'\\', b'r', ..] => { + self.skip(2); + buffer.push(b'\r'); + } + &[b'\\', b't', ..] => { + self.skip(2); + buffer.push(b'\t'); + } + &[b'\\', b'v', ..] => { + self.skip(2); + buffer.push(b'\x0b'); + } + &[b'\\', b'e', ..] => { + self.skip(2); + buffer.push(b'\x1b'); + } + &[b'\\', b'f', ..] => { + self.skip(2); + buffer.push(b'\x0c'); + } + &[b'\\', b'x', b @ (b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F'), ..] => { + self.skip(3); + + let mut hex = String::from(b as char); + if let Some(b @ (b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F')) = self.current { + self.next(); + hex.push(b as char); + } + + let b = u8::from_str_radix(&hex, 16).unwrap(); + buffer.push(b); + } + &[b'\\', b'u', b'{', ..] => { + self.skip(3); + + let mut code_point = String::new(); + while let Some(b @ (b'0'..=b'9' | b'a'..=b'f' | b'A'..=b'F')) = self.current { + self.next(); + code_point.push(b as char); + } + + if code_point.is_empty() || self.current != Some(b'}') { + return Err(SyntaxError::InvalidUnicodeEscape(self.span)); + } + self.next(); + + let c = if let Ok(c) = u32::from_str_radix(&code_point, 16) { + c + } else { + return Err(SyntaxError::InvalidUnicodeEscape(self.span)); + }; + + if let Some(c) = char::from_u32(c) { + let mut tmp = [0; 4]; + let bytes = c.encode_utf8(&mut tmp); + buffer.extend(bytes.as_bytes()); + } else { + return Err(SyntaxError::InvalidUnicodeEscape(self.span)); + } + } + &[b'\\', b @ b'0'..=b'7', ..] => { + self.skip(2); + + let mut octal = String::from(b as char); + if let Some(b @ b'0'..=b'7') = self.current { + self.next(); + octal.push(b as char); + } + if let Some(b @ b'0'..=b'7') = self.current { + self.next(); + octal.push(b as char); + } + + if let Ok(b) = u8::from_str_radix(&octal, 8) { + buffer.push(b); + } else { + return Err(SyntaxError::InvalidOctalEscape(self.span)); + } + } + [b'$', ident_start!(), ..] | [b'{', b'$', ..] | [b'$', b'{', ..] => { + break false; + } + &[b, ..] => { + self.next(); + buffer.push(b); + } + [] => return Err(SyntaxError::UnexpectedEndOfFile(self.span)), + } + }; + + Ok(if constant { + TokenKind::LiteralString(buffer.into()) + } else { + self.enter_state(LexerState::DoubleQuote); + TokenKind::StringPart(buffer.into()) + }) + } + + fn peek_identifier(&self) -> Option<&[u8]> { + let mut cursor = self.cursor; + if let Some(ident_start!()) = self.chars.get(cursor) { + cursor += 1; + while let Some(ident!()) = self.chars.get(cursor) { + cursor += 1; + } + Some(&self.chars[self.cursor..cursor]) + } else { + None + } + } + + fn consume_identifier(&mut self) -> Vec { + let ident = self.peek_identifier().unwrap().to_vec(); + self.skip(ident.len()); + + ident + } + + fn tokenize_variable(&mut self) -> TokenKind { + TokenKind::Variable(self.consume_identifier().into()) + } + + fn tokenize_number(&mut self) -> Result { + let mut buffer = String::new(); + + let (base, kind) = match self.peek_buf() { + [b'0', b'B' | b'b', ..] => { + self.skip(2); + (2, NumberKind::Int) + } + [b'0', b'O' | b'o', ..] => { + self.skip(2); + (8, NumberKind::Int) + } + [b'0', b'X' | b'x', ..] => { + self.skip(2); + (16, NumberKind::Int) + } + [b'0', ..] => (10, NumberKind::OctalOrFloat), + [b'.', ..] => (10, NumberKind::Float), + _ => (10, NumberKind::IntOrFloat), + }; + + if kind != NumberKind::Float { + self.read_digits(&mut buffer, base); + if kind == NumberKind::Int { + return parse_int(&buffer, base as u32, self.span); + } + } + + // Remaining cases: decimal integer, legacy octal integer, or float. + let is_float = matches!( + self.peek_buf(), + [b'.', ..] + | [b'e' | b'E', b'-' | b'+', b'0'..=b'9', ..] + | [b'e' | b'E', b'0'..=b'9', ..] + ); + if !is_float { + let base = if kind == NumberKind::OctalOrFloat { + 8 + } else { + 10 + }; + return parse_int(&buffer, base as u32, self.span); + } + + if self.current == Some(b'.') { + buffer.push('.'); + self.next(); + self.read_digits(&mut buffer, 10); + } + + if let Some(b'e' | b'E') = self.current { + buffer.push('e'); + self.next(); + if let Some(b @ (b'-' | b'+')) = self.current { + buffer.push(b as char); + self.next(); + } + self.read_digits(&mut buffer, 10); + } + + Ok(TokenKind::LiteralFloat(buffer.parse().unwrap())) + } + + fn read_digits(&mut self, buffer: &mut String, base: usize) { + if base == 16 { + self.read_digits_fn(buffer, u8::is_ascii_hexdigit); + } else { + let max = b'0' + base as u8; + self.read_digits_fn(buffer, |b| (b'0'..max).contains(b)); + }; + } + + fn read_digits_fn bool>(&mut self, buffer: &mut String, is_digit: F) { + if let Some(b) = self.current { + if is_digit(&b) { + self.next(); + buffer.push(b as char); + } else { + return; + } + } + loop { + match *self.peek_buf() { + [b, ..] if is_digit(&b) => { + self.next(); + buffer.push(b as char); + } + [b'_', b, ..] if is_digit(&b) => { + self.next(); + self.next(); + buffer.push(b as char); + } + _ => { + break; + } + } + } + } + + fn enter_state(&mut self, state: LexerState) { + *self.state_stack.last_mut().unwrap() = state; + } + + fn push_state(&mut self, state: LexerState) { + self.state_stack.push(state); + } + + fn pop_state(&mut self) { + self.state_stack.pop(); + } + + fn peek_buf(&self) -> &[u8] { + &self.chars[self.cursor..] + } + + fn peek_byte(&self, delta: usize) -> Option { + self.chars.get(self.cursor + delta).copied() + } + + fn try_read(&self, search: &'static [u8]) -> bool { + self.peek_buf().starts_with(search) + } + + fn skip(&mut self, count: usize) { + for _ in 0..count { + self.next(); + } + } + + fn next(&mut self) { + match self.current { + Some(b'\n') => { + self.span.0 += 1; + self.span.1 = 1; + } + Some(_) => self.span.1 += 1, + _ => {} + } + self.cursor += 1; + self.current = self.chars.get(self.cursor).copied(); + } +} + +// Parses an integer literal in the given base and converts errors to SyntaxError. +// It returns a float token instead on overflow. +fn parse_int(buffer: &str, base: u32, span: Span) -> Result { + match i64::from_str_radix(buffer, base) { + Ok(i) => Ok(TokenKind::LiteralInteger(i)), + Err(err) if err.kind() == &IntErrorKind::InvalidDigit => { + // The InvalidDigit error is only possible for legacy octal literals. + Err(SyntaxError::InvalidOctalLiteral(span)) + } + Err(err) if err.kind() == &IntErrorKind::PosOverflow => { + // Parse as i128 so we can handle other bases. + // This means there's an upper limit on how large the literal can be. + let i = i128::from_str_radix(buffer, base).unwrap(); + Ok(TokenKind::LiteralFloat(i as f64)) + } + _ => Err(SyntaxError::UnexpectedError(span)), + } +} + +fn identifier_to_keyword(ident: &[u8]) -> Option { + Some(match ident { + b"enddeclare" => TokenKind::EndDeclare, + b"endswitch" => TokenKind::EndSwitch, + b"endfor" => TokenKind::EndFor, + b"endwhile" => TokenKind::EndWhile, + b"endforeach" => TokenKind::EndForeach, + b"endif" => TokenKind::EndIf, + b"from" => TokenKind::From, + b"and" => TokenKind::LogicalAnd, + b"or" => TokenKind::LogicalOr, + b"xor" => TokenKind::LogicalXor, + b"print" => TokenKind::Print, + b"__halt_compiler" | b"__HALT_COMPILER" => TokenKind::HaltCompiler, + b"readonly" => TokenKind::Readonly, + b"global" => TokenKind::Global, + b"match" => TokenKind::Match, + b"abstract" => TokenKind::Abstract, + b"array" => TokenKind::Array, + b"as" => TokenKind::As, + b"break" => TokenKind::Break, + b"case" => TokenKind::Case, + b"catch" => TokenKind::Catch, + b"class" => TokenKind::Class, + b"clone" => TokenKind::Clone, + b"continue" => TokenKind::Continue, + b"const" => TokenKind::Const, + b"declare" => TokenKind::Declare, + b"default" => TokenKind::Default, + b"do" => TokenKind::Do, + b"echo" => TokenKind::Echo, + b"else" => TokenKind::Else, + b"elseif" => TokenKind::ElseIf, + b"enum" => TokenKind::Enum, + b"extends" => TokenKind::Extends, + b"false" | b"FALSE" => TokenKind::False, + b"final" => TokenKind::Final, + b"finally" => TokenKind::Finally, + b"fn" => TokenKind::Fn, + b"for" => TokenKind::For, + b"foreach" => TokenKind::Foreach, + b"function" => TokenKind::Function, + b"goto" => TokenKind::Goto, + b"if" => TokenKind::If, + b"include" => TokenKind::Include, + b"include_once" => TokenKind::IncludeOnce, + b"implements" => TokenKind::Implements, + b"interface" => TokenKind::Interface, + b"instanceof" => TokenKind::Instanceof, + b"namespace" => TokenKind::Namespace, + b"new" => TokenKind::New, + b"null" | b"NULL" => TokenKind::Null, + b"private" => TokenKind::Private, + b"protected" => TokenKind::Protected, + b"public" => TokenKind::Public, + b"require" => TokenKind::Require, + b"require_once" => TokenKind::RequireOnce, + b"return" => TokenKind::Return, + b"static" => TokenKind::Static, + b"switch" => TokenKind::Switch, + b"throw" => TokenKind::Throw, + b"trait" => TokenKind::Trait, + b"true" | b"TRUE" => TokenKind::True, + b"try" => TokenKind::Try, + b"use" => TokenKind::Use, + b"var" => TokenKind::Var, + b"yield" => TokenKind::Yield, + b"__DIR__" => TokenKind::DirConstant, + b"while" => TokenKind::While, + b"insteadof" => TokenKind::Insteadof, + _ => return None, + }) +} + +#[derive(Debug, Eq, PartialEq)] +enum NumberKind { + Int, + Float, + IntOrFloat, + OctalOrFloat, +} diff --git a/src/lexer/token.rs b/src/lexer/token.rs index 14295aa2..fe55a8c1 100644 --- a/src/lexer/token.rs +++ b/src/lexer/token.rs @@ -1,6 +1,6 @@ use std::fmt::Display; -use crate::ByteString; +use crate::lexer::byte_string::ByteString; pub type Span = (usize, usize); diff --git a/src/lib.rs b/src/lib.rs index 1709348e..8f934357 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,9 +1,3 @@ -mod ast; -mod lexer; -mod parser; - -pub use ast::*; -pub use lexer::*; -pub use parser::error::ParseError; -pub use parser::error::ParseResult; -pub use parser::Parser; +pub mod lexer; +pub mod parser; +pub mod prelude; diff --git a/src/main.rs b/src/main.rs index a001e08c..676b7437 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,16 +1,44 @@ -use php_parser_rs::{Lexer, Parser}; +use php_parser_rs::prelude::Lexer; +use php_parser_rs::prelude::Parser; fn main() { - let file = std::env::args().nth(1).unwrap(); - let contents = std::fs::read_to_string(&file).unwrap(); + let file = match std::env::args().nth(1) { + Some(file) => file, + None => { + println!("Usage: php-parser [file]"); - println!("> Parsing {}", file); + ::std::process::exit(0); + } + }; - let mut lexer = Lexer::new(None); - let tokens = lexer.tokenize(contents.as_bytes()).unwrap(); + let contents = match std::fs::read_to_string(&file) { + Ok(contents) => contents, + Err(error) => { + println!("Failed to read file: {}", error); + + ::std::process::exit(1); + } + }; + + let mut lexer = Lexer::new(); + let tokens = match lexer.tokenize(contents.as_bytes()) { + Ok(tokens) => tokens, + Err(error) => { + println!("{}", error); + + ::std::process::exit(1); + } + }; let mut parser = Parser::new(None); - let ast = parser.parse(tokens).unwrap(); + let ast = match parser.parse(tokens) { + Ok(ast) => ast, + Err(error) => { + println!("{}", error); + + ::std::process::exit(1); + } + }; dbg!(ast); } diff --git a/src/ast.rs b/src/parser/ast.rs similarity index 98% rename from src/ast.rs rename to src/parser/ast.rs index 3a982fa2..f61ad1c5 100644 --- a/src/ast.rs +++ b/src/parser/ast.rs @@ -1,6 +1,7 @@ use std::fmt::Display; -use crate::{ByteString, TokenKind}; +use crate::lexer::byte_string::ByteString; +use crate::lexer::token::TokenKind; pub type Block = Vec; pub type Program = Block; @@ -103,18 +104,6 @@ impl From<&ByteString> for Identifier { } } -impl From<&[u8]> for Identifier { - fn from(name: &[u8]) -> Self { - Self::from(ByteString::from(name)) - } -} - -impl From<&str> for Identifier { - fn from(name: &str) -> Self { - Self::from(ByteString::from(name)) - } -} - pub type ParamList = Vec; #[derive(Debug, PartialEq, Clone)] diff --git a/src/parser/block.rs b/src/parser/block.rs index ca9654ce..d1d4a3db 100644 --- a/src/parser/block.rs +++ b/src/parser/block.rs @@ -1,11 +1,10 @@ -use crate::TokenKind; - -use crate::Block; - -use super::{ParseResult, Parser}; +use crate::lexer::token::TokenKind; +use crate::parser::ast::Block; +use crate::parser::error::ParseResult; +use crate::parser::Parser; impl Parser { - pub(crate) fn block(&mut self, until: &TokenKind) -> ParseResult { + pub(in crate::parser) fn block(&mut self, until: &TokenKind) -> ParseResult { self.skip_comments(); let mut block = Block::new(); diff --git a/src/parser/classish.rs b/src/parser/classish.rs index 7c5ae80b..a7c774e8 100644 --- a/src/parser/classish.rs +++ b/src/parser/classish.rs @@ -1,17 +1,18 @@ -use super::ParseResult; +use crate::lexer::token::TokenKind; +use crate::parser::ast::BackedEnumType; +use crate::parser::ast::Block; +use crate::parser::ast::ClassFlag; +use crate::parser::ast::Expression; +use crate::parser::ast::Identifier; +use crate::parser::ast::Statement; +use crate::parser::error::ParseResult; +use crate::parser::Parser; + use crate::expect_token; use crate::expected_token_err; -use crate::BackedEnumType; -use crate::Block; -use crate::ClassFlag; -use crate::Expression; -use crate::Identifier; -use crate::Parser; -use crate::Statement; -use crate::TokenKind; impl Parser { - pub(crate) fn class_definition(&mut self) -> ParseResult { + pub(in crate::parser) fn class_definition(&mut self) -> ParseResult { let flags: Vec = self.class_flags()?.iter().map(|f| f.into()).collect(); expect_token!([TokenKind::Class], self, ["`class`"]); @@ -58,7 +59,7 @@ impl Parser { }) } - pub(crate) fn interface_definition(&mut self) -> ParseResult { + pub(in crate::parser) fn interface_definition(&mut self) -> ParseResult { expect_token!([TokenKind::Interface], self, ["`interface`"]); let name = self.ident()?; @@ -95,7 +96,7 @@ impl Parser { }) } - pub(crate) fn trait_definition(&mut self) -> ParseResult { + pub(in crate::parser) fn trait_definition(&mut self) -> ParseResult { expect_token!([TokenKind::Trait], self, ["`trait`"]); let name = self.ident()?; @@ -121,7 +122,7 @@ impl Parser { }) } - pub(crate) fn anonymous_class_definition(&mut self) -> ParseResult { + pub(in crate::parser) fn anonymous_class_definition(&mut self) -> ParseResult { self.next(); expect_token!([TokenKind::Class], self, ["`class`"]); @@ -173,7 +174,7 @@ impl Parser { }) } - pub(crate) fn enum_definition(&mut self) -> ParseResult { + pub(in crate::parser) fn enum_definition(&mut self) -> ParseResult { self.next(); let name = self.ident()?; diff --git a/src/parser/classish_statement.rs b/src/parser/classish_statement.rs index b2234b01..96d086e7 100644 --- a/src/parser/classish_statement.rs +++ b/src/parser/classish_statement.rs @@ -1,15 +1,16 @@ -use super::ParseResult; +use crate::lexer::token::TokenKind; +use crate::parser::ast::ClassFlag; +use crate::parser::ast::Identifier; +use crate::parser::ast::MethodFlag; +use crate::parser::ast::Statement; +use crate::parser::ast::TraitAdaptation; +use crate::parser::error::ParseError; +use crate::parser::error::ParseResult; +use crate::parser::precedence::Precedence; +use crate::parser::Parser; + use crate::expect_token; use crate::expected_token_err; -use crate::parser::precedence::Precedence; -use crate::ClassFlag; -use crate::Identifier; -use crate::MethodFlag; -use crate::ParseError; -use crate::Parser; -use crate::Statement; -use crate::TokenKind; -use crate::TraitAdaptation; #[derive(Debug)] pub enum ClassishDefinitionType { @@ -21,11 +22,14 @@ pub enum ClassishDefinitionType { } impl Parser { - pub(crate) fn class_statement(&mut self, flags: Vec) -> ParseResult { + pub(in crate::parser) fn class_statement( + &mut self, + flags: Vec, + ) -> ParseResult { self.complete_class_statement(ClassishDefinitionType::Class(flags)) } - pub(crate) fn interface_statement(&mut self) -> ParseResult { + pub(in crate::parser) fn interface_statement(&mut self) -> ParseResult { if self.current.kind == TokenKind::Const { return self.parse_classish_const(vec![]); } @@ -46,15 +50,15 @@ impl Parser { } } - pub(crate) fn trait_statement(&mut self) -> ParseResult { + pub(in crate::parser) fn trait_statement(&mut self) -> ParseResult { self.complete_class_statement(ClassishDefinitionType::Trait) } - pub(crate) fn anonymous_class_statement(&mut self) -> ParseResult { + pub(in crate::parser) fn anonymous_class_statement(&mut self) -> ParseResult { self.complete_class_statement(ClassishDefinitionType::AnonymousClass) } - pub(crate) fn enum_statement(&mut self, backed: bool) -> ParseResult { + pub(in crate::parser) fn enum_statement(&mut self, backed: bool) -> ParseResult { if self.current.kind == TokenKind::Case { self.next(); diff --git a/src/parser/comments.rs b/src/parser/comments.rs index 174550b5..29cdc313 100644 --- a/src/parser/comments.rs +++ b/src/parser/comments.rs @@ -1,9 +1,9 @@ -use crate::{Token, TokenKind}; - -use crate::Parser; +use crate::lexer::token::Token; +use crate::lexer::token::TokenKind; +use crate::parser::Parser; impl Parser { - pub(crate) fn skip_comments(&mut self) { + pub(in crate::parser) fn skip_comments(&mut self) { while matches!( self.current.kind, TokenKind::Comment(_) | TokenKind::DocComment(_) @@ -12,7 +12,7 @@ impl Parser { } } - pub(crate) fn gather_comments(&mut self) { + pub(in crate::parser) fn gather_comments(&mut self) { while matches!( self.current.kind, TokenKind::Comment(_) | TokenKind::DocComment(_) @@ -22,7 +22,7 @@ impl Parser { } } - pub(crate) fn clear_comments(&mut self) -> Vec { + pub(in crate::parser) fn clear_comments(&mut self) -> Vec { let c = self.comments.clone(); self.comments = vec![]; c diff --git a/src/parser/error.rs b/src/parser/error.rs index abfb2fdd..8dd99607 100644 --- a/src/parser/error.rs +++ b/src/parser/error.rs @@ -1,11 +1,11 @@ use std::fmt::Display; -use crate::Span; -use crate::Type; +use crate::lexer::token::Span; +use crate::parser::ast::Type; pub type ParseResult = Result; -#[derive(Debug)] +#[derive(Debug, Eq, PartialEq)] pub enum ParseError { ExpectedToken(Vec, Option, Span), MultipleModifiers(String, Span), @@ -13,10 +13,7 @@ pub enum ParseError { UnexpectedToken(String, Span), UnexpectedEndOfFile, StandaloneTypeUsedInCombination(Type, Span), - InvalidClassStatement(String, Span), - ConstantInTrait(Span), TryWithoutCatchOrFinally(Span), - InvalidCatchArgumentType(Span), VariadicPromotedProperty(Span), PromotedPropertyOutsideConstructor(Span), PromotedPropertyOnAbstractConstructor(Span), @@ -49,15 +46,12 @@ impl Display for ParseError { Self::MultipleModifiers(modifier, span) => write!(f, "Parse Error: Multiple {} modifiers are not allowed on line {} column {}", modifier, span.0, span.1), Self::MultipleAccessModifiers( span) => write!(f, "Parse Error: Multiple access type modifiers are not allowed on line {} column {}", span.0, span.1), Self::UnexpectedToken(message, span) => write!(f, "Parse error: unexpected token {} on line {} column {}", message, span.0, span.1), - Self::InvalidClassStatement(message, span) => write!(f, "Parse error: {} on line {} column {}", message, span.0, span.1), Self::UnexpectedEndOfFile => write!(f, "Parse error: unexpected end of file."), Self::FinalModifierOnAbstractClassMember(span) => write!(f, "Parse error: Cannot use the final modifier on an abstract class member on line {} column {}", span.0, span.1), Self::StaticModifierOnConstant(span) => write!(f, "Parse error: Cannot use 'static' as constant modifier on line {} column {}", span.0, span.1), Self::ReadonlyModifierOnConstant(span) => write!(f, "Parse error: Cannot use 'readonly' as constant modifier on line {} column {}", span.0, span.1), Self::FinalModifierOnPrivateConstant(span) => write!(f, "Parse error: Private constant cannot be final as it is not visible to other classes on line {} column {}", span.0, span.1), - Self::ConstantInTrait(span) => write!(f, "Parse error: traits cannot contain constants on line {} column {}", span.0, span.1), Self::TryWithoutCatchOrFinally(span) => write!(f, "Parse error: cannot use try without catch or finally on line {} column {}", span.0, span.1), - Self::InvalidCatchArgumentType(span) => write!(f, "Parse error: catch types must either describe a single type or union of types on line {} column {}", span.0, span.1), Self::StandaloneTypeUsedInCombination(r#type, span) => write!(f, "Parse error: {} can only be used as a standalone type on line {} column {}", r#type, span.0, span.1), Self::VariadicPromotedProperty(span) => write!(f, "Parse error: Cannot declare variadic promoted property on line {} column {}", span.0, span.1), Self::PromotedPropertyOutsideConstructor(span) => write!(f, "Parse error: Cannot declare promoted property outside a constructor on line {} column {}", span.0, span.1), diff --git a/src/parser/flags.rs b/src/parser/flags.rs index 643f86c6..278b8094 100644 --- a/src/parser/flags.rs +++ b/src/parser/flags.rs @@ -1,7 +1,7 @@ -use super::ParseResult; -use crate::ParseError; -use crate::Parser; -use crate::TokenKind; +use crate::lexer::token::TokenKind; +use crate::parser::error::ParseError; +use crate::parser::error::ParseResult; +use crate::parser::Parser; enum FlagTarget { Class, @@ -11,14 +11,14 @@ enum FlagTarget { } impl Parser { - pub(crate) fn class_flags(&mut self) -> ParseResult> { + pub(in crate::parser) fn class_flags(&mut self) -> ParseResult> { self.collect( vec![TokenKind::Final, TokenKind::Abstract, TokenKind::Readonly], FlagTarget::Class, ) } - pub(crate) fn class_members_flags(&mut self) -> ParseResult> { + pub(in crate::parser) fn class_members_flags(&mut self) -> ParseResult> { self.collect( vec![ TokenKind::Final, @@ -33,7 +33,7 @@ impl Parser { ) } - pub(crate) fn enum_members_flags(&mut self) -> ParseResult> { + pub(in crate::parser) fn enum_members_flags(&mut self) -> ParseResult> { self.collect( vec![ TokenKind::Final, @@ -46,7 +46,7 @@ impl Parser { ) } - pub(crate) fn promoted_property_flags(&mut self) -> ParseResult> { + pub(in crate::parser) fn promoted_property_flags(&mut self) -> ParseResult> { self.collect( vec![ TokenKind::Private, diff --git a/src/parser/functions.rs b/src/parser/functions.rs index 57f88f86..eac96e29 100644 --- a/src/parser/functions.rs +++ b/src/parser/functions.rs @@ -1,16 +1,16 @@ -use super::classish_statement::ClassishDefinitionType; -use super::params::ParamPosition; -use super::ParseResult; -use crate::ByteString; -use crate::ClassFlag; -use crate::MethodFlag; -use crate::ParseError; -use crate::Parser; -use crate::Statement; -use crate::TokenKind; +use crate::lexer::byte_string::ByteString; +use crate::lexer::token::TokenKind; +use crate::parser::ast::ClassFlag; +use crate::parser::ast::MethodFlag; +use crate::parser::ast::Statement; +use crate::parser::classish_statement::ClassishDefinitionType; +use crate::parser::error::ParseError; +use crate::parser::error::ParseResult; +use crate::parser::params::ParamPosition; +use crate::parser::Parser; impl Parser { - pub(crate) fn function(&mut self) -> ParseResult { + pub(in crate::parser) fn function(&mut self) -> ParseResult { self.next(); let by_ref = if self.current.kind == TokenKind::Ampersand { @@ -51,7 +51,7 @@ impl Parser { }) } - pub(crate) fn method( + pub(in crate::parser) fn method( &mut self, class_type: ClassishDefinitionType, flags: Vec, diff --git a/src/parser/ident.rs b/src/parser/ident.rs index 7b4f854c..21aa8f50 100644 --- a/src/parser/ident.rs +++ b/src/parser/ident.rs @@ -1,18 +1,20 @@ -use super::ParseResult; +use crate::lexer::byte_string::ByteString; +use crate::lexer::token::TokenKind; +use crate::parser::error::ParseResult; +use crate::parser::Parser; + use crate::expect_token; -use crate::Parser; -use crate::{ByteString, TokenKind}; impl Parser { /// Expect an unqualified identifier such as Foo or Bar. - pub(crate) fn ident(&mut self) -> ParseResult { + pub(in crate::parser) fn ident(&mut self) -> ParseResult { Ok(expect_token!([ TokenKind::Identifier(identifier) => identifier, ], self, "an identifier")) } /// Expect an unqualified or qualified identifier such as Foo, Bar or Foo\Bar. - pub(crate) fn name(&mut self) -> ParseResult { + pub(in crate::parser) fn name(&mut self) -> ParseResult { Ok(expect_token!([ TokenKind::Identifier(identifier) => identifier, TokenKind::QualifiedIdentifier(qualified) => qualified, @@ -20,7 +22,7 @@ impl Parser { } /// Expect an unqualified, qualified or fully qualified identifier such as Foo, Foo\Bar or \Foo\Bar. - pub(crate) fn full_name(&mut self) -> ParseResult { + pub(in crate::parser) fn full_name(&mut self) -> ParseResult { Ok(expect_token!([ TokenKind::Identifier(identifier) => identifier, TokenKind::QualifiedIdentifier(qualified) => qualified, @@ -28,13 +30,13 @@ impl Parser { ], self, "an identifier")) } - pub(crate) fn var(&mut self) -> ParseResult { + pub(in crate::parser) fn var(&mut self) -> ParseResult { Ok(expect_token!([ TokenKind::Variable(v) => v, ], self, "a variable")) } - pub(crate) fn full_name_maybe_type_keyword(&mut self) -> ParseResult { + pub(in crate::parser) fn full_name_maybe_type_keyword(&mut self) -> ParseResult { match self.current.kind { TokenKind::Array | TokenKind::Callable => { let r = Ok(self.current.kind.to_string().into()); @@ -45,7 +47,7 @@ impl Parser { } } - pub(crate) fn type_with_static(&mut self) -> ParseResult { + pub(in crate::parser) fn type_with_static(&mut self) -> ParseResult { Ok(match self.current.kind { TokenKind::Static | TokenKind::Null | TokenKind::True | TokenKind::False => { let str = self.current.kind.to_string(); @@ -56,7 +58,7 @@ impl Parser { }) } - pub(crate) fn ident_maybe_reserved(&mut self) -> ParseResult { + pub(in crate::parser) fn ident_maybe_reserved(&mut self) -> ParseResult { match self.current.kind { _ if is_reserved_ident(&self.current.kind) => { let string = self.current.kind.to_string().into(); @@ -68,7 +70,7 @@ impl Parser { } } -pub(crate) fn is_reserved_ident(kind: &TokenKind) -> bool { +pub fn is_reserved_ident(kind: &TokenKind) -> bool { matches!( kind, TokenKind::Static diff --git a/src/parser/mod.rs b/src/parser/mod.rs index 253f30d3..54b4d660 100644 --- a/src/parser/mod.rs +++ b/src/parser/mod.rs @@ -3,22 +3,21 @@ use std::vec::IntoIter; use crate::expect_literal; use crate::expect_token; use crate::expected_token_err; -use crate::lexer::{Token, TokenKind}; +use crate::lexer::byte_string::ByteString; +use crate::lexer::token::Token; +use crate::lexer::token::TokenKind; +use crate::parser::ast::{ + ArrayItem, Block, Case, Catch, ClosureUse, Constant, DeclareItem, ElseIf, Expression, + IncludeKind, MagicConst, MatchArm, Program, Statement, StaticVar, StringPart, + TryBlockCaughtType, Type, Use, UseKind, +}; use crate::parser::error::ParseError; use crate::parser::error::ParseResult; +use crate::parser::ident::is_reserved_ident; +use crate::parser::params::ParamPosition; use crate::parser::precedence::{Associativity, Precedence}; -use crate::{ - ast::{ - ArrayItem, ClosureUse, Constant, DeclareItem, ElseIf, IncludeKind, MagicConst, StaticVar, - StringPart, Use, UseKind, - }, - Block, Case, Catch, Expression, MatchArm, Program, Statement, Type, -}; -use crate::{ByteString, TryBlockCaughtType}; - -use self::ident::is_reserved_ident; -use self::params::ParamPosition; +pub mod ast; pub mod error; mod block; diff --git a/src/parser/params.rs b/src/parser/params.rs index 549a8bbf..1d1a7dba 100644 --- a/src/parser/params.rs +++ b/src/parser/params.rs @@ -1,12 +1,15 @@ -use crate::expect_token; +use crate::lexer::token::TokenKind; +use crate::parser::ast::Arg; +use crate::parser::ast::Expression; +use crate::parser::ast::Param; +use crate::parser::ast::ParamList; +use crate::parser::ast::PropertyFlag; use crate::parser::error::ParseError; -use crate::TokenKind; -use crate::{ - ast::{Arg, ParamList, PropertyFlag}, - Expression, Param, -}; +use crate::parser::error::ParseResult; +use crate::parser::precedence::Precedence; +use crate::parser::Parser; -use super::{precedence::Precedence, ParseResult, Parser}; +use crate::expect_token; #[derive(Debug)] pub enum ParamPosition { @@ -16,7 +19,10 @@ pub enum ParamPosition { } impl Parser { - pub(crate) fn param_list(&mut self, position: ParamPosition) -> Result { + pub(in crate::parser) fn param_list( + &mut self, + position: ParamPosition, + ) -> Result { let mut params = ParamList::new(); while !self.is_eof() && self.current.kind != TokenKind::RightParen { @@ -105,7 +111,7 @@ impl Parser { Ok(params) } - pub(crate) fn args_list(&mut self) -> ParseResult> { + pub(in crate::parser) fn args_list(&mut self) -> ParseResult> { let mut args = Vec::new(); while !self.is_eof() && self.current.kind != TokenKind::RightParen { diff --git a/src/parser/precedence.rs b/src/parser/precedence.rs index fa5c10e9..16cf1adf 100644 --- a/src/parser/precedence.rs +++ b/src/parser/precedence.rs @@ -1,4 +1,4 @@ -use crate::TokenKind; +use crate::lexer::token::TokenKind; #[allow(dead_code)] #[derive(Debug, PartialEq, Eq, PartialOrd, Ord)] diff --git a/src/parser/punc.rs b/src/parser/punc.rs index a8d6bb78..1eb5b9dc 100644 --- a/src/parser/punc.rs +++ b/src/parser/punc.rs @@ -1,42 +1,41 @@ -use crate::TokenKind; +use crate::lexer::token::TokenKind; +use crate::parser::error::ParseResult; +use crate::parser::Parser; use crate::expect_token; -use crate::Parser; - -use super::ParseResult; impl Parser { - pub(crate) fn semi(&mut self) -> ParseResult<()> { + pub(in crate::parser) fn semi(&mut self) -> ParseResult<()> { expect_token!([TokenKind::SemiColon], self, "`;`"); Ok(()) } - pub(crate) fn lbrace(&mut self) -> ParseResult<()> { + pub(in crate::parser) fn lbrace(&mut self) -> ParseResult<()> { expect_token!([TokenKind::LeftBrace], self, "`{`"); Ok(()) } - pub(crate) fn rbrace(&mut self) -> ParseResult<()> { + pub(in crate::parser) fn rbrace(&mut self) -> ParseResult<()> { expect_token!([TokenKind::RightBrace], self, "`}`"); Ok(()) } - pub(crate) fn lparen(&mut self) -> ParseResult<()> { + pub(in crate::parser) fn lparen(&mut self) -> ParseResult<()> { expect_token!([TokenKind::LeftParen], self, "`(`"); Ok(()) } - pub(crate) fn rparen(&mut self) -> ParseResult<()> { + pub(in crate::parser) fn rparen(&mut self) -> ParseResult<()> { expect_token!([TokenKind::RightParen], self, "`)`"); Ok(()) } - pub(crate) fn rbracket(&mut self) -> ParseResult<()> { + pub(in crate::parser) fn rbracket(&mut self) -> ParseResult<()> { expect_token!([TokenKind::RightBracket], self, "`]`"); Ok(()) } - pub(crate) fn optional_comma(&mut self) -> ParseResult<()> { + pub(in crate::parser) fn optional_comma(&mut self) -> ParseResult<()> { if self.current.kind == TokenKind::Comma { expect_token!([TokenKind::Comma], self, "`,`"); } @@ -44,7 +43,7 @@ impl Parser { Ok(()) } - pub(crate) fn colon(&mut self) -> ParseResult<()> { + pub(in crate::parser) fn colon(&mut self) -> ParseResult<()> { expect_token!([TokenKind::Colon], self, "`:`"); Ok(()) diff --git a/src/parser/vars.rs b/src/parser/vars.rs index 021859a5..29da2f78 100644 --- a/src/parser/vars.rs +++ b/src/parser/vars.rs @@ -1,9 +1,12 @@ -use super::{ParseError, ParseResult, Precedence}; -use crate::TokenKind; -use crate::{Expression, Parser}; +use crate::lexer::token::TokenKind; +use crate::parser::ast::Expression; +use crate::parser::error::ParseError; +use crate::parser::error::ParseResult; +use crate::parser::precedence::Precedence; +use crate::parser::Parser; impl Parser { - pub(crate) fn dynamic_variable(&mut self) -> ParseResult { + pub(in crate::parser) fn dynamic_variable(&mut self) -> ParseResult { self.next(); Ok(match &self.current.kind { diff --git a/src/prelude.rs b/src/prelude.rs new file mode 100644 index 00000000..8298ab6a --- /dev/null +++ b/src/prelude.rs @@ -0,0 +1,8 @@ +pub use crate::lexer::byte_string::*; +pub use crate::lexer::error::*; +pub use crate::lexer::token::*; +pub use crate::lexer::*; + +pub use crate::parser::ast::*; +pub use crate::parser::error::*; +pub use crate::parser::*; diff --git a/tests/0001/tokens.txt b/tests/0001/tokens.txt new file mode 100644 index 00000000..148e2e88 --- /dev/null +++ b/tests/0001/tokens.txt @@ -0,0 +1,452 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Function, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 3, + 10, + ), + }, + Token { + kind: LeftParen, + span: ( + 3, + 13, + ), + }, + Token { + kind: Identifier( + "string", + ), + span: ( + 3, + 14, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 3, + 21, + ), + }, + Token { + kind: Equals, + span: ( + 3, + 24, + ), + }, + Token { + kind: LiteralString( + "", + ), + span: ( + 3, + 26, + ), + }, + Token { + kind: Comma, + span: ( + 3, + 28, + ), + }, + Token { + kind: Array, + span: ( + 3, + 30, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 3, + 36, + ), + }, + Token { + kind: Equals, + span: ( + 3, + 39, + ), + }, + Token { + kind: LeftBracket, + span: ( + 3, + 41, + ), + }, + Token { + kind: RightBracket, + span: ( + 3, + 42, + ), + }, + Token { + kind: RightParen, + span: ( + 3, + 43, + ), + }, + Token { + kind: Colon, + span: ( + 3, + 44, + ), + }, + Token { + kind: Identifier( + "never", + ), + span: ( + 3, + 46, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 52, + ), + }, + Token { + kind: Identifier( + "exit", + ), + span: ( + 4, + 5, + ), + }, + Token { + kind: LeftParen, + span: ( + 4, + 9, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 4, + 10, + ), + }, + Token { + kind: RightParen, + span: ( + 4, + 11, + ), + }, + Token { + kind: SemiColon, + span: ( + 4, + 12, + ), + }, + Token { + kind: RightBrace, + span: ( + 5, + 1, + ), + }, + Token { + kind: Function, + span: ( + 7, + 1, + ), + }, + Token { + kind: Identifier( + "bar", + ), + span: ( + 7, + 10, + ), + }, + Token { + kind: LeftParen, + span: ( + 7, + 13, + ), + }, + Token { + kind: Identifier( + "int", + ), + span: ( + 7, + 14, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 7, + 18, + ), + }, + Token { + kind: Comma, + span: ( + 7, + 20, + ), + }, + Token { + kind: Identifier( + "float", + ), + span: ( + 7, + 22, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 7, + 28, + ), + }, + Token { + kind: Comma, + span: ( + 7, + 30, + ), + }, + Token { + kind: Identifier( + "string", + ), + span: ( + 7, + 32, + ), + }, + Token { + kind: Variable( + "c", + ), + span: ( + 7, + 39, + ), + }, + Token { + kind: Comma, + span: ( + 7, + 41, + ), + }, + Token { + kind: True, + span: ( + 7, + 43, + ), + }, + Token { + kind: Variable( + "d", + ), + span: ( + 7, + 48, + ), + }, + Token { + kind: Comma, + span: ( + 7, + 50, + ), + }, + Token { + kind: False, + span: ( + 7, + 52, + ), + }, + Token { + kind: Variable( + "e", + ), + span: ( + 7, + 58, + ), + }, + Token { + kind: Comma, + span: ( + 7, + 60, + ), + }, + Token { + kind: Null, + span: ( + 7, + 62, + ), + }, + Token { + kind: Variable( + "f", + ), + span: ( + 7, + 67, + ), + }, + Token { + kind: RightParen, + span: ( + 7, + 69, + ), + }, + Token { + kind: Colon, + span: ( + 7, + 70, + ), + }, + Token { + kind: Null, + span: ( + 7, + 72, + ), + }, + Token { + kind: Pipe, + span: ( + 7, + 76, + ), + }, + Token { + kind: Identifier( + "string", + ), + span: ( + 7, + 77, + ), + }, + Token { + kind: Pipe, + span: ( + 7, + 83, + ), + }, + Token { + kind: Identifier( + "int", + ), + span: ( + 7, + 84, + ), + }, + Token { + kind: Pipe, + span: ( + 7, + 87, + ), + }, + Token { + kind: Identifier( + "float", + ), + span: ( + 7, + 88, + ), + }, + Token { + kind: LeftBrace, + span: ( + 7, + 94, + ), + }, + Token { + kind: Return, + span: ( + 8, + 5, + ), + }, + Token { + kind: Null, + span: ( + 8, + 12, + ), + }, + Token { + kind: SemiColon, + span: ( + 8, + 16, + ), + }, + Token { + kind: RightBrace, + span: ( + 9, + 1, + ), + }, +] diff --git a/tests/0002/tokens.txt b/tests/0002/tokens.txt new file mode 100644 index 00000000..1386d00c --- /dev/null +++ b/tests/0002/tokens.txt @@ -0,0 +1,126 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Function, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 3, + 10, + ), + }, + Token { + kind: LeftParen, + span: ( + 3, + 13, + ), + }, + Token { + kind: RightParen, + span: ( + 3, + 14, + ), + }, + Token { + kind: Colon, + span: ( + 3, + 15, + ), + }, + Token { + kind: Identifier( + "never", + ), + span: ( + 3, + 17, + ), + }, + Token { + kind: Pipe, + span: ( + 3, + 22, + ), + }, + Token { + kind: Identifier( + "string", + ), + span: ( + 3, + 23, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 30, + ), + }, + Token { + kind: Identifier( + "exit", + ), + span: ( + 4, + 5, + ), + }, + Token { + kind: LeftParen, + span: ( + 4, + 9, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 4, + 10, + ), + }, + Token { + kind: RightParen, + span: ( + 4, + 11, + ), + }, + Token { + kind: SemiColon, + span: ( + 4, + 12, + ), + }, + Token { + kind: RightBrace, + span: ( + 5, + 1, + ), + }, +] diff --git a/tests/0003/tokens.txt b/tests/0003/tokens.txt new file mode 100644 index 00000000..623865d5 --- /dev/null +++ b/tests/0003/tokens.txt @@ -0,0 +1,157 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Function, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 3, + 10, + ), + }, + Token { + kind: LeftParen, + span: ( + 3, + 13, + ), + }, + Token { + kind: RightParen, + span: ( + 3, + 14, + ), + }, + Token { + kind: Colon, + span: ( + 3, + 15, + ), + }, + Token { + kind: Identifier( + "never", + ), + span: ( + 3, + 17, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 23, + ), + }, + Token { + kind: Try, + span: ( + 4, + 5, + ), + }, + Token { + kind: LeftBrace, + span: ( + 4, + 9, + ), + }, + Token { + kind: Identifier( + "bar", + ), + span: ( + 5, + 9, + ), + }, + Token { + kind: LeftParen, + span: ( + 5, + 12, + ), + }, + Token { + kind: RightParen, + span: ( + 5, + 13, + ), + }, + Token { + kind: SemiColon, + span: ( + 5, + 14, + ), + }, + Token { + kind: RightBrace, + span: ( + 6, + 5, + ), + }, + Token { + kind: Catch, + span: ( + 6, + 7, + ), + }, + Token { + kind: LeftParen, + span: ( + 6, + 13, + ), + }, + Token { + kind: RightParen, + span: ( + 6, + 14, + ), + }, + Token { + kind: LeftBrace, + span: ( + 6, + 16, + ), + }, + Token { + kind: RightBrace, + span: ( + 8, + 5, + ), + }, + Token { + kind: RightBrace, + span: ( + 9, + 1, + ), + }, +] diff --git a/tests/0004/tokens.txt b/tests/0004/tokens.txt new file mode 100644 index 00000000..544f8ad6 --- /dev/null +++ b/tests/0004/tokens.txt @@ -0,0 +1,166 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Function, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 3, + 10, + ), + }, + Token { + kind: LeftParen, + span: ( + 3, + 13, + ), + }, + Token { + kind: RightParen, + span: ( + 3, + 14, + ), + }, + Token { + kind: Colon, + span: ( + 3, + 15, + ), + }, + Token { + kind: Identifier( + "never", + ), + span: ( + 3, + 17, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 23, + ), + }, + Token { + kind: Try, + span: ( + 4, + 5, + ), + }, + Token { + kind: LeftBrace, + span: ( + 4, + 9, + ), + }, + Token { + kind: Identifier( + "bar", + ), + span: ( + 5, + 9, + ), + }, + Token { + kind: LeftParen, + span: ( + 5, + 12, + ), + }, + Token { + kind: RightParen, + span: ( + 5, + 13, + ), + }, + Token { + kind: SemiColon, + span: ( + 5, + 14, + ), + }, + Token { + kind: RightBrace, + span: ( + 6, + 5, + ), + }, + Token { + kind: Catch, + span: ( + 6, + 7, + ), + }, + Token { + kind: LeftParen, + span: ( + 6, + 13, + ), + }, + Token { + kind: Variable( + "e", + ), + span: ( + 6, + 14, + ), + }, + Token { + kind: RightParen, + span: ( + 6, + 16, + ), + }, + Token { + kind: LeftBrace, + span: ( + 6, + 18, + ), + }, + Token { + kind: RightBrace, + span: ( + 8, + 5, + ), + }, + Token { + kind: RightBrace, + span: ( + 9, + 1, + ), + }, +] diff --git a/tests/0005/tokens.txt b/tests/0005/tokens.txt new file mode 100644 index 00000000..f9820af6 --- /dev/null +++ b/tests/0005/tokens.txt @@ -0,0 +1,143 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Function, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 3, + 10, + ), + }, + Token { + kind: LeftParen, + span: ( + 3, + 13, + ), + }, + Token { + kind: RightParen, + span: ( + 3, + 14, + ), + }, + Token { + kind: Colon, + span: ( + 3, + 15, + ), + }, + Token { + kind: Identifier( + "never", + ), + span: ( + 3, + 17, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 23, + ), + }, + Token { + kind: Try, + span: ( + 4, + 5, + ), + }, + Token { + kind: LeftBrace, + span: ( + 4, + 9, + ), + }, + Token { + kind: Identifier( + "bar", + ), + span: ( + 5, + 9, + ), + }, + Token { + kind: LeftParen, + span: ( + 5, + 12, + ), + }, + Token { + kind: RightParen, + span: ( + 5, + 13, + ), + }, + Token { + kind: SemiColon, + span: ( + 5, + 14, + ), + }, + Token { + kind: RightBrace, + span: ( + 6, + 5, + ), + }, + Token { + kind: Catch, + span: ( + 6, + 7, + ), + }, + Token { + kind: LeftBrace, + span: ( + 6, + 13, + ), + }, + Token { + kind: RightBrace, + span: ( + 8, + 5, + ), + }, + Token { + kind: RightBrace, + span: ( + 9, + 1, + ), + }, +] diff --git a/tests/0006/tokens.txt b/tests/0006/tokens.txt new file mode 100644 index 00000000..caa19f0e --- /dev/null +++ b/tests/0006/tokens.txt @@ -0,0 +1,103 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Include, + span: ( + 3, + 1, + ), + }, + Token { + kind: LiteralString( + "foo.php", + ), + span: ( + 3, + 9, + ), + }, + Token { + kind: SemiColon, + span: ( + 3, + 18, + ), + }, + Token { + kind: IncludeOnce, + span: ( + 5, + 1, + ), + }, + Token { + kind: LiteralString( + "bar.php", + ), + span: ( + 5, + 14, + ), + }, + Token { + kind: SemiColon, + span: ( + 5, + 23, + ), + }, + Token { + kind: Require, + span: ( + 7, + 1, + ), + }, + Token { + kind: LiteralString( + "baz.php", + ), + span: ( + 7, + 9, + ), + }, + Token { + kind: SemiColon, + span: ( + 7, + 18, + ), + }, + Token { + kind: RequireOnce, + span: ( + 9, + 1, + ), + }, + Token { + kind: LiteralString( + "qux.php", + ), + span: ( + 9, + 14, + ), + }, + Token { + kind: SemiColon, + span: ( + 9, + 23, + ), + }, +] diff --git a/tests/0007/tokens.txt b/tests/0007/tokens.txt new file mode 100644 index 00000000..cfd0d2b3 --- /dev/null +++ b/tests/0007/tokens.txt @@ -0,0 +1,215 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 3, + 1, + ), + }, + Token { + kind: Equals, + span: ( + 3, + 6, + ), + }, + Token { + kind: Identifier( + "give_me_foo", + ), + span: ( + 3, + 8, + ), + }, + Token { + kind: LeftParen, + span: ( + 3, + 19, + ), + }, + Token { + kind: RightParen, + span: ( + 3, + 20, + ), + }, + Token { + kind: SemiColon, + span: ( + 3, + 21, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 5, + 1, + ), + }, + Token { + kind: Equals, + span: ( + 5, + 4, + ), + }, + Token { + kind: LeftBracket, + span: ( + 5, + 6, + ), + }, + Token { + kind: LiteralString( + "single", + ), + span: ( + 6, + 5, + ), + }, + Token { + kind: DoubleArrow, + span: ( + 6, + 14, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 6, + 17, + ), + }, + Token { + kind: Instanceof, + span: ( + 6, + 22, + ), + }, + Token { + kind: Identifier( + "Foo", + ), + span: ( + 6, + 33, + ), + }, + Token { + kind: Comma, + span: ( + 6, + 36, + ), + }, + Token { + kind: LiteralString( + "multiple", + ), + span: ( + 7, + 5, + ), + }, + Token { + kind: DoubleArrow, + span: ( + 7, + 16, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 7, + 19, + ), + }, + Token { + kind: Instanceof, + span: ( + 7, + 24, + ), + }, + Token { + kind: Identifier( + "Bar", + ), + span: ( + 7, + 35, + ), + }, + Token { + kind: BooleanAnd, + span: ( + 7, + 39, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 7, + 42, + ), + }, + Token { + kind: Instanceof, + span: ( + 7, + 47, + ), + }, + Token { + kind: Identifier( + "Baz", + ), + span: ( + 7, + 58, + ), + }, + Token { + kind: RightBracket, + span: ( + 8, + 1, + ), + }, + Token { + kind: SemiColon, + span: ( + 8, + 2, + ), + }, +] diff --git a/tests/0008/tokens.txt b/tests/0008/tokens.txt new file mode 100644 index 00000000..4402f38a --- /dev/null +++ b/tests/0008/tokens.txt @@ -0,0 +1,395 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 3, + 1, + ), + }, + Token { + kind: Equals, + span: ( + 3, + 4, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 3, + 6, + ), + }, + Token { + kind: Pow, + span: ( + 3, + 8, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 3, + 11, + ), + }, + Token { + kind: SemiColon, + span: ( + 3, + 12, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 5, + 1, + ), + }, + Token { + kind: Equals, + span: ( + 5, + 4, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 5, + 6, + ), + }, + Token { + kind: Question, + span: ( + 5, + 8, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 5, + 10, + ), + }, + Token { + kind: Colon, + span: ( + 5, + 12, + ), + }, + Token { + kind: LiteralInteger( + 3, + ), + span: ( + 5, + 14, + ), + }, + Token { + kind: SemiColon, + span: ( + 5, + 15, + ), + }, + Token { + kind: Variable( + "c", + ), + span: ( + 7, + 1, + ), + }, + Token { + kind: Equals, + span: ( + 7, + 4, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 7, + 6, + ), + }, + Token { + kind: Question, + span: ( + 7, + 8, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 7, + 10, + ), + }, + Token { + kind: Question, + span: ( + 7, + 12, + ), + }, + Token { + kind: LiteralInteger( + 3, + ), + span: ( + 7, + 14, + ), + }, + Token { + kind: Colon, + span: ( + 7, + 16, + ), + }, + Token { + kind: LiteralInteger( + 4, + ), + span: ( + 7, + 18, + ), + }, + Token { + kind: Colon, + span: ( + 7, + 20, + ), + }, + Token { + kind: LiteralInteger( + 5, + ), + span: ( + 7, + 22, + ), + }, + Token { + kind: SemiColon, + span: ( + 7, + 23, + ), + }, + Token { + kind: Variable( + "d", + ), + span: ( + 9, + 1, + ), + }, + Token { + kind: Equals, + span: ( + 9, + 4, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 9, + 6, + ), + }, + Token { + kind: QuestionColon, + span: ( + 9, + 8, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 9, + 11, + ), + }, + Token { + kind: QuestionColon, + span: ( + 9, + 13, + ), + }, + Token { + kind: LiteralInteger( + 3, + ), + span: ( + 9, + 16, + ), + }, + Token { + kind: SemiColon, + span: ( + 9, + 17, + ), + }, + Token { + kind: Variable( + "e", + ), + span: ( + 11, + 1, + ), + }, + Token { + kind: Equals, + span: ( + 11, + 4, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 11, + 6, + ), + }, + Token { + kind: Coalesce, + span: ( + 11, + 8, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 11, + 11, + ), + }, + Token { + kind: SemiColon, + span: ( + 11, + 12, + ), + }, + Token { + kind: Variable( + "f", + ), + span: ( + 13, + 1, + ), + }, + Token { + kind: Equals, + span: ( + 13, + 4, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 13, + 6, + ), + }, + Token { + kind: Coalesce, + span: ( + 13, + 8, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 13, + 11, + ), + }, + Token { + kind: Coalesce, + span: ( + 13, + 13, + ), + }, + Token { + kind: LiteralInteger( + 3, + ), + span: ( + 13, + 16, + ), + }, + Token { + kind: SemiColon, + span: ( + 13, + 17, + ), + }, +] diff --git a/tests/0009/tokens.txt b/tests/0009/tokens.txt new file mode 100644 index 00000000..24810b5d --- /dev/null +++ b/tests/0009/tokens.txt @@ -0,0 +1,167 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 3, + 1, + ), + }, + Token { + kind: LeftBracket, + span: ( + 3, + 5, + ), + }, + Token { + kind: LiteralString( + "bar", + ), + span: ( + 3, + 6, + ), + }, + Token { + kind: RightBracket, + span: ( + 3, + 11, + ), + }, + Token { + kind: SemiColon, + span: ( + 3, + 12, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 5, + 1, + ), + }, + Token { + kind: LeftBracket, + span: ( + 5, + 5, + ), + }, + Token { + kind: LiteralString( + "bar", + ), + span: ( + 5, + 6, + ), + }, + Token { + kind: RightBracket, + span: ( + 5, + 11, + ), + }, + Token { + kind: LeftBracket, + span: ( + 5, + 12, + ), + }, + Token { + kind: LiteralString( + "baz", + ), + span: ( + 5, + 13, + ), + }, + Token { + kind: RightBracket, + span: ( + 5, + 18, + ), + }, + Token { + kind: SemiColon, + span: ( + 5, + 19, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 7, + 1, + ), + }, + Token { + kind: LeftBracket, + span: ( + 7, + 5, + ), + }, + Token { + kind: LiteralString( + "bar", + ), + span: ( + 7, + 6, + ), + }, + Token { + kind: RightBracket, + span: ( + 7, + 11, + ), + }, + Token { + kind: Equals, + span: ( + 7, + 13, + ), + }, + Token { + kind: LiteralString( + "baz", + ), + span: ( + 7, + 15, + ), + }, + Token { + kind: SemiColon, + span: ( + 7, + 20, + ), + }, +] diff --git a/tests/0010/tokens.txt b/tests/0010/tokens.txt new file mode 100644 index 00000000..49662042 --- /dev/null +++ b/tests/0010/tokens.txt @@ -0,0 +1,1083 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 3, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 3, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 3, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 3, + 11, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 3, + 13, + ), + }, + Token { + kind: DoubleEquals, + span: ( + 3, + 15, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 3, + 18, + ), + }, + Token { + kind: RightParen, + span: ( + 3, + 19, + ), + }, + Token { + kind: SemiColon, + span: ( + 3, + 20, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 4, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 4, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 4, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 4, + 11, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 4, + 13, + ), + }, + Token { + kind: TripleEquals, + span: ( + 4, + 15, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 4, + 19, + ), + }, + Token { + kind: RightParen, + span: ( + 4, + 20, + ), + }, + Token { + kind: SemiColon, + span: ( + 4, + 21, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 5, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 5, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 5, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 5, + 11, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 5, + 13, + ), + }, + Token { + kind: BangEquals, + span: ( + 5, + 15, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 5, + 18, + ), + }, + Token { + kind: RightParen, + span: ( + 5, + 19, + ), + }, + Token { + kind: SemiColon, + span: ( + 5, + 20, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 6, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 6, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 6, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 6, + 11, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 6, + 13, + ), + }, + Token { + kind: BangDoubleEquals, + span: ( + 6, + 15, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 6, + 19, + ), + }, + Token { + kind: RightParen, + span: ( + 6, + 20, + ), + }, + Token { + kind: SemiColon, + span: ( + 6, + 21, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 7, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 7, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 7, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 7, + 11, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 7, + 13, + ), + }, + Token { + kind: Plus, + span: ( + 7, + 15, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 7, + 17, + ), + }, + Token { + kind: RightParen, + span: ( + 7, + 18, + ), + }, + Token { + kind: SemiColon, + span: ( + 7, + 19, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 8, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 8, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 8, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 8, + 11, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 8, + 13, + ), + }, + Token { + kind: Minus, + span: ( + 8, + 15, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 8, + 17, + ), + }, + Token { + kind: RightParen, + span: ( + 8, + 18, + ), + }, + Token { + kind: SemiColon, + span: ( + 8, + 19, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 9, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 9, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 9, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 9, + 11, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 9, + 13, + ), + }, + Token { + kind: Slash, + span: ( + 9, + 15, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 9, + 17, + ), + }, + Token { + kind: RightParen, + span: ( + 9, + 18, + ), + }, + Token { + kind: SemiColon, + span: ( + 9, + 19, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 10, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 10, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 10, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 10, + 11, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 10, + 13, + ), + }, + Token { + kind: Caret, + span: ( + 10, + 15, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 10, + 17, + ), + }, + Token { + kind: RightParen, + span: ( + 10, + 18, + ), + }, + Token { + kind: SemiColon, + span: ( + 10, + 19, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 11, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 11, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 11, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 11, + 11, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 11, + 13, + ), + }, + Token { + kind: Asterisk, + span: ( + 11, + 15, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 11, + 17, + ), + }, + Token { + kind: RightParen, + span: ( + 11, + 18, + ), + }, + Token { + kind: SemiColon, + span: ( + 11, + 19, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 12, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 12, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 12, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 12, + 11, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 12, + 13, + ), + }, + Token { + kind: RightShift, + span: ( + 12, + 15, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 12, + 18, + ), + }, + Token { + kind: RightParen, + span: ( + 12, + 19, + ), + }, + Token { + kind: SemiColon, + span: ( + 12, + 20, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 13, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 13, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 13, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 13, + 11, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 13, + 13, + ), + }, + Token { + kind: LeftShift, + span: ( + 13, + 15, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 13, + 18, + ), + }, + Token { + kind: RightParen, + span: ( + 13, + 19, + ), + }, + Token { + kind: SemiColon, + span: ( + 13, + 20, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 14, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 14, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 14, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 14, + 11, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 14, + 13, + ), + }, + Token { + kind: Pipe, + span: ( + 14, + 15, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 14, + 17, + ), + }, + Token { + kind: RightParen, + span: ( + 14, + 18, + ), + }, + Token { + kind: SemiColon, + span: ( + 14, + 19, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 15, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 15, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 15, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 15, + 11, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 15, + 13, + ), + }, + Token { + kind: Ampersand, + span: ( + 15, + 15, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 15, + 17, + ), + }, + Token { + kind: RightParen, + span: ( + 15, + 18, + ), + }, + Token { + kind: SemiColon, + span: ( + 15, + 19, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 16, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 16, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 16, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 16, + 11, + ), + }, + Token { + kind: BitwiseNot, + span: ( + 16, + 13, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 16, + 14, + ), + }, + Token { + kind: RightParen, + span: ( + 16, + 15, + ), + }, + Token { + kind: SemiColon, + span: ( + 16, + 16, + ), + }, + Token { + kind: Echo, + span: ( + 18, + 1, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 18, + 6, + ), + }, + Token { + kind: Plus, + span: ( + 18, + 8, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 18, + 10, + ), + }, + Token { + kind: Asterisk, + span: ( + 18, + 12, + ), + }, + Token { + kind: LiteralInteger( + 3, + ), + span: ( + 18, + 14, + ), + }, + Token { + kind: Slash, + span: ( + 18, + 16, + ), + }, + Token { + kind: LiteralInteger( + 4, + ), + span: ( + 18, + 18, + ), + }, + Token { + kind: Minus, + span: ( + 18, + 20, + ), + }, + Token { + kind: LiteralInteger( + 5, + ), + span: ( + 18, + 22, + ), + }, + Token { + kind: SemiColon, + span: ( + 18, + 23, + ), + }, +] diff --git a/tests/0011/tokens.txt b/tests/0011/tokens.txt new file mode 100644 index 00000000..a93dbf48 --- /dev/null +++ b/tests/0011/tokens.txt @@ -0,0 +1,1152 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 3, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 3, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 3, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 3, + 11, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 3, + 13, + ), + }, + Token { + kind: DoubleEquals, + span: ( + 3, + 16, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 3, + 19, + ), + }, + Token { + kind: RightParen, + span: ( + 3, + 21, + ), + }, + Token { + kind: SemiColon, + span: ( + 3, + 22, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 4, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 4, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 4, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 4, + 11, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 4, + 13, + ), + }, + Token { + kind: TripleEquals, + span: ( + 4, + 16, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 4, + 20, + ), + }, + Token { + kind: RightParen, + span: ( + 4, + 22, + ), + }, + Token { + kind: SemiColon, + span: ( + 4, + 23, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 5, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 5, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 5, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 5, + 11, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 5, + 13, + ), + }, + Token { + kind: BangEquals, + span: ( + 5, + 16, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 5, + 19, + ), + }, + Token { + kind: RightParen, + span: ( + 5, + 21, + ), + }, + Token { + kind: SemiColon, + span: ( + 5, + 22, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 6, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 6, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 6, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 6, + 11, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 6, + 13, + ), + }, + Token { + kind: BangDoubleEquals, + span: ( + 6, + 16, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 6, + 20, + ), + }, + Token { + kind: RightParen, + span: ( + 6, + 22, + ), + }, + Token { + kind: SemiColon, + span: ( + 6, + 23, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 7, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 7, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 7, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 7, + 11, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 7, + 13, + ), + }, + Token { + kind: Plus, + span: ( + 7, + 16, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 7, + 18, + ), + }, + Token { + kind: RightParen, + span: ( + 7, + 20, + ), + }, + Token { + kind: SemiColon, + span: ( + 7, + 21, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 8, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 8, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 8, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 8, + 11, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 8, + 13, + ), + }, + Token { + kind: Minus, + span: ( + 8, + 16, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 8, + 18, + ), + }, + Token { + kind: RightParen, + span: ( + 8, + 20, + ), + }, + Token { + kind: SemiColon, + span: ( + 8, + 21, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 9, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 9, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 9, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 9, + 11, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 9, + 13, + ), + }, + Token { + kind: Slash, + span: ( + 9, + 16, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 9, + 18, + ), + }, + Token { + kind: RightParen, + span: ( + 9, + 20, + ), + }, + Token { + kind: SemiColon, + span: ( + 9, + 21, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 10, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 10, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 10, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 10, + 11, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 10, + 13, + ), + }, + Token { + kind: Caret, + span: ( + 10, + 16, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 10, + 18, + ), + }, + Token { + kind: RightParen, + span: ( + 10, + 20, + ), + }, + Token { + kind: SemiColon, + span: ( + 10, + 21, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 11, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 11, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 11, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 11, + 11, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 11, + 13, + ), + }, + Token { + kind: Asterisk, + span: ( + 11, + 16, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 11, + 18, + ), + }, + Token { + kind: RightParen, + span: ( + 11, + 20, + ), + }, + Token { + kind: SemiColon, + span: ( + 11, + 21, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 12, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 12, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 12, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 12, + 11, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 12, + 13, + ), + }, + Token { + kind: RightShift, + span: ( + 12, + 16, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 12, + 19, + ), + }, + Token { + kind: RightParen, + span: ( + 12, + 21, + ), + }, + Token { + kind: SemiColon, + span: ( + 12, + 22, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 13, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 13, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 13, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 13, + 11, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 13, + 13, + ), + }, + Token { + kind: LeftShift, + span: ( + 13, + 16, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 13, + 19, + ), + }, + Token { + kind: RightParen, + span: ( + 13, + 21, + ), + }, + Token { + kind: SemiColon, + span: ( + 13, + 22, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 14, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 14, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 14, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 14, + 11, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 14, + 13, + ), + }, + Token { + kind: Pipe, + span: ( + 14, + 16, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 14, + 18, + ), + }, + Token { + kind: RightParen, + span: ( + 14, + 20, + ), + }, + Token { + kind: SemiColon, + span: ( + 14, + 21, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 15, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 15, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 15, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 15, + 11, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 15, + 13, + ), + }, + Token { + kind: Ampersand, + span: ( + 15, + 16, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 15, + 18, + ), + }, + Token { + kind: RightParen, + span: ( + 15, + 20, + ), + }, + Token { + kind: SemiColon, + span: ( + 15, + 21, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 16, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 16, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 16, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 16, + 11, + ), + }, + Token { + kind: BitwiseNot, + span: ( + 16, + 13, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 16, + 14, + ), + }, + Token { + kind: RightParen, + span: ( + 16, + 16, + ), + }, + Token { + kind: SemiColon, + span: ( + 16, + 17, + ), + }, + Token { + kind: Echo, + span: ( + 18, + 1, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 18, + 6, + ), + }, + Token { + kind: Plus, + span: ( + 18, + 9, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 18, + 11, + ), + }, + Token { + kind: Asterisk, + span: ( + 18, + 14, + ), + }, + Token { + kind: Variable( + "c", + ), + span: ( + 18, + 16, + ), + }, + Token { + kind: Slash, + span: ( + 18, + 19, + ), + }, + Token { + kind: Variable( + "d", + ), + span: ( + 18, + 21, + ), + }, + Token { + kind: Minus, + span: ( + 18, + 24, + ), + }, + Token { + kind: Dollar, + span: ( + 18, + 26, + ), + }, + Token { + kind: LeftBrace, + span: ( + 18, + 27, + ), + }, + Token { + kind: LiteralString( + "foo", + ), + span: ( + 18, + 28, + ), + }, + Token { + kind: Dot, + span: ( + 18, + 34, + ), + }, + Token { + kind: Variable( + "c", + ), + span: ( + 18, + 36, + ), + }, + Token { + kind: Question, + span: ( + 18, + 39, + ), + }, + Token { + kind: LiteralInteger( + 4, + ), + span: ( + 18, + 41, + ), + }, + Token { + kind: Colon, + span: ( + 18, + 43, + ), + }, + Token { + kind: LiteralInteger( + 3, + ), + span: ( + 18, + 45, + ), + }, + Token { + kind: RightBrace, + span: ( + 18, + 46, + ), + }, + Token { + kind: SemiColon, + span: ( + 18, + 47, + ), + }, +] diff --git a/tests/0012/tokens.txt b/tests/0012/tokens.txt new file mode 100644 index 00000000..cb595aed --- /dev/null +++ b/tests/0012/tokens.txt @@ -0,0 +1,1300 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 3, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 3, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 3, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 3, + 11, + ), + }, + Token { + kind: LeftParen, + span: ( + 3, + 13, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 3, + 14, + ), + }, + Token { + kind: DoubleEquals, + span: ( + 3, + 17, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 3, + 20, + ), + }, + Token { + kind: RightParen, + span: ( + 3, + 22, + ), + }, + Token { + kind: RightParen, + span: ( + 3, + 23, + ), + }, + Token { + kind: SemiColon, + span: ( + 3, + 24, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 4, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 4, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 4, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 4, + 11, + ), + }, + Token { + kind: LeftParen, + span: ( + 4, + 13, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 4, + 14, + ), + }, + Token { + kind: TripleEquals, + span: ( + 4, + 17, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 4, + 21, + ), + }, + Token { + kind: RightParen, + span: ( + 4, + 23, + ), + }, + Token { + kind: RightParen, + span: ( + 4, + 24, + ), + }, + Token { + kind: SemiColon, + span: ( + 4, + 25, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 5, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 5, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 5, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 5, + 11, + ), + }, + Token { + kind: LeftParen, + span: ( + 5, + 13, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 5, + 14, + ), + }, + Token { + kind: BangEquals, + span: ( + 5, + 17, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 5, + 20, + ), + }, + Token { + kind: RightParen, + span: ( + 5, + 22, + ), + }, + Token { + kind: RightParen, + span: ( + 5, + 23, + ), + }, + Token { + kind: SemiColon, + span: ( + 5, + 24, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 6, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 6, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 6, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 6, + 11, + ), + }, + Token { + kind: LeftParen, + span: ( + 6, + 13, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 6, + 14, + ), + }, + Token { + kind: BangDoubleEquals, + span: ( + 6, + 17, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 6, + 21, + ), + }, + Token { + kind: RightParen, + span: ( + 6, + 23, + ), + }, + Token { + kind: RightParen, + span: ( + 6, + 24, + ), + }, + Token { + kind: SemiColon, + span: ( + 6, + 25, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 7, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 7, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 7, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 7, + 11, + ), + }, + Token { + kind: LeftParen, + span: ( + 7, + 13, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 7, + 14, + ), + }, + Token { + kind: Plus, + span: ( + 7, + 17, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 7, + 19, + ), + }, + Token { + kind: RightParen, + span: ( + 7, + 21, + ), + }, + Token { + kind: RightParen, + span: ( + 7, + 22, + ), + }, + Token { + kind: SemiColon, + span: ( + 7, + 23, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 8, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 8, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 8, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 8, + 11, + ), + }, + Token { + kind: LeftParen, + span: ( + 8, + 13, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 8, + 14, + ), + }, + Token { + kind: Minus, + span: ( + 8, + 17, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 8, + 19, + ), + }, + Token { + kind: RightParen, + span: ( + 8, + 21, + ), + }, + Token { + kind: RightParen, + span: ( + 8, + 22, + ), + }, + Token { + kind: SemiColon, + span: ( + 8, + 23, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 9, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 9, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 9, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 9, + 11, + ), + }, + Token { + kind: LeftParen, + span: ( + 9, + 13, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 9, + 14, + ), + }, + Token { + kind: Slash, + span: ( + 9, + 17, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 9, + 19, + ), + }, + Token { + kind: RightParen, + span: ( + 9, + 21, + ), + }, + Token { + kind: RightParen, + span: ( + 9, + 22, + ), + }, + Token { + kind: SemiColon, + span: ( + 9, + 23, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 10, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 10, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 10, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 10, + 11, + ), + }, + Token { + kind: LeftParen, + span: ( + 10, + 13, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 10, + 14, + ), + }, + Token { + kind: Caret, + span: ( + 10, + 17, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 10, + 19, + ), + }, + Token { + kind: RightParen, + span: ( + 10, + 21, + ), + }, + Token { + kind: RightParen, + span: ( + 10, + 22, + ), + }, + Token { + kind: SemiColon, + span: ( + 10, + 23, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 11, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 11, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 11, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 11, + 11, + ), + }, + Token { + kind: LeftParen, + span: ( + 11, + 13, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 11, + 14, + ), + }, + Token { + kind: Asterisk, + span: ( + 11, + 17, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 11, + 19, + ), + }, + Token { + kind: RightParen, + span: ( + 11, + 21, + ), + }, + Token { + kind: RightParen, + span: ( + 11, + 22, + ), + }, + Token { + kind: SemiColon, + span: ( + 11, + 23, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 12, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 12, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 12, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 12, + 11, + ), + }, + Token { + kind: LeftParen, + span: ( + 12, + 13, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 12, + 14, + ), + }, + Token { + kind: RightShift, + span: ( + 12, + 17, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 12, + 20, + ), + }, + Token { + kind: RightParen, + span: ( + 12, + 22, + ), + }, + Token { + kind: RightParen, + span: ( + 12, + 23, + ), + }, + Token { + kind: SemiColon, + span: ( + 12, + 24, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 13, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 13, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 13, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 13, + 11, + ), + }, + Token { + kind: LeftParen, + span: ( + 13, + 13, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 13, + 14, + ), + }, + Token { + kind: LeftShift, + span: ( + 13, + 17, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 13, + 20, + ), + }, + Token { + kind: RightParen, + span: ( + 13, + 22, + ), + }, + Token { + kind: RightParen, + span: ( + 13, + 23, + ), + }, + Token { + kind: SemiColon, + span: ( + 13, + 24, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 14, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 14, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 14, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 14, + 11, + ), + }, + Token { + kind: LeftParen, + span: ( + 14, + 13, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 14, + 14, + ), + }, + Token { + kind: Pipe, + span: ( + 14, + 17, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 14, + 19, + ), + }, + Token { + kind: RightParen, + span: ( + 14, + 21, + ), + }, + Token { + kind: RightParen, + span: ( + 14, + 22, + ), + }, + Token { + kind: SemiColon, + span: ( + 14, + 23, + ), + }, + Token { + kind: Identifier( + "define", + ), + span: ( + 15, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 15, + 7, + ), + }, + Token { + kind: LiteralString( + "a", + ), + span: ( + 15, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 15, + 11, + ), + }, + Token { + kind: LeftParen, + span: ( + 15, + 13, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 15, + 14, + ), + }, + Token { + kind: Ampersand, + span: ( + 15, + 17, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 15, + 19, + ), + }, + Token { + kind: RightParen, + span: ( + 15, + 21, + ), + }, + Token { + kind: RightParen, + span: ( + 15, + 22, + ), + }, + Token { + kind: SemiColon, + span: ( + 15, + 23, + ), + }, + Token { + kind: Echo, + span: ( + 17, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 17, + 6, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 17, + 7, + ), + }, + Token { + kind: Plus, + span: ( + 17, + 10, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 17, + 12, + ), + }, + Token { + kind: RightParen, + span: ( + 17, + 14, + ), + }, + Token { + kind: Asterisk, + span: ( + 17, + 16, + ), + }, + Token { + kind: LeftParen, + span: ( + 17, + 18, + ), + }, + Token { + kind: Variable( + "c", + ), + span: ( + 17, + 19, + ), + }, + Token { + kind: Slash, + span: ( + 17, + 22, + ), + }, + Token { + kind: Variable( + "d", + ), + span: ( + 17, + 24, + ), + }, + Token { + kind: Minus, + span: ( + 17, + 27, + ), + }, + Token { + kind: Dollar, + span: ( + 17, + 29, + ), + }, + Token { + kind: LeftBrace, + span: ( + 17, + 30, + ), + }, + Token { + kind: LiteralString( + "foo", + ), + span: ( + 17, + 31, + ), + }, + Token { + kind: Dot, + span: ( + 17, + 37, + ), + }, + Token { + kind: Variable( + "c", + ), + span: ( + 17, + 39, + ), + }, + Token { + kind: Question, + span: ( + 17, + 42, + ), + }, + Token { + kind: LiteralInteger( + 4, + ), + span: ( + 17, + 44, + ), + }, + Token { + kind: Colon, + span: ( + 17, + 46, + ), + }, + Token { + kind: LiteralInteger( + 3, + ), + span: ( + 17, + 48, + ), + }, + Token { + kind: RightBrace, + span: ( + 17, + 49, + ), + }, + Token { + kind: RightParen, + span: ( + 17, + 50, + ), + }, + Token { + kind: SemiColon, + span: ( + 17, + 51, + ), + }, +] diff --git a/tests/0013/tokens.txt b/tests/0013/tokens.txt new file mode 100644 index 00000000..c78b5eff --- /dev/null +++ b/tests/0013/tokens.txt @@ -0,0 +1,420 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Use, + span: ( + 3, + 1, + ), + }, + Token { + kind: QualifiedIdentifier( + "Foo\", + ), + span: ( + 3, + 5, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 9, + ), + }, + Token { + kind: Identifier( + "Bar", + ), + span: ( + 3, + 10, + ), + }, + Token { + kind: Comma, + span: ( + 3, + 13, + ), + }, + Token { + kind: Identifier( + "Baz", + ), + span: ( + 3, + 15, + ), + }, + Token { + kind: As, + span: ( + 3, + 19, + ), + }, + Token { + kind: Identifier( + "Bob", + ), + span: ( + 3, + 22, + ), + }, + Token { + kind: Comma, + span: ( + 3, + 25, + ), + }, + Token { + kind: Identifier( + "Car", + ), + span: ( + 3, + 27, + ), + }, + Token { + kind: RightBrace, + span: ( + 3, + 30, + ), + }, + Token { + kind: SemiColon, + span: ( + 3, + 31, + ), + }, + Token { + kind: Use, + span: ( + 4, + 1, + ), + }, + Token { + kind: QualifiedIdentifier( + "Bar\", + ), + span: ( + 4, + 5, + ), + }, + Token { + kind: LeftBrace, + span: ( + 4, + 9, + ), + }, + Token { + kind: Identifier( + "Bar0", + ), + span: ( + 4, + 10, + ), + }, + Token { + kind: Comma, + span: ( + 4, + 14, + ), + }, + Token { + kind: Identifier( + "Baz0", + ), + span: ( + 4, + 16, + ), + }, + Token { + kind: Comma, + span: ( + 4, + 20, + ), + }, + Token { + kind: Identifier( + "Car0", + ), + span: ( + 4, + 22, + ), + }, + Token { + kind: RightBrace, + span: ( + 4, + 26, + ), + }, + Token { + kind: SemiColon, + span: ( + 4, + 27, + ), + }, + Token { + kind: Use, + span: ( + 5, + 1, + ), + }, + Token { + kind: Identifier( + "Foo1", + ), + span: ( + 5, + 5, + ), + }, + Token { + kind: Comma, + span: ( + 5, + 9, + ), + }, + Token { + kind: Identifier( + "Bar1", + ), + span: ( + 5, + 11, + ), + }, + Token { + kind: Comma, + span: ( + 5, + 15, + ), + }, + Token { + kind: Identifier( + "Baz1", + ), + span: ( + 5, + 17, + ), + }, + Token { + kind: SemiColon, + span: ( + 5, + 21, + ), + }, + Token { + kind: Use, + span: ( + 6, + 1, + ), + }, + Token { + kind: Identifier( + "Foo", + ), + span: ( + 6, + 5, + ), + }, + Token { + kind: As, + span: ( + 6, + 9, + ), + }, + Token { + kind: Identifier( + "Qux", + ), + span: ( + 6, + 12, + ), + }, + Token { + kind: SemiColon, + span: ( + 6, + 15, + ), + }, + Token { + kind: Use, + span: ( + 7, + 1, + ), + }, + Token { + kind: Identifier( + "Foo", + ), + span: ( + 7, + 5, + ), + }, + Token { + kind: SemiColon, + span: ( + 7, + 8, + ), + }, + Token { + kind: Use, + span: ( + 9, + 1, + ), + }, + Token { + kind: Const, + span: ( + 9, + 5, + ), + }, + Token { + kind: Identifier( + "FOO", + ), + span: ( + 9, + 11, + ), + }, + Token { + kind: SemiColon, + span: ( + 9, + 14, + ), + }, + Token { + kind: Use, + span: ( + 10, + 1, + ), + }, + Token { + kind: Const, + span: ( + 10, + 5, + ), + }, + Token { + kind: QualifiedIdentifier( + "Foo\Bar\Baz\QUX", + ), + span: ( + 10, + 11, + ), + }, + Token { + kind: SemiColon, + span: ( + 10, + 26, + ), + }, + Token { + kind: Use, + span: ( + 12, + 1, + ), + }, + Token { + kind: Function, + span: ( + 12, + 5, + ), + }, + Token { + kind: Identifier( + "f", + ), + span: ( + 12, + 14, + ), + }, + Token { + kind: SemiColon, + span: ( + 12, + 15, + ), + }, + Token { + kind: Use, + span: ( + 13, + 1, + ), + }, + Token { + kind: Const, + span: ( + 13, + 5, + ), + }, + Token { + kind: QualifiedIdentifier( + "Pop\Bar\f", + ), + span: ( + 13, + 11, + ), + }, + Token { + kind: SemiColon, + span: ( + 13, + 20, + ), + }, +] diff --git a/tests/0014/tokens.txt b/tests/0014/tokens.txt new file mode 100644 index 00000000..9cb194fd --- /dev/null +++ b/tests/0014/tokens.txt @@ -0,0 +1,434 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Class, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "Foo2", + ), + span: ( + 3, + 7, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 12, + ), + }, + Token { + kind: Use, + span: ( + 4, + 5, + ), + }, + Token { + kind: Identifier( + "B", + ), + span: ( + 4, + 9, + ), + }, + Token { + kind: LeftBrace, + span: ( + 4, + 11, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 5, + 9, + ), + }, + Token { + kind: As, + span: ( + 5, + 13, + ), + }, + Token { + kind: Protected, + span: ( + 5, + 16, + ), + }, + Token { + kind: Identifier( + "bar", + ), + span: ( + 5, + 26, + ), + }, + Token { + kind: SemiColon, + span: ( + 5, + 29, + ), + }, + Token { + kind: RightBrace, + span: ( + 6, + 5, + ), + }, + Token { + kind: RightBrace, + span: ( + 7, + 1, + ), + }, + Token { + kind: Class, + span: ( + 9, + 1, + ), + }, + Token { + kind: Identifier( + "Bar2", + ), + span: ( + 9, + 7, + ), + }, + Token { + kind: LeftBrace, + span: ( + 9, + 12, + ), + }, + Token { + kind: Use, + span: ( + 10, + 5, + ), + }, + Token { + kind: Identifier( + "B", + ), + span: ( + 10, + 9, + ), + }, + Token { + kind: Comma, + span: ( + 10, + 10, + ), + }, + Token { + kind: Identifier( + "C", + ), + span: ( + 11, + 9, + ), + }, + Token { + kind: LeftBrace, + span: ( + 11, + 11, + ), + }, + Token { + kind: Identifier( + "B", + ), + span: ( + 12, + 13, + ), + }, + Token { + kind: DoubleColon, + span: ( + 12, + 14, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 12, + 16, + ), + }, + Token { + kind: Insteadof, + span: ( + 12, + 20, + ), + }, + Token { + kind: Identifier( + "C", + ), + span: ( + 12, + 30, + ), + }, + Token { + kind: SemiColon, + span: ( + 12, + 31, + ), + }, + Token { + kind: RightBrace, + span: ( + 13, + 9, + ), + }, + Token { + kind: RightBrace, + span: ( + 14, + 1, + ), + }, + Token { + kind: Class, + span: ( + 16, + 1, + ), + }, + Token { + kind: Identifier( + "Bar3", + ), + span: ( + 16, + 7, + ), + }, + Token { + kind: LeftBrace, + span: ( + 16, + 12, + ), + }, + Token { + kind: Use, + span: ( + 17, + 5, + ), + }, + Token { + kind: Identifier( + "B", + ), + span: ( + 17, + 9, + ), + }, + Token { + kind: LeftBrace, + span: ( + 17, + 11, + ), + }, + Token { + kind: Identifier( + "B", + ), + span: ( + 17, + 13, + ), + }, + Token { + kind: DoubleColon, + span: ( + 17, + 14, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 17, + 16, + ), + }, + Token { + kind: As, + span: ( + 17, + 20, + ), + }, + Token { + kind: Identifier( + "bar", + ), + span: ( + 17, + 23, + ), + }, + Token { + kind: SemiColon, + span: ( + 17, + 26, + ), + }, + Token { + kind: RightBrace, + span: ( + 17, + 28, + ), + }, + Token { + kind: RightBrace, + span: ( + 18, + 1, + ), + }, + Token { + kind: Class, + span: ( + 20, + 1, + ), + }, + Token { + kind: Identifier( + "Bar4", + ), + span: ( + 20, + 7, + ), + }, + Token { + kind: LeftBrace, + span: ( + 20, + 12, + ), + }, + Token { + kind: Use, + span: ( + 21, + 5, + ), + }, + Token { + kind: Identifier( + "B", + ), + span: ( + 21, + 9, + ), + }, + Token { + kind: LeftBrace, + span: ( + 21, + 11, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 21, + 13, + ), + }, + Token { + kind: As, + span: ( + 21, + 17, + ), + }, + Token { + kind: Identifier( + "bar", + ), + span: ( + 21, + 20, + ), + }, + Token { + kind: SemiColon, + span: ( + 21, + 23, + ), + }, + Token { + kind: RightBrace, + span: ( + 21, + 25, + ), + }, + Token { + kind: RightBrace, + span: ( + 22, + 1, + ), + }, +] diff --git a/tests/0015/tokens.txt b/tests/0015/tokens.txt new file mode 100644 index 00000000..da4378b6 --- /dev/null +++ b/tests/0015/tokens.txt @@ -0,0 +1,230 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Declare, + span: ( + 3, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 3, + 8, + ), + }, + Token { + kind: Identifier( + "a", + ), + span: ( + 3, + 9, + ), + }, + Token { + kind: Equals, + span: ( + 3, + 10, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 3, + 11, + ), + }, + Token { + kind: RightParen, + span: ( + 3, + 12, + ), + }, + Token { + kind: Colon, + span: ( + 3, + 13, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 4, + 5, + ), + }, + Token { + kind: SemiColon, + span: ( + 4, + 7, + ), + }, + Token { + kind: EndDeclare, + span: ( + 5, + 1, + ), + }, + Token { + kind: SemiColon, + span: ( + 5, + 11, + ), + }, + Token { + kind: Declare, + span: ( + 7, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 7, + 8, + ), + }, + Token { + kind: Identifier( + "b", + ), + span: ( + 7, + 9, + ), + }, + Token { + kind: Equals, + span: ( + 7, + 10, + ), + }, + Token { + kind: LiteralString( + "9", + ), + span: ( + 7, + 11, + ), + }, + Token { + kind: RightParen, + span: ( + 7, + 14, + ), + }, + Token { + kind: Colon, + span: ( + 7, + 15, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 8, + 5, + ), + }, + Token { + kind: SemiColon, + span: ( + 8, + 7, + ), + }, + Token { + kind: EndDeclare, + span: ( + 9, + 1, + ), + }, + Token { + kind: SemiColon, + span: ( + 9, + 11, + ), + }, + Token { + kind: Declare, + span: ( + 11, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 11, + 8, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 11, + 9, + ), + }, + Token { + kind: Equals, + span: ( + 11, + 12, + ), + }, + Token { + kind: LiteralFloat( + 1.42, + ), + span: ( + 11, + 13, + ), + }, + Token { + kind: RightParen, + span: ( + 11, + 17, + ), + }, + Token { + kind: SemiColon, + span: ( + 11, + 18, + ), + }, +] diff --git a/tests/0016/tokens.txt b/tests/0016/tokens.txt new file mode 100644 index 00000000..7e058e9a --- /dev/null +++ b/tests/0016/tokens.txt @@ -0,0 +1,78 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Declare, + span: ( + 3, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 3, + 8, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 3, + 10, + ), + }, + Token { + kind: Equals, + span: ( + 3, + 14, + ), + }, + Token { + kind: Identifier( + "bar", + ), + span: ( + 3, + 16, + ), + }, + Token { + kind: LeftParen, + span: ( + 3, + 19, + ), + }, + Token { + kind: RightParen, + span: ( + 3, + 20, + ), + }, + Token { + kind: RightParen, + span: ( + 3, + 22, + ), + }, + Token { + kind: SemiColon, + span: ( + 3, + 23, + ), + }, +] diff --git a/tests/0017/tokens.txt b/tests/0017/tokens.txt new file mode 100644 index 00000000..dff611e7 --- /dev/null +++ b/tests/0017/tokens.txt @@ -0,0 +1,220 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 3, + 1, + ), + }, + Token { + kind: Equals, + span: ( + 3, + 4, + ), + }, + Token { + kind: New, + span: ( + 3, + 6, + ), + }, + Token { + kind: Identifier( + "Foo", + ), + span: ( + 3, + 10, + ), + }, + Token { + kind: LeftParen, + span: ( + 3, + 13, + ), + }, + Token { + kind: RightParen, + span: ( + 3, + 14, + ), + }, + Token { + kind: SemiColon, + span: ( + 3, + 15, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 4, + 1, + ), + }, + Token { + kind: Equals, + span: ( + 4, + 4, + ), + }, + Token { + kind: Plus, + span: ( + 4, + 6, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 4, + 7, + ), + }, + Token { + kind: SemiColon, + span: ( + 4, + 8, + ), + }, + Token { + kind: Variable( + "c", + ), + span: ( + 5, + 1, + ), + }, + Token { + kind: Equals, + span: ( + 5, + 4, + ), + }, + Token { + kind: BitwiseNot, + span: ( + 5, + 6, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 5, + 7, + ), + }, + Token { + kind: SemiColon, + span: ( + 5, + 8, + ), + }, + Token { + kind: Variable( + "d", + ), + span: ( + 6, + 1, + ), + }, + Token { + kind: Equals, + span: ( + 6, + 4, + ), + }, + Token { + kind: Decrement, + span: ( + 6, + 6, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 6, + 8, + ), + }, + Token { + kind: SemiColon, + span: ( + 6, + 10, + ), + }, + Token { + kind: Variable( + "e", + ), + span: ( + 7, + 1, + ), + }, + Token { + kind: Equals, + span: ( + 7, + 4, + ), + }, + Token { + kind: Increment, + span: ( + 7, + 6, + ), + }, + Token { + kind: Variable( + "d", + ), + span: ( + 7, + 8, + ), + }, + Token { + kind: SemiColon, + span: ( + 7, + 10, + ), + }, +] diff --git a/tests/0018/tokens.txt b/tests/0018/tokens.txt new file mode 100644 index 00000000..7445a347 --- /dev/null +++ b/tests/0018/tokens.txt @@ -0,0 +1,235 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Function, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "a", + ), + span: ( + 3, + 10, + ), + }, + Token { + kind: LeftParen, + span: ( + 3, + 11, + ), + }, + Token { + kind: RightParen, + span: ( + 3, + 12, + ), + }, + Token { + kind: Colon, + span: ( + 3, + 13, + ), + }, + Token { + kind: Null, + span: ( + 3, + 15, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 20, + ), + }, + Token { + kind: Echo, + span: ( + 4, + 5, + ), + }, + Token { + kind: LiteralString( + "looping..\n", + ), + span: ( + 4, + 10, + ), + }, + Token { + kind: SemiColon, + span: ( + 4, + 23, + ), + }, + Token { + kind: Return, + span: ( + 6, + 5, + ), + }, + Token { + kind: Null, + span: ( + 6, + 12, + ), + }, + Token { + kind: SemiColon, + span: ( + 6, + 16, + ), + }, + Token { + kind: RightBrace, + span: ( + 7, + 1, + ), + }, + Token { + kind: Variable( + "bar", + ), + span: ( + 9, + 1, + ), + }, + Token { + kind: Equals, + span: ( + 9, + 6, + ), + }, + Token { + kind: Identifier( + "a", + ), + span: ( + 9, + 8, + ), + }, + Token { + kind: LeftParen, + span: ( + 9, + 9, + ), + }, + Token { + kind: Ellipsis, + span: ( + 9, + 10, + ), + }, + Token { + kind: RightParen, + span: ( + 9, + 13, + ), + }, + Token { + kind: SemiColon, + span: ( + 9, + 14, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 11, + 1, + ), + }, + Token { + kind: Colon, + span: ( + 11, + 4, + ), + }, + Token { + kind: Variable( + "bar", + ), + span: ( + 12, + 5, + ), + }, + Token { + kind: LeftParen, + span: ( + 12, + 9, + ), + }, + Token { + kind: RightParen, + span: ( + 12, + 10, + ), + }, + Token { + kind: SemiColon, + span: ( + 12, + 11, + ), + }, + Token { + kind: Goto, + span: ( + 13, + 5, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 13, + 10, + ), + }, + Token { + kind: SemiColon, + span: ( + 13, + 13, + ), + }, +] diff --git a/tests/0019/tokens.txt b/tests/0019/tokens.txt new file mode 100644 index 00000000..674e68ef --- /dev/null +++ b/tests/0019/tokens.txt @@ -0,0 +1,1601 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Namespace, + span: ( + 3, + 1, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 11, + ), + }, + Token { + kind: Function, + span: ( + 4, + 5, + ), + }, + Token { + kind: Identifier( + "globalFunc", + ), + span: ( + 4, + 14, + ), + }, + Token { + kind: LeftParen, + span: ( + 4, + 24, + ), + }, + Token { + kind: RightParen, + span: ( + 4, + 25, + ), + }, + Token { + kind: LeftBrace, + span: ( + 4, + 27, + ), + }, + Token { + kind: RightBrace, + span: ( + 4, + 28, + ), + }, + Token { + kind: RightBrace, + span: ( + 5, + 1, + ), + }, + Token { + kind: Namespace, + span: ( + 7, + 1, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 7, + 11, + ), + }, + Token { + kind: LeftBrace, + span: ( + 7, + 15, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 8, + 5, + ), + }, + Token { + kind: Equals, + span: ( + 8, + 8, + ), + }, + Token { + kind: Function, + span: ( + 8, + 10, + ), + }, + Token { + kind: LeftParen, + span: ( + 8, + 19, + ), + }, + Token { + kind: RightParen, + span: ( + 8, + 20, + ), + }, + Token { + kind: LeftBrace, + span: ( + 8, + 22, + ), + }, + Token { + kind: RightBrace, + span: ( + 8, + 23, + ), + }, + Token { + kind: SemiColon, + span: ( + 8, + 24, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 9, + 5, + ), + }, + Token { + kind: Equals, + span: ( + 9, + 8, + ), + }, + Token { + kind: Function, + span: ( + 9, + 10, + ), + }, + Token { + kind: LeftParen, + span: ( + 9, + 19, + ), + }, + Token { + kind: Ampersand, + span: ( + 9, + 20, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 9, + 21, + ), + }, + Token { + kind: RightParen, + span: ( + 9, + 23, + ), + }, + Token { + kind: LeftBrace, + span: ( + 9, + 25, + ), + }, + Token { + kind: RightBrace, + span: ( + 9, + 26, + ), + }, + Token { + kind: SemiColon, + span: ( + 9, + 27, + ), + }, + Token { + kind: Variable( + "c", + ), + span: ( + 10, + 5, + ), + }, + Token { + kind: Equals, + span: ( + 10, + 8, + ), + }, + Token { + kind: Function, + span: ( + 10, + 10, + ), + }, + Token { + kind: Ampersand, + span: ( + 10, + 19, + ), + }, + Token { + kind: LeftParen, + span: ( + 10, + 20, + ), + }, + Token { + kind: RightParen, + span: ( + 10, + 21, + ), + }, + Token { + kind: LeftBrace, + span: ( + 10, + 23, + ), + }, + Token { + kind: RightBrace, + span: ( + 10, + 24, + ), + }, + Token { + kind: SemiColon, + span: ( + 10, + 25, + ), + }, + Token { + kind: Variable( + "d", + ), + span: ( + 11, + 5, + ), + }, + Token { + kind: Equals, + span: ( + 11, + 8, + ), + }, + Token { + kind: Function, + span: ( + 11, + 10, + ), + }, + Token { + kind: Ampersand, + span: ( + 11, + 19, + ), + }, + Token { + kind: LeftParen, + span: ( + 11, + 20, + ), + }, + Token { + kind: Ampersand, + span: ( + 11, + 21, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 11, + 22, + ), + }, + Token { + kind: RightParen, + span: ( + 11, + 24, + ), + }, + Token { + kind: LeftBrace, + span: ( + 11, + 26, + ), + }, + Token { + kind: Return, + span: ( + 11, + 28, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 11, + 35, + ), + }, + Token { + kind: SemiColon, + span: ( + 11, + 37, + ), + }, + Token { + kind: RightBrace, + span: ( + 11, + 39, + ), + }, + Token { + kind: SemiColon, + span: ( + 11, + 40, + ), + }, + Token { + kind: Variable( + "e", + ), + span: ( + 12, + 5, + ), + }, + Token { + kind: Equals, + span: ( + 12, + 8, + ), + }, + Token { + kind: Fn, + span: ( + 12, + 10, + ), + }, + Token { + kind: LeftParen, + span: ( + 12, + 13, + ), + }, + Token { + kind: RightParen, + span: ( + 12, + 14, + ), + }, + Token { + kind: DoubleArrow, + span: ( + 12, + 16, + ), + }, + Token { + kind: Null, + span: ( + 12, + 19, + ), + }, + Token { + kind: SemiColon, + span: ( + 12, + 23, + ), + }, + Token { + kind: Variable( + "f", + ), + span: ( + 13, + 5, + ), + }, + Token { + kind: Equals, + span: ( + 13, + 8, + ), + }, + Token { + kind: Fn, + span: ( + 13, + 10, + ), + }, + Token { + kind: LeftParen, + span: ( + 13, + 13, + ), + }, + Token { + kind: Ampersand, + span: ( + 13, + 14, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 13, + 15, + ), + }, + Token { + kind: RightParen, + span: ( + 13, + 17, + ), + }, + Token { + kind: DoubleArrow, + span: ( + 13, + 19, + ), + }, + Token { + kind: Null, + span: ( + 13, + 22, + ), + }, + Token { + kind: SemiColon, + span: ( + 13, + 26, + ), + }, + Token { + kind: Variable( + "g", + ), + span: ( + 14, + 5, + ), + }, + Token { + kind: Equals, + span: ( + 14, + 8, + ), + }, + Token { + kind: Fn, + span: ( + 14, + 10, + ), + }, + Token { + kind: Ampersand, + span: ( + 14, + 13, + ), + }, + Token { + kind: LeftParen, + span: ( + 14, + 14, + ), + }, + Token { + kind: RightParen, + span: ( + 14, + 15, + ), + }, + Token { + kind: DoubleArrow, + span: ( + 14, + 17, + ), + }, + Token { + kind: Null, + span: ( + 14, + 20, + ), + }, + Token { + kind: SemiColon, + span: ( + 14, + 24, + ), + }, + Token { + kind: Variable( + "h", + ), + span: ( + 15, + 5, + ), + }, + Token { + kind: Equals, + span: ( + 15, + 8, + ), + }, + Token { + kind: Fn, + span: ( + 15, + 10, + ), + }, + Token { + kind: Ampersand, + span: ( + 15, + 13, + ), + }, + Token { + kind: LeftParen, + span: ( + 15, + 14, + ), + }, + Token { + kind: Ampersand, + span: ( + 15, + 15, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 15, + 16, + ), + }, + Token { + kind: RightParen, + span: ( + 15, + 18, + ), + }, + Token { + kind: DoubleArrow, + span: ( + 15, + 20, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 15, + 23, + ), + }, + Token { + kind: SemiColon, + span: ( + 15, + 25, + ), + }, + Token { + kind: RightBrace, + span: ( + 16, + 1, + ), + }, + Token { + kind: Namespace, + span: ( + 18, + 1, + ), + }, + Token { + kind: Identifier( + "bar", + ), + span: ( + 18, + 11, + ), + }, + Token { + kind: LeftBrace, + span: ( + 18, + 15, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 19, + 5, + ), + }, + Token { + kind: Equals, + span: ( + 19, + 8, + ), + }, + Token { + kind: Static, + span: ( + 19, + 10, + ), + }, + Token { + kind: Function, + span: ( + 19, + 17, + ), + }, + Token { + kind: LeftParen, + span: ( + 19, + 26, + ), + }, + Token { + kind: RightParen, + span: ( + 19, + 27, + ), + }, + Token { + kind: LeftBrace, + span: ( + 19, + 29, + ), + }, + Token { + kind: RightBrace, + span: ( + 19, + 30, + ), + }, + Token { + kind: SemiColon, + span: ( + 19, + 31, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 20, + 5, + ), + }, + Token { + kind: Equals, + span: ( + 20, + 8, + ), + }, + Token { + kind: Static, + span: ( + 20, + 10, + ), + }, + Token { + kind: Function, + span: ( + 20, + 17, + ), + }, + Token { + kind: LeftParen, + span: ( + 20, + 26, + ), + }, + Token { + kind: Ampersand, + span: ( + 20, + 27, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 20, + 28, + ), + }, + Token { + kind: RightParen, + span: ( + 20, + 30, + ), + }, + Token { + kind: LeftBrace, + span: ( + 20, + 32, + ), + }, + Token { + kind: RightBrace, + span: ( + 20, + 33, + ), + }, + Token { + kind: SemiColon, + span: ( + 20, + 34, + ), + }, + Token { + kind: Variable( + "c", + ), + span: ( + 21, + 5, + ), + }, + Token { + kind: Equals, + span: ( + 21, + 8, + ), + }, + Token { + kind: Static, + span: ( + 21, + 10, + ), + }, + Token { + kind: Function, + span: ( + 21, + 17, + ), + }, + Token { + kind: Ampersand, + span: ( + 21, + 26, + ), + }, + Token { + kind: LeftParen, + span: ( + 21, + 27, + ), + }, + Token { + kind: RightParen, + span: ( + 21, + 28, + ), + }, + Token { + kind: LeftBrace, + span: ( + 21, + 30, + ), + }, + Token { + kind: RightBrace, + span: ( + 21, + 31, + ), + }, + Token { + kind: SemiColon, + span: ( + 21, + 32, + ), + }, + Token { + kind: Variable( + "d", + ), + span: ( + 22, + 5, + ), + }, + Token { + kind: Equals, + span: ( + 22, + 8, + ), + }, + Token { + kind: Static, + span: ( + 22, + 10, + ), + }, + Token { + kind: Function, + span: ( + 22, + 17, + ), + }, + Token { + kind: Ampersand, + span: ( + 22, + 26, + ), + }, + Token { + kind: LeftParen, + span: ( + 22, + 27, + ), + }, + Token { + kind: Ampersand, + span: ( + 22, + 28, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 22, + 29, + ), + }, + Token { + kind: RightParen, + span: ( + 22, + 31, + ), + }, + Token { + kind: LeftBrace, + span: ( + 22, + 33, + ), + }, + Token { + kind: Return, + span: ( + 22, + 35, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 22, + 42, + ), + }, + Token { + kind: SemiColon, + span: ( + 22, + 44, + ), + }, + Token { + kind: RightBrace, + span: ( + 22, + 46, + ), + }, + Token { + kind: SemiColon, + span: ( + 22, + 47, + ), + }, + Token { + kind: Variable( + "e", + ), + span: ( + 23, + 5, + ), + }, + Token { + kind: Equals, + span: ( + 23, + 8, + ), + }, + Token { + kind: Static, + span: ( + 23, + 10, + ), + }, + Token { + kind: Fn, + span: ( + 23, + 17, + ), + }, + Token { + kind: LeftParen, + span: ( + 23, + 20, + ), + }, + Token { + kind: RightParen, + span: ( + 23, + 21, + ), + }, + Token { + kind: DoubleArrow, + span: ( + 23, + 23, + ), + }, + Token { + kind: Null, + span: ( + 23, + 26, + ), + }, + Token { + kind: SemiColon, + span: ( + 23, + 30, + ), + }, + Token { + kind: Variable( + "f", + ), + span: ( + 24, + 5, + ), + }, + Token { + kind: Equals, + span: ( + 24, + 8, + ), + }, + Token { + kind: Static, + span: ( + 24, + 10, + ), + }, + Token { + kind: Fn, + span: ( + 24, + 17, + ), + }, + Token { + kind: LeftParen, + span: ( + 24, + 20, + ), + }, + Token { + kind: Ampersand, + span: ( + 24, + 21, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 24, + 22, + ), + }, + Token { + kind: RightParen, + span: ( + 24, + 24, + ), + }, + Token { + kind: DoubleArrow, + span: ( + 24, + 26, + ), + }, + Token { + kind: Null, + span: ( + 24, + 29, + ), + }, + Token { + kind: SemiColon, + span: ( + 24, + 33, + ), + }, + Token { + kind: Variable( + "g", + ), + span: ( + 25, + 5, + ), + }, + Token { + kind: Equals, + span: ( + 25, + 8, + ), + }, + Token { + kind: Static, + span: ( + 25, + 10, + ), + }, + Token { + kind: Fn, + span: ( + 25, + 17, + ), + }, + Token { + kind: Ampersand, + span: ( + 25, + 20, + ), + }, + Token { + kind: LeftParen, + span: ( + 25, + 21, + ), + }, + Token { + kind: RightParen, + span: ( + 25, + 22, + ), + }, + Token { + kind: DoubleArrow, + span: ( + 25, + 24, + ), + }, + Token { + kind: Null, + span: ( + 25, + 27, + ), + }, + Token { + kind: SemiColon, + span: ( + 25, + 31, + ), + }, + Token { + kind: Variable( + "h", + ), + span: ( + 26, + 5, + ), + }, + Token { + kind: Equals, + span: ( + 26, + 8, + ), + }, + Token { + kind: Static, + span: ( + 26, + 10, + ), + }, + Token { + kind: Fn, + span: ( + 26, + 17, + ), + }, + Token { + kind: Ampersand, + span: ( + 26, + 20, + ), + }, + Token { + kind: LeftParen, + span: ( + 26, + 21, + ), + }, + Token { + kind: Ampersand, + span: ( + 26, + 22, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 26, + 23, + ), + }, + Token { + kind: RightParen, + span: ( + 26, + 25, + ), + }, + Token { + kind: DoubleArrow, + span: ( + 26, + 27, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 26, + 30, + ), + }, + Token { + kind: SemiColon, + span: ( + 26, + 32, + ), + }, + Token { + kind: RightBrace, + span: ( + 27, + 1, + ), + }, + Token { + kind: Namespace, + span: ( + 29, + 1, + ), + }, + Token { + kind: Identifier( + "baz", + ), + span: ( + 29, + 11, + ), + }, + Token { + kind: LeftBrace, + span: ( + 29, + 15, + ), + }, + Token { + kind: Function, + span: ( + 30, + 5, + ), + }, + Token { + kind: Identifier( + "a", + ), + span: ( + 30, + 14, + ), + }, + Token { + kind: LeftParen, + span: ( + 30, + 15, + ), + }, + Token { + kind: Ampersand, + span: ( + 30, + 16, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 30, + 17, + ), + }, + Token { + kind: RightParen, + span: ( + 30, + 19, + ), + }, + Token { + kind: LeftBrace, + span: ( + 30, + 21, + ), + }, + Token { + kind: RightBrace, + span: ( + 30, + 22, + ), + }, + Token { + kind: Function, + span: ( + 31, + 5, + ), + }, + Token { + kind: Ampersand, + span: ( + 31, + 14, + ), + }, + Token { + kind: Identifier( + "b", + ), + span: ( + 31, + 15, + ), + }, + Token { + kind: LeftParen, + span: ( + 31, + 16, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 31, + 17, + ), + }, + Token { + kind: RightParen, + span: ( + 31, + 19, + ), + }, + Token { + kind: LeftBrace, + span: ( + 31, + 21, + ), + }, + Token { + kind: Return, + span: ( + 31, + 23, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 31, + 30, + ), + }, + Token { + kind: SemiColon, + span: ( + 31, + 32, + ), + }, + Token { + kind: RightBrace, + span: ( + 31, + 34, + ), + }, + Token { + kind: Function, + span: ( + 32, + 5, + ), + }, + Token { + kind: Ampersand, + span: ( + 32, + 14, + ), + }, + Token { + kind: Identifier( + "c", + ), + span: ( + 32, + 15, + ), + }, + Token { + kind: LeftParen, + span: ( + 32, + 16, + ), + }, + Token { + kind: RightParen, + span: ( + 32, + 17, + ), + }, + Token { + kind: LeftBrace, + span: ( + 32, + 19, + ), + }, + Token { + kind: Return, + span: ( + 32, + 21, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 32, + 28, + ), + }, + Token { + kind: SemiColon, + span: ( + 32, + 30, + ), + }, + Token { + kind: RightBrace, + span: ( + 32, + 32, + ), + }, + Token { + kind: RightBrace, + span: ( + 33, + 1, + ), + }, +] diff --git a/tests/0020/tokens.txt b/tests/0020/tokens.txt new file mode 100644 index 00000000..cd5bd21f --- /dev/null +++ b/tests/0020/tokens.txt @@ -0,0 +1,811 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: LiteralInteger( + 6, + ), + span: ( + 3, + 1, + ), + }, + Token { + kind: Percent, + span: ( + 3, + 3, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 3, + 5, + ), + }, + Token { + kind: SemiColon, + span: ( + 3, + 6, + ), + }, + Token { + kind: LiteralInteger( + 6, + ), + span: ( + 4, + 1, + ), + }, + Token { + kind: LeftShift, + span: ( + 4, + 3, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 4, + 6, + ), + }, + Token { + kind: SemiColon, + span: ( + 4, + 7, + ), + }, + Token { + kind: LiteralInteger( + 6, + ), + span: ( + 5, + 1, + ), + }, + Token { + kind: RightShift, + span: ( + 5, + 3, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 5, + 6, + ), + }, + Token { + kind: SemiColon, + span: ( + 5, + 7, + ), + }, + Token { + kind: LiteralInteger( + 6, + ), + span: ( + 6, + 1, + ), + }, + Token { + kind: Ampersand, + span: ( + 6, + 3, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 6, + 5, + ), + }, + Token { + kind: SemiColon, + span: ( + 6, + 6, + ), + }, + Token { + kind: LiteralInteger( + 6, + ), + span: ( + 7, + 1, + ), + }, + Token { + kind: Pipe, + span: ( + 7, + 3, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 7, + 5, + ), + }, + Token { + kind: SemiColon, + span: ( + 7, + 6, + ), + }, + Token { + kind: LiteralInteger( + 6, + ), + span: ( + 8, + 1, + ), + }, + Token { + kind: Caret, + span: ( + 8, + 3, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 8, + 5, + ), + }, + Token { + kind: SemiColon, + span: ( + 8, + 6, + ), + }, + Token { + kind: LiteralInteger( + 6, + ), + span: ( + 9, + 1, + ), + }, + Token { + kind: AngledLeftRight, + span: ( + 9, + 3, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 9, + 6, + ), + }, + Token { + kind: SemiColon, + span: ( + 9, + 7, + ), + }, + Token { + kind: LiteralInteger( + 6, + ), + span: ( + 10, + 1, + ), + }, + Token { + kind: Spaceship, + span: ( + 10, + 3, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 10, + 7, + ), + }, + Token { + kind: SemiColon, + span: ( + 10, + 8, + ), + }, + Token { + kind: LiteralInteger( + 6, + ), + span: ( + 11, + 1, + ), + }, + Token { + kind: LogicalAnd, + span: ( + 11, + 3, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 11, + 7, + ), + }, + Token { + kind: SemiColon, + span: ( + 11, + 8, + ), + }, + Token { + kind: LiteralInteger( + 6, + ), + span: ( + 12, + 1, + ), + }, + Token { + kind: LogicalOr, + span: ( + 12, + 3, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 12, + 6, + ), + }, + Token { + kind: SemiColon, + span: ( + 12, + 7, + ), + }, + Token { + kind: LiteralInteger( + 6, + ), + span: ( + 13, + 1, + ), + }, + Token { + kind: LogicalXor, + span: ( + 13, + 3, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 13, + 7, + ), + }, + Token { + kind: SemiColon, + span: ( + 13, + 8, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 14, + 1, + ), + }, + Token { + kind: Equals, + span: ( + 14, + 4, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 14, + 6, + ), + }, + Token { + kind: SemiColon, + span: ( + 14, + 7, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 15, + 1, + ), + }, + Token { + kind: PlusEquals, + span: ( + 15, + 4, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 15, + 7, + ), + }, + Token { + kind: SemiColon, + span: ( + 15, + 8, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 16, + 1, + ), + }, + Token { + kind: MinusEquals, + span: ( + 16, + 4, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 16, + 7, + ), + }, + Token { + kind: SemiColon, + span: ( + 16, + 8, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 17, + 1, + ), + }, + Token { + kind: AsteriskEqual, + span: ( + 17, + 4, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 17, + 7, + ), + }, + Token { + kind: SemiColon, + span: ( + 17, + 8, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 18, + 1, + ), + }, + Token { + kind: PowEquals, + span: ( + 18, + 4, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 18, + 8, + ), + }, + Token { + kind: SemiColon, + span: ( + 18, + 9, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 19, + 1, + ), + }, + Token { + kind: SlashEquals, + span: ( + 19, + 4, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 19, + 7, + ), + }, + Token { + kind: SemiColon, + span: ( + 19, + 8, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 20, + 1, + ), + }, + Token { + kind: DotEquals, + span: ( + 20, + 4, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 20, + 7, + ), + }, + Token { + kind: SemiColon, + span: ( + 20, + 8, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 21, + 1, + ), + }, + Token { + kind: PercentEquals, + span: ( + 21, + 4, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 21, + 7, + ), + }, + Token { + kind: SemiColon, + span: ( + 21, + 8, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 22, + 1, + ), + }, + Token { + kind: AmpersandEquals, + span: ( + 22, + 4, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 22, + 7, + ), + }, + Token { + kind: SemiColon, + span: ( + 22, + 8, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 23, + 1, + ), + }, + Token { + kind: PipeEquals, + span: ( + 23, + 4, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 23, + 7, + ), + }, + Token { + kind: SemiColon, + span: ( + 23, + 8, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 24, + 1, + ), + }, + Token { + kind: CaretEquals, + span: ( + 24, + 4, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 24, + 7, + ), + }, + Token { + kind: SemiColon, + span: ( + 24, + 8, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 25, + 1, + ), + }, + Token { + kind: LeftShiftEquals, + span: ( + 25, + 4, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 25, + 8, + ), + }, + Token { + kind: SemiColon, + span: ( + 25, + 9, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 26, + 1, + ), + }, + Token { + kind: RightShiftEquals, + span: ( + 26, + 4, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 26, + 8, + ), + }, + Token { + kind: SemiColon, + span: ( + 26, + 9, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 27, + 1, + ), + }, + Token { + kind: CoalesceEqual, + span: ( + 27, + 4, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 27, + 8, + ), + }, + Token { + kind: SemiColon, + span: ( + 27, + 9, + ), + }, +] diff --git a/tests/0021/tokens.txt b/tests/0021/tokens.txt new file mode 100644 index 00000000..5399aed8 --- /dev/null +++ b/tests/0021/tokens.txt @@ -0,0 +1,552 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: If, + span: ( + 3, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 3, + 4, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 3, + 5, + ), + }, + Token { + kind: RightParen, + span: ( + 3, + 7, + ), + }, + Token { + kind: Colon, + span: ( + 3, + 8, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 4, + 5, + ), + }, + Token { + kind: SemiColon, + span: ( + 4, + 7, + ), + }, + Token { + kind: EndIf, + span: ( + 5, + 1, + ), + }, + Token { + kind: SemiColon, + span: ( + 5, + 6, + ), + }, + Token { + kind: If, + span: ( + 7, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 7, + 4, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 7, + 5, + ), + }, + Token { + kind: RightParen, + span: ( + 7, + 7, + ), + }, + Token { + kind: Colon, + span: ( + 7, + 8, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 8, + 5, + ), + }, + Token { + kind: SemiColon, + span: ( + 8, + 7, + ), + }, + Token { + kind: Else, + span: ( + 9, + 1, + ), + }, + Token { + kind: Colon, + span: ( + 9, + 5, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 10, + 5, + ), + }, + Token { + kind: SemiColon, + span: ( + 10, + 7, + ), + }, + Token { + kind: EndIf, + span: ( + 11, + 1, + ), + }, + Token { + kind: SemiColon, + span: ( + 11, + 6, + ), + }, + Token { + kind: If, + span: ( + 13, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 13, + 4, + ), + }, + Token { + kind: True, + span: ( + 13, + 5, + ), + }, + Token { + kind: RightParen, + span: ( + 13, + 9, + ), + }, + Token { + kind: Colon, + span: ( + 13, + 10, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 14, + 5, + ), + }, + Token { + kind: SemiColon, + span: ( + 14, + 7, + ), + }, + Token { + kind: ElseIf, + span: ( + 15, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 15, + 8, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 15, + 9, + ), + }, + Token { + kind: Arrow, + span: ( + 15, + 13, + ), + }, + Token { + kind: Identifier( + "bar", + ), + span: ( + 15, + 15, + ), + }, + Token { + kind: LeftParen, + span: ( + 15, + 18, + ), + }, + Token { + kind: RightParen, + span: ( + 15, + 19, + ), + }, + Token { + kind: BooleanAnd, + span: ( + 15, + 21, + ), + }, + Token { + kind: Variable( + "baz", + ), + span: ( + 15, + 24, + ), + }, + Token { + kind: Arrow, + span: ( + 15, + 28, + ), + }, + Token { + kind: Identifier( + "bar", + ), + span: ( + 15, + 30, + ), + }, + Token { + kind: NullsafeArrow, + span: ( + 15, + 33, + ), + }, + Token { + kind: Identifier( + "qux", + ), + span: ( + 15, + 36, + ), + }, + Token { + kind: LeftParen, + span: ( + 15, + 39, + ), + }, + Token { + kind: RightParen, + span: ( + 15, + 40, + ), + }, + Token { + kind: RightParen, + span: ( + 15, + 41, + ), + }, + Token { + kind: Colon, + span: ( + 15, + 42, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 16, + 5, + ), + }, + Token { + kind: SemiColon, + span: ( + 16, + 7, + ), + }, + Token { + kind: EndIf, + span: ( + 17, + 1, + ), + }, + Token { + kind: SemiColon, + span: ( + 17, + 6, + ), + }, + Token { + kind: If, + span: ( + 19, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 19, + 4, + ), + }, + Token { + kind: True, + span: ( + 19, + 5, + ), + }, + Token { + kind: RightParen, + span: ( + 19, + 9, + ), + }, + Token { + kind: Colon, + span: ( + 19, + 10, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 20, + 5, + ), + }, + Token { + kind: SemiColon, + span: ( + 20, + 7, + ), + }, + Token { + kind: ElseIf, + span: ( + 21, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 21, + 8, + ), + }, + Token { + kind: True, + span: ( + 21, + 9, + ), + }, + Token { + kind: RightParen, + span: ( + 21, + 13, + ), + }, + Token { + kind: Colon, + span: ( + 21, + 14, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 22, + 5, + ), + }, + Token { + kind: SemiColon, + span: ( + 22, + 7, + ), + }, + Token { + kind: ElseIf, + span: ( + 23, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 23, + 8, + ), + }, + Token { + kind: True, + span: ( + 23, + 9, + ), + }, + Token { + kind: RightParen, + span: ( + 23, + 13, + ), + }, + Token { + kind: Colon, + span: ( + 23, + 14, + ), + }, + Token { + kind: Variable( + "c", + ), + span: ( + 24, + 5, + ), + }, + Token { + kind: SemiColon, + span: ( + 24, + 7, + ), + }, + Token { + kind: EndIf, + span: ( + 25, + 1, + ), + }, + Token { + kind: SemiColon, + span: ( + 25, + 6, + ), + }, +] diff --git a/tests/0022/tokens.txt b/tests/0022/tokens.txt new file mode 100644 index 00000000..34bd8b78 --- /dev/null +++ b/tests/0022/tokens.txt @@ -0,0 +1,265 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Foreach, + span: ( + 3, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 3, + 9, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 3, + 10, + ), + }, + Token { + kind: As, + span: ( + 3, + 13, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 3, + 16, + ), + }, + Token { + kind: RightParen, + span: ( + 3, + 18, + ), + }, + Token { + kind: Colon, + span: ( + 3, + 19, + ), + }, + Token { + kind: Echo, + span: ( + 4, + 5, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 4, + 10, + ), + }, + Token { + kind: SemiColon, + span: ( + 4, + 12, + ), + }, + Token { + kind: EndForeach, + span: ( + 5, + 1, + ), + }, + Token { + kind: SemiColon, + span: ( + 5, + 11, + ), + }, + Token { + kind: While, + span: ( + 7, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 7, + 7, + ), + }, + Token { + kind: True, + span: ( + 7, + 8, + ), + }, + Token { + kind: RightParen, + span: ( + 7, + 12, + ), + }, + Token { + kind: Colon, + span: ( + 7, + 13, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 8, + 5, + ), + }, + Token { + kind: SemiColon, + span: ( + 8, + 7, + ), + }, + Token { + kind: EndWhile, + span: ( + 9, + 1, + ), + }, + Token { + kind: SemiColon, + span: ( + 9, + 9, + ), + }, + Token { + kind: For, + span: ( + 11, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 11, + 5, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 11, + 6, + ), + }, + Token { + kind: SemiColon, + span: ( + 11, + 8, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 11, + 10, + ), + }, + Token { + kind: SemiColon, + span: ( + 11, + 12, + ), + }, + Token { + kind: Variable( + "c", + ), + span: ( + 11, + 14, + ), + }, + Token { + kind: RightParen, + span: ( + 11, + 16, + ), + }, + Token { + kind: Colon, + span: ( + 11, + 17, + ), + }, + Token { + kind: Variable( + "d", + ), + span: ( + 12, + 5, + ), + }, + Token { + kind: SemiColon, + span: ( + 12, + 7, + ), + }, + Token { + kind: EndFor, + span: ( + 13, + 1, + ), + }, + Token { + kind: SemiColon, + span: ( + 13, + 7, + ), + }, +] diff --git a/tests/0023/tokens.txt b/tests/0023/tokens.txt new file mode 100644 index 00000000..ede73488 --- /dev/null +++ b/tests/0023/tokens.txt @@ -0,0 +1,505 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Foreach, + span: ( + 3, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 3, + 9, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 3, + 10, + ), + }, + Token { + kind: As, + span: ( + 3, + 13, + ), + }, + Token { + kind: Ampersand, + span: ( + 3, + 16, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 3, + 17, + ), + }, + Token { + kind: RightParen, + span: ( + 3, + 19, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 21, + ), + }, + Token { + kind: RightBrace, + span: ( + 3, + 22, + ), + }, + Token { + kind: Foreach, + span: ( + 5, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 5, + 9, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 5, + 10, + ), + }, + Token { + kind: As, + span: ( + 5, + 13, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 5, + 16, + ), + }, + Token { + kind: DoubleArrow, + span: ( + 5, + 19, + ), + }, + Token { + kind: Ampersand, + span: ( + 5, + 22, + ), + }, + Token { + kind: Variable( + "c", + ), + span: ( + 5, + 23, + ), + }, + Token { + kind: RightParen, + span: ( + 5, + 25, + ), + }, + Token { + kind: LeftBrace, + span: ( + 5, + 27, + ), + }, + Token { + kind: RightBrace, + span: ( + 5, + 28, + ), + }, + Token { + kind: Switch, + span: ( + 7, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 7, + 8, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 7, + 9, + ), + }, + Token { + kind: RightParen, + span: ( + 7, + 11, + ), + }, + Token { + kind: LeftBrace, + span: ( + 7, + 13, + ), + }, + Token { + kind: Case, + span: ( + 8, + 5, + ), + }, + Token { + kind: LiteralInteger( + 0, + ), + span: ( + 8, + 10, + ), + }, + Token { + kind: Colon, + span: ( + 8, + 11, + ), + }, + Token { + kind: Break, + span: ( + 9, + 9, + ), + }, + Token { + kind: SemiColon, + span: ( + 9, + 14, + ), + }, + Token { + kind: Case, + span: ( + 10, + 5, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 10, + 10, + ), + }, + Token { + kind: SemiColon, + span: ( + 10, + 11, + ), + }, + Token { + kind: Default, + span: ( + 11, + 5, + ), + }, + Token { + kind: Colon, + span: ( + 11, + 12, + ), + }, + Token { + kind: RightBrace, + span: ( + 12, + 1, + ), + }, + Token { + kind: Foreach, + span: ( + 14, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 14, + 9, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 14, + 10, + ), + }, + Token { + kind: As, + span: ( + 14, + 15, + ), + }, + Token { + kind: LeftBracket, + span: ( + 14, + 18, + ), + }, + Token { + kind: Variable( + "baz", + ), + span: ( + 14, + 19, + ), + }, + Token { + kind: Comma, + span: ( + 14, + 23, + ), + }, + Token { + kind: Variable( + "car", + ), + span: ( + 14, + 25, + ), + }, + Token { + kind: RightBracket, + span: ( + 14, + 29, + ), + }, + Token { + kind: RightParen, + span: ( + 14, + 30, + ), + }, + Token { + kind: LeftBrace, + span: ( + 14, + 32, + ), + }, + Token { + kind: RightBrace, + span: ( + 14, + 33, + ), + }, + Token { + kind: Foreach, + span: ( + 16, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 16, + 9, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 16, + 10, + ), + }, + Token { + kind: As, + span: ( + 16, + 15, + ), + }, + Token { + kind: Variable( + "bar", + ), + span: ( + 16, + 18, + ), + }, + Token { + kind: DoubleArrow, + span: ( + 16, + 23, + ), + }, + Token { + kind: Variable( + "baz", + ), + span: ( + 16, + 26, + ), + }, + Token { + kind: RightParen, + span: ( + 16, + 30, + ), + }, + Token { + kind: LeftBrace, + span: ( + 16, + 32, + ), + }, + Token { + kind: RightBrace, + span: ( + 16, + 33, + ), + }, + Token { + kind: Foreach, + span: ( + 18, + 1, + ), + }, + Token { + kind: LeftParen, + span: ( + 18, + 9, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 18, + 10, + ), + }, + Token { + kind: As, + span: ( + 18, + 15, + ), + }, + Token { + kind: Variable( + "bar", + ), + span: ( + 18, + 18, + ), + }, + Token { + kind: RightParen, + span: ( + 18, + 22, + ), + }, + Token { + kind: LeftBrace, + span: ( + 18, + 24, + ), + }, + Token { + kind: RightBrace, + span: ( + 18, + 25, + ), + }, +] diff --git a/tests/0024/tokens.txt b/tests/0024/tokens.txt new file mode 100644 index 00000000..1cdb16c0 --- /dev/null +++ b/tests/0024/tokens.txt @@ -0,0 +1,414 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Try, + span: ( + 3, + 1, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 5, + ), + }, + Token { + kind: RightBrace, + span: ( + 5, + 1, + ), + }, + Token { + kind: Catch, + span: ( + 5, + 3, + ), + }, + Token { + kind: LeftParen, + span: ( + 5, + 9, + ), + }, + Token { + kind: Identifier( + "Exception", + ), + span: ( + 5, + 10, + ), + }, + Token { + kind: Variable( + "e", + ), + span: ( + 5, + 20, + ), + }, + Token { + kind: RightParen, + span: ( + 5, + 22, + ), + }, + Token { + kind: LeftBrace, + span: ( + 5, + 24, + ), + }, + Token { + kind: RightBrace, + span: ( + 7, + 1, + ), + }, + Token { + kind: Catch, + span: ( + 7, + 3, + ), + }, + Token { + kind: LeftParen, + span: ( + 7, + 9, + ), + }, + Token { + kind: Identifier( + "CustomException", + ), + span: ( + 7, + 10, + ), + }, + Token { + kind: Variable( + "e", + ), + span: ( + 7, + 26, + ), + }, + Token { + kind: RightParen, + span: ( + 7, + 28, + ), + }, + Token { + kind: LeftBrace, + span: ( + 7, + 30, + ), + }, + Token { + kind: RightBrace, + span: ( + 9, + 1, + ), + }, + Token { + kind: Try, + span: ( + 11, + 1, + ), + }, + Token { + kind: LeftBrace, + span: ( + 11, + 5, + ), + }, + Token { + kind: RightBrace, + span: ( + 13, + 1, + ), + }, + Token { + kind: Catch, + span: ( + 13, + 3, + ), + }, + Token { + kind: LeftParen, + span: ( + 13, + 9, + ), + }, + Token { + kind: Identifier( + "Exception", + ), + span: ( + 13, + 10, + ), + }, + Token { + kind: Variable( + "e", + ), + span: ( + 13, + 20, + ), + }, + Token { + kind: RightParen, + span: ( + 13, + 22, + ), + }, + Token { + kind: LeftBrace, + span: ( + 13, + 24, + ), + }, + Token { + kind: RightBrace, + span: ( + 15, + 1, + ), + }, + Token { + kind: Finally, + span: ( + 15, + 3, + ), + }, + Token { + kind: LeftBrace, + span: ( + 15, + 11, + ), + }, + Token { + kind: RightBrace, + span: ( + 17, + 1, + ), + }, + Token { + kind: Try, + span: ( + 19, + 1, + ), + }, + Token { + kind: LeftBrace, + span: ( + 19, + 5, + ), + }, + Token { + kind: RightBrace, + span: ( + 21, + 1, + ), + }, + Token { + kind: Finally, + span: ( + 21, + 3, + ), + }, + Token { + kind: LeftBrace, + span: ( + 21, + 11, + ), + }, + Token { + kind: RightBrace, + span: ( + 21, + 12, + ), + }, + Token { + kind: Try, + span: ( + 23, + 1, + ), + }, + Token { + kind: LeftBrace, + span: ( + 23, + 5, + ), + }, + Token { + kind: RightBrace, + span: ( + 25, + 1, + ), + }, + Token { + kind: Catch, + span: ( + 25, + 3, + ), + }, + Token { + kind: LeftParen, + span: ( + 25, + 9, + ), + }, + Token { + kind: Identifier( + "Exception", + ), + span: ( + 25, + 10, + ), + }, + Token { + kind: RightParen, + span: ( + 25, + 19, + ), + }, + Token { + kind: LeftBrace, + span: ( + 25, + 21, + ), + }, + Token { + kind: RightBrace, + span: ( + 27, + 1, + ), + }, + Token { + kind: Try, + span: ( + 29, + 1, + ), + }, + Token { + kind: LeftBrace, + span: ( + 29, + 5, + ), + }, + Token { + kind: RightBrace, + span: ( + 31, + 1, + ), + }, + Token { + kind: Catch, + span: ( + 31, + 3, + ), + }, + Token { + kind: LeftParen, + span: ( + 31, + 9, + ), + }, + Token { + kind: Identifier( + "Exception", + ), + span: ( + 31, + 10, + ), + }, + Token { + kind: Variable( + "e", + ), + span: ( + 31, + 20, + ), + }, + Token { + kind: RightParen, + span: ( + 31, + 22, + ), + }, + Token { + kind: LeftBrace, + span: ( + 31, + 24, + ), + }, + Token { + kind: RightBrace, + span: ( + 33, + 1, + ), + }, +] diff --git a/tests/0025/tokens.txt b/tests/0025/tokens.txt new file mode 100644 index 00000000..1370bdb2 --- /dev/null +++ b/tests/0025/tokens.txt @@ -0,0 +1,25 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Break, + span: ( + 1, + 7, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 12, + ), + }, +] diff --git a/tests/0026/tokens.txt b/tests/0026/tokens.txt new file mode 100644 index 00000000..f7b2e071 --- /dev/null +++ b/tests/0026/tokens.txt @@ -0,0 +1,34 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Break, + span: ( + 1, + 7, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 1, + 13, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 14, + ), + }, +] diff --git a/tests/0027/tokens.txt b/tests/0027/tokens.txt new file mode 100644 index 00000000..c5c03edd --- /dev/null +++ b/tests/0027/tokens.txt @@ -0,0 +1,25 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Continue, + span: ( + 1, + 7, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 15, + ), + }, +] diff --git a/tests/0028/tokens.txt b/tests/0028/tokens.txt new file mode 100644 index 00000000..dfd6b4b2 --- /dev/null +++ b/tests/0028/tokens.txt @@ -0,0 +1,34 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Continue, + span: ( + 1, + 7, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 1, + 16, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 17, + ), + }, +] diff --git a/tests/0029/tokens.txt b/tests/0029/tokens.txt new file mode 100644 index 00000000..17cb705a --- /dev/null +++ b/tests/0029/tokens.txt @@ -0,0 +1,91 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 1, + 7, + ), + }, + Token { + kind: Arrow, + span: ( + 1, + 11, + ), + }, + Token { + kind: Identifier( + "bar", + ), + span: ( + 1, + 13, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 16, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 1, + 18, + ), + }, + Token { + kind: Arrow, + span: ( + 1, + 22, + ), + }, + Token { + kind: Identifier( + "bar", + ), + span: ( + 1, + 24, + ), + }, + Token { + kind: Arrow, + span: ( + 1, + 27, + ), + }, + Token { + kind: Identifier( + "baz", + ), + span: ( + 1, + 29, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 32, + ), + }, +] diff --git a/tests/0030/tokens.txt b/tests/0030/tokens.txt new file mode 100644 index 00000000..09f6ae56 --- /dev/null +++ b/tests/0030/tokens.txt @@ -0,0 +1,193 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 1, + 7, + ), + }, + Token { + kind: Arrow, + span: ( + 1, + 11, + ), + }, + Token { + kind: Identifier( + "bar", + ), + span: ( + 1, + 13, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 16, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 17, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 18, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 1, + 20, + ), + }, + Token { + kind: Arrow, + span: ( + 1, + 24, + ), + }, + Token { + kind: Identifier( + "bar", + ), + span: ( + 1, + 26, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 29, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 30, + ), + }, + Token { + kind: Arrow, + span: ( + 1, + 31, + ), + }, + Token { + kind: Identifier( + "baz", + ), + span: ( + 1, + 33, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 36, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 37, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 38, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 1, + 40, + ), + }, + Token { + kind: Arrow, + span: ( + 1, + 44, + ), + }, + Token { + kind: Identifier( + "bar", + ), + span: ( + 1, + 46, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 49, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 50, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 51, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 52, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 53, + ), + }, +] diff --git a/tests/0031/tokens.txt b/tests/0031/tokens.txt new file mode 100644 index 00000000..ae701cbb --- /dev/null +++ b/tests/0031/tokens.txt @@ -0,0 +1,159 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: StringPart( + "", + ), + span: ( + 1, + 7, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 1, + 8, + ), + }, + Token { + kind: StringPart( + " abc ", + ), + span: ( + 1, + 12, + ), + }, + Token { + kind: Variable( + "bar", + ), + span: ( + 1, + 12, + ), + }, + Token { + kind: Arrow, + span: ( + 1, + 21, + ), + }, + Token { + kind: Identifier( + "a", + ), + span: ( + 1, + 23, + ), + }, + Token { + kind: StringPart( + " def ", + ), + span: ( + 1, + 24, + ), + }, + Token { + kind: Variable( + "bar", + ), + span: ( + 1, + 24, + ), + }, + Token { + kind: LeftBracket, + span: ( + 1, + 33, + ), + }, + Token { + kind: LiteralInteger( + 0, + ), + span: ( + 1, + 34, + ), + }, + Token { + kind: RightBracket, + span: ( + 1, + 35, + ), + }, + Token { + kind: StringPart( + " ghi ", + ), + span: ( + 1, + 36, + ), + }, + Token { + kind: Variable( + "bar", + ), + span: ( + 1, + 36, + ), + }, + Token { + kind: LeftBracket, + span: ( + 1, + 45, + ), + }, + Token { + kind: Identifier( + "baz", + ), + span: ( + 1, + 46, + ), + }, + Token { + kind: RightBracket, + span: ( + 1, + 49, + ), + }, + Token { + kind: DoubleQuote, + span: ( + 1, + 50, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 51, + ), + }, +] diff --git a/tests/0032/tokens.txt b/tests/0032/tokens.txt new file mode 100644 index 00000000..52e759fd --- /dev/null +++ b/tests/0032/tokens.txt @@ -0,0 +1,186 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: StringPart( + "", + ), + span: ( + 1, + 7, + ), + }, + Token { + kind: DollarLeftBrace, + span: ( + 1, + 8, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 1, + 10, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 13, + ), + }, + Token { + kind: DollarLeftBrace, + span: ( + 1, + 14, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 1, + 16, + ), + }, + Token { + kind: LeftBracket, + span: ( + 1, + 19, + ), + }, + Token { + kind: LiteralInteger( + 0, + ), + span: ( + 1, + 20, + ), + }, + Token { + kind: RightBracket, + span: ( + 1, + 21, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 22, + ), + }, + Token { + kind: DollarLeftBrace, + span: ( + 1, + 23, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 1, + 25, + ), + }, + Token { + kind: LeftBracket, + span: ( + 1, + 28, + ), + }, + Token { + kind: LiteralString( + "bar", + ), + span: ( + 1, + 29, + ), + }, + Token { + kind: RightBracket, + span: ( + 1, + 34, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 35, + ), + }, + Token { + kind: DollarLeftBrace, + span: ( + 1, + 36, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 38, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 1, + 39, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 43, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 44, + ), + }, + Token { + kind: DoubleQuote, + span: ( + 1, + 45, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 46, + ), + }, +] diff --git a/tests/0033/tokens.txt b/tests/0033/tokens.txt new file mode 100644 index 00000000..17e5ae2a --- /dev/null +++ b/tests/0033/tokens.txt @@ -0,0 +1,241 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: StringPart( + "", + ), + span: ( + 1, + 7, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 8, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 1, + 9, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 13, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 14, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 1, + 15, + ), + }, + Token { + kind: LeftBracket, + span: ( + 1, + 19, + ), + }, + Token { + kind: LiteralInteger( + 0, + ), + span: ( + 1, + 20, + ), + }, + Token { + kind: RightBracket, + span: ( + 1, + 21, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 22, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 23, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 1, + 24, + ), + }, + Token { + kind: LeftBracket, + span: ( + 1, + 28, + ), + }, + Token { + kind: LiteralString( + "bar", + ), + span: ( + 1, + 29, + ), + }, + Token { + kind: RightBracket, + span: ( + 1, + 34, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 35, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 36, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 1, + 37, + ), + }, + Token { + kind: Arrow, + span: ( + 1, + 41, + ), + }, + Token { + kind: Identifier( + "bar", + ), + span: ( + 1, + 43, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 46, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 47, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 1, + 48, + ), + }, + Token { + kind: Arrow, + span: ( + 1, + 52, + ), + }, + Token { + kind: Identifier( + "bar", + ), + span: ( + 1, + 54, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 57, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 58, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 59, + ), + }, + Token { + kind: DoubleQuote, + span: ( + 1, + 60, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 61, + ), + }, +] diff --git a/tests/0034/tokens.txt b/tests/0034/tokens.txt new file mode 100644 index 00000000..210443e2 --- /dev/null +++ b/tests/0034/tokens.txt @@ -0,0 +1,59 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: LiteralString( + "foo", + ), + span: ( + 1, + 7, + ), + }, + Token { + kind: Dot, + span: ( + 1, + 13, + ), + }, + Token { + kind: LiteralString( + "bar", + ), + span: ( + 1, + 15, + ), + }, + Token { + kind: Dot, + span: ( + 1, + 21, + ), + }, + Token { + kind: LiteralString( + "baz", + ), + span: ( + 1, + 23, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 28, + ), + }, +] diff --git a/tests/0035/tokens.txt b/tests/0035/tokens.txt new file mode 100644 index 00000000..d605f2f9 --- /dev/null +++ b/tests/0035/tokens.txt @@ -0,0 +1,55 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Function, + span: ( + 1, + 7, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 1, + 16, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 19, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 20, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 22, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 23, + ), + }, +] diff --git a/tests/0036/tokens.txt b/tests/0036/tokens.txt new file mode 100644 index 00000000..abcb15ad --- /dev/null +++ b/tests/0036/tokens.txt @@ -0,0 +1,64 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Function, + span: ( + 1, + 7, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 1, + 16, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 19, + ), + }, + Token { + kind: Variable( + "n", + ), + span: ( + 1, + 20, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 22, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 24, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 25, + ), + }, +] diff --git a/tests/0037/tokens.txt b/tests/0037/tokens.txt new file mode 100644 index 00000000..267408aa --- /dev/null +++ b/tests/0037/tokens.txt @@ -0,0 +1,80 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Function, + span: ( + 1, + 7, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 1, + 16, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 19, + ), + }, + Token { + kind: Variable( + "n", + ), + span: ( + 1, + 20, + ), + }, + Token { + kind: Comma, + span: ( + 1, + 22, + ), + }, + Token { + kind: Variable( + "m", + ), + span: ( + 1, + 24, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 26, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 28, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 29, + ), + }, +] diff --git a/tests/0038/tokens.txt b/tests/0038/tokens.txt new file mode 100644 index 00000000..60b47224 --- /dev/null +++ b/tests/0038/tokens.txt @@ -0,0 +1,264 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Function, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "fib", + ), + span: ( + 3, + 10, + ), + }, + Token { + kind: LeftParen, + span: ( + 3, + 13, + ), + }, + Token { + kind: Variable( + "n", + ), + span: ( + 3, + 14, + ), + }, + Token { + kind: RightParen, + span: ( + 3, + 16, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 18, + ), + }, + Token { + kind: If, + span: ( + 4, + 5, + ), + }, + Token { + kind: LeftParen, + span: ( + 4, + 8, + ), + }, + Token { + kind: Variable( + "n", + ), + span: ( + 4, + 9, + ), + }, + Token { + kind: LessThan, + span: ( + 4, + 12, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 4, + 14, + ), + }, + Token { + kind: RightParen, + span: ( + 4, + 15, + ), + }, + Token { + kind: LeftBrace, + span: ( + 4, + 17, + ), + }, + Token { + kind: Return, + span: ( + 5, + 9, + ), + }, + Token { + kind: Variable( + "n", + ), + span: ( + 5, + 16, + ), + }, + Token { + kind: SemiColon, + span: ( + 5, + 18, + ), + }, + Token { + kind: RightBrace, + span: ( + 6, + 5, + ), + }, + Token { + kind: Return, + span: ( + 8, + 5, + ), + }, + Token { + kind: Identifier( + "fib", + ), + span: ( + 8, + 12, + ), + }, + Token { + kind: LeftParen, + span: ( + 8, + 15, + ), + }, + Token { + kind: Variable( + "n", + ), + span: ( + 8, + 16, + ), + }, + Token { + kind: Minus, + span: ( + 8, + 19, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 8, + 21, + ), + }, + Token { + kind: RightParen, + span: ( + 8, + 22, + ), + }, + Token { + kind: Plus, + span: ( + 8, + 24, + ), + }, + Token { + kind: Identifier( + "fib", + ), + span: ( + 8, + 26, + ), + }, + Token { + kind: LeftParen, + span: ( + 8, + 29, + ), + }, + Token { + kind: Variable( + "n", + ), + span: ( + 8, + 30, + ), + }, + Token { + kind: Minus, + span: ( + 8, + 33, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 8, + 35, + ), + }, + Token { + kind: RightParen, + span: ( + 8, + 36, + ), + }, + Token { + kind: SemiColon, + span: ( + 8, + 37, + ), + }, + Token { + kind: RightBrace, + span: ( + 9, + 1, + ), + }, +] diff --git a/tests/0039/tokens.txt b/tests/0039/tokens.txt new file mode 100644 index 00000000..e7d8868a --- /dev/null +++ b/tests/0039/tokens.txt @@ -0,0 +1,64 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: If, + span: ( + 1, + 7, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 9, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 1, + 10, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 14, + ), + }, + Token { + kind: Return, + span: ( + 1, + 16, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 1, + 23, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 27, + ), + }, +] diff --git a/tests/0040/tokens.txt b/tests/0040/tokens.txt new file mode 100644 index 00000000..92b0e474 --- /dev/null +++ b/tests/0040/tokens.txt @@ -0,0 +1,122 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: If, + span: ( + 1, + 7, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 9, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 1, + 10, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 14, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 16, + ), + }, + Token { + kind: Return, + span: ( + 1, + 18, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 1, + 25, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 29, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 31, + ), + }, + Token { + kind: Else, + span: ( + 1, + 33, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 38, + ), + }, + Token { + kind: Return, + span: ( + 1, + 40, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 1, + 47, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 51, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 53, + ), + }, +] diff --git a/tests/0041/tokens.txt b/tests/0041/tokens.txt new file mode 100644 index 00000000..a53585cd --- /dev/null +++ b/tests/0041/tokens.txt @@ -0,0 +1,189 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: If, + span: ( + 1, + 7, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 9, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 1, + 10, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 14, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 16, + ), + }, + Token { + kind: Return, + span: ( + 1, + 18, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 1, + 25, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 29, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 31, + ), + }, + Token { + kind: ElseIf, + span: ( + 1, + 33, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 39, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 1, + 40, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 44, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 46, + ), + }, + Token { + kind: Return, + span: ( + 1, + 48, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 1, + 55, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 59, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 61, + ), + }, + Token { + kind: Else, + span: ( + 1, + 63, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 68, + ), + }, + Token { + kind: Return, + span: ( + 1, + 70, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 1, + 77, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 81, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 83, + ), + }, +] diff --git a/tests/0042/tokens.txt b/tests/0042/tokens.txt new file mode 100644 index 00000000..37a9effe --- /dev/null +++ b/tests/0042/tokens.txt @@ -0,0 +1,34 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Echo, + span: ( + 1, + 7, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 1, + 12, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 13, + ), + }, +] diff --git a/tests/0043/tokens.txt b/tests/0043/tokens.txt new file mode 100644 index 00000000..0773dadf --- /dev/null +++ b/tests/0043/tokens.txt @@ -0,0 +1,41 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Class, + span: ( + 1, + 7, + ), + }, + Token { + kind: Identifier( + "Foo", + ), + span: ( + 1, + 13, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 17, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 18, + ), + }, +] diff --git a/tests/0044/tokens.txt b/tests/0044/tokens.txt new file mode 100644 index 00000000..31248f1b --- /dev/null +++ b/tests/0044/tokens.txt @@ -0,0 +1,90 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Class, + span: ( + 1, + 7, + ), + }, + Token { + kind: Identifier( + "Foo", + ), + span: ( + 1, + 13, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 17, + ), + }, + Token { + kind: Public, + span: ( + 1, + 19, + ), + }, + Token { + kind: Fn, + span: ( + 1, + 26, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 28, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 29, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 31, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 32, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 33, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 35, + ), + }, +] diff --git a/tests/0045/tokens.txt b/tests/0045/tokens.txt new file mode 100644 index 00000000..8b5e07fe --- /dev/null +++ b/tests/0045/tokens.txt @@ -0,0 +1,108 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Class, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "Foo", + ), + span: ( + 3, + 7, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 11, + ), + }, + Token { + kind: Function, + span: ( + 4, + 5, + ), + }, + Token { + kind: Identifier( + "bar", + ), + span: ( + 4, + 14, + ), + }, + Token { + kind: LeftParen, + span: ( + 4, + 17, + ), + }, + Token { + kind: RightParen, + span: ( + 4, + 18, + ), + }, + Token { + kind: LeftBrace, + span: ( + 4, + 20, + ), + }, + Token { + kind: Echo, + span: ( + 5, + 9, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 5, + 14, + ), + }, + Token { + kind: SemiColon, + span: ( + 5, + 15, + ), + }, + Token { + kind: RightBrace, + span: ( + 6, + 5, + ), + }, + Token { + kind: RightBrace, + span: ( + 7, + 1, + ), + }, +] diff --git a/tests/0046/tokens.txt b/tests/0046/tokens.txt new file mode 100644 index 00000000..e1d9a5cb --- /dev/null +++ b/tests/0046/tokens.txt @@ -0,0 +1,57 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Class, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "Foo", + ), + span: ( + 3, + 7, + ), + }, + Token { + kind: Extends, + span: ( + 3, + 11, + ), + }, + Token { + kind: Identifier( + "Bar", + ), + span: ( + 3, + 19, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 23, + ), + }, + Token { + kind: RightBrace, + span: ( + 3, + 24, + ), + }, +] diff --git a/tests/0047/tokens.txt b/tests/0047/tokens.txt new file mode 100644 index 00000000..9751c619 --- /dev/null +++ b/tests/0047/tokens.txt @@ -0,0 +1,73 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Class, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "Foo", + ), + span: ( + 3, + 7, + ), + }, + Token { + kind: Implements, + span: ( + 3, + 11, + ), + }, + Token { + kind: Identifier( + "Bar", + ), + span: ( + 3, + 22, + ), + }, + Token { + kind: Comma, + span: ( + 3, + 25, + ), + }, + Token { + kind: Identifier( + "Baz", + ), + span: ( + 3, + 27, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 31, + ), + }, + Token { + kind: RightBrace, + span: ( + 3, + 32, + ), + }, +] diff --git a/tests/0048/tokens.txt b/tests/0048/tokens.txt new file mode 100644 index 00000000..52490450 --- /dev/null +++ b/tests/0048/tokens.txt @@ -0,0 +1,73 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Function, + span: ( + 1, + 7, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 1, + 16, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 19, + ), + }, + Token { + kind: Identifier( + "string", + ), + span: ( + 1, + 20, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 1, + 27, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 29, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 31, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 32, + ), + }, +] diff --git a/tests/0049/tokens.txt b/tests/0049/tokens.txt new file mode 100644 index 00000000..3ff44d8f --- /dev/null +++ b/tests/0049/tokens.txt @@ -0,0 +1,121 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Function, + span: ( + 1, + 7, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 1, + 16, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 19, + ), + }, + Token { + kind: Identifier( + "string", + ), + span: ( + 1, + 20, + ), + }, + Token { + kind: Pipe, + span: ( + 1, + 26, + ), + }, + Token { + kind: Identifier( + "ArrAy", + ), + span: ( + 1, + 27, + ), + }, + Token { + kind: Pipe, + span: ( + 1, + 32, + ), + }, + Token { + kind: Identifier( + "iterable", + ), + span: ( + 1, + 33, + ), + }, + Token { + kind: Pipe, + span: ( + 1, + 41, + ), + }, + Token { + kind: Identifier( + "CALLABLE", + ), + span: ( + 1, + 42, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 1, + 51, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 53, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 55, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 56, + ), + }, +] diff --git a/tests/0050/tokens.txt b/tests/0050/tokens.txt new file mode 100644 index 00000000..4f6c0904 --- /dev/null +++ b/tests/0050/tokens.txt @@ -0,0 +1,71 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Function, + span: ( + 1, + 7, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 1, + 16, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 19, + ), + }, + Token { + kind: Ellipsis, + span: ( + 1, + 20, + ), + }, + Token { + kind: Variable( + "bar", + ), + span: ( + 1, + 23, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 27, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 29, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 30, + ), + }, +] diff --git a/tests/0051/tokens.txt b/tests/0051/tokens.txt new file mode 100644 index 00000000..48af358e --- /dev/null +++ b/tests/0051/tokens.txt @@ -0,0 +1,80 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Function, + span: ( + 1, + 7, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 1, + 16, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 19, + ), + }, + Token { + kind: Identifier( + "string", + ), + span: ( + 1, + 20, + ), + }, + Token { + kind: Ellipsis, + span: ( + 1, + 27, + ), + }, + Token { + kind: Variable( + "bar", + ), + span: ( + 1, + 30, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 34, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 36, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 37, + ), + }, +] diff --git a/tests/0052/tokens.txt b/tests/0052/tokens.txt new file mode 100644 index 00000000..80ae5fee --- /dev/null +++ b/tests/0052/tokens.txt @@ -0,0 +1,103 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Function, + span: ( + 1, + 7, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 1, + 16, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 19, + ), + }, + Token { + kind: Variable( + "bar", + ), + span: ( + 1, + 20, + ), + }, + Token { + kind: Comma, + span: ( + 1, + 24, + ), + }, + Token { + kind: Variable( + "baz", + ), + span: ( + 1, + 26, + ), + }, + Token { + kind: Comma, + span: ( + 1, + 30, + ), + }, + Token { + kind: Ellipsis, + span: ( + 1, + 32, + ), + }, + Token { + kind: Variable( + "car", + ), + span: ( + 1, + 35, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 39, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 41, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 42, + ), + }, +] diff --git a/tests/0053/tokens.txt b/tests/0053/tokens.txt new file mode 100644 index 00000000..a52e218c --- /dev/null +++ b/tests/0053/tokens.txt @@ -0,0 +1,80 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Function, + span: ( + 1, + 7, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 1, + 16, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 19, + ), + }, + Token { + kind: Question, + span: ( + 1, + 20, + ), + }, + Token { + kind: Identifier( + "string", + ), + span: ( + 1, + 21, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 1, + 28, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 30, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 32, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 33, + ), + }, +] diff --git a/tests/0054/tokens.txt b/tests/0054/tokens.txt new file mode 100644 index 00000000..1cbe0b82 --- /dev/null +++ b/tests/0054/tokens.txt @@ -0,0 +1,89 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Function, + span: ( + 1, + 7, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 1, + 16, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 19, + ), + }, + Token { + kind: Identifier( + "int", + ), + span: ( + 1, + 20, + ), + }, + Token { + kind: Pipe, + span: ( + 1, + 23, + ), + }, + Token { + kind: Identifier( + "float", + ), + span: ( + 1, + 24, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 1, + 30, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 32, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 34, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 35, + ), + }, +] diff --git a/tests/0055/tokens.txt b/tests/0055/tokens.txt new file mode 100644 index 00000000..2578d445 --- /dev/null +++ b/tests/0055/tokens.txt @@ -0,0 +1,105 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Function, + span: ( + 1, + 7, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 1, + 16, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 19, + ), + }, + Token { + kind: Identifier( + "string", + ), + span: ( + 1, + 20, + ), + }, + Token { + kind: Pipe, + span: ( + 1, + 26, + ), + }, + Token { + kind: Identifier( + "int", + ), + span: ( + 1, + 27, + ), + }, + Token { + kind: Pipe, + span: ( + 1, + 30, + ), + }, + Token { + kind: Identifier( + "float", + ), + span: ( + 1, + 31, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 1, + 37, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 39, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 41, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 42, + ), + }, +] diff --git a/tests/0056/tokens.txt b/tests/0056/tokens.txt new file mode 100644 index 00000000..b7534301 --- /dev/null +++ b/tests/0056/tokens.txt @@ -0,0 +1,89 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Function, + span: ( + 1, + 7, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 1, + 16, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 19, + ), + }, + Token { + kind: Identifier( + "Foo", + ), + span: ( + 1, + 20, + ), + }, + Token { + kind: Ampersand, + span: ( + 1, + 23, + ), + }, + Token { + kind: Identifier( + "Bar", + ), + span: ( + 1, + 24, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 1, + 28, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 30, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 32, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 33, + ), + }, +] diff --git a/tests/0057/tokens.txt b/tests/0057/tokens.txt new file mode 100644 index 00000000..83249100 --- /dev/null +++ b/tests/0057/tokens.txt @@ -0,0 +1,105 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Function, + span: ( + 1, + 7, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 1, + 16, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 19, + ), + }, + Token { + kind: Identifier( + "Foo", + ), + span: ( + 1, + 20, + ), + }, + Token { + kind: Ampersand, + span: ( + 1, + 23, + ), + }, + Token { + kind: Identifier( + "Bar", + ), + span: ( + 1, + 24, + ), + }, + Token { + kind: Ampersand, + span: ( + 1, + 27, + ), + }, + Token { + kind: Identifier( + "Baz", + ), + span: ( + 1, + 28, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 1, + 32, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 34, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 36, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 37, + ), + }, +] diff --git a/tests/0058/tokens.txt b/tests/0058/tokens.txt new file mode 100644 index 00000000..ba3ca39b --- /dev/null +++ b/tests/0058/tokens.txt @@ -0,0 +1,71 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Function, + span: ( + 1, + 7, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 1, + 16, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 19, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 20, + ), + }, + Token { + kind: Colon, + span: ( + 1, + 21, + ), + }, + Token { + kind: Identifier( + "string", + ), + span: ( + 1, + 23, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 30, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 31, + ), + }, +] diff --git a/tests/0059/tokens.txt b/tests/0059/tokens.txt new file mode 100644 index 00000000..57b1fc98 --- /dev/null +++ b/tests/0059/tokens.txt @@ -0,0 +1,71 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Function, + span: ( + 1, + 7, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 1, + 16, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 19, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 20, + ), + }, + Token { + kind: Colon, + span: ( + 1, + 21, + ), + }, + Token { + kind: Identifier( + "void", + ), + span: ( + 1, + 23, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 28, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 29, + ), + }, +] diff --git a/tests/0060/tokens.txt b/tests/0060/tokens.txt new file mode 100644 index 00000000..989e2cb5 --- /dev/null +++ b/tests/0060/tokens.txt @@ -0,0 +1,46 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: New, + span: ( + 1, + 7, + ), + }, + Token { + kind: Class, + span: ( + 1, + 11, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 16, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 17, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 18, + ), + }, +] diff --git a/tests/0061/tokens.txt b/tests/0061/tokens.txt new file mode 100644 index 00000000..9a34b3f4 --- /dev/null +++ b/tests/0061/tokens.txt @@ -0,0 +1,85 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: New, + span: ( + 1, + 7, + ), + }, + Token { + kind: Class, + span: ( + 1, + 11, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 16, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 1, + 17, + ), + }, + Token { + kind: Comma, + span: ( + 1, + 18, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 1, + 20, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 21, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 23, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 24, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 25, + ), + }, +] diff --git a/tests/0062/tokens.txt b/tests/0062/tokens.txt new file mode 100644 index 00000000..023916d1 --- /dev/null +++ b/tests/0062/tokens.txt @@ -0,0 +1,62 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: New, + span: ( + 1, + 7, + ), + }, + Token { + kind: Class, + span: ( + 1, + 11, + ), + }, + Token { + kind: Extends, + span: ( + 1, + 17, + ), + }, + Token { + kind: Identifier( + "Foo", + ), + span: ( + 1, + 25, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 29, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 30, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 31, + ), + }, +] diff --git a/tests/0063/tokens.txt b/tests/0063/tokens.txt new file mode 100644 index 00000000..ea04e7e4 --- /dev/null +++ b/tests/0063/tokens.txt @@ -0,0 +1,78 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: New, + span: ( + 1, + 7, + ), + }, + Token { + kind: Class, + span: ( + 1, + 11, + ), + }, + Token { + kind: Implements, + span: ( + 1, + 17, + ), + }, + Token { + kind: Identifier( + "Foo", + ), + span: ( + 1, + 28, + ), + }, + Token { + kind: Comma, + span: ( + 1, + 31, + ), + }, + Token { + kind: Identifier( + "Bar", + ), + span: ( + 1, + 33, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 37, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 38, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 39, + ), + }, +] diff --git a/tests/0064/tokens.txt b/tests/0064/tokens.txt new file mode 100644 index 00000000..9431c68e --- /dev/null +++ b/tests/0064/tokens.txt @@ -0,0 +1,97 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: New, + span: ( + 3, + 1, + ), + }, + Token { + kind: Class, + span: ( + 3, + 5, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 11, + ), + }, + Token { + kind: Public, + span: ( + 4, + 5, + ), + }, + Token { + kind: Function, + span: ( + 4, + 12, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 4, + 21, + ), + }, + Token { + kind: LeftParen, + span: ( + 4, + 24, + ), + }, + Token { + kind: RightParen, + span: ( + 4, + 25, + ), + }, + Token { + kind: LeftBrace, + span: ( + 4, + 27, + ), + }, + Token { + kind: RightBrace, + span: ( + 4, + 28, + ), + }, + Token { + kind: RightBrace, + span: ( + 5, + 1, + ), + }, + Token { + kind: SemiColon, + span: ( + 5, + 2, + ), + }, +] diff --git a/tests/0065/tokens.txt b/tests/0065/tokens.txt new file mode 100644 index 00000000..fc9a7389 --- /dev/null +++ b/tests/0065/tokens.txt @@ -0,0 +1,25 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 7, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 8, + ), + }, +] diff --git a/tests/0066/tokens.txt b/tests/0066/tokens.txt new file mode 100644 index 00000000..85220962 --- /dev/null +++ b/tests/0066/tokens.txt @@ -0,0 +1,41 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 7, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 1, + 9, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 11, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 13, + ), + }, +] diff --git a/tests/0067/tokens.txt b/tests/0067/tokens.txt new file mode 100644 index 00000000..ae245f7a --- /dev/null +++ b/tests/0067/tokens.txt @@ -0,0 +1,18 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 7, + ), + }, +] diff --git a/tests/0068/tokens.txt b/tests/0068/tokens.txt new file mode 100644 index 00000000..ef6ca6b1 --- /dev/null +++ b/tests/0068/tokens.txt @@ -0,0 +1,73 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Class, + span: ( + 2, + 1, + ), + }, + Token { + kind: Identifier( + "MyClass", + ), + span: ( + 2, + 7, + ), + }, + Token { + kind: LeftBrace, + span: ( + 2, + 15, + ), + }, + Token { + kind: Protected, + span: ( + 3, + 5, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 3, + 15, + ), + }, + Token { + kind: SemiColon, + span: ( + 3, + 17, + ), + }, + Token { + kind: Comment( + "// my comment", + ), + span: ( + 4, + 5, + ), + }, + Token { + kind: RightBrace, + span: ( + 5, + 1, + ), + }, +] diff --git a/tests/0069/tokens.txt b/tests/0069/tokens.txt new file mode 100644 index 00000000..86fffe49 --- /dev/null +++ b/tests/0069/tokens.txt @@ -0,0 +1,69 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Do, + span: ( + 1, + 7, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 10, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 12, + ), + }, + Token { + kind: While, + span: ( + 1, + 14, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 20, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 1, + 21, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 23, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 24, + ), + }, +] diff --git a/tests/0070/tokens.txt b/tests/0070/tokens.txt new file mode 100644 index 00000000..b73101eb --- /dev/null +++ b/tests/0070/tokens.txt @@ -0,0 +1,90 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Do, + span: ( + 2, + 1, + ), + }, + Token { + kind: LeftBrace, + span: ( + 2, + 4, + ), + }, + Token { + kind: Echo, + span: ( + 3, + 5, + ), + }, + Token { + kind: LiteralString( + "Hi!", + ), + span: ( + 3, + 10, + ), + }, + Token { + kind: SemiColon, + span: ( + 3, + 15, + ), + }, + Token { + kind: RightBrace, + span: ( + 4, + 1, + ), + }, + Token { + kind: While, + span: ( + 4, + 3, + ), + }, + Token { + kind: LeftParen, + span: ( + 4, + 9, + ), + }, + Token { + kind: True, + span: ( + 4, + 10, + ), + }, + Token { + kind: RightParen, + span: ( + 4, + 14, + ), + }, + Token { + kind: SemiColon, + span: ( + 4, + 15, + ), + }, +] diff --git a/tests/0071/tokens.txt b/tests/0071/tokens.txt new file mode 100644 index 00000000..9144537b --- /dev/null +++ b/tests/0071/tokens.txt @@ -0,0 +1,27 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: CloseTag, + span: ( + 1, + 7, + ), + }, + Token { + kind: InlineHtml( + " ", + ), + span: ( + 1, + 9, + ), + }, +] diff --git a/tests/0072/tokens.txt b/tests/0072/tokens.txt new file mode 100644 index 00000000..a9e39d83 --- /dev/null +++ b/tests/0072/tokens.txt @@ -0,0 +1,48 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: At, + span: ( + 1, + 7, + ), + }, + Token { + kind: Identifier( + "hello", + ), + span: ( + 1, + 8, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 13, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 14, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 15, + ), + }, +] diff --git a/tests/0073/tokens.txt b/tests/0073/tokens.txt new file mode 100644 index 00000000..0b4cdb01 --- /dev/null +++ b/tests/0073/tokens.txt @@ -0,0 +1,43 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 1, + 7, + ), + }, + Token { + kind: NullsafeArrow, + span: ( + 1, + 9, + ), + }, + Token { + kind: Identifier( + "b", + ), + span: ( + 1, + 12, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 13, + ), + }, +] diff --git a/tests/0074/tokens.txt b/tests/0074/tokens.txt new file mode 100644 index 00000000..fda48f29 --- /dev/null +++ b/tests/0074/tokens.txt @@ -0,0 +1,57 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 1, + 7, + ), + }, + Token { + kind: NullsafeArrow, + span: ( + 1, + 9, + ), + }, + Token { + kind: Identifier( + "b", + ), + span: ( + 1, + 12, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 13, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 14, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 15, + ), + }, +] diff --git a/tests/0075/tokens.txt b/tests/0075/tokens.txt new file mode 100644 index 00000000..e1d59e28 --- /dev/null +++ b/tests/0075/tokens.txt @@ -0,0 +1,66 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 1, + 7, + ), + }, + Token { + kind: NullsafeArrow, + span: ( + 1, + 9, + ), + }, + Token { + kind: Identifier( + "b", + ), + span: ( + 1, + 12, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 13, + ), + }, + Token { + kind: Variable( + "c", + ), + span: ( + 1, + 14, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 16, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 17, + ), + }, +] diff --git a/tests/0076/tokens.txt b/tests/0076/tokens.txt new file mode 100644 index 00000000..08ee306f --- /dev/null +++ b/tests/0076/tokens.txt @@ -0,0 +1,73 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 1, + 7, + ), + }, + Token { + kind: NullsafeArrow, + span: ( + 1, + 9, + ), + }, + Token { + kind: Identifier( + "b", + ), + span: ( + 1, + 12, + ), + }, + Token { + kind: NullsafeArrow, + span: ( + 1, + 13, + ), + }, + Token { + kind: Identifier( + "c", + ), + span: ( + 1, + 16, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 17, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 18, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 19, + ), + }, +] diff --git a/tests/0077/tokens.txt b/tests/0077/tokens.txt new file mode 100644 index 00000000..46af1005 --- /dev/null +++ b/tests/0077/tokens.txt @@ -0,0 +1,50 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Const, + span: ( + 1, + 7, + ), + }, + Token { + kind: Identifier( + "FOO", + ), + span: ( + 1, + 13, + ), + }, + Token { + kind: Equals, + span: ( + 1, + 17, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 1, + 19, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 20, + ), + }, +] diff --git a/tests/0078/tokens.txt b/tests/0078/tokens.txt new file mode 100644 index 00000000..327d6ca7 --- /dev/null +++ b/tests/0078/tokens.txt @@ -0,0 +1,82 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Const, + span: ( + 1, + 7, + ), + }, + Token { + kind: Identifier( + "FOO", + ), + span: ( + 1, + 13, + ), + }, + Token { + kind: Equals, + span: ( + 1, + 17, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 1, + 19, + ), + }, + Token { + kind: Comma, + span: ( + 1, + 20, + ), + }, + Token { + kind: Identifier( + "BAR", + ), + span: ( + 1, + 22, + ), + }, + Token { + kind: Equals, + span: ( + 1, + 26, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 1, + 28, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 29, + ), + }, +] diff --git a/tests/0079/tokens.txt b/tests/0079/tokens.txt new file mode 100644 index 00000000..eee50ba0 --- /dev/null +++ b/tests/0079/tokens.txt @@ -0,0 +1,34 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Global, + span: ( + 1, + 7, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 1, + 14, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 16, + ), + }, +] diff --git a/tests/0080/tokens.txt b/tests/0080/tokens.txt new file mode 100644 index 00000000..d0b2bd43 --- /dev/null +++ b/tests/0080/tokens.txt @@ -0,0 +1,50 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Global, + span: ( + 1, + 7, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 1, + 14, + ), + }, + Token { + kind: Comma, + span: ( + 1, + 16, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 1, + 18, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 20, + ), + }, +] diff --git a/tests/0081/tokens.txt b/tests/0081/tokens.txt new file mode 100644 index 00000000..18131e11 --- /dev/null +++ b/tests/0081/tokens.txt @@ -0,0 +1,64 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Declare, + span: ( + 1, + 7, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 14, + ), + }, + Token { + kind: Identifier( + "A", + ), + span: ( + 1, + 15, + ), + }, + Token { + kind: Equals, + span: ( + 1, + 16, + ), + }, + Token { + kind: LiteralString( + "B", + ), + span: ( + 1, + 17, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 20, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 21, + ), + }, +] diff --git a/tests/0082/tokens.txt b/tests/0082/tokens.txt new file mode 100644 index 00000000..e3060d08 --- /dev/null +++ b/tests/0082/tokens.txt @@ -0,0 +1,96 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Declare, + span: ( + 1, + 7, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 14, + ), + }, + Token { + kind: Identifier( + "A", + ), + span: ( + 1, + 15, + ), + }, + Token { + kind: Equals, + span: ( + 1, + 16, + ), + }, + Token { + kind: LiteralString( + "B", + ), + span: ( + 1, + 17, + ), + }, + Token { + kind: Comma, + span: ( + 1, + 20, + ), + }, + Token { + kind: Identifier( + "C", + ), + span: ( + 1, + 22, + ), + }, + Token { + kind: Equals, + span: ( + 1, + 23, + ), + }, + Token { + kind: LiteralString( + "D", + ), + span: ( + 1, + 24, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 27, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 28, + ), + }, +] diff --git a/tests/0083/tokens.txt b/tests/0083/tokens.txt new file mode 100644 index 00000000..d098fff8 --- /dev/null +++ b/tests/0083/tokens.txt @@ -0,0 +1,94 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Declare, + span: ( + 1, + 7, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 14, + ), + }, + Token { + kind: Identifier( + "A", + ), + span: ( + 1, + 15, + ), + }, + Token { + kind: Equals, + span: ( + 1, + 16, + ), + }, + Token { + kind: LiteralString( + "B", + ), + span: ( + 1, + 17, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 20, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 22, + ), + }, + Token { + kind: Echo, + span: ( + 1, + 24, + ), + }, + Token { + kind: LiteralString( + "Hello, world!", + ), + span: ( + 1, + 29, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 44, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 46, + ), + }, +] diff --git a/tests/0084/tokens.txt b/tests/0084/tokens.txt new file mode 100644 index 00000000..d3610797 --- /dev/null +++ b/tests/0084/tokens.txt @@ -0,0 +1,80 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: LeftBracket, + span: ( + 1, + 7, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 1, + 8, + ), + }, + Token { + kind: Comma, + span: ( + 1, + 9, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 1, + 11, + ), + }, + Token { + kind: Comma, + span: ( + 1, + 12, + ), + }, + Token { + kind: Comma, + span: ( + 1, + 14, + ), + }, + Token { + kind: LiteralInteger( + 4, + ), + span: ( + 1, + 16, + ), + }, + Token { + kind: RightBracket, + span: ( + 1, + 17, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 18, + ), + }, +] diff --git a/tests/0085/tokens.txt b/tests/0085/tokens.txt new file mode 100644 index 00000000..57db28e4 --- /dev/null +++ b/tests/0085/tokens.txt @@ -0,0 +1,48 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Readonly, + span: ( + 1, + 7, + ), + }, + Token { + kind: Class, + span: ( + 1, + 16, + ), + }, + Token { + kind: Identifier( + "Foo", + ), + span: ( + 1, + 22, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 26, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 27, + ), + }, +] diff --git a/tests/0086/tokens.txt b/tests/0086/tokens.txt new file mode 100644 index 00000000..b870fd57 --- /dev/null +++ b/tests/0086/tokens.txt @@ -0,0 +1,71 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Class, + span: ( + 1, + 7, + ), + }, + Token { + kind: Identifier( + "Foo", + ), + span: ( + 1, + 13, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 17, + ), + }, + Token { + kind: Public, + span: ( + 1, + 19, + ), + }, + Token { + kind: Readonly, + span: ( + 1, + 26, + ), + }, + Token { + kind: Variable( + "bar", + ), + span: ( + 1, + 35, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 39, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 41, + ), + }, +] diff --git a/tests/0087/tokens.txt b/tests/0087/tokens.txt new file mode 100644 index 00000000..3a2ca180 --- /dev/null +++ b/tests/0087/tokens.txt @@ -0,0 +1,119 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Enum, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "Foo", + ), + span: ( + 3, + 6, + ), + }, + Token { + kind: Colon, + span: ( + 3, + 9, + ), + }, + Token { + kind: Identifier( + "string", + ), + span: ( + 3, + 11, + ), + }, + Token { + kind: LeftBrace, + span: ( + 4, + 1, + ), + }, + Token { + kind: Case, + span: ( + 5, + 5, + ), + }, + Token { + kind: Identifier( + "Bar", + ), + span: ( + 5, + 10, + ), + }, + Token { + kind: SemiColon, + span: ( + 5, + 13, + ), + }, + Token { + kind: Case, + span: ( + 6, + 5, + ), + }, + Token { + kind: Identifier( + "Baz", + ), + span: ( + 6, + 10, + ), + }, + Token { + kind: Equals, + span: ( + 6, + 14, + ), + }, + Token { + kind: LiteralString( + "Baz", + ), + span: ( + 6, + 16, + ), + }, + Token { + kind: SemiColon, + span: ( + 6, + 21, + ), + }, + Token { + kind: RightBrace, + span: ( + 7, + 1, + ), + }, +] diff --git a/tests/0088/tokens.txt b/tests/0088/tokens.txt new file mode 100644 index 00000000..9d0cfec4 --- /dev/null +++ b/tests/0088/tokens.txt @@ -0,0 +1,55 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 1, + 7, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 10, + ), + }, + Token { + kind: Ellipsis, + span: ( + 1, + 11, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 14, + ), + }, + Token { + kind: Class, + span: ( + 1, + 16, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 21, + ), + }, +] diff --git a/tests/0089/tokens.txt b/tests/0089/tokens.txt new file mode 100644 index 00000000..1fd1cb63 --- /dev/null +++ b/tests/0089/tokens.txt @@ -0,0 +1,64 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Variable( + "this", + ), + span: ( + 1, + 7, + ), + }, + Token { + kind: Arrow, + span: ( + 1, + 12, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 1, + 14, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 17, + ), + }, + Token { + kind: Ellipsis, + span: ( + 1, + 18, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 21, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 22, + ), + }, +] diff --git a/tests/0090/tokens.txt b/tests/0090/tokens.txt new file mode 100644 index 00000000..f4594c57 --- /dev/null +++ b/tests/0090/tokens.txt @@ -0,0 +1,64 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Identifier( + "A", + ), + span: ( + 1, + 7, + ), + }, + Token { + kind: DoubleColon, + span: ( + 1, + 8, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 1, + 10, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 13, + ), + }, + Token { + kind: Ellipsis, + span: ( + 1, + 14, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 17, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 18, + ), + }, +] diff --git a/tests/0091/tokens.txt b/tests/0091/tokens.txt new file mode 100644 index 00000000..bd5167d3 --- /dev/null +++ b/tests/0091/tokens.txt @@ -0,0 +1,69 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Function, + span: ( + 1, + 7, + ), + }, + Token { + kind: Identifier( + "a", + ), + span: ( + 1, + 16, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 17, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 18, + ), + }, + Token { + kind: Colon, + span: ( + 1, + 19, + ), + }, + Token { + kind: True, + span: ( + 1, + 21, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 26, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 27, + ), + }, +] diff --git a/tests/0092/tokens.txt b/tests/0092/tokens.txt new file mode 100644 index 00000000..bcd39ae8 --- /dev/null +++ b/tests/0092/tokens.txt @@ -0,0 +1,69 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Function, + span: ( + 1, + 7, + ), + }, + Token { + kind: Identifier( + "a", + ), + span: ( + 1, + 16, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 17, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 18, + ), + }, + Token { + kind: Colon, + span: ( + 1, + 19, + ), + }, + Token { + kind: False, + span: ( + 1, + 21, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 27, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 28, + ), + }, +] diff --git a/tests/0093/tokens.txt b/tests/0093/tokens.txt new file mode 100644 index 00000000..dcebfe01 --- /dev/null +++ b/tests/0093/tokens.txt @@ -0,0 +1,48 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Dollar, + span: ( + 1, + 7, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 8, + ), + }, + Token { + kind: LiteralString( + "foo", + ), + span: ( + 1, + 9, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 14, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 15, + ), + }, +] diff --git a/tests/0094/tokens.txt b/tests/0094/tokens.txt new file mode 100644 index 00000000..30159a6f --- /dev/null +++ b/tests/0094/tokens.txt @@ -0,0 +1,62 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Dollar, + span: ( + 1, + 7, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 8, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 1, + 9, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 12, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 13, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 14, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 15, + ), + }, +] diff --git a/tests/0095/tokens.txt b/tests/0095/tokens.txt new file mode 100644 index 00000000..7264fce3 --- /dev/null +++ b/tests/0095/tokens.txt @@ -0,0 +1,34 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Dollar, + span: ( + 1, + 7, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 1, + 8, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 10, + ), + }, +] diff --git a/tests/0096/tokens.txt b/tests/0096/tokens.txt new file mode 100644 index 00000000..942caef7 --- /dev/null +++ b/tests/0096/tokens.txt @@ -0,0 +1,50 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 1, + 7, + ), + }, + Token { + kind: Arrow, + span: ( + 1, + 9, + ), + }, + Token { + kind: Dollar, + span: ( + 1, + 11, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 1, + 12, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 14, + ), + }, +] diff --git a/tests/0097/tokens.txt b/tests/0097/tokens.txt new file mode 100644 index 00000000..19319fd5 --- /dev/null +++ b/tests/0097/tokens.txt @@ -0,0 +1,64 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 1, + 7, + ), + }, + Token { + kind: Arrow, + span: ( + 1, + 9, + ), + }, + Token { + kind: Dollar, + span: ( + 1, + 11, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 1, + 12, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 14, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 15, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 16, + ), + }, +] diff --git a/tests/0098/tokens.txt b/tests/0098/tokens.txt new file mode 100644 index 00000000..81fe0d2b --- /dev/null +++ b/tests/0098/tokens.txt @@ -0,0 +1,50 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Identifier( + "Foo", + ), + span: ( + 1, + 7, + ), + }, + Token { + kind: DoubleColon, + span: ( + 1, + 10, + ), + }, + Token { + kind: Dollar, + span: ( + 1, + 12, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 1, + 13, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 15, + ), + }, +] diff --git a/tests/0099/tokens.txt b/tests/0099/tokens.txt new file mode 100644 index 00000000..217fd2c0 --- /dev/null +++ b/tests/0099/tokens.txt @@ -0,0 +1,64 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Identifier( + "Foo", + ), + span: ( + 1, + 7, + ), + }, + Token { + kind: DoubleColon, + span: ( + 1, + 10, + ), + }, + Token { + kind: Dollar, + span: ( + 1, + 12, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 1, + 13, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 15, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 16, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 17, + ), + }, +] diff --git a/tests/0100/tokens.txt b/tests/0100/tokens.txt new file mode 100644 index 00000000..97f8979b --- /dev/null +++ b/tests/0100/tokens.txt @@ -0,0 +1,71 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Identifier( + "Foo", + ), + span: ( + 1, + 7, + ), + }, + Token { + kind: DoubleColon, + span: ( + 1, + 10, + ), + }, + Token { + kind: LeftBrace, + span: ( + 1, + 12, + ), + }, + Token { + kind: LiteralString( + "foo", + ), + span: ( + 1, + 13, + ), + }, + Token { + kind: RightBrace, + span: ( + 1, + 18, + ), + }, + Token { + kind: LeftParen, + span: ( + 1, + 19, + ), + }, + Token { + kind: RightParen, + span: ( + 1, + 20, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 21, + ), + }, +] diff --git a/tests/0101/tokens.txt b/tests/0101/tokens.txt new file mode 100644 index 00000000..d0c96f1b --- /dev/null +++ b/tests/0101/tokens.txt @@ -0,0 +1,53 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: LeftBracket, + span: ( + 1, + 7, + ), + }, + Token { + kind: Ellipsis, + span: ( + 1, + 8, + ), + }, + Token { + kind: LeftBracket, + span: ( + 1, + 11, + ), + }, + Token { + kind: RightBracket, + span: ( + 1, + 12, + ), + }, + Token { + kind: RightBracket, + span: ( + 1, + 13, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 14, + ), + }, +] diff --git a/tests/0102/tokens.txt b/tests/0102/tokens.txt new file mode 100644 index 00000000..78934905 --- /dev/null +++ b/tests/0102/tokens.txt @@ -0,0 +1,99 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: LeftBracket, + span: ( + 1, + 7, + ), + }, + Token { + kind: Ellipsis, + span: ( + 1, + 8, + ), + }, + Token { + kind: LeftBracket, + span: ( + 1, + 11, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 1, + 12, + ), + }, + Token { + kind: RightBracket, + span: ( + 1, + 13, + ), + }, + Token { + kind: Comma, + span: ( + 1, + 14, + ), + }, + Token { + kind: Ellipsis, + span: ( + 1, + 16, + ), + }, + Token { + kind: LeftBracket, + span: ( + 1, + 19, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 1, + 20, + ), + }, + Token { + kind: RightBracket, + span: ( + 1, + 21, + ), + }, + Token { + kind: RightBracket, + span: ( + 1, + 22, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 23, + ), + }, +] diff --git a/tests/0103/tokens.txt b/tests/0103/tokens.txt new file mode 100644 index 00000000..6a9e54f6 --- /dev/null +++ b/tests/0103/tokens.txt @@ -0,0 +1,34 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Print, + span: ( + 1, + 7, + ), + }, + Token { + kind: Variable( + "foo", + ), + span: ( + 1, + 13, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 17, + ), + }, +] diff --git a/tests/0105/tokens.txt b/tests/0105/tokens.txt new file mode 100644 index 00000000..74885706 --- /dev/null +++ b/tests/0105/tokens.txt @@ -0,0 +1,25 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Yield, + span: ( + 1, + 7, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 12, + ), + }, +] diff --git a/tests/0106/tokens.txt b/tests/0106/tokens.txt new file mode 100644 index 00000000..82468553 --- /dev/null +++ b/tests/0106/tokens.txt @@ -0,0 +1,34 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Yield, + span: ( + 1, + 7, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 1, + 13, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 14, + ), + }, +] diff --git a/tests/0107/tokens.txt b/tests/0107/tokens.txt new file mode 100644 index 00000000..a8ccac0d --- /dev/null +++ b/tests/0107/tokens.txt @@ -0,0 +1,50 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Yield, + span: ( + 1, + 7, + ), + }, + Token { + kind: LiteralInteger( + 0, + ), + span: ( + 1, + 13, + ), + }, + Token { + kind: DoubleArrow, + span: ( + 1, + 15, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 1, + 18, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 19, + ), + }, +] diff --git a/tests/0108/tokens.txt b/tests/0108/tokens.txt new file mode 100644 index 00000000..3a6e77dd --- /dev/null +++ b/tests/0108/tokens.txt @@ -0,0 +1,41 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Yield, + span: ( + 1, + 7, + ), + }, + Token { + kind: From, + span: ( + 1, + 13, + ), + }, + Token { + kind: LiteralInteger( + 1, + ), + span: ( + 1, + 18, + ), + }, + Token { + kind: SemiColon, + span: ( + 1, + 19, + ), + }, +] diff --git a/tests/0109/tokens.txt b/tests/0109/tokens.txt new file mode 100644 index 00000000..fd1aea2e --- /dev/null +++ b/tests/0109/tokens.txt @@ -0,0 +1,239 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Class, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 3, + 7, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 11, + ), + }, + Token { + kind: Public, + span: ( + 4, + 5, + ), + }, + Token { + kind: Function, + span: ( + 4, + 12, + ), + }, + Token { + kind: Identifier( + "__construct", + ), + span: ( + 4, + 21, + ), + }, + Token { + kind: LeftParen, + span: ( + 4, + 32, + ), + }, + Token { + kind: Public, + span: ( + 5, + 9, + ), + }, + Token { + kind: Identifier( + "string", + ), + span: ( + 5, + 16, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 5, + 23, + ), + }, + Token { + kind: Comma, + span: ( + 5, + 25, + ), + }, + Token { + kind: Public, + span: ( + 6, + 9, + ), + }, + Token { + kind: Readonly, + span: ( + 6, + 16, + ), + }, + Token { + kind: Identifier( + "int", + ), + span: ( + 6, + 25, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 6, + 29, + ), + }, + Token { + kind: Comma, + span: ( + 6, + 31, + ), + }, + Token { + kind: Public, + span: ( + 7, + 9, + ), + }, + Token { + kind: Readonly, + span: ( + 7, + 16, + ), + }, + Token { + kind: Identifier( + "float", + ), + span: ( + 7, + 25, + ), + }, + Token { + kind: Ampersand, + span: ( + 7, + 31, + ), + }, + Token { + kind: Variable( + "c", + ), + span: ( + 7, + 32, + ), + }, + Token { + kind: Comma, + span: ( + 7, + 34, + ), + }, + Token { + kind: Ampersand, + span: ( + 8, + 9, + ), + }, + Token { + kind: Ellipsis, + span: ( + 8, + 10, + ), + }, + Token { + kind: Variable( + "e", + ), + span: ( + 8, + 13, + ), + }, + Token { + kind: Comma, + span: ( + 8, + 15, + ), + }, + Token { + kind: RightParen, + span: ( + 9, + 5, + ), + }, + Token { + kind: LeftBrace, + span: ( + 9, + 7, + ), + }, + Token { + kind: RightBrace, + span: ( + 9, + 8, + ), + }, + Token { + kind: RightBrace, + span: ( + 10, + 1, + ), + }, +] diff --git a/tests/0110/tokens.txt b/tests/0110/tokens.txt new file mode 100644 index 00000000..b6701aaa --- /dev/null +++ b/tests/0110/tokens.txt @@ -0,0 +1,232 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Class, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 3, + 7, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 11, + ), + }, + Token { + kind: Public, + span: ( + 4, + 5, + ), + }, + Token { + kind: Function, + span: ( + 4, + 12, + ), + }, + Token { + kind: Identifier( + "__construct", + ), + span: ( + 4, + 21, + ), + }, + Token { + kind: LeftParen, + span: ( + 4, + 32, + ), + }, + Token { + kind: Public, + span: ( + 5, + 9, + ), + }, + Token { + kind: Identifier( + "string", + ), + span: ( + 5, + 16, + ), + }, + Token { + kind: Variable( + "a", + ), + span: ( + 5, + 23, + ), + }, + Token { + kind: Comma, + span: ( + 5, + 25, + ), + }, + Token { + kind: Public, + span: ( + 6, + 9, + ), + }, + Token { + kind: Readonly, + span: ( + 6, + 16, + ), + }, + Token { + kind: Identifier( + "int", + ), + span: ( + 6, + 25, + ), + }, + Token { + kind: Variable( + "b", + ), + span: ( + 6, + 29, + ), + }, + Token { + kind: Comma, + span: ( + 6, + 31, + ), + }, + Token { + kind: Public, + span: ( + 7, + 9, + ), + }, + Token { + kind: Readonly, + span: ( + 7, + 16, + ), + }, + Token { + kind: Identifier( + "float", + ), + span: ( + 7, + 25, + ), + }, + Token { + kind: Ampersand, + span: ( + 7, + 31, + ), + }, + Token { + kind: Variable( + "c", + ), + span: ( + 7, + 32, + ), + }, + Token { + kind: Comma, + span: ( + 7, + 34, + ), + }, + Token { + kind: Ellipsis, + span: ( + 8, + 9, + ), + }, + Token { + kind: Variable( + "e", + ), + span: ( + 8, + 12, + ), + }, + Token { + kind: Comma, + span: ( + 8, + 14, + ), + }, + Token { + kind: RightParen, + span: ( + 9, + 5, + ), + }, + Token { + kind: LeftBrace, + span: ( + 9, + 7, + ), + }, + Token { + kind: RightBrace, + span: ( + 9, + 8, + ), + }, + Token { + kind: RightBrace, + span: ( + 10, + 1, + ), + }, +] diff --git a/tests/0111/tokens.txt b/tests/0111/tokens.txt new file mode 100644 index 00000000..874980b2 --- /dev/null +++ b/tests/0111/tokens.txt @@ -0,0 +1,129 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Class, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 3, + 7, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 11, + ), + }, + Token { + kind: Public, + span: ( + 4, + 5, + ), + }, + Token { + kind: Function, + span: ( + 4, + 12, + ), + }, + Token { + kind: Identifier( + "__construct", + ), + span: ( + 4, + 21, + ), + }, + Token { + kind: LeftParen, + span: ( + 4, + 32, + ), + }, + Token { + kind: Public, + span: ( + 5, + 9, + ), + }, + Token { + kind: Readonly, + span: ( + 5, + 16, + ), + }, + Token { + kind: Ellipsis, + span: ( + 5, + 25, + ), + }, + Token { + kind: Variable( + "e", + ), + span: ( + 5, + 28, + ), + }, + Token { + kind: Comma, + span: ( + 5, + 30, + ), + }, + Token { + kind: RightParen, + span: ( + 6, + 5, + ), + }, + Token { + kind: LeftBrace, + span: ( + 6, + 7, + ), + }, + Token { + kind: RightBrace, + span: ( + 6, + 8, + ), + }, + Token { + kind: RightBrace, + span: ( + 7, + 1, + ), + }, +] diff --git a/tests/0112/tokens.txt b/tests/0112/tokens.txt new file mode 100644 index 00000000..2421bdcc --- /dev/null +++ b/tests/0112/tokens.txt @@ -0,0 +1,129 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Class, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 3, + 7, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 11, + ), + }, + Token { + kind: Public, + span: ( + 4, + 5, + ), + }, + Token { + kind: Function, + span: ( + 4, + 12, + ), + }, + Token { + kind: Identifier( + "__construct", + ), + span: ( + 4, + 21, + ), + }, + Token { + kind: LeftParen, + span: ( + 4, + 32, + ), + }, + Token { + kind: Public, + span: ( + 5, + 9, + ), + }, + Token { + kind: Readonly, + span: ( + 5, + 16, + ), + }, + Token { + kind: Ampersand, + span: ( + 5, + 25, + ), + }, + Token { + kind: Variable( + "e", + ), + span: ( + 5, + 26, + ), + }, + Token { + kind: Comma, + span: ( + 5, + 28, + ), + }, + Token { + kind: RightParen, + span: ( + 6, + 5, + ), + }, + Token { + kind: LeftBrace, + span: ( + 6, + 7, + ), + }, + Token { + kind: RightBrace, + span: ( + 6, + 8, + ), + }, + Token { + kind: RightBrace, + span: ( + 7, + 1, + ), + }, +] diff --git a/tests/0113/tokens.txt b/tests/0113/tokens.txt new file mode 100644 index 00000000..9266f2a5 --- /dev/null +++ b/tests/0113/tokens.txt @@ -0,0 +1,124 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Class, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 3, + 7, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 11, + ), + }, + Token { + kind: Public, + span: ( + 4, + 5, + ), + }, + Token { + kind: Function, + span: ( + 4, + 12, + ), + }, + Token { + kind: Identifier( + "bar", + ), + span: ( + 4, + 21, + ), + }, + Token { + kind: LeftParen, + span: ( + 4, + 24, + ), + }, + Token { + kind: Public, + span: ( + 5, + 9, + ), + }, + Token { + kind: Identifier( + "string", + ), + span: ( + 5, + 16, + ), + }, + Token { + kind: Variable( + "e", + ), + span: ( + 5, + 23, + ), + }, + Token { + kind: Comma, + span: ( + 5, + 25, + ), + }, + Token { + kind: RightParen, + span: ( + 6, + 5, + ), + }, + Token { + kind: LeftBrace, + span: ( + 6, + 7, + ), + }, + Token { + kind: RightBrace, + span: ( + 6, + 8, + ), + }, + Token { + kind: RightBrace, + span: ( + 7, + 1, + ), + }, +] diff --git a/tests/0114/tokens.txt b/tests/0114/tokens.txt new file mode 100644 index 00000000..1ac51b4e --- /dev/null +++ b/tests/0114/tokens.txt @@ -0,0 +1,117 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Interface, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 3, + 11, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 15, + ), + }, + Token { + kind: Public, + span: ( + 4, + 5, + ), + }, + Token { + kind: Function, + span: ( + 4, + 12, + ), + }, + Token { + kind: Identifier( + "__construct", + ), + span: ( + 4, + 21, + ), + }, + Token { + kind: LeftParen, + span: ( + 4, + 32, + ), + }, + Token { + kind: Public, + span: ( + 5, + 9, + ), + }, + Token { + kind: Identifier( + "string", + ), + span: ( + 5, + 16, + ), + }, + Token { + kind: Variable( + "e", + ), + span: ( + 5, + 23, + ), + }, + Token { + kind: Comma, + span: ( + 5, + 25, + ), + }, + Token { + kind: RightParen, + span: ( + 6, + 5, + ), + }, + Token { + kind: SemiColon, + span: ( + 6, + 6, + ), + }, + Token { + kind: RightBrace, + span: ( + 7, + 1, + ), + }, +] diff --git a/tests/0115/tokens.txt b/tests/0115/tokens.txt new file mode 100644 index 00000000..7bbeec35 --- /dev/null +++ b/tests/0115/tokens.txt @@ -0,0 +1,131 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Abstract, + span: ( + 3, + 1, + ), + }, + Token { + kind: Class, + span: ( + 3, + 10, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 3, + 16, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 20, + ), + }, + Token { + kind: Public, + span: ( + 4, + 5, + ), + }, + Token { + kind: Abstract, + span: ( + 4, + 12, + ), + }, + Token { + kind: Function, + span: ( + 4, + 21, + ), + }, + Token { + kind: Identifier( + "__construct", + ), + span: ( + 4, + 30, + ), + }, + Token { + kind: LeftParen, + span: ( + 4, + 41, + ), + }, + Token { + kind: Public, + span: ( + 5, + 9, + ), + }, + Token { + kind: Identifier( + "string", + ), + span: ( + 5, + 16, + ), + }, + Token { + kind: Variable( + "e", + ), + span: ( + 5, + 23, + ), + }, + Token { + kind: Comma, + span: ( + 5, + 25, + ), + }, + Token { + kind: RightParen, + span: ( + 6, + 5, + ), + }, + Token { + kind: SemiColon, + span: ( + 6, + 6, + ), + }, + Token { + kind: RightBrace, + span: ( + 7, + 1, + ), + }, +] diff --git a/tests/0116/tokens.txt b/tests/0116/tokens.txt new file mode 100644 index 00000000..6a5140a4 --- /dev/null +++ b/tests/0116/tokens.txt @@ -0,0 +1,124 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Trait, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 3, + 7, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 11, + ), + }, + Token { + kind: Public, + span: ( + 4, + 5, + ), + }, + Token { + kind: Abstract, + span: ( + 4, + 12, + ), + }, + Token { + kind: Function, + span: ( + 4, + 21, + ), + }, + Token { + kind: Identifier( + "__construct", + ), + span: ( + 4, + 30, + ), + }, + Token { + kind: LeftParen, + span: ( + 4, + 41, + ), + }, + Token { + kind: Public, + span: ( + 5, + 9, + ), + }, + Token { + kind: Identifier( + "string", + ), + span: ( + 5, + 16, + ), + }, + Token { + kind: Variable( + "e", + ), + span: ( + 5, + 23, + ), + }, + Token { + kind: Comma, + span: ( + 5, + 25, + ), + }, + Token { + kind: RightParen, + span: ( + 6, + 5, + ), + }, + Token { + kind: SemiColon, + span: ( + 6, + 6, + ), + }, + Token { + kind: RightBrace, + span: ( + 7, + 1, + ), + }, +] diff --git a/tests/0117/tokens.txt b/tests/0117/tokens.txt new file mode 100644 index 00000000..8d58559a --- /dev/null +++ b/tests/0117/tokens.txt @@ -0,0 +1,135 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Enum, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "Foo", + ), + span: ( + 3, + 6, + ), + }, + Token { + kind: Colon, + span: ( + 3, + 9, + ), + }, + Token { + kind: Identifier( + "int", + ), + span: ( + 3, + 11, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 15, + ), + }, + Token { + kind: Case, + span: ( + 4, + 5, + ), + }, + Token { + kind: Identifier( + "Bar", + ), + span: ( + 4, + 10, + ), + }, + Token { + kind: Equals, + span: ( + 4, + 14, + ), + }, + Token { + kind: LiteralInteger( + 2, + ), + span: ( + 4, + 16, + ), + }, + Token { + kind: SemiColon, + span: ( + 4, + 17, + ), + }, + Token { + kind: Case, + span: ( + 5, + 5, + ), + }, + Token { + kind: Identifier( + "Baz", + ), + span: ( + 5, + 10, + ), + }, + Token { + kind: Equals, + span: ( + 5, + 14, + ), + }, + Token { + kind: LiteralInteger( + 4, + ), + span: ( + 5, + 16, + ), + }, + Token { + kind: SemiColon, + span: ( + 5, + 17, + ), + }, + Token { + kind: RightBrace, + span: ( + 6, + 1, + ), + }, +] diff --git a/tests/0118/tokens.txt b/tests/0118/tokens.txt new file mode 100644 index 00000000..25378b80 --- /dev/null +++ b/tests/0118/tokens.txt @@ -0,0 +1,87 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Enum, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "Foo", + ), + span: ( + 3, + 6, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 10, + ), + }, + Token { + kind: Case, + span: ( + 4, + 5, + ), + }, + Token { + kind: Identifier( + "Bar", + ), + span: ( + 4, + 10, + ), + }, + Token { + kind: SemiColon, + span: ( + 4, + 13, + ), + }, + Token { + kind: Case, + span: ( + 5, + 5, + ), + }, + Token { + kind: Identifier( + "Baz", + ), + span: ( + 5, + 10, + ), + }, + Token { + kind: SemiColon, + span: ( + 5, + 13, + ), + }, + Token { + kind: RightBrace, + span: ( + 6, + 1, + ), + }, +] diff --git a/tests/0119/tokens.txt b/tests/0119/tokens.txt new file mode 100644 index 00000000..048a48e4 --- /dev/null +++ b/tests/0119/tokens.txt @@ -0,0 +1,135 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Enum, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "Foo", + ), + span: ( + 3, + 6, + ), + }, + Token { + kind: Colon, + span: ( + 3, + 9, + ), + }, + Token { + kind: Identifier( + "string", + ), + span: ( + 3, + 11, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 18, + ), + }, + Token { + kind: Case, + span: ( + 4, + 5, + ), + }, + Token { + kind: Identifier( + "Bar", + ), + span: ( + 4, + 10, + ), + }, + Token { + kind: Equals, + span: ( + 4, + 14, + ), + }, + Token { + kind: LiteralString( + "3", + ), + span: ( + 4, + 16, + ), + }, + Token { + kind: SemiColon, + span: ( + 4, + 19, + ), + }, + Token { + kind: Case, + span: ( + 5, + 5, + ), + }, + Token { + kind: Identifier( + "Baz", + ), + span: ( + 5, + 10, + ), + }, + Token { + kind: Equals, + span: ( + 5, + 14, + ), + }, + Token { + kind: LiteralString( + "g", + ), + span: ( + 5, + 16, + ), + }, + Token { + kind: SemiColon, + span: ( + 5, + 19, + ), + }, + Token { + kind: RightBrace, + span: ( + 6, + 1, + ), + }, +] diff --git a/tests/0120/tokens.txt b/tests/0120/tokens.txt new file mode 100644 index 00000000..92f74f19 --- /dev/null +++ b/tests/0120/tokens.txt @@ -0,0 +1,85 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Class, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 3, + 7, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 11, + ), + }, + Token { + kind: Abstract, + span: ( + 4, + 5, + ), + }, + Token { + kind: Function, + span: ( + 4, + 14, + ), + }, + Token { + kind: Identifier( + "bar", + ), + span: ( + 4, + 23, + ), + }, + Token { + kind: LeftParen, + span: ( + 4, + 26, + ), + }, + Token { + kind: RightParen, + span: ( + 4, + 27, + ), + }, + Token { + kind: SemiColon, + span: ( + 4, + 28, + ), + }, + Token { + kind: RightBrace, + span: ( + 5, + 1, + ), + }, +] diff --git a/tests/0121/tokens.txt b/tests/0121/tokens.txt new file mode 100644 index 00000000..4a180c1d --- /dev/null +++ b/tests/0121/tokens.txt @@ -0,0 +1,87 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Class, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 3, + 7, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 11, + ), + }, + Token { + kind: Static, + span: ( + 4, + 5, + ), + }, + Token { + kind: Const, + span: ( + 4, + 12, + ), + }, + Token { + kind: Identifier( + "BAR", + ), + span: ( + 4, + 18, + ), + }, + Token { + kind: Equals, + span: ( + 4, + 22, + ), + }, + Token { + kind: LiteralInteger( + 34, + ), + span: ( + 4, + 24, + ), + }, + Token { + kind: SemiColon, + span: ( + 4, + 26, + ), + }, + Token { + kind: RightBrace, + span: ( + 5, + 1, + ), + }, +] diff --git a/tests/0122/tokens.txt b/tests/0122/tokens.txt new file mode 100644 index 00000000..b04ae41c --- /dev/null +++ b/tests/0122/tokens.txt @@ -0,0 +1,87 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Class, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 3, + 7, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 11, + ), + }, + Token { + kind: Const, + span: ( + 4, + 5, + ), + }, + Token { + kind: Static, + span: ( + 4, + 11, + ), + }, + Token { + kind: Identifier( + "BAR", + ), + span: ( + 4, + 18, + ), + }, + Token { + kind: Equals, + span: ( + 4, + 22, + ), + }, + Token { + kind: LiteralInteger( + 34, + ), + span: ( + 4, + 24, + ), + }, + Token { + kind: SemiColon, + span: ( + 4, + 26, + ), + }, + Token { + kind: RightBrace, + span: ( + 5, + 1, + ), + }, +] diff --git a/tests/0123/tokens.txt b/tests/0123/tokens.txt new file mode 100644 index 00000000..9746f121 --- /dev/null +++ b/tests/0123/tokens.txt @@ -0,0 +1,87 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Class, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 3, + 7, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 11, + ), + }, + Token { + kind: Readonly, + span: ( + 4, + 5, + ), + }, + Token { + kind: Const, + span: ( + 4, + 14, + ), + }, + Token { + kind: Identifier( + "BAR", + ), + span: ( + 4, + 20, + ), + }, + Token { + kind: Equals, + span: ( + 4, + 24, + ), + }, + Token { + kind: LiteralInteger( + 34, + ), + span: ( + 4, + 26, + ), + }, + Token { + kind: SemiColon, + span: ( + 4, + 28, + ), + }, + Token { + kind: RightBrace, + span: ( + 5, + 1, + ), + }, +] diff --git a/tests/0124/tokens.txt b/tests/0124/tokens.txt new file mode 100644 index 00000000..f45fc289 --- /dev/null +++ b/tests/0124/tokens.txt @@ -0,0 +1,99 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Class, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 3, + 7, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 11, + ), + }, + Token { + kind: Final, + span: ( + 4, + 5, + ), + }, + Token { + kind: Abstract, + span: ( + 4, + 11, + ), + }, + Token { + kind: Function, + span: ( + 4, + 20, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 4, + 29, + ), + }, + Token { + kind: LeftParen, + span: ( + 4, + 32, + ), + }, + Token { + kind: RightParen, + span: ( + 4, + 33, + ), + }, + Token { + kind: LeftBrace, + span: ( + 4, + 35, + ), + }, + Token { + kind: RightBrace, + span: ( + 6, + 5, + ), + }, + Token { + kind: RightBrace, + span: ( + 7, + 1, + ), + }, +] diff --git a/tests/0125/tokens.txt b/tests/0125/tokens.txt new file mode 100644 index 00000000..df0510c1 --- /dev/null +++ b/tests/0125/tokens.txt @@ -0,0 +1,99 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Final, + span: ( + 3, + 1, + ), + }, + Token { + kind: Abstract, + span: ( + 3, + 7, + ), + }, + Token { + kind: Class, + span: ( + 3, + 16, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 3, + 22, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 26, + ), + }, + Token { + kind: Function, + span: ( + 4, + 5, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 4, + 14, + ), + }, + Token { + kind: LeftParen, + span: ( + 4, + 17, + ), + }, + Token { + kind: RightParen, + span: ( + 4, + 18, + ), + }, + Token { + kind: LeftBrace, + span: ( + 4, + 20, + ), + }, + Token { + kind: RightBrace, + span: ( + 6, + 5, + ), + }, + Token { + kind: RightBrace, + span: ( + 7, + 1, + ), + }, +] diff --git a/tests/0126/tokens.txt b/tests/0126/tokens.txt new file mode 100644 index 00000000..20991cea --- /dev/null +++ b/tests/0126/tokens.txt @@ -0,0 +1,94 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Class, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 3, + 7, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 11, + ), + }, + Token { + kind: Final, + span: ( + 4, + 5, + ), + }, + Token { + kind: Private, + span: ( + 4, + 11, + ), + }, + Token { + kind: Const, + span: ( + 4, + 19, + ), + }, + Token { + kind: Identifier( + "BAR", + ), + span: ( + 4, + 25, + ), + }, + Token { + kind: Equals, + span: ( + 4, + 29, + ), + }, + Token { + kind: LiteralInteger( + 3, + ), + span: ( + 4, + 31, + ), + }, + Token { + kind: SemiColon, + span: ( + 4, + 32, + ), + }, + Token { + kind: RightBrace, + span: ( + 5, + 1, + ), + }, +] diff --git a/tests/0127/tokens.txt b/tests/0127/tokens.txt new file mode 100644 index 00000000..3722780a --- /dev/null +++ b/tests/0127/tokens.txt @@ -0,0 +1,147 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Class, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 3, + 7, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 11, + ), + }, + Token { + kind: Public, + span: ( + 4, + 5, + ), + }, + Token { + kind: Function, + span: ( + 4, + 12, + ), + }, + Token { + kind: Identifier( + "__construct", + ), + span: ( + 4, + 21, + ), + }, + Token { + kind: LeftParen, + span: ( + 4, + 32, + ), + }, + Token { + kind: Readonly, + span: ( + 5, + 9, + ), + }, + Token { + kind: Public, + span: ( + 5, + 18, + ), + }, + Token { + kind: Identifier( + "string", + ), + span: ( + 5, + 25, + ), + }, + Token { + kind: Variable( + "s", + ), + span: ( + 5, + 32, + ), + }, + Token { + kind: Equals, + span: ( + 5, + 35, + ), + }, + Token { + kind: LiteralString( + "h", + ), + span: ( + 5, + 37, + ), + }, + Token { + kind: Comma, + span: ( + 5, + 40, + ), + }, + Token { + kind: RightParen, + span: ( + 6, + 5, + ), + }, + Token { + kind: LeftBrace, + span: ( + 6, + 7, + ), + }, + Token { + kind: RightBrace, + span: ( + 6, + 8, + ), + }, + Token { + kind: RightBrace, + span: ( + 7, + 1, + ), + }, +] diff --git a/tests/0128/tokens.txt b/tests/0128/tokens.txt new file mode 100644 index 00000000..e468c251 --- /dev/null +++ b/tests/0128/tokens.txt @@ -0,0 +1,138 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Class, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 3, + 7, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 11, + ), + }, + Token { + kind: Public, + span: ( + 4, + 5, + ), + }, + Token { + kind: Function, + span: ( + 4, + 12, + ), + }, + Token { + kind: Identifier( + "__construct", + ), + span: ( + 4, + 21, + ), + }, + Token { + kind: LeftParen, + span: ( + 4, + 32, + ), + }, + Token { + kind: Readonly, + span: ( + 5, + 9, + ), + }, + Token { + kind: Public, + span: ( + 5, + 18, + ), + }, + Token { + kind: Variable( + "s", + ), + span: ( + 5, + 25, + ), + }, + Token { + kind: Equals, + span: ( + 5, + 28, + ), + }, + Token { + kind: LiteralString( + "h", + ), + span: ( + 5, + 30, + ), + }, + Token { + kind: Comma, + span: ( + 5, + 33, + ), + }, + Token { + kind: RightParen, + span: ( + 6, + 5, + ), + }, + Token { + kind: LeftBrace, + span: ( + 6, + 7, + ), + }, + Token { + kind: RightBrace, + span: ( + 6, + 8, + ), + }, + Token { + kind: RightBrace, + span: ( + 7, + 1, + ), + }, +] diff --git a/tests/0129/tokens.txt b/tests/0129/tokens.txt new file mode 100644 index 00000000..2bef352d --- /dev/null +++ b/tests/0129/tokens.txt @@ -0,0 +1,161 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Class, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 3, + 7, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 11, + ), + }, + Token { + kind: Public, + span: ( + 4, + 5, + ), + }, + Token { + kind: Function, + span: ( + 4, + 12, + ), + }, + Token { + kind: Identifier( + "__construct", + ), + span: ( + 4, + 21, + ), + }, + Token { + kind: LeftParen, + span: ( + 4, + 32, + ), + }, + Token { + kind: Readonly, + span: ( + 5, + 9, + ), + }, + Token { + kind: Public, + span: ( + 5, + 18, + ), + }, + Token { + kind: Protected, + span: ( + 5, + 25, + ), + }, + Token { + kind: Private, + span: ( + 5, + 35, + ), + }, + Token { + kind: Identifier( + "string", + ), + span: ( + 5, + 43, + ), + }, + Token { + kind: Variable( + "s", + ), + span: ( + 5, + 50, + ), + }, + Token { + kind: Equals, + span: ( + 5, + 53, + ), + }, + Token { + kind: LiteralString( + "h", + ), + span: ( + 5, + 55, + ), + }, + Token { + kind: Comma, + span: ( + 5, + 58, + ), + }, + Token { + kind: RightParen, + span: ( + 6, + 5, + ), + }, + Token { + kind: LeftBrace, + span: ( + 6, + 7, + ), + }, + Token { + kind: RightBrace, + span: ( + 6, + 8, + ), + }, + Token { + kind: RightBrace, + span: ( + 7, + 1, + ), + }, +] diff --git a/tests/0130/tokens.txt b/tests/0130/tokens.txt new file mode 100644 index 00000000..618d48f6 --- /dev/null +++ b/tests/0130/tokens.txt @@ -0,0 +1,99 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Class, + span: ( + 3, + 1, + ), + }, + Token { + kind: Identifier( + "foo", + ), + span: ( + 3, + 7, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 11, + ), + }, + Token { + kind: Public, + span: ( + 4, + 5, + ), + }, + Token { + kind: Function, + span: ( + 4, + 12, + ), + }, + Token { + kind: Foreach, + span: ( + 4, + 21, + ), + }, + Token { + kind: LeftParen, + span: ( + 4, + 28, + ), + }, + Token { + kind: RightParen, + span: ( + 4, + 29, + ), + }, + Token { + kind: LeftBrace, + span: ( + 4, + 31, + ), + }, + Token { + kind: Comment( + "//", + ), + span: ( + 5, + 9, + ), + }, + Token { + kind: RightBrace, + span: ( + 6, + 5, + ), + }, + Token { + kind: RightBrace, + span: ( + 7, + 1, + ), + }, +] diff --git a/tests/0131/tokens.txt b/tests/0131/tokens.txt new file mode 100644 index 00000000..cbbb8979 --- /dev/null +++ b/tests/0131/tokens.txt @@ -0,0 +1,62 @@ +[ + Token { + kind: OpenTag( + Full, + ), + span: ( + 1, + 1, + ), + }, + Token { + kind: Function, + span: ( + 3, + 1, + ), + }, + Token { + kind: Foreach, + span: ( + 3, + 10, + ), + }, + Token { + kind: LeftParen, + span: ( + 3, + 17, + ), + }, + Token { + kind: RightParen, + span: ( + 3, + 18, + ), + }, + Token { + kind: LeftBrace, + span: ( + 3, + 20, + ), + }, + Token { + kind: Comment( + "//", + ), + span: ( + 4, + 5, + ), + }, + Token { + kind: RightBrace, + span: ( + 5, + 1, + ), + }, +] diff --git a/tests/0133/code.php b/tests/0133/code.php new file mode 100644 index 00000000..4e450cc5 --- /dev/null +++ b/tests/0133/code.php @@ -0,0 +1,5 @@ + Syntax Error: invalid octal escape on line 5 column 11 diff --git a/tests/0134/code.php b/tests/0134/code.php new file mode 100644 index 00000000..8723c612 --- /dev/null +++ b/tests/0134/code.php @@ -0,0 +1,4 @@ + Syntax Error: invalid unicode escape on line 4 column 10 diff --git a/tests/0135/code.php b/tests/0135/code.php new file mode 100644 index 00000000..6c300cd9 --- /dev/null +++ b/tests/0135/code.php @@ -0,0 +1,4 @@ + Syntax Error: invalid unicode escape on line 4 column 10 diff --git a/tests/0136/code.php b/tests/0136/code.php new file mode 100644 index 00000000..cf559e58 --- /dev/null +++ b/tests/0136/code.php @@ -0,0 +1,4 @@ + Syntax Error: invalid unicode escape on line 4 column 12 diff --git a/tests/0137/code.php b/tests/0137/code.php new file mode 100644 index 00000000..0d7d6b3e --- /dev/null +++ b/tests/0137/code.php @@ -0,0 +1,4 @@ + Syntax Error: invalid unicode escape on line 4 column 17 diff --git a/tests/0138/code.php b/tests/0138/code.php new file mode 100644 index 00000000..745e22ca --- /dev/null +++ b/tests/0138/code.php @@ -0,0 +1,3 @@ + Syntax Error: unexpected end of file on line 4 column 1 diff --git a/tests/0139/code.php b/tests/0139/code.php new file mode 100644 index 00000000..a81511aa --- /dev/null +++ b/tests/0139/code.php @@ -0,0 +1,3 @@ + Syntax Error: unexpected end of file on line 4 column 1 diff --git a/tests/0140/code.php b/tests/0140/code.php new file mode 100644 index 00000000..323a509c --- /dev/null +++ b/tests/0140/code.php @@ -0,0 +1,3 @@ + Syntax Error: invalid octal literal on line 3 column 8 diff --git a/tests/third_party_tests.rs b/tests/third_party_tests.rs index ca3d098b..6cc27983 100644 --- a/tests/third_party_tests.rs +++ b/tests/third_party_tests.rs @@ -3,8 +3,8 @@ use std::fs; use std::path::PathBuf; use std::process::Command; -use php_parser_rs::Lexer; -use php_parser_rs::Parser; +use php_parser_rs::prelude::Lexer; +use php_parser_rs::prelude::Parser; #[test] fn third_party_php_standard_library() { @@ -73,7 +73,7 @@ fn test_directory(root: PathBuf, directory: PathBuf) { fn test_file(name: &str, filename: PathBuf) { let code = std::fs::read_to_string(&filename).unwrap(); - Lexer::new(None) + Lexer::new() .tokenize(code.as_bytes()) .map(|tokens| { Parser::new(None)