diff --git a/src/tokenizer.rs b/src/tokenizer.rs index fd33f9589..d82810528 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -394,10 +394,21 @@ impl<'a> Tokenizer<'a> { ) } } - // numbers - '0'..='9' => { - // TODO: https://jakewheat.github.io/sql-overview/sql-2011-foundation-grammar.html#unsigned-numeric-literal - let s = peeking_take_while(chars, |ch| matches!(ch, '0'..='9' | '.')); + // numbers and period + '0'..='9' | '.' => { + let mut s = peeking_take_while(chars, |ch| matches!(ch, '0'..='9')); + // match one period + if let Some('.') = chars.peek() { + s.push('.'); + chars.next(); + } + s += &peeking_take_while(chars, |ch| matches!(ch, '0'..='9')); + + // No number -> Token::Period + if s == "." { + return Ok(Some(Token::Period)); + } + let long = if chars.peek() == Some(&'L') { chars.next(); true @@ -470,7 +481,6 @@ impl<'a> Tokenizer<'a> { _ => Ok(Some(Token::Eq)), } } - '.' => self.consume_and_return(chars, Token::Period), '!' => { chars.next(); // consume match chars.peek() { @@ -667,6 +677,22 @@ mod tests { compare(expected, tokens); } + #[test] + fn tokenize_select_float() { + let sql = String::from("SELECT .1"); + let dialect = GenericDialect {}; + let mut tokenizer = Tokenizer::new(&dialect, &sql); + let tokens = tokenizer.tokenize().unwrap(); + + let expected = vec![ + Token::make_keyword("SELECT"), + Token::Whitespace(Whitespace::Space), + Token::Number(String::from(".1"), false), + ]; + + compare(expected, tokens); + } + #[test] fn tokenize_scalar_function() { let sql = String::from("SELECT sqrt(1)"); diff --git a/tests/sqlparser_regression.rs b/tests/sqlparser_regression.rs index 1fc35d99c..e869e0932 100644 --- a/tests/sqlparser_regression.rs +++ b/tests/sqlparser_regression.rs @@ -26,10 +26,7 @@ macro_rules! tpch_tests { fn $name() { let dialect = GenericDialect {}; let res = Parser::parse_sql(&dialect, QUERIES[$value -1]); - // Ignore 6.sql - if $value != 6 { assert!(res.is_ok()); - } } )* }