diff --git a/src/tokenizer.rs b/src/tokenizer.rs index b05667c2b..289e9830f 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -759,6 +759,7 @@ impl<'a> Tokenizer<'a> { '=' => { chars.next(); // consume match chars.peek() { + Some('=') => self.consume_and_return(chars, Token::DoubleEq), Some('>') => self.consume_and_return(chars, Token::RArrow), _ => Ok(Some(Token::Eq)), } @@ -1622,6 +1623,24 @@ mod tests { compare(expected, tokens); } + #[test] + fn tokenize_double_eq() { + let sql = String::from("a == 123"); + let dialect = GenericDialect {}; + let mut tokenizer = Tokenizer::new(&dialect, &sql); + let tokens = tokenizer.tokenize().unwrap(); + + let expected = vec![ + Token::make_word("a", None), + Token::Whitespace(Whitespace::Space), + Token::DoubleEq, + Token::Whitespace(Whitespace::Space), + Token::Number(String::from("123"), false), + ]; + + compare(expected, tokens); + } + #[test] fn tokenize_comment() { let sql = String::from("0--this is a comment\n1");