diff --git a/src/dialect/postgresql.rs b/src/dialect/postgresql.rs index 0c2eb99f0..87d9fd9d4 100644 --- a/src/dialect/postgresql.rs +++ b/src/dialect/postgresql.rs @@ -20,7 +20,7 @@ impl Dialect for PostgreSqlDialect { // See https://www.postgresql.org/docs/11/sql-syntax-lexical.html#SQL-SYNTAX-IDENTIFIERS // We don't yet support identifiers beginning with "letters with // diacritical marks and non-Latin letters" - ('a'..='z').contains(&ch) || ('A'..='Z').contains(&ch) || ch == '_' + ('a'..='z').contains(&ch) || ('A'..='Z').contains(&ch) || ch == '_' || ch == '#' } fn is_identifier_part(&self, ch: char) -> bool { @@ -29,5 +29,6 @@ impl Dialect for PostgreSqlDialect { || ('0'..='9').contains(&ch) || ch == '$' || ch == '_' + || ch == '#' } } diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 566deacec..525652ba1 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -361,6 +361,25 @@ impl<'a> Tokenizer<'a> { Ok(tokens) } + fn consume_sharp( + &self, + chars: &mut Peekable>, + ) -> Result, TokenizerError> { + match chars.peek() { + Some('>') => { + chars.next(); + match chars.peek() { + Some('>') => { + chars.next(); + Ok(Some(Token::HashLongArrow)) + } + _ => Ok(Some(Token::HashArrow)), + } + } + _ => Ok(Some(Token::Sharp)), + } + } + /// Get the next token or return None fn next_token(&self, chars: &mut Peekable>) -> Result, TokenizerError> { //println!("next_token: {:?}", chars.peek()); @@ -422,7 +441,11 @@ impl<'a> Tokenizer<'a> { s += s2.as_str(); return Ok(Some(Token::Number(s, false))); } - Ok(Some(Token::make_word(&s, None))) + if s == "#" { + self.consume_sharp(chars) + } else { + Ok(Some(Token::make_word(&s, None))) + } } // string '\'' => { @@ -624,19 +647,7 @@ impl<'a> Tokenizer<'a> { } '#' => { chars.next(); - match chars.peek() { - Some('>') => { - chars.next(); - match chars.peek() { - Some('>') => { - chars.next(); - Ok(Some(Token::HashLongArrow)) - } - _ => Ok(Some(Token::HashArrow)), - } - } - _ => Ok(Some(Token::Sharp)), - } + self.consume_sharp(chars) } '@' => self.consume_and_return(chars, Token::AtSign), '?' => self.consume_and_return(chars, Token::Placeholder(String::from("?"))), diff --git a/tests/sqlparser_postgres.rs b/tests/sqlparser_postgres.rs index eb42edc8d..c60e06c06 100644 --- a/tests/sqlparser_postgres.rs +++ b/tests/sqlparser_postgres.rs @@ -1373,3 +1373,13 @@ fn pg_and_generic() -> TestedDialects { dialects: vec![Box::new(PostgreSqlDialect {}), Box::new(GenericDialect {})], } } + +#[test] +fn test_sharp() { + let sql = "SELECT #_of_values"; + let select = pg().verified_only_select(sql); + assert_eq!( + SelectItem::UnnamedExpr(Expr::Identifier(Ident::new("#_of_values"))), + select.projection[0] + ); +}