Skip to content

Commit

Permalink
Add support for aliased expressions (sqlparser-rs#153)
Browse files Browse the repository at this point in the history
  • Loading branch information
andygrove committed Mar 25, 2020
1 parent 30de48c commit ce0a255
Show file tree
Hide file tree
Showing 10 changed files with 68 additions and 20 deletions.
3 changes: 2 additions & 1 deletion Cargo.toml
@@ -1,7 +1,8 @@
[package]
name = "sqlparser"
description = "Extensible SQL Lexer and Parser with support for ANSI SQL:2011"
version = "0.2.4"
version = "0.2.5"
edition = "2018"
authors = ["Andy Grove <andygrove73@gmail.com>"]
homepage = "https://github.com/andygrove/sqlparser-rs"
documentation = "https://docs.rs/sqlparser/"
Expand Down
5 changes: 2 additions & 3 deletions src/dialect/ansi_sql.rs
@@ -1,6 +1,5 @@
use dialect::Dialect;

use dialect::keywords::*;
use crate::dialect::keywords::*;
use crate::dialect::Dialect;

pub struct AnsiSqlDialect {}

Expand Down
4 changes: 2 additions & 2 deletions src/dialect/generic_sql.rs
@@ -1,6 +1,6 @@
use dialect::Dialect;
use crate::dialect::keywords::*;
use crate::dialect::Dialect;

use dialect::keywords::*;
pub struct GenericSqlDialect {}

impl Dialect for GenericSqlDialect {
Expand Down
2 changes: 1 addition & 1 deletion src/dialect/keywords.rs
Expand Up @@ -6,7 +6,7 @@ macro_rules! keyword {
}
}

/// enumerate all the keywords here for all dialects to support in this project
// enumerate all the keywords here for all dialects to support in this project
keyword!(
ABS,
ADD,
Expand Down
5 changes: 2 additions & 3 deletions src/dialect/postgresql.rs
@@ -1,6 +1,5 @@
use dialect::Dialect;

use dialect::keywords::*;
use crate::dialect::keywords::*;
use crate::dialect::Dialect;

pub struct PostgreSqlDialect {}

Expand Down
5 changes: 5 additions & 0 deletions src/sqlast/mod.rs
Expand Up @@ -30,6 +30,8 @@ pub use self::sql_operator::SQLOperator;
pub enum ASTNode {
/// Identifier e.g. table name or column name
SQLIdentifier(String),
/// Aliased expression
SQLAliasedExpr(Box<ASTNode>, String),
/// Wildcard e.g. `*`
SQLWildcard,
/// Multi part identifier e.g. `myschema.dbo.mytable`
Expand Down Expand Up @@ -139,6 +141,9 @@ pub enum ASTNode {
impl ToString for ASTNode {
fn to_string(&self) -> String {
match self {
ASTNode::SQLAliasedExpr(ast, alias) => {
format!("{} AS {}", ast.as_ref().to_string(), alias)
}
ASTNode::SQLIdentifier(s) => s.to_string(),
ASTNode::SQLWildcard => "*".to_string(),
ASTNode::SQLCompoundIdentifier(s) => s.join("."),
Expand Down
29 changes: 27 additions & 2 deletions src/sqlparser.rs
Expand Up @@ -53,7 +53,7 @@ impl Parser {
}

/// Parse a SQL statement and produce an Abstract Syntax Tree (AST)
pub fn parse_sql(dialect: &Dialect, sql: String) -> Result<ASTNode, ParserError> {
pub fn parse_sql(dialect: &dyn Dialect, sql: String) -> Result<ASTNode, ParserError> {
let mut tokenizer = Tokenizer::new(dialect, &sql);
let tokens = tokenizer.tokenize()?;
let mut parser = Parser::new(tokens);
Expand Down Expand Up @@ -256,6 +256,11 @@ impl Parser {
debug!("parsing infix");
match self.next_token() {
Some(tok) => match tok {
// Token::Keyword(ref k) if k == "AS" => {
// aliased expressions and CAST expr AS ident
// Ok(Some(ASTNode::SQLAliasedExpr(Box::new(expr), self.parse_identifier()?)))
// Ok(None)
// }
Token::Keyword(ref k) if k == "IS" => {
if self.parse_keywords(vec!["NULL"]) {
Ok(Some(ASTNode::SQLIsNull(Box::new(expr))))
Expand Down Expand Up @@ -342,6 +347,7 @@ impl Parser {
debug!("get_precedence() {:?}", tok);

match tok {
//&Token::Keyword(ref k) if k == "AS" => Ok(4),
&Token::Keyword(ref k) if k == "OR" => Ok(5),
&Token::Keyword(ref k) if k == "AND" => Ok(10),
&Token::Keyword(ref k) if k == "NOT" => Ok(15),
Expand Down Expand Up @@ -1015,6 +1021,14 @@ impl Parser {
}
}

pub fn parse_identifier(&mut self) -> Result<String, ParserError> {
let identifier = self.parse_compound_identifier(&Token::Period)?;
match identifier {
ASTNode::SQLCompoundIdentifier(idents) => Ok(idents.join(".")),
other => parser_err!(format!("Expecting identifier, found: {:?}", other)),
}
}

pub fn parse_column_names(&mut self) -> Result<Vec<String>, ParserError> {
let identifier = self.parse_compound_identifier(&Token::Comma)?;
match identifier {
Expand Down Expand Up @@ -1300,7 +1314,18 @@ impl Parser {
pub fn parse_expr_list(&mut self) -> Result<Vec<ASTNode>, ParserError> {
let mut expr_list: Vec<ASTNode> = vec![];
loop {
expr_list.push(self.parse_expr(0)?);
let expr = self.parse_expr(0)?;
match self.peek_token() {
Some(Token::Keyword(k)) if k.as_str() == "AS" => {
self.next_token();
expr_list.push(ASTNode::SQLAliasedExpr(
Box::new(expr),
self.parse_identifier()?,
));
}
_ => expr_list.push(expr),
}

if let Some(t) = self.peek_token() {
if t == Token::Comma {
self.next_token();
Expand Down
9 changes: 4 additions & 5 deletions src/sqltokenizer.rs
Expand Up @@ -151,15 +151,15 @@ pub struct TokenizerError(String);

/// SQL Tokenizer
pub struct Tokenizer<'a> {
dialect: &'a Dialect,
dialect: &'a dyn Dialect,
pub query: String,
pub line: u64,
pub col: u64,
}

impl<'a> Tokenizer<'a> {
/// Create a new SQL tokenizer for the specified SQL statement
pub fn new(dialect: &'a Dialect, query: &str) -> Self {
pub fn new(dialect: &'a dyn Dialect, query: &str) -> Self {
Self {
dialect,
query: query.to_string(),
Expand Down Expand Up @@ -278,11 +278,11 @@ impl<'a> Tokenizer<'a> {
Ok(Some(Token::DoubleQuotedString(s)))
}
// numbers
'0'...'9' => {
'0'..='9' => {
let mut s = String::new();
while let Some(&ch) = chars.peek() {
match ch {
'0'...'9' | '.' => {
'0'..='9' | '.' => {
chars.next(); // consume
s.push(ch);
}
Expand Down Expand Up @@ -550,5 +550,4 @@ mod tests {
//println!("------------------------------");
assert_eq!(expected, actual);
}

}
20 changes: 19 additions & 1 deletion tests/sqlparser_generic.rs
Expand Up @@ -508,6 +508,24 @@ fn parse_select_with_semi_colon() {
}
}

#[test]
fn parse_select_with_alias() {
let sql = String::from("SELECT id AS aliased_id FROM customer");
let ast = parse_sql(&sql);
match ast {
ASTNode::SQLSelect { projection, .. } => {
assert_eq!(1, projection.len());
match &projection[0] {
ASTNode::SQLAliasedExpr(_, alias) => {
assert_eq!("aliased_id", alias.as_str());
}
_ => assert!(false),
}
}
_ => assert!(false),
}
}

#[test]
fn parse_delete_with_semi_colon() {
let sql: &str = "DELETE FROM 'table';";
Expand Down Expand Up @@ -676,7 +694,7 @@ fn parse_sql(sql: &str) -> ASTNode {
generic_ast
}

fn parse_sql_with(sql: &str, dialect: &Dialect) -> ASTNode {
fn parse_sql_with(sql: &str, dialect: &dyn Dialect) -> ASTNode {
let mut tokenizer = Tokenizer::new(dialect, &sql);
let tokens = tokenizer.tokenize().unwrap();
let mut parser = Parser::new(tokens);
Expand Down
6 changes: 4 additions & 2 deletions tests/sqlparser_postgres.rs
Expand Up @@ -313,7 +313,8 @@ fn parse_alter_table_constraint_foreign_key() {

#[test]
fn parse_copy_example() {
let sql = String::from(r#"COPY public.actor (actor_id, first_name, last_name, last_update, value) FROM stdin;
let sql = String::from(
r#"COPY public.actor (actor_id, first_name, last_name, last_update, value) FROM stdin;
1 PENELOPE GUINESS 2006-02-15 09:34:33 0.11111
2 NICK WAHLBERG 2006-02-15 09:34:33 0.22222
3 ED CHASE 2006-02-15 09:34:33 0.312323
Expand All @@ -332,7 +333,8 @@ Kwara & Kogi
'awe':5 'awe-inspir':4 'barbarella':1 'cat':13 'conquer':16 'dog':18 'feminist':10 'inspir':6 'monasteri':21 'must':15 'stori':7 'streetcar':2
PHP ₱ USD $
\N Some other value
\\."#);
\\."#,
);
let ast = parse_sql(&sql);
println!("{:#?}", ast);
//assert_eq!(sql, ast.to_string());
Expand Down

0 comments on commit ce0a255

Please sign in to comment.