Skip to content

Commit

Permalink
Make clippy happy (sqlparser-rs#330)
Browse files Browse the repository at this point in the history
Signed-off-by: koushiro <koushiro.cqx@gmail.com>
  • Loading branch information
koushiro committed Aug 19, 2021
1 parent e5991f3 commit 67e17b2
Show file tree
Hide file tree
Showing 14 changed files with 56 additions and 43 deletions.
2 changes: 1 addition & 1 deletion CHANGELOG.md
Expand Up @@ -9,7 +9,7 @@ Given that the parser produces a typed AST, any changes to the AST will technica
Check https://github.com/ballista-compute/sqlparser-rs/commits/main for undocumented changes.


## [0.8.0] 2020-03-21
## [0.9.0] 2020-03-21

### Added
* Add support for `TRY_CAST` syntax (#299) - Thanks @seddonm1!
Expand Down
3 changes: 2 additions & 1 deletion examples/cli.rs
Expand Up @@ -12,13 +12,14 @@

#![warn(clippy::all)]

///! A small command-line app to run the parser.
/// A small command-line app to run the parser.
/// Run with `cargo run --example cli`
use std::fs;

use simple_logger::SimpleLogger;
use sqlparser::dialect::*;
use sqlparser::parser::Parser;

fn main() {
SimpleLogger::new().init().unwrap();

Expand Down
6 changes: 4 additions & 2 deletions src/ast/data_type.rs
Expand Up @@ -10,10 +10,12 @@
// See the License for the specific language governing permissions and
// limitations under the License.

use super::ObjectName;
use std::fmt;

#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use std::fmt;

use crate::ast::ObjectName;

/// SQL data types
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
Expand Down
10 changes: 6 additions & 4 deletions src/ast/ddl.rs
Expand Up @@ -12,12 +12,14 @@

//! AST types specific to CREATE/ALTER variants of [Statement]
//! (commonly referred to as Data Definition Language, or DDL)
use super::{display_comma_separated, DataType, Expr, Ident, ObjectName};
use crate::ast::display_separated;
use crate::tokenizer::Token;

use std::fmt;

#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use std::fmt;

use crate::ast::{display_comma_separated, display_separated, DataType, Expr, Ident, ObjectName};
use crate::tokenizer::Token;

/// An `ALTER TABLE` (`Statement::AlterTable`) operation
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
Expand Down
13 changes: 7 additions & 6 deletions src/ast/mod.rs
Expand Up @@ -18,9 +18,10 @@ mod operator;
mod query;
mod value;

use std::fmt;

#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use std::fmt;

pub use self::data_type::DataType;
pub use self::ddl::{
Expand Down Expand Up @@ -993,16 +994,16 @@ impl fmt::Display for Statement {
}
match hive_distribution {
HiveDistributionStyle::PARTITIONED { columns } => {
write!(f, " PARTITIONED BY ({})", display_comma_separated(&columns))?;
write!(f, " PARTITIONED BY ({})", display_comma_separated(columns))?;
}
HiveDistributionStyle::CLUSTERED {
columns,
sorted_by,
num_buckets,
} => {
write!(f, " CLUSTERED BY ({})", display_comma_separated(&columns))?;
write!(f, " CLUSTERED BY ({})", display_comma_separated(columns))?;
if !sorted_by.is_empty() {
write!(f, " SORTED BY ({})", display_comma_separated(&sorted_by))?;
write!(f, " SORTED BY ({})", display_comma_separated(sorted_by))?;
}
if *num_buckets > 0 {
write!(f, " INTO {} BUCKETS", num_buckets)?;
Expand All @@ -1016,8 +1017,8 @@ impl fmt::Display for Statement {
write!(
f,
" SKEWED BY ({})) ON ({})",
display_comma_separated(&columns),
display_comma_separated(&on)
display_comma_separated(columns),
display_comma_separated(on)
)?;
if *stored_as_directories {
write!(f, " STORED AS DIRECTORIES")?;
Expand Down
3 changes: 2 additions & 1 deletion src/ast/operator.rs
Expand Up @@ -10,9 +10,10 @@
// See the License for the specific language governing permissions and
// limitations under the License.

use std::fmt;

#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use std::fmt;

/// Unary operators
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
Expand Down
3 changes: 2 additions & 1 deletion src/ast/query.rs
Expand Up @@ -10,10 +10,11 @@
// See the License for the specific language governing permissions and
// limitations under the License.

use super::*;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};

use crate::ast::*;

/// The most complete variant of a `SELECT` query expression, optionally
/// including `WITH`, `UNION` / other set operations, and `ORDER BY`.
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
Expand Down
3 changes: 2 additions & 1 deletion src/ast/value.rs
Expand Up @@ -10,11 +10,12 @@
// See the License for the specific language governing permissions and
// limitations under the License.

use std::fmt;

#[cfg(feature = "bigdecimal")]
use bigdecimal::BigDecimal;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use std::fmt;

/// Primitive SQL values such as number and string
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
Expand Down
2 changes: 1 addition & 1 deletion src/dialect/keywords.rs
Expand Up @@ -10,7 +10,7 @@
// See the License for the specific language governing permissions and
// limitations under the License.

///! This module defines
/// This module defines
/// 1) a list of constants for every keyword that
/// can appear in [Word::keyword]:
/// pub const KEYWORD = "KEYWORD"
Expand Down
19 changes: 10 additions & 9 deletions src/parser.rs
Expand Up @@ -12,15 +12,16 @@

//! SQL Parser

use log::debug;

use super::ast::*;
use super::dialect::keywords::Keyword;
use super::dialect::*;
use super::tokenizer::*;
use std::error::Error;
use std::fmt;

use log::debug;

use crate::ast::*;
use crate::dialect::keywords::Keyword;
use crate::dialect::*;
use crate::tokenizer::*;

#[derive(Debug, Clone, PartialEq)]
pub enum ParserError {
TokenizerError(String),
Expand Down Expand Up @@ -56,7 +57,6 @@ pub enum IsLateral {
NotLateral,
}

use crate::ast::Statement::CreateVirtualTable;
use IsLateral::*;

impl From<TokenizerError> for ParserError {
Expand Down Expand Up @@ -102,7 +102,7 @@ impl<'a> Parser<'a> {

/// Parse a SQL statement and produce an Abstract Syntax Tree (AST)
pub fn parse_sql(dialect: &dyn Dialect, sql: &str) -> Result<Vec<Statement>, ParserError> {
let mut tokenizer = Tokenizer::new(dialect, &sql);
let mut tokenizer = Tokenizer::new(dialect, sql);
let tokens = tokenizer.tokenize()?;
let mut parser = Parser::new(tokens, dialect);
let mut stmts = Vec::new();
Expand Down Expand Up @@ -297,6 +297,7 @@ impl<'a> Parser<'a> {
}
Ok(expr)
}

pub fn parse_assert(&mut self) -> Result<Statement, ParserError> {
let condition = self.parse_expr()?;
let message = if self.parse_keyword(Keyword::AS) {
Expand Down Expand Up @@ -1248,7 +1249,7 @@ impl<'a> Parser<'a> {
// definitions in a traditional CREATE TABLE statement", but
// we don't implement that.
let module_args = self.parse_parenthesized_column_list(Optional)?;
Ok(CreateVirtualTable {
Ok(Statement::CreateVirtualTable {
name: table_name,
if_not_exists,
module_name,
Expand Down
14 changes: 7 additions & 7 deletions src/test_utils.rs
Expand Up @@ -18,10 +18,10 @@
// via `tests/test_utils/mod.rs`.
use std::fmt::Debug;

use super::ast::*;
use super::dialect::*;
use super::parser::{Parser, ParserError};
use super::tokenizer::Tokenizer;
use crate::ast::*;
use crate::dialect::*;
use crate::parser::{Parser, ParserError};
use crate::tokenizer::Tokenizer;

/// Tests use the methods on this struct to invoke the parser on one or
/// multiple dialects.
Expand Down Expand Up @@ -64,7 +64,7 @@ impl TestedDialects {
}

pub fn parse_sql_statements(&self, sql: &str) -> Result<Vec<Statement>, ParserError> {
self.one_of_identical_results(|dialect| Parser::parse_sql(dialect, &sql))
self.one_of_identical_results(|dialect| Parser::parse_sql(dialect, sql))
// To fail the `ensure_multiple_dialects_are_tested` test:
// Parser::parse_sql(&**self.dialects.first().unwrap(), sql)
}
Expand All @@ -75,11 +75,11 @@ impl TestedDialects {
/// tree as parsing `canonical`, and that serializing it back to string
/// results in the `canonical` representation.
pub fn one_statement_parses_to(&self, sql: &str, canonical: &str) -> Statement {
let mut statements = self.parse_sql_statements(&sql).unwrap();
let mut statements = self.parse_sql_statements(sql).unwrap();
assert_eq!(statements.len(), 1);

if !canonical.is_empty() && sql != canonical {
assert_eq!(self.parse_sql_statements(&canonical).unwrap(), statements);
assert_eq!(self.parse_sql_statements(canonical).unwrap(), statements);
}

let only_statement = statements.pop().unwrap();
Expand Down
11 changes: 7 additions & 4 deletions src/tokenizer.rs
Expand Up @@ -16,15 +16,16 @@
//!
//! The tokens then form the input for the parser, which outputs an Abstract Syntax Tree (AST).

use std::fmt;
use std::iter::Peekable;
use std::str::Chars;

use super::dialect::keywords::{Keyword, ALL_KEYWORDS, ALL_KEYWORDS_INDEX};
use super::dialect::Dialect;
use super::dialect::SnowflakeDialect;
#[cfg(feature = "serde")]
use serde::{Deserialize, Serialize};
use std::fmt;

use crate::dialect::keywords::{Keyword, ALL_KEYWORDS, ALL_KEYWORDS_INDEX};
use crate::dialect::Dialect;
use crate::dialect::SnowflakeDialect;

/// SQL Token enumeration
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
Expand Down Expand Up @@ -184,6 +185,7 @@ impl Token {
pub fn make_keyword(keyword: &str) -> Self {
Token::make_word(keyword, None)
}

pub fn make_word(word: &str, quote_style: Option<char>) -> Self {
let word_uppercase = word.to_uppercase();
Token::Word(Word {
Expand Down Expand Up @@ -226,6 +228,7 @@ impl fmt::Display for Word {
}
}
}

impl Word {
fn matching_end_quote(ch: char) -> char {
match ch {
Expand Down
6 changes: 3 additions & 3 deletions tests/sqlparser_common.rs
Expand Up @@ -102,7 +102,7 @@ fn parse_insert_sqlite() {
let dialect = SQLiteDialect {};

let check = |sql: &str, expected_action: Option<SqliteOnConflict>| match Parser::parse_sql(
&dialect, &sql,
&dialect, sql,
)
.unwrap()
.pop()
Expand Down Expand Up @@ -340,7 +340,7 @@ fn parse_column_aliases() {
}

// alias without AS is parsed correctly:
one_statement_parses_to("SELECT a.col + 1 newname FROM foo AS a", &sql);
one_statement_parses_to("SELECT a.col + 1 newname FROM foo AS a", sql);
}

#[test]
Expand Down Expand Up @@ -2685,7 +2685,7 @@ fn parse_multiple_statements() {
let res = parse_sql_statements(&(sql1.to_owned() + ";" + sql2_kw + sql2_rest));
assert_eq!(
vec![
one_statement_parses_to(&sql1, ""),
one_statement_parses_to(sql1, ""),
one_statement_parses_to(&(sql2_kw.to_owned() + sql2_rest), ""),
],
res.unwrap()
Expand Down
4 changes: 2 additions & 2 deletions tests/sqlparser_snowflake.rs
Expand Up @@ -38,7 +38,7 @@ fn test_snowflake_create_table() {
fn test_snowflake_single_line_tokenize() {
let sql = "CREATE TABLE# this is a comment \ntable_1";
let dialect = SnowflakeDialect {};
let mut tokenizer = Tokenizer::new(&dialect, &sql);
let mut tokenizer = Tokenizer::new(&dialect, sql);
let tokens = tokenizer.tokenize().unwrap();

let expected = vec![
Expand All @@ -55,7 +55,7 @@ fn test_snowflake_single_line_tokenize() {
assert_eq!(expected, tokens);

let sql = "CREATE TABLE// this is a comment \ntable_1";
let mut tokenizer = Tokenizer::new(&dialect, &sql);
let mut tokenizer = Tokenizer::new(&dialect, sql);
let tokens = tokenizer.tokenize().unwrap();

let expected = vec![
Expand Down

0 comments on commit 67e17b2

Please sign in to comment.