From a9992971061268d559e5489d171c6fbeadad75cb Mon Sep 17 00:00:00 2001 From: Joey Hain Date: Mon, 17 Nov 2025 19:34:49 -0800 Subject: [PATCH] fix tokenization of {{param}}._ --- src/ast/mod.rs | 9 ++------- src/tokenizer.rs | 16 +++++++++++++++- 2 files changed, 17 insertions(+), 8 deletions(-) diff --git a/src/ast/mod.rs b/src/ast/mod.rs index acf409173..112652c63 100644 --- a/src/ast/mod.rs +++ b/src/ast/mod.rs @@ -2732,10 +2732,11 @@ impl fmt::Display for Declare { } /// Sql options of a `CREATE TABLE` statement. -#[derive(Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] +#[derive(Default, Debug, Clone, PartialEq, PartialOrd, Eq, Ord, Hash)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] #[cfg_attr(feature = "visitor", derive(Visit, VisitMut))] pub enum CreateTableOptions { + #[default] None, /// Options specified using the `WITH` keyword. /// e.g. `WITH (description = "123")` @@ -2764,12 +2765,6 @@ pub enum CreateTableOptions { TableProperties(Vec), } -impl Default for CreateTableOptions { - fn default() -> Self { - Self::None - } -} - impl fmt::Display for CreateTableOptions { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match self { diff --git a/src/tokenizer.rs b/src/tokenizer.rs index 3740405c0..a26fd4013 100644 --- a/src/tokenizer.rs +++ b/src/tokenizer.rs @@ -1236,7 +1236,7 @@ impl<'a> Tokenizer<'a> { // if the prev token is not a word, then this is not a valid sql // word or number. if ch == '.' && chars.peekable.clone().nth(1) == Some('_') { - if let Some(Token::Word(_)) = prev_token { + if let Some(Token::Word(_) | Token::Mustache(_)) = prev_token { chars.next(); return Ok(Some(Token::Period)); } @@ -4138,4 +4138,18 @@ mod tests { panic!("Tokenizer should have failed on {sql}, but it succeeded with {tokens:?}"); } } + + #[test] + fn tokenize_mustache_dot_ident() { + all_dialects_where(|d| d.is_identifier_start('_')).tokenizes_to( + "SELECT {{schema}}._column", + vec![ + Token::make_keyword("SELECT"), + Token::Whitespace(Whitespace::Space), + Token::Mustache("schema".to_owned()), + Token::Period, + Token::make_word("_column", None), + ], + ); + } }