From e27c66cab43d0197ad8ba95d01120af0ae3cc319 Mon Sep 17 00:00:00 2001 From: Vinod K C Date: Fri, 26 Jun 2015 11:40:09 +0530 Subject: [PATCH] Avoid reinitialization of lexical in parse method --- .../apache/spark/sql/catalyst/AbstractSparkSQLParser.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala index 3fc5891fe3a67..9464b831b26fd 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/AbstractSparkSQLParser.scala @@ -30,12 +30,14 @@ private[sql] abstract class AbstractSparkSQLParser def parse(input: String): LogicalPlan = { // Initialize the Keywords. - lexical.initialize(reservedWords) + initLexical phrase(start)(new lexical.Scanner(input)) match { case Success(plan, _) => plan case failureOrError => sys.error(failureOrError.toString) } } + /* One time initialization of lexical.This avoid reinitialization of lexical in parse method */ + protected lazy val initLexical: Unit = lexical.initialize(reservedWords) protected case class Keyword(str: String) { def normalize: String = lexical.normalizeKeyword(str) @@ -82,10 +84,8 @@ class SqlLexical extends StdLexical { /* This is a work around to support the lazy setting */ def initialize(keywords: Seq[String]): Unit = { - synchronized { reserved.clear() reserved ++= keywords - } } /* Normal the keyword string */