diff --git a/docs/sql-programming-guide.md b/docs/sql-programming-guide.md index 89fe873851d8c..b9d999a525b1b 100644 --- a/docs/sql-programming-guide.md +++ b/docs/sql-programming-guide.md @@ -122,13 +122,6 @@ Spark build. If these dependencies are not a problem for your application then u is recommended for the 1.3 release of Spark. Future releases will focus on bringing `SQLContext` up to feature parity with a `HiveContext`. -The specific variant of SQL that is used to parse queries can also be selected using the -`spark.sql.dialect` option. This parameter can be changed using either the `setConf` method on -a `SQLContext` or by using a `SET key=value` command in SQL. For a `SQLContext`, the only dialect -available is "sql" which uses a simple SQL parser provided by Spark SQL. In a `HiveContext`, the -default is "hiveql", though "sql" is also available. Since the HiveQL parser is much more complete, -this is recommended for most use cases. - ## Creating DataFrames diff --git a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala index 54dbd6bda555d..a2f592b130ed8 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/SQLContext.scala @@ -79,7 +79,7 @@ class SQLContext private[sql]( def this(sparkContext: JavaSparkContext) = this(sparkContext.sc) // If spark.sql.allowMultipleContexts is true, we will throw an exception if a user - // wants to create a new root SQLContext (a SLQContext that is not created by newSession). + // wants to create a new root SQLContext (a SQLContext that is not created by newSession). private val allowMultipleContexts = sparkContext.conf.getBoolean( SQLConf.ALLOW_MULTIPLE_CONTEXTS.key, diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala index 10024874472f2..4c64ef969ff93 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveQuerySuite.scala @@ -270,12 +270,6 @@ class HiveQuerySuite extends HiveComparisonTest with BeforeAndAfter { "SELECT 11 % 10, IF((101.1 % 100.0) BETWEEN 1.01 AND 1.11, \"true\", \"false\"), " + "(101 / 2) % 10 FROM src LIMIT 1") - test("Query expressed in SQL") { - setConf("spark.sql.dialect", "sql") - assert(sql("SELECT 1").collect() === Array(Row(1))) - setConf("spark.sql.dialect", "hiveql") - } - test("Query expressed in HiveQL") { sql("FROM src SELECT key").collect() }