From 42844586a5e53559187aa98797bd82b5c1f9601d Mon Sep 17 00:00:00 2001 From: gengjiaan Date: Tue, 11 May 2021 11:18:36 +0800 Subject: [PATCH] Update code --- .../org/apache/spark/sql/errors/QueryCompilationErrors.scala | 2 +- .../scala/org/apache/spark/sql/streaming/DataStreamReader.scala | 2 +- .../scala/org/apache/spark/sql/streaming/DataStreamWriter.scala | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala index ec15ae295d0b8..9a87917d0e468 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/errors/QueryCompilationErrors.scala @@ -1392,7 +1392,7 @@ private[spark] object QueryCompilationErrors { new AnalysisException("function is only supported in v1 catalog") } - def operateHiveDataSourceDirectlyError(operation: String): Throwable = { + def cannotOperateOnHiveDataSourceFilesError(operation: String): Throwable = { new AnalysisException("Hive data source can only be used with tables, you can not " + s"$operation files of Hive data source directly.") } diff --git a/sql/core/src/main/scala/org/apache/spark/sql/streaming/DataStreamReader.scala b/sql/core/src/main/scala/org/apache/spark/sql/streaming/DataStreamReader.scala index d86655edf3dde..1798f6e2c88bd 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/streaming/DataStreamReader.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/streaming/DataStreamReader.scala @@ -195,7 +195,7 @@ final class DataStreamReader private[sql](sparkSession: SparkSession) extends Lo private def loadInternal(path: Option[String]): DataFrame = { if (source.toLowerCase(Locale.ROOT) == DDLUtils.HIVE_PROVIDER) { - throw QueryCompilationErrors.operateHiveDataSourceDirectlyError("read") + throw QueryCompilationErrors.cannotOperateOnHiveDataSourceFilesError("read") } val optionsWithPath = if (path.isEmpty) { diff --git a/sql/core/src/main/scala/org/apache/spark/sql/streaming/DataStreamWriter.scala b/sql/core/src/main/scala/org/apache/spark/sql/streaming/DataStreamWriter.scala index 51108ef0a6e7d..b25aedbeca79e 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/streaming/DataStreamWriter.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/streaming/DataStreamWriter.scala @@ -384,7 +384,7 @@ final class DataStreamWriter[T] private[sql](ds: Dataset[T]) { private def startInternal(path: Option[String]): StreamingQuery = { if (source.toLowerCase(Locale.ROOT) == DDLUtils.HIVE_PROVIDER) { - throw QueryCompilationErrors.operateHiveDataSourceDirectlyError("write") + throw QueryCompilationErrors.cannotOperateOnHiveDataSourceFilesError("write") } if (source == SOURCE_NAME_MEMORY) {