From 23d82c5278e209ef0051174834b32cd6ca8c3bb7 Mon Sep 17 00:00:00 2001 From: Andrew Or Date: Thu, 5 May 2016 17:12:56 -0700 Subject: [PATCH] Expose it --- .../main/scala/org/apache/spark/sql/catalog/Catalog.scala | 8 ++++++++ .../scala/org/apache/spark/sql/internal/CatalogImpl.scala | 7 +++++++ 2 files changed, 15 insertions(+) diff --git a/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala b/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala index 7a815c1f99336..6aa89f81a577c 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/catalog/Catalog.scala @@ -27,6 +27,14 @@ import org.apache.spark.sql.types.StructType */ abstract class Catalog { + /** + * Return an identifier for the underlying catalog implementation, currently must be + * either 'hive' or 'in-memory'. + * + * @since 2.0.0 + */ + def implementation: String + /** * Returns the current default database in this session. * diff --git a/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala b/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala index 976c9c53de139..40122ef66e535 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/internal/CatalogImpl.scala @@ -21,6 +21,7 @@ import scala.collection.JavaConverters._ import scala.reflect.runtime.universe.TypeTag import org.apache.spark.annotation.Experimental +import org.apache.spark.internal.config.CATALOG_IMPLEMENTATION import org.apache.spark.sql.{AnalysisException, DataFrame, Dataset, SparkSession} import org.apache.spark.sql.catalog.{Catalog, Column, Database, Function, Table} import org.apache.spark.sql.catalyst.{DefinedByConstructorParams, TableIdentifier} @@ -58,6 +59,12 @@ class CatalogImpl(sparkSession: SparkSession) extends Catalog { new Dataset[T](sparkSession, queryExecution, enc) } + /** + * Return an identifier for the underlying catalog implementation, currently must be + * either 'hive' or 'in-memory'. + */ + override val implementation: String = sparkSession.sparkContext.conf.get(CATALOG_IMPLEMENTATION) + /** * Returns the current default database in this session. */