Skip to content

Commit

Permalink
[HOTFIX] Changes to align printing of information in explain command …
Browse files Browse the repository at this point in the history
…based on enable.query.statistics flag
  • Loading branch information
manishgupta88 committed Oct 3, 2018
1 parent d8a51c9 commit 2b03fe9
Show file tree
Hide file tree
Showing 12 changed files with 61 additions and 5 deletions.
Expand Up @@ -26,7 +26,9 @@
import java.util.concurrent.ConcurrentHashMap;

import org.apache.carbondata.common.annotations.InterfaceAudience;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.datamap.dev.expr.DataMapWrapperSimpleInfo;
import org.apache.carbondata.core.util.CarbonProperties;

/**
* An information collector used for EXPLAIN command, to print out
Expand All @@ -52,7 +54,12 @@ public static boolean enabled() {
}

public static void setup() {
INSTANCE = new ExplainCollector();
boolean isQueryStatisticsEnabled = Boolean.parseBoolean(CarbonProperties.getInstance()
.getProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS,
CarbonCommonConstants.ENABLE_QUERY_STATISTICS_DEFAULT));
if (isQueryStatisticsEnabled) {
INSTANCE = new ExplainCollector();
}
}

public static void remove() {
Expand Down
2 changes: 1 addition & 1 deletion docs/datamap/datamap-management.md
Expand Up @@ -122,7 +122,7 @@ There is a DataMapCatalog interface to retrieve schema of all datamap, it can be

How can user know whether datamap is used in the query?

User can use EXPLAIN command to know, it will print out something like
User can set enable.query.statistics = true and use EXPLAIN command to know, it will print out something like

```text
== CarbonData Profiler ==
Expand Down
Expand Up @@ -31,14 +31,18 @@ class BloomFilterDataMapTestCase extends QueryTest with BeforeAndAfterEach with
"yyyy-MM-dd")
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
"yyyy-MM-dd HH:mm:ss")

CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS, "true")
}

override protected def afterAll(): Unit = {
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT,
CarbonCommonConstants.CARBON_DATE_DEFAULT_FORMAT)
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS,
CarbonCommonConstants.ENABLE_QUERY_STATISTICS_DEFAULT)
}

private def createAllDataTypeTable(tableName: String): Unit = {
Expand Down
Expand Up @@ -41,6 +41,8 @@ class LuceneFineGrainDataMapSuite extends QueryTest with BeforeAndAfterAll {
val file2 = resourcesPath + "/datamap_input.csv"

override protected def beforeAll(): Unit = {
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS, "true")
new File(CarbonProperties.getInstance().getSystemFolderLocation).delete()
LuceneFineGrainDataMapSuite.createFile(file2)
sql("create database if not exists lucene")
Expand Down Expand Up @@ -921,6 +923,9 @@ class LuceneFineGrainDataMapSuite extends QueryTest with BeforeAndAfterAll {
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.USE_DISTRIBUTED_DATAMAP,
originDistributedDatamapStatus)
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS,
CarbonCommonConstants.ENABLE_QUERY_STATISTICS_DEFAULT)
}
}

Expand Down
Expand Up @@ -42,6 +42,8 @@ class LuceneFineGrainDataMapWithSearchModeSuite extends QueryTest with BeforeAnd
sqlContext.sparkSession.asInstanceOf[CarbonSession].startSearchMode()
CarbonProperties
.getInstance().addProperty(CarbonCommonConstants.CARBON_SEARCH_QUERY_TIMEOUT, "100s")
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS, "true")
LuceneFineGrainDataMapSuite.createFile(file2, n)
sql("create database if not exists lucene")
sql("use lucene")
Expand Down Expand Up @@ -296,6 +298,9 @@ class LuceneFineGrainDataMapWithSearchModeSuite extends QueryTest with BeforeAnd
}

override protected def afterAll(): Unit = {
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS,
CarbonCommonConstants.ENABLE_QUERY_STATISTICS_DEFAULT)
LuceneFineGrainDataMapSuite.deleteFile(file2)
sql("DROP TABLE IF EXISTS datamap_test")
sql("DROP TABLE IF EXISTS datamap_test5")
Expand Down
Expand Up @@ -24,14 +24,18 @@ import org.apache.spark.sql.hive.CarbonRelation
import org.apache.spark.sql.{CarbonDatasourceHadoopRelation, Row}
import org.scalatest.BeforeAndAfterAll

import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.metadata.schema.datamap.DataMapClassProvider.TIMESERIES
import org.apache.carbondata.core.util.CarbonProperties
import org.apache.carbondata.spark.util.SparkQueryTest

class TestPreAggregateTableSelection extends SparkQueryTest with BeforeAndAfterAll {

val timeSeries = TIMESERIES.toString

override def beforeAll: Unit = {
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS, "true")
sql("drop table if exists mainTable")
sql("drop table if exists mainTableavg")
sql("drop table if exists agg0")
Expand Down Expand Up @@ -454,6 +458,9 @@ class TestPreAggregateTableSelection extends SparkQueryTest with BeforeAndAfterA
sql("DROP TABLE IF EXISTS mainTableavg")
sql("DROP TABLE IF EXISTS filtertable")
sql("DROP TABLE IF EXISTS grouptable")
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS,
CarbonCommonConstants.ENABLE_QUERY_STATISTICS_DEFAULT)
}

}
Expand Up @@ -20,6 +20,9 @@ package org.apache.carbondata.spark.testsuite.createTable
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.BeforeAndAfterAll

import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.carbondata.core.util.CarbonProperties

/**
* test functionality for alter table with datamap
*/
Expand All @@ -31,6 +34,8 @@ class TestRenameTableWithDataMap extends QueryTest with BeforeAndAfterAll {
sql("DROP TABLE IF EXISTS carbon_table")
sql("DROP TABLE IF EXISTS carbon_tb")
sql("DROP TABLE IF EXISTS fact_table1")
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS, "true")
}

test("Creating a bloomfilter datamap,then table rename") {
Expand Down Expand Up @@ -188,5 +193,8 @@ class TestRenameTableWithDataMap extends QueryTest with BeforeAndAfterAll {
sql("DROP TABLE IF EXISTS carbon_table")
sql("DROP TABLE IF EXISTS carbon_tb")
sql("DROP TABLE IF EXISTS fact_table1")
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS,
CarbonCommonConstants.ENABLE_QUERY_STATISTICS_DEFAULT)
}
}
Expand Up @@ -361,6 +361,8 @@ class CGDataMapTestCase extends QueryTest with BeforeAndAfterAll {
//n should be about 5000000 of reset if size is default 1024
val n = 150000
CompactionSupportGlobalSortBigFileTest.createFile(file2, n * 4, n)
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS, "true")
sql("DROP TABLE IF EXISTS normal_test")
sql(
"""
Expand Down Expand Up @@ -558,6 +560,9 @@ class CGDataMapTestCase extends QueryTest with BeforeAndAfterAll {
sql("DROP TABLE IF EXISTS datamap_store_test")
sql("DROP TABLE IF EXISTS datamap_store_test1")
sql("DROP TABLE IF EXISTS datamap_store_test2")
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS,
CarbonCommonConstants.ENABLE_QUERY_STATISTICS_DEFAULT)
}

}
Expand Up @@ -45,7 +45,7 @@ import org.apache.carbondata.core.scan.expression.Expression
import org.apache.carbondata.core.scan.expression.conditional.EqualToExpression
import org.apache.carbondata.core.scan.filter.intf.ExpressionType
import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf
import org.apache.carbondata.core.util.ByteUtil
import org.apache.carbondata.core.util.{ByteUtil, CarbonProperties}
import org.apache.carbondata.core.util.path.CarbonTablePath
import org.apache.carbondata.events.Event
import org.apache.carbondata.spark.testsuite.datacompaction.CompactionSupportGlobalSortBigFileTest
Expand Down Expand Up @@ -437,6 +437,8 @@ class FGDataMapTestCase extends QueryTest with BeforeAndAfterAll {
//n should be about 5000000 of reset if size is default 1024
val n = 150000
CompactionSupportGlobalSortBigFileTest.createFile(file2, n * 4, n)
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS, "true")
sql("DROP TABLE IF EXISTS normal_test")
sql(
"""
Expand Down Expand Up @@ -569,5 +571,8 @@ class FGDataMapTestCase extends QueryTest with BeforeAndAfterAll {
sql("DROP TABLE IF EXISTS normal_test")
sql("DROP TABLE IF EXISTS datamap_test")
sql("DROP TABLE IF EXISTS datamap_testFG")
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS,
CarbonCommonConstants.ENABLE_QUERY_STATISTICS_DEFAULT)
}
}
Expand Up @@ -115,6 +115,7 @@ class CarbonSession(@transient val sc: SparkContext,
*/
@InterfaceAudience.Developer(Array("DataMap"))
def isDataMapHit(sqlStatement: String, dataMapName: String): Boolean = {
// explain command will output the dataMap information only if enable.query.statistics = true
val message = sql(s"EXPLAIN $sqlStatement").collect()
message(0).getString(0).contains(dataMapName)
}
Expand Down
Expand Up @@ -52,7 +52,11 @@ case class CarbonExplainCommand(
try {
ExplainCollector.setup()
queryExecution.toRdd.partitions
Seq(Row("== CarbonData Profiler ==\n" + ExplainCollector.getFormatedOutput))
if (ExplainCollector.enabled()) {
Seq(Row("== CarbonData Profiler ==\n" + ExplainCollector.getFormatedOutput))
} else {
Seq.empty
}
} finally {
ExplainCollector.remove()
}
Expand Down
Expand Up @@ -41,6 +41,8 @@ class BloomCoarseGrainDataMapSuite extends QueryTest with BeforeAndAfterAll with

override protected def beforeAll(): Unit = {
new File(CarbonProperties.getInstance().getSystemFolderLocation).delete()
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS, "true")
createFile(bigFile, line = 50000)
createFile(smallFile)
sql(s"DROP TABLE IF EXISTS $normalTable")
Expand Down Expand Up @@ -992,6 +994,9 @@ class BloomCoarseGrainDataMapSuite extends QueryTest with BeforeAndAfterAll with
deleteFile(smallFile)
sql(s"DROP TABLE IF EXISTS $normalTable")
sql(s"DROP TABLE IF EXISTS $bloomDMSampleTable")
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS,
CarbonCommonConstants.ENABLE_QUERY_STATISTICS_DEFAULT)
}

private def createFile(fileName: String, line: Int = 10000, start: Int = 0) = {
Expand Down

0 comments on commit 2b03fe9

Please sign in to comment.