Skip to content

Commit

Permalink
Merge 73379c6 into 2ecf30c
Browse files Browse the repository at this point in the history
  • Loading branch information
kevinjmh committed Feb 2, 2019
2 parents 2ecf30c + 73379c6 commit aeb2f5e
Show file tree
Hide file tree
Showing 4 changed files with 28 additions and 8 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@ class MVCreateTestCase extends QueryTest with BeforeAndAfterAll {
sql("drop datamap if exists datamap1")
sql("create datamap datamap1 using 'mv' as select empname, designation from fact_table1")
sql(s"rebuild datamap datamap1")
checkExistence(sql("show datamap on table fact_table1"), true, "select empname, designation from fact_table1")
val df = sql("select empname,designation from fact_table1")
val analyzed = df.queryExecution.analyzed
assert(verifyMVDataMap(analyzed, "datamap1"))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ class TestPreAggCreateCommand extends QueryTest with BeforeAndAfterAll {

test("test pre agg create table 1") {
sql("create datamap preagg1 on table PreAggMain using 'preaggregate' as select a,sum(b) from PreAggMain group by a")
checkExistence(sql("SHOW DATAMAP ON TABLE PreAggMain"), true, "select a,sum(b) from PreAggMain group by a")
checkExistence(sql("DESCRIBE FORMATTED PreAggMain_preagg1"), true, "preaggmain_a")
checkExistence(sql("DESCRIBE FORMATTED PreAggMain_preagg1"), true, "preaggmain_b_sum")
sql("drop datamap preagg1 on table PreAggMain")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -229,9 +229,9 @@ class TestDataMapCommand extends QueryTest with BeforeAndAfterAll {
""".stripMargin)
var result = sql(s"show datamap on table $tableName").cache()
checkAnswer(sql(s"show datamap on table $tableName"),
Seq(Row(datamapName, "bloomfilter", s"default.$tableName", "'bloom_fpp'='0.001', 'bloom_size'='32000', 'index_columns'='a'"),
Row(datamapName2, "bloomfilter", s"default.$tableName", "'index_columns'='b'"),
Row(datamapName3, "bloomfilter", s"default.$tableName", "'index_columns'='c'")))
Seq(Row(datamapName, "bloomfilter", s"default.$tableName", "'bloom_fpp'='0.001', 'bloom_size'='32000', 'index_columns'='a'", null),
Row(datamapName2, "bloomfilter", s"default.$tableName", "'index_columns'='b'", null),
Row(datamapName3, "bloomfilter", s"default.$tableName", "'index_columns'='c'", null)))
result.unpersist()
sql(s"drop table if exists $tableName")

Expand All @@ -248,7 +248,8 @@ class TestDataMapCommand extends QueryTest with BeforeAndAfterAll {
| GROUP BY mytime
""".stripMargin)
checkAnswer(sql(s"show datamap on table $tableName"),
Seq(Row("agg0_hour", "timeSeries", s"default.${tableName}_agg0_hour", "'event_time'='mytime', 'hour_granularity'='1'")))
Seq(Row("agg0_hour", "timeSeries", s"default.${tableName}_agg0_hour", "'event_time'='mytime', 'hour_granularity'='1'",
"SELECT mytime, SUM(age) FROM datamapshowtest GROUP BY mytime")))
sql(s"drop table if exists $tableName")

// for preaggreate datamap, the property is empty
Expand All @@ -262,7 +263,8 @@ class TestDataMapCommand extends QueryTest with BeforeAndAfterAll {
| FROM $tableName GROUP BY name
| """.stripMargin)
checkAnswer(sql(s"show datamap on table $tableName"),
Seq(Row("agg0", "preaggregate", s"default.${tableName}_agg0", "")))
Seq(Row("agg0", "preaggregate", s"default.${tableName}_agg0", "",
"SELECT name, count(age) FROM datamapshowtest GROUP BY name")))
sql(s"drop table if exists $tableName")
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,12 @@ import org.apache.spark.sql.{CarbonEnv, Row, SparkSession}
import org.apache.spark.sql.catalyst.TableIdentifier
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference}
import org.apache.spark.sql.execution.command.{Checker, DataCommand}
import org.apache.spark.sql.execution.command.preaaggregate.PreAggregateUtil
import org.apache.spark.sql.types.StringType

import org.apache.carbondata.core.datamap.DataMapStoreManager
import org.apache.carbondata.core.metadata.schema.datamap.{DataMapClassProvider, DataMapProperty}
import org.apache.carbondata.core.metadata.schema.table.DataMapSchema
import org.apache.carbondata.core.metadata.schema.table.{AggregationDataMapSchema, DataMapSchema}

/**
* Show the datamaps on the table
Expand All @@ -43,7 +44,8 @@ case class CarbonDataMapShowCommand(tableIdentifier: Option[TableIdentifier])
Seq(AttributeReference("DataMapName", StringType, nullable = false)(),
AttributeReference("ClassName", StringType, nullable = false)(),
AttributeReference("Associated Table", StringType, nullable = false)(),
AttributeReference("DataMap Properties", StringType, nullable = false)())
AttributeReference("DataMap Properties", StringType, nullable = false)(),
AttributeReference("Subquery", StringType, nullable = true)())
}

override def processData(sparkSession: SparkSession): Seq[Row] = {
Expand Down Expand Up @@ -92,7 +94,21 @@ case class CarbonDataMapShowCommand(tableIdentifier: Option[TableIdentifier])
.map(p => s"'${ p._1 }'='${ p._2 }'").toSeq
.sorted.mkString(", ")
}
Row(s.getDataMapName, s.getProviderName, table, dmPropertieStr)

val subquery = if (s.getProviderName.equalsIgnoreCase(
DataMapClassProvider.MV.getShortName)) {
s.getCtasQuery
} else if (s.getProviderName.equalsIgnoreCase(
DataMapClassProvider.PREAGGREGATE.getShortName)
|| s.getProviderName.equalsIgnoreCase(
DataMapClassProvider.TIMESERIES.getShortName)) {
PreAggregateUtil.getChildQuery(s.asInstanceOf[AggregationDataMapSchema])
} else {
null
}

Row(s.getDataMapName, s.getProviderName, table, dmPropertieStr,
if (null == subquery) null else subquery.replace("\n", " "))
}
} else {
Seq.empty
Expand Down

0 comments on commit aeb2f5e

Please sign in to comment.