Skip to content

Commit

Permalink
[CARBONDATA-2499][Test] Validate the visible/invisible status of datamap
Browse files Browse the repository at this point in the history
This closes #2325
  • Loading branch information
xubo245 authored and QiangCai committed May 28, 2018
1 parent ddf3e85 commit 1b6ce8c
Show file tree
Hide file tree
Showing 3 changed files with 130 additions and 23 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer

import com.sun.xml.internal.messaging.saaj.util.ByteOutputStream
import org.apache.hadoop.fs.Path
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.BeforeAndAfterAll

Expand Down Expand Up @@ -401,30 +400,70 @@ class CGDataMapTestCase extends QueryTest with BeforeAndAfterAll {
sql(s"DROP TABLE IF EXISTS $tableName")
sql(
s"""
| CREATE TABLE $tableName(id INT, name STRING, city STRING, age INT)
| STORED BY 'org.apache.carbondata.format'
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
| CREATE TABLE $tableName(id INT, name STRING, city STRING, age INT)
| STORED BY 'org.apache.carbondata.format'
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
""".stripMargin)
// register datamap writer
sql(s"create datamap $dataMapName1 on table $tableName using '${classOf[CGDataMapFactory].getName}' DMPROPERTIES('index_columns'='name')")
sql(s"create datamap $dataMapName2 on table $tableName using '${classOf[CGDataMapFactory].getName}' DMPROPERTIES('index_columns'='city')")
sql(
s"""
| CREATE DATAMAP $dataMapName1
| ON TABLE $tableName
| USING '${classOf[CGDataMapFactory].getName}'
| DMPROPERTIES('index_columns'='name')
""".stripMargin)
sql(
s"""
| CREATE DATAMAP $dataMapName2
| ON TABLE $tableName
| USING '${classOf[CGDataMapFactory].getName}'
| DMPROPERTIES('index_columns'='city')
""".stripMargin)
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE $tableName OPTIONS('header'='false')")
val df1 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'").collect()
assert(df1(0).getString(0).contains("CG DataMap"))
assert(df1(0).getString(0).contains(dataMapName1))
val e11 = intercept[Exception] {
assert(df1(0).getString(0).contains(dataMapName2))
}
assert(e11.getMessage.contains("did not contain \"" + dataMapName2))

// make datamap1 invisible
sql(s"set ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = false")
checkAnswer(sql(s"select * from $tableName where name='n502670' and city='c2670'"),
sql("select * from normal_test where name='n502670' and city='c2670'"))
sql(s"SET ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = false")
val df2 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'").collect()
val e = intercept[Exception] {
assert(df2(0).getString(0).contains(dataMapName1))
}
assert(e.getMessage.contains("did not contain \"" + dataMapName1))
assert(df2(0).getString(0).contains(dataMapName2))
checkAnswer(sql(s"SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'"),
sql("SELECT * FROM normal_test WHERE name='n502670' AND city='c2670'"))

// also make datamap2 invisible
sql(s"set ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName2 = false")
checkAnswer(sql(s"select * from $tableName where name='n502670' and city='c2670'"),
sql("select * from normal_test where name='n502670' and city='c2670'"))
sql(s"SET ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName2 = false")
checkAnswer(sql(s"SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'"),
sql("SELECT * FROM normal_test WHERE name='n502670' AND city='c2670'"))
val df3 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'").collect()
val e31 = intercept[Exception] {
assert(df3(0).getString(0).contains(dataMapName1))
}
assert(e31.getMessage.contains("did not contain \"" + dataMapName1))
val e32 = intercept[Exception] {
assert(df3(0).getString(0).contains(dataMapName2))
}
assert(e32.getMessage.contains("did not contain \"" + dataMapName2))

// make datamap1,datamap2 visible
sql(s"set ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = true")
sql(s"set ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = true")
checkAnswer(sql(s"select * from $tableName where name='n502670' and city='c2670'"),
sql("select * from normal_test where name='n502670' and city='c2670'"))
sql(s"SET ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = true")
sql(s"SET ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = true")
checkAnswer(sql(s"SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'"),
sql("SELECT * FROM normal_test WHERE name='n502670' AND city='c2670'"))
val df4 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'").collect()
assert(df4(0).getString(0).contains(dataMapName1))
val e41 = intercept[Exception] {
assert(df3(0).getString(0).contains(dataMapName2))
}
assert(e41.getMessage.contains("did not contain \"" + dataMapName2))
}

test("test datamap storage in system folder") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,10 @@ import scala.collection.JavaConverters._
import scala.collection.mutable.ArrayBuffer

import com.sun.xml.internal.messaging.saaj.util.ByteOutputStream
import org.apache.carbondata.core.constants.CarbonCommonConstants
import org.apache.spark.sql.test.util.QueryTest
import org.scalatest.BeforeAndAfterAll

import org.apache.carbondata.core.datamap.{DataMapDistributable, DataMapMeta}
import org.apache.carbondata.core.datamap.Segment
import org.apache.carbondata.core.datamap.dev.{DataMapModel, DataMapBuilder, DataMapWriter}
import org.apache.carbondata.core.datamap.{DataMapDistributable, DataMapMeta, Segment}
import org.apache.carbondata.core.datamap.dev.{DataMapModel, DataMapBuilder, DataMapWriter}
import org.apache.carbondata.core.datamap.dev.fgdatamap.{FineGrainBlocklet, FineGrainDataMap, FineGrainDataMapFactory}
Expand Down Expand Up @@ -488,9 +486,83 @@ class FGDataMapTestCase extends QueryTest with BeforeAndAfterAll {
sql("select * from normal_test where name='n502670' and city='c2670'"))
}

test("test invisible datamap during query") {
val tableName = "datamap_testFG"
val dataMapName1 = "datamap1"
val dataMapName2 = "datamap2"
sql(s"DROP TABLE IF EXISTS $tableName")
sql(
s"""
| CREATE TABLE $tableName(id INT, name STRING, city STRING, age INT)
| STORED BY 'org.apache.carbondata.format'
| TBLPROPERTIES('SORT_COLUMNS'='city,name', 'SORT_SCOPE'='LOCAL_SORT')
""".stripMargin)
// register datamap writer
sql(
s"""
| CREATE DATAMAP $dataMapName1
| ON TABLE $tableName
| USING '${classOf[FGDataMapFactory].getName}'
| DMPROPERTIES('index_columns'='name')
""".stripMargin)
sql(
s"""
| CREATE DATAMAP $dataMapName2
| ON TABLE $tableName
| USING '${classOf[FGDataMapFactory].getName}'
| DMPROPERTIES('index_columns'='city')
""".stripMargin)
sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE $tableName OPTIONS('header'='false')")
val df1 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'").collect()
assert(df1(0).getString(0).contains("FG DataMap"))
assert(df1(0).getString(0).contains(dataMapName1))
val e11 = intercept[Exception] {
assert(df1(0).getString(0).contains(dataMapName2))
}
assert(e11.getMessage.contains("did not contain \"" + dataMapName2))

// make datamap1 invisible
sql(s"SET ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = false")
val df2 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'").collect()
val e = intercept[Exception] {
assert(df2(0).getString(0).contains(dataMapName1))
}
assert(e.getMessage.contains("did not contain \"" + dataMapName1))
assert(df2(0).getString(0).contains(dataMapName2))
checkAnswer(sql(s"SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'"),
sql("SELECT * FROM normal_test WHERE name='n502670' AND city='c2670'"))

// also make datamap2 invisible
sql(s"SET ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName2 = false")
checkAnswer(sql(s"SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'"),
sql("SELECT * FROM normal_test WHERE name='n502670' AND city='c2670'"))
val df3 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'").collect()
val e31 = intercept[Exception] {
assert(df3(0).getString(0).contains(dataMapName1))
}
assert(e31.getMessage.contains("did not contain \"" + dataMapName1))
val e32 = intercept[Exception] {
assert(df3(0).getString(0).contains(dataMapName2))
}
assert(e32.getMessage.contains("did not contain \"" + dataMapName2))

// make datamap1,datamap2 visible
sql(s"SET ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = true")
sql(s"SET ${CarbonCommonConstants.CARBON_DATAMAP_VISIBLE}default.$tableName.$dataMapName1 = true")
checkAnswer(sql(s"SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'"),
sql("SELECT * FROM normal_test WHERE name='n502670' AND city='c2670'"))
val df4 = sql(s"EXPLAIN EXTENDED SELECT * FROM $tableName WHERE name='n502670' AND city='c2670'").collect()
assert(df4(0).getString(0).contains(dataMapName1))
val e41 = intercept[Exception] {
assert(df3(0).getString(0).contains(dataMapName2))
}
assert(e41.getMessage.contains("did not contain \"" + dataMapName2))
}

override protected def afterAll(): Unit = {
CompactionSupportGlobalSortBigFileTest.deleteFile(file2)
sql("DROP TABLE IF EXISTS normal_test")
sql("DROP TABLE IF EXISTS datamap_test")
sql("DROP TABLE IF EXISTS datamap_testFG")
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,6 @@

package org.apache.spark.carbondata

import java.io.File

import org.apache.spark.sql.common.util.Spark2QueryTest
import org.apache.spark.sql.hive.HiveContext
import org.scalatest.BeforeAndAfterAll
Expand All @@ -28,8 +26,6 @@ import org.apache.carbondata.core.util.CarbonProperties

/**
* Test Class for detailed query on timestamp datatypes
*
*
*/
class DataLoadFailAllTypeSortTest extends Spark2QueryTest with BeforeAndAfterAll {
var hiveContext: HiveContext = _
Expand Down

0 comments on commit 1b6ce8c

Please sign in to comment.