Skip to content

Commit

Permalink
[CARBONDATA-3087] Improve DESC FORMATTED output
Browse files Browse the repository at this point in the history
Change output of DESC FORMATTED

This closes #2908
  • Loading branch information
jackylk authored and ravipesala committed Nov 21, 2018
1 parent 6e62698 commit ab1070b
Show file tree
Hide file tree
Showing 34 changed files with 342 additions and 325 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -61,11 +61,6 @@ private CarbonCommonConstants() {
*/
public static final int BLOCKLET_SIZE_MAX_VAL = 12000000;

/**
* default block size in MB
*/
public static final String BLOCK_SIZE_DEFAULT_VAL = "1024";

/**
* min block size in MB
*/
Expand Down Expand Up @@ -438,8 +433,16 @@ private CarbonCommonConstants() {
public static final String COLUMN_PROPERTIES = "columnproperties";
// table block size in MB
public static final String TABLE_BLOCKSIZE = "table_blocksize";

// default block size in MB
public static final String TABLE_BLOCK_SIZE_DEFAULT = "1024";

// table blocklet size in MB
public static final String TABLE_BLOCKLET_SIZE = "table_blocklet_size";

// default blocklet size value in MB
public static final String TABLE_BLOCKLET_SIZE_DEFAULT = "64";

/**
* set in column level to disable inverted index
* @Deprecated :This property is deprecated, it is kept just for compatibility
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
* limitations under the License.
*/

package org.apache.carbondata.processing.loading.sort;
package org.apache.carbondata.core.constants;

import org.apache.carbondata.core.constants.CarbonCommonConstants;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,15 @@
import java.util.Comparator;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;

import org.apache.carbondata.common.exceptions.sql.MalformedDataMapCommandException;
import org.apache.carbondata.common.logging.LogServiceFactory;
import org.apache.carbondata.core.constants.CarbonCommonConstants;
import org.apache.carbondata.core.constants.CarbonLoadOptionConstants;
import org.apache.carbondata.core.constants.SortScopeOptions;
import org.apache.carbondata.core.datamap.DataMapStoreManager;
import org.apache.carbondata.core.datamap.TableDataMap;
import org.apache.carbondata.core.datamap.dev.DataMapFactory;
Expand All @@ -55,13 +58,15 @@
import org.apache.carbondata.core.scan.filter.optimizer.RangeFilterOptmizer;
import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
import org.apache.carbondata.core.scan.model.QueryModel;
import org.apache.carbondata.core.util.CarbonProperties;
import org.apache.carbondata.core.util.CarbonUtil;
import org.apache.carbondata.core.util.DataTypeUtil;
import org.apache.carbondata.core.util.path.CarbonTablePath;

import static org.apache.carbondata.core.metadata.schema.datamap.DataMapClassProvider.MV;
import static org.apache.carbondata.core.util.CarbonUtil.thriftColumnSchemaToWrapperColumnSchema;

import com.google.common.collect.Lists;
import org.apache.hadoop.conf.Configuration;
import org.apache.log4j.Logger;

Expand Down Expand Up @@ -841,6 +846,15 @@ public int getBlockSizeInMB() {
return blockSize;
}

public int getBlockletSizeInMB() {
try {
return Integer.parseInt(tableInfo.getFactTable().getTableProperties().get(
CarbonCommonConstants.TABLE_BLOCKLET_SIZE));
} catch (NumberFormatException e) {
return Integer.parseInt(CarbonCommonConstants.TABLE_BLOCKLET_SIZE_DEFAULT);
}
}

/**
* to get the normal dimension or the primitive dimension of the complex type
*
Expand Down Expand Up @@ -921,6 +935,10 @@ public List<String> getSortColumns(String tableName) {
return sort_columsList;
}

public List<String> getSortColumns() {
return getSortColumns(getTableName());
}

public int getNumberOfSortColumns() {
return numberOfSortColumns;
}
Expand Down Expand Up @@ -1214,16 +1232,22 @@ public List<String> getMinMaxCachedColumnsInCreateOrder() {
String tableName = tableInfo.getFactTable().getTableName();
String cacheColumns =
tableInfo.getFactTable().getTableProperties().get(CarbonCommonConstants.COLUMN_META_CACHE);
if (null != cacheColumns && !cacheColumns.isEmpty()) {
String[] cachedCols = cacheColumns.split(",");
for (String column : cachedCols) {
CarbonColumn carbonColumn = getColumnByName(tableName, column);
if (null != carbonColumn && !carbonColumn.isInvisible()) {
cachedColsList.add(carbonColumn.getColName());
if (null != cacheColumns) {
if (!cacheColumns.isEmpty()) {
String[] cachedCols = cacheColumns.split(",");
for (String column : cachedCols) {
CarbonColumn carbonColumn = getColumnByName(tableName, column);
if (null != carbonColumn && !carbonColumn.isInvisible()) {
cachedColsList.add(carbonColumn.getColName());
}
}
return cachedColsList;
} else {
return new LinkedList<>();
}
} else {
return Lists.newArrayList("All columns");
}
return cachedColsList;
}

/**
Expand Down Expand Up @@ -1297,4 +1321,41 @@ public static boolean hasMVDataMap(CarbonTable carbonTable) throws IOException {
}
return false;
}

/**
* Return all inverted index columns in this table
*/
public List<ColumnSchema> getInvertedIndexColumns() {
if (getSortScope() == SortScopeOptions.SortScope.NO_SORT) {
return new LinkedList<>();
}
List<ColumnSchema> columns = new LinkedList<>();
for (ColumnSchema column : tableInfo.getFactTable().getListOfColumns()) {
if (column.isUseInvertedIndex() && column.isSortColumn()) {
columns.add(column);
}
}
return columns;
}

/**
* Return table level sort scope
*/
public SortScopeOptions.SortScope getSortScope() {
String sortScope = tableInfo.getFactTable().getTableProperties().get("sort_scope");
if (sortScope == null) {
if (getNumberOfSortColumns() == 0) {
return SortScopeOptions.SortScope.NO_SORT;
} else {
return SortScopeOptions.getSortScope(
CarbonProperties.getInstance().getProperty(
CarbonLoadOptionConstants.CARBON_OPTIONS_SORT_SCOPE,
CarbonProperties.getInstance().getProperty(
CarbonCommonConstants.LOAD_SORT_SCOPE,
CarbonCommonConstants.LOAD_SORT_SCOPE_DEFAULT)));
}
} else {
return SortScopeOptions.getSortScope(sortScope);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -257,12 +257,12 @@ int getTableBlockSizeInMB() {
tableBlockSize = tableProperties.get(CarbonCommonConstants.TABLE_BLOCKSIZE);
}
if (null == tableBlockSize) {
tableBlockSize = CarbonCommonConstants.BLOCK_SIZE_DEFAULT_VAL;
tableBlockSize = CarbonCommonConstants.TABLE_BLOCK_SIZE_DEFAULT;
if (LOGGER.isDebugEnabled()) {
LOGGER.debug(
"Table block size not specified for " + getTableUniqueName() +
". Therefore considering the default value " +
CarbonCommonConstants.BLOCK_SIZE_DEFAULT_VAL + " MB");
CarbonCommonConstants.TABLE_BLOCK_SIZE_DEFAULT + " MB");
}
}
return Integer.parseInt(tableBlockSize);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2513,6 +2513,9 @@ public static Map<String, Long> calculateDataIndexSize(CarbonTable carbonTable,
FileFactory.getCarbonFile(tableStatusPath, FileFactory.getFileType(tableStatusPath))
.getLastModifiedTime();
}
if (!FileFactory.isFileExist(metadataPath)) {
dataSize = FileFactory.getDirectorySize(carbonTable.getTablePath());
}
dataIndexSizeMap
.put(String.valueOf(CarbonCommonConstants.CARBON_TOTAL_DATA_SIZE), dataSize);
dataIndexSizeMap
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -283,7 +283,12 @@ class TestNoInvertedIndexLoadAndQuery extends QueryTest with BeforeAndAfterAll {
"""
describe formatted indexFormat
"""),
true,"NOINVERTEDINDEX")
true,"Inverted Index Columns")

sql(
"""
describe formatted indexFormat
""").show(100, false)
}

test("filter query on dictionary and no inverted index column where all values are null"){
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,8 @@ class TestPreAggCreateCommand extends QueryTest with BeforeAndAfterAll {
sql("create datamap preagg11 on table PreAggMain1 using 'preaggregate'as select a,sum(b) from PreAggMain1 group by a")
checkExistence(sql("DESCRIBE FORMATTED PreAggMain1_preagg11"), true, "preaggmain1_a")
checkExistence(sql("DESCRIBE FORMATTED PreAggMain1_preagg11"), true, "preaggmain1_b_sum")
checkExistence(sql("DESCRIBE FORMATTED PreAggMain1_preagg11"), true, "DICTIONARY")
sql("DESCRIBE FORMATTED PreAggMain1_preagg11").show(100, false)
checkExistence(sql("DESCRIBE FORMATTED PreAggMain1_preagg11"), true, "Dictionary")
sql("drop datamap preagg11 on table PreAggMain1")
}

Expand All @@ -87,7 +88,7 @@ class TestPreAggCreateCommand extends QueryTest with BeforeAndAfterAll {
checkExistence(sql("DESCRIBE FORMATTED PreAggMain1_preagg12"), true, "preaggmain1_b_sum")
checkExistence(sql("DESCRIBE FORMATTED PreAggMain1_preagg12"), false, "preaggmain1_a1")
checkExistence(sql("DESCRIBE FORMATTED PreAggMain1_preagg12"), false, "preaggmain1_sum")
checkExistence(sql("DESCRIBE FORMATTED PreAggMain1_preagg12"), true, "DICTIONARY")
checkExistence(sql("DESCRIBE FORMATTED PreAggMain1_preagg12"), true, "Dictionary")
sql("drop datamap preagg12 on table PreAggMain1")
}

Expand All @@ -97,7 +98,7 @@ class TestPreAggCreateCommand extends QueryTest with BeforeAndAfterAll {
checkExistence(sql("DESCRIBE FORMATTED PreAggMain1_preagg14"), true, "preaggmain1_b_sum")
checkExistence(sql("DESCRIBE FORMATTED PreAggMain1_preagg14"), false, "preaggmain1_a1")
checkExistence(sql("DESCRIBE FORMATTED PreAggMain1_preagg14"), false, "preaggmain1_sum")
checkExistence(sql("DESCRIBE FORMATTED PreAggMain1_preagg14"), true, "DICTIONARY")
checkExistence(sql("DESCRIBE FORMATTED PreAggMain1_preagg14"), true, "Dictionary")
sql("drop datamap preagg14 on table PreAggMain1")
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -111,18 +111,18 @@ class TestAlterTableWithColumnMetCacheAndCacheLevelProperty extends QueryTest wi
test("validate unsetting of column_meta_cache when column_meta_cache is already set - alter_column_meta_cache_11") {
sql("Alter table alter_column_meta_cache SET TBLPROPERTIES('column_meta_cache'='c2,c3')")
var descResult = sql("describe formatted alter_column_meta_cache")
checkExistence(descResult, true, "COLUMN_META_CACHE")
checkExistence(descResult, true, "Cached Min/Max Index Columns c2, c3")
sql("Alter table alter_column_meta_cache UNSET TBLPROPERTIES('column_meta_cache')")
descResult = sql("describe formatted alter_column_meta_cache")
checkExistence(descResult, false, "COLUMN_META_CACHE")
checkExistence(descResult, false, "Cached Min/Max Index Columns c2, c3")
}

test("validate unsetting of column_meta_cache when column_meta_cache is not already set - alter_column_meta_cache_12") {
var descResult = sql("describe formatted alter_column_meta_cache")
checkExistence(descResult, false, "COLUMN_META_CACHE")
checkExistence(descResult, false, "c2, c3")
sql("Alter table alter_column_meta_cache UNSET TBLPROPERTIES('column_meta_cache')")
descResult = sql("describe formatted alter_column_meta_cache")
checkExistence(descResult, false, "COLUMN_META_CACHE")
checkExistence(descResult, false, "c2, c3")
}

test("validate cache_level with only empty spaces - ALTER_CACHE_LEVEL_01") {
Expand Down Expand Up @@ -150,14 +150,14 @@ class TestAlterTableWithColumnMetCacheAndCacheLevelProperty extends QueryTest wi
test("validate describe formatted command to display cache_level when cache_level is set - ALTER_CACHE_LEVEL_05") {
sql("Alter table cache_level SET TBLPROPERTIES('cache_level'='bloCKlet')")
val descResult = sql("describe formatted cache_level")
checkExistence(descResult, true, "CACHE_LEVEL")
checkExistence(descResult, true, "Min/Max Index Cache Level")
}

test("validate describe formatted command to display cache_level when cache_level is not set - ALTER_CACHE_LEVEL_06") {
sql("Alter table cache_level UNSET TBLPROPERTIES('cache_level')")
val descResult = sql("describe formatted cache_level")
// even though not configured default cache level will be displayed as BLOCK
checkExistence(descResult, true, "CACHE_LEVEL")
checkExistence(descResult, true, "Min/Max Index Cache Level")
}

test("validate column_meta_cache and cache_level on child dataMap- ALTER_CACHE_LEVEL_07") {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -109,14 +109,14 @@ class TestCreateTableWithColumnMetCacheAndCacheLevelProperty extends QueryTest w
sql("drop table if exists column_meta_cache")
sql("create table column_meta_cache(c1 String, c2 String, c3 int, c4 double) stored by 'carbondata' TBLPROPERTIES('COLUMN_meta_CachE'='c2')")
val descResult = sql("describe formatted column_meta_cache")
checkExistence(descResult, true, "COLUMN_META_CACHE")
checkExistence(descResult, true, "Cached Min/Max Index Columns c2")
}

test("validate describe formatted command to display column_meta_cache when column_meta_cache is not set - COLUMN_META_CACHE_11") {
sql("drop table if exists column_meta_cache")
sql("create table column_meta_cache(c1 String, c2 String, c3 int, c4 double) stored by 'carbondata'")
val descResult = sql("describe formatted column_meta_cache")
checkExistence(descResult, false, "COLUMN_META_CACHE")
checkExistence(descResult, false, "Cached Min/Max Index Columns c2")
}

test("validate column_meta_cache after column drop - COLUMN_META_CACHE_12") {
Expand Down Expand Up @@ -157,15 +157,15 @@ class TestCreateTableWithColumnMetCacheAndCacheLevelProperty extends QueryTest w
sql("drop table if exists cache_level")
sql("create table cache_level(c1 String) stored by 'carbondata' TBLPROPERTIES('cache_level'='bloCKlet')")
val descResult = sql("describe formatted cache_level")
checkExistence(descResult, true, "CACHE_LEVEL")
checkExistence(descResult, true, "Min/Max Index Cache Level BLOCKLET")
}

test("validate describe formatted command to display cache_level when cache_level is not set - CACHE_LEVEL_06") {
sql("drop table if exists cache_level")
sql("create table cache_level(c1 String) stored by 'carbondata'")
val descResult = sql("describe formatted cache_level")
// even though not configured default cache level will be displayed as BLOCK
checkExistence(descResult, true, "CACHE_LEVEL")
checkExistence(descResult, true, "Min/Max Index Cache Level BLOCK")
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -175,24 +175,4 @@ class TestCreateTableWithCompactionOptions extends QueryTest with BeforeAndAfter
"only int value between 0 and 100 is supported."))
}

test("test create table without compaction options") {
sql(
s"""
| CREATE TABLE $tableWithoutCompactionOptions(
| intField INT,
| stringField STRING
| )
| STORED BY 'carbondata'
""".stripMargin)

val tableOptions = sql(s"DESCRIBE FORMATTED $tableWithoutCompactionOptions")
.collect().map(r => (r.getString(0).trim, r.getString(1).trim)).toMap

assert(!tableOptions.contains("MAJOR_COMPACTION_SIZE"))
assert(!tableOptions.contains("AUTO_LOAD_MERGE"))
assert(!tableOptions.contains("COMPACTION_LEVEL_THRESHOLD"))
assert(!tableOptions.contains("COMPACTION_PRESERVE_SEGMENTS"))
assert(!tableOptions.contains("ALLOWED_COMPACTION_DAYS"))
}

}

0 comments on commit ab1070b

Please sign in to comment.