Skip to content

Commit

Permalink
review comments handled and test case fix
Browse files Browse the repository at this point in the history
  • Loading branch information
akashrn5 authored and akashrn5 committed Dec 19, 2018
1 parent 98fd042 commit 78cc5d3
Show file tree
Hide file tree
Showing 20 changed files with 353 additions and 460 deletions.
Expand Up @@ -21,7 +21,8 @@ public enum TableOperation {
ALTER_RENAME,
ALTER_DROP,
ALTER_ADD_COLUMN,
ALTER_COL_RENAME_AND_CHANGE_DATATYPE,
ALTER_CHANGE_DATATYPE,
ALTER_COLUMN_RENAME,
STREAMING,
UPDATE,
DELETE,
Expand Down
Expand Up @@ -383,7 +383,9 @@ public boolean willBecomeStale(TableOperation operation) {
return true;
case ALTER_ADD_COLUMN:
return false;
case ALTER_COL_RENAME_AND_CHANGE_DATATYPE:
case ALTER_CHANGE_DATATYPE:
return true;
case ALTER_COLUMN_RENAME:
return true;
case STREAMING:
return false;
Expand Down Expand Up @@ -415,7 +417,8 @@ public boolean isOperationBlocked(TableOperation operation, Object... targets) {
}
return false;
}
case ALTER_COL_RENAME_AND_CHANGE_DATATYPE: {
case ALTER_CHANGE_DATATYPE:
case ALTER_COLUMN_RENAME: {
// alter table change one column datatype, or rename
// will be blocked if the column in bloomfilter datamap
String columnToChangeDatatype = (String) targets[0];
Expand Down
Expand Up @@ -96,7 +96,9 @@ public DataMapLevel getDataMapLevel() {
return true;
case ALTER_ADD_COLUMN:
return true;
case ALTER_COL_RENAME_AND_CHANGE_DATATYPE:
case ALTER_CHANGE_DATATYPE:
return true;
case ALTER_COLUMN_RENAME:
return true;
case STREAMING:
return false;
Expand Down
Expand Up @@ -641,7 +641,7 @@ class LuceneFineGrainDataMapSuite extends QueryTest with BeforeAndAfterAll {
val ex3 = intercept[MalformedCarbonCommandException] {
sql("alter table datamap_test7 change id id BIGINT")
}
assert(ex3.getMessage.contains("alter table change datatype or column rename is not supported"))
assert(ex3.getMessage.contains("alter table change datatype is not supported"))

val ex4 = intercept[MalformedCarbonCommandException] {
sql("alter table datamap_test7 drop columns(name)")
Expand All @@ -658,6 +658,11 @@ class LuceneFineGrainDataMapSuite extends QueryTest with BeforeAndAfterAll {
sql("delete from datamap_test7 where name = 'n10'").show()
}
assert(ex6.getMessage.contains("Delete operation is not supported"))

val ex7 = intercept[MalformedCarbonCommandException] {
sql("alter table datamap_test7 change id test int")
}
assert(ex7.getMessage.contains("alter table column rename is not supported"))
}

ignore("test lucene fine grain multiple data map on table") {
Expand Down
Expand Up @@ -45,7 +45,7 @@ case class AlterTableDropColumnPreEvent(
case class AlterTableColRenameAndDataTypeChangePreEvent(
sparkSession: SparkSession,
carbonTable: CarbonTable,
alterTableDataTypeChangeModel: AlterTableColRenameAndDataTypeChangeModel)
alterTableDataTypeChangeModel: AlterTableDataTypeChangeModel)
extends Event with AlterTableDataTypeChangeEventInfo

/**
Expand All @@ -57,7 +57,7 @@ case class AlterTableColRenameAndDataTypeChangePreEvent(
case class AlterTableColRenameAndDataTypeChangePostEvent(
sparkSession: SparkSession,
carbonTable: CarbonTable,
alterTableDataTypeChangeModel: AlterTableColRenameAndDataTypeChangeModel)
alterTableDataTypeChangeModel: AlterTableDataTypeChangeModel)
extends Event with AlterTableDataTypeChangeEventInfo

/**
Expand Down
Expand Up @@ -19,7 +19,7 @@ package org.apache.carbondata.events

import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.catalog.CatalogTypes.TablePartitionSpec
import org.apache.spark.sql.execution.command.{AlterTableAddColumnsModel, AlterTableColRenameAndDataTypeChangeModel, AlterTableDropColumnModel, AlterTableRenameModel, CarbonMergerMapping}
import org.apache.spark.sql.execution.command.{AlterTableAddColumnsModel, AlterTableDataTypeChangeModel, AlterTableDropColumnModel, AlterTableRenameModel, CarbonMergerMapping}

import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier
import org.apache.carbondata.core.metadata.schema.table.CarbonTable
Expand Down Expand Up @@ -80,7 +80,7 @@ trait AlterTableDropPartitionEventInfo {

trait AlterTableDataTypeChangeEventInfo {
val carbonTable: CarbonTable
val alterTableDataTypeChangeModel: AlterTableColRenameAndDataTypeChangeModel
val alterTableDataTypeChangeModel: AlterTableDataTypeChangeModel
}

/**
Expand Down
Expand Up @@ -1491,36 +1491,32 @@ abstract class CarbonDDLSqlParser extends AbstractCarbonSparkSQLParser {
dataType: String,
values: Option[List[(Int, Int)]],
isColumnRename: Boolean): DataTypeInfo = {
def validateAndGetDecimalDatatype: DataTypeInfo = {
var precision: Int = 0
var scale: Int = 0
if (values.isDefined) {
precision = values.get(0)._1
scale = values.get(0)._2
} else {
throw new MalformedCarbonCommandException("Decimal format provided is invalid")
}
// precision should be > 0 and <= 38 and scale should be >= 0 and <= 38
if (precision < 1 || precision > 38) {
throw new MalformedCarbonCommandException("Invalid value for precision")
} else if (scale < 0 || scale > 38) {
throw new MalformedCarbonCommandException("Invalid value for scale")
}
DataTypeInfo("decimal", precision, scale)
}

var precision: Int = 0
var scale: Int = 0
dataType match {
case "bigint" | "long" =>
if (values.isDefined) {
throw new MalformedCarbonCommandException("Invalid data type")
}
DataTypeInfo(dataType)
case "decimal" =>
validateAndGetDecimalDatatype
if (values.isDefined) {
precision = values.get(0)._1
scale = values.get(0)._2
} else {
throw new MalformedCarbonCommandException("Decimal format provided is invalid")
}
// precision should be > 0 and <= 38 and scale should be >= 0 and <= 38
if (precision < 1 || precision > 38) {
throw new MalformedCarbonCommandException("Invalid value for precision")
} else if (scale < 0 || scale > 38) {
throw new MalformedCarbonCommandException("Invalid value for scale")
}
DataTypeInfo("decimal", precision, scale)
case _ =>
if (isColumnRename) {
if (dataType.equalsIgnoreCase("decimal")) {
return validateAndGetDecimalDatatype
DataTypeInfo("decimal", precision, scale)
} else {
return DataTypeInfo(DataTypeConverterUtil.convertToCarbonType(dataType).getName)
}
Expand Down
Expand Up @@ -171,12 +171,17 @@ case class DropPartitionCallableModel(carbonLoadModel: CarbonLoadModel,

case class DataTypeInfo(dataType: String, precision: Int = 0, scale: Int = 0)

case class AlterTableColRenameAndDataTypeChangeModel(dataTypeInfo: DataTypeInfo,
class AlterTableColumnRenameModel(columnName: String,
newColumnName: String,
isColumnRename: Boolean)

case class AlterTableDataTypeChangeModel(dataTypeInfo: DataTypeInfo,
databaseName: Option[String],
tableName: String,
columnName: String,
newColumnName: String,
isColumnRename: Boolean)
extends AlterTableColumnRenameModel(columnName, newColumnName, isColumnRename)

case class AlterTableRenameModel(
oldTableIdentifier: TableIdentifier,
Expand Down
Expand Up @@ -22,7 +22,7 @@ import org.apache.spark.sql.catalyst.parser.SqlBaseParser
import org.apache.spark.sql.catalyst.parser.SqlBaseParser.{AddTableColumnsContext, ChangeColumnContext, CreateTableContext, ShowTablesContext}
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.execution.SparkSqlAstBuilder
import org.apache.spark.sql.execution.command.{AlterTableAddColumnsModel, AlterTableColRenameAndDataTypeChangeModel}
import org.apache.spark.sql.execution.command.{AlterTableAddColumnsModel, AlterTableDataTypeChangeModel}
import org.apache.spark.sql.execution.command.schema.{CarbonAlterTableAddColumnCommand, CarbonAlterTableColRenameDataTypeChangeCommand}
import org.apache.spark.sql.execution.command.table.{CarbonExplainCommand, CarbonShowTablesCommand}
import org.apache.spark.sql.parser.CarbonSpark2SqlParser
Expand All @@ -37,9 +37,10 @@ trait SqlAstBuilderHelper extends SparkSqlAstBuilder {
override def visitChangeColumn(ctx: ChangeColumnContext): LogicalPlan = {

val newColumn = visitColType(ctx.colType)
var isColumnRename = false
if (!ctx.identifier.getText.equalsIgnoreCase(newColumn.name)) {
isColumnRename = true
val isColumnRename = if (!ctx.identifier.getText.equalsIgnoreCase(newColumn.name)) {
true
} else {
false
}

val (typeString, values): (String, Option[List[(Int, Int)]]) = newColumn.dataType match {
Expand All @@ -48,7 +49,7 @@ trait SqlAstBuilderHelper extends SparkSqlAstBuilder {
}

val alterTableColRenameAndDataTypeChangeModel =
AlterTableColRenameAndDataTypeChangeModel(new CarbonSpark2SqlParser()
AlterTableDataTypeChangeModel(new CarbonSpark2SqlParser()
.parseDataType(typeString, values, isColumnRename),
new CarbonSpark2SqlParser()
.convertDbNameToLowerCase(Option(ctx.tableIdentifier().db).map(_.getText)),
Expand Down

0 comments on commit 78cc5d3

Please sign in to comment.