Skip to content

Commit

Permalink
[CARBONDATA-3023] Alter add column issue with reading a row
Browse files Browse the repository at this point in the history
  • Loading branch information
dhatchayani committed Oct 17, 2018
1 parent 15d3826 commit 8269397
Show file tree
Hide file tree
Showing 3 changed files with 53 additions and 10 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -634,8 +634,9 @@ case class CarbonLoadDataCommand(
CarbonSession.threadSet("partition.operationcontext", operationContext)
// input data from csv files. Convert to logical plan
val allCols = new ArrayBuffer[String]()
allCols ++= table.getAllDimensions.asScala.map(_.getColName)
allCols ++= table.getAllMeasures.asScala.map(_.getColName)
// get only the visible dimensions from table
allCols ++= table.getDimensionByTableName(table.getTableName).asScala.map(_.getColName)
allCols ++= table.getMeasureByTableName(table.getTableName).asScala.map(_.getColName)
var attributes =
StructType(
allCols.filterNot(_.equals(CarbonCommonConstants.DEFAULT_INVISIBLE_DUMMY_MEASURE)).map(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,8 @@ class AlterTableValidationTestCase extends Spark2QueryTest with BeforeAndAfterAl
sql("drop table if exists test")
sql("drop table if exists retructure_iud")
sql("drop table if exists restructure_random_select")
sql("drop table if exists alterTable")
sql("drop table if exists alterPartitionTable")

// clean data folder
CarbonProperties.getInstance()
Expand Down Expand Up @@ -738,6 +740,42 @@ test("test alter command for boolean data type with correct default measure valu
}
}

test("load table after alter drop column scenario") {
sql("drop table if exists alterTable")
sql(
"create table alterTable(empno string, salary string) stored by 'carbondata' tblproperties" +
"('sort_columns'='')")
sql("alter table alterTable drop columns(empno)")
sql("alter table alterTable add columns(empno string)")
sql(s"load data local inpath '$resourcesPath/double.csv' into table alterTable options" +
s"('header'='true')")
checkAnswer(sql("select salary from alterTable limit 1"), Row(" 775678765456789098765432.789"))
}

test("load partition table after alter drop column scenario") {
val timestampFormat = CarbonProperties.getInstance().getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT)
CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
sql("drop table if exists alterPartitionTable")
sql(
"""
| CREATE TABLE alterPartitionTable (empname String, designation String, doj Timestamp,
| workgroupcategory int, workgroupcategoryname String, deptno int, deptname String,
| projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,
| utilization int,salary int)
| PARTITIONED BY (empno int)
| STORED BY 'org.apache.carbondata.format'
| TBLPROPERTIES('SORT_COLUMNS'='empname,deptno,projectcode,projectjoindate,
| projectenddate,attendance')
""".stripMargin)
sql("alter table alterPartitionTable drop columns(projectenddate)")
sql("alter table alterPartitionTable add columns(projectenddate timestamp)")
sql(s"LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE alterPartitionTable OPTIONS('DELIMITER'= ',', " +
"'QUOTECHAR'= '\"')")
sql("select * from alterPartitionTable where empname='bill'").show(false)
CarbonProperties.getInstance()
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, timestampFormat)
}

override def afterAll {
sql("DROP TABLE IF EXISTS restructure")
sql("drop table if exists table1")
Expand All @@ -756,5 +794,7 @@ test("test alter command for boolean data type with correct default measure valu
sql("drop table if exists test")
sql("drop table if exists retructure_iud")
sql("drop table if exists restructure_random_select")
sql("drop table if exists alterTable")
sql("drop table if exists alterPartitionTable")
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,7 @@ public IntermediateSortTempRow readWithoutNoSortFieldConvert(
// read no-dict & sort data
for (int idx = 0; idx < this.noDictSortDimCnt; idx++) {
// for no dict measure column get the original data
noDictSortDims[idx] = getDataForNoDictSortColumn(inputStream, idx);
noDictSortDims[this.noDictSortDimIdx[idx]] = getDataForNoDictSortColumn(inputStream, idx);
}

// read no-dict dims & measures
Expand Down Expand Up @@ -246,7 +246,7 @@ public IntermediateSortTempRow readWithNoSortFieldConvert(
// read no-dict & sort data
for (int idx = 0; idx < this.noDictSortDimCnt; idx++) {
// for no dict measure column get the original data
noDictSortDims[idx] = getDataForNoDictSortColumn(inputStream, idx);
noDictSortDims[this.noDictSortDimIdx[idx]] = getDataForNoDictSortColumn(inputStream, idx);
}

// read no-dict dims & measures
Expand Down Expand Up @@ -327,13 +327,15 @@ private void unpackNoSortFromBytes(byte[] noSortDimsAndMeasures, int[] dictDims,
for (int i = 0; i < noDictNoSortDimCnt; i++) {
// for no dict measure column get the original data
if (this.noDictNoSortColMapping[i]) {
noDictDims[noDictIndex++] = getDataFromRowBuffer(noDictNoSortDataTypes[i], rowBuffer);
noDictDims[this.noDictNoSortDimIdx[i]] =
getDataFromRowBuffer(noDictNoSortDataTypes[i], rowBuffer);
} else {
short len = rowBuffer.getShort();
byte[] bytes = new byte[len];
rowBuffer.get(bytes);
noDictDims[noDictIndex++] = bytes;
noDictDims[this.noDictNoSortDimIdx[i]] = bytes;
}
noDictIndex++;
}

// read varchar dims
Expand Down Expand Up @@ -547,7 +549,7 @@ public IntermediateSortTempRow readFromMemoryWithoutNoSortFieldConvert(Object ba
CarbonUnsafe.getUnsafe().copyMemory(baseObject, address + size,
bytes, CarbonUnsafe.BYTE_ARRAY_OFFSET, length);
size += length;
noDictSortDims[idx] = bytes;
noDictSortDims[this.noDictSortDimIdx[idx]] = bytes;
}

// read no-sort dims & measures
Expand Down Expand Up @@ -591,19 +593,19 @@ public IntermediateSortTempRow readRowFromMemoryWithNoSortFieldConvert(Object ba
// get the original data from the unsafe memory
if (0 == length) {
// if the length is 0, the the data is null
noDictSortDims[idx] = null;
noDictSortDims[this.noDictSortDimIdx[idx]] = null;
} else {
Object data = CarbonUnsafeUtil
.getDataFromUnsafe(noDictSortDataTypes[idx], baseObject, address, size, length);
size += length;
noDictSortDims[idx] = data;
noDictSortDims[this.noDictSortDimIdx[idx]] = data;
}
} else {
byte[] bytes = new byte[length];
CarbonUnsafe.getUnsafe()
.copyMemory(baseObject, address + size, bytes, CarbonUnsafe.BYTE_ARRAY_OFFSET, length);
size += length;
noDictSortDims[idx] = bytes;
noDictSortDims[this.noDictSortDimIdx[idx]] = bytes;
}
}

Expand Down

0 comments on commit 8269397

Please sign in to comment.