Skip to content

Commit

Permalink
[HOTFIX] Old stores cannot read with new table infered through sdk.
Browse files Browse the repository at this point in the history
Problem: Old stores column schema is written in the different case then fileformat cannot read data because of sdk infer schema always gives lower case schema.
Solution: Do case insensitivity check while comparing.
It also disables prefetch as it is redundant for fileformat read and not getting inputmetrics properly if we use thread

This closes #2704
  • Loading branch information
ravipesala authored and kumarvishal09 committed Sep 12, 2018
1 parent daa91c8 commit 4c692d1
Show file tree
Hide file tree
Showing 11 changed files with 30 additions and 13 deletions.
Expand Up @@ -143,7 +143,7 @@ public CarbonDimension getCarbonDimensionBasedOnColIdentifier(CarbonTable carbon
List<CarbonDimension> listOfCarbonDims =
carbonTable.getDimensionByTableName(carbonTable.getTableName());
for (CarbonDimension dimension : listOfCarbonDims) {
if (dimension.getColumnId().equals(columnIdentifier)) {
if (dimension.getColumnId().equalsIgnoreCase(columnIdentifier)) {
return dimension;
}
if (dimension.getNumberOfChild() > 0) {
Expand All @@ -168,7 +168,8 @@ public CarbonDimension getCarbonDimensionBasedOnColIdentifier(CarbonTable carbon
private CarbonDimension getCarbonChildDimsBasedOnColIdentifier(String columnIdentifier,
CarbonDimension dimension) {
for (int i = 0; i < dimension.getNumberOfChild(); i++) {
if (dimension.getListOfChildDimensions().get(i).getColumnId().equals(columnIdentifier)) {
if (dimension.getListOfChildDimensions().get(i).getColumnId()
.equalsIgnoreCase(columnIdentifier)) {
return dimension.getListOfChildDimensions().get(i);
} else if (dimension.getListOfChildDimensions().get(i).getNumberOfChild() > 0) {
CarbonDimension childDim = getCarbonChildDimsBasedOnColIdentifier(columnIdentifier,
Expand Down
Expand Up @@ -152,7 +152,7 @@ public ColumnSchema getChildColByParentColName(String columName) {
List<ParentColumnTableRelation> parentColumnTableRelations =
columnSchema.getParentColumnTableRelations();
if (null != parentColumnTableRelations && parentColumnTableRelations.size() == 1
&& parentColumnTableRelations.get(0).getColumnName().equals(columName) &&
&& parentColumnTableRelations.get(0).getColumnName().equalsIgnoreCase(columName) &&
columnSchema.getColumnName().endsWith(columName)) {
return columnSchema;
}
Expand Down Expand Up @@ -198,7 +198,7 @@ public ColumnSchema getTimeseriesChildColByParent(String columName, String times
List<ParentColumnTableRelation> parentColumnTableRelations =
columnSchema.getParentColumnTableRelations();
if (null != parentColumnTableRelations && parentColumnTableRelations.size() == 1
&& parentColumnTableRelations.get(0).getColumnName().equals(columName)
&& parentColumnTableRelations.get(0).getColumnName().equalsIgnoreCase(columName)
&& timeseriesFunction.equalsIgnoreCase(columnSchema.getTimeSeriesFunction())) {
return columnSchema;
}
Expand Down
Expand Up @@ -315,7 +315,7 @@ public void setParentColumnTableRelations(
if (other.columnName != null) {
return false;
}
} else if (!columnName.equals(other.columnName)) {
} else if (!columnName.equalsIgnoreCase(other.columnName)) {
return false;
}
if (dataType == null) {
Expand Down
Expand Up @@ -476,7 +476,11 @@ private BlockExecutionInfo getBlockExecutionInfoForBlock(QueryModel queryModel,
blockExecutionInfo
.setTotalNumberDimensionToRead(
segmentProperties.getDimensionOrdinalToChunkMapping().size());
blockExecutionInfo.setPrefetchBlocklet(!queryModel.isReadPageByPage());
if (queryModel.isReadPageByPage()) {
blockExecutionInfo.setPrefetchBlocklet(false);
} else {
blockExecutionInfo.setPrefetchBlocklet(queryModel.isPreFetchData());
}
blockExecutionInfo
.setTotalNumberOfMeasureToRead(segmentProperties.getMeasuresOrdinalToChunkMapping().size());
blockExecutionInfo.setComplexDimensionInfoMap(QueryUtil
Expand Down
Expand Up @@ -165,14 +165,15 @@ private static boolean isColumnMatches(boolean isTransactionalTable,
// column ID but can have same column name
if (tableColumn.getDataType().isComplexType() && !(tableColumn.getDataType().getId()
== DataTypes.ARRAY_TYPE_ID)) {
if (tableColumn.getColumnId().equals(queryColumn.getColumnId())) {
if (tableColumn.getColumnId().equalsIgnoreCase(queryColumn.getColumnId())) {
return true;
} else {
return isColumnMatchesStruct(tableColumn, queryColumn);
}
} else {
return (tableColumn.getColumnId().equals(queryColumn.getColumnId()) || (!isTransactionalTable
&& tableColumn.getColName().equals(queryColumn.getColName())));
return (tableColumn.getColumnId().equalsIgnoreCase(queryColumn.getColumnId()) || (
!isTransactionalTable && tableColumn.getColName()
.equalsIgnoreCase(queryColumn.getColName())));
}
}

Expand Down
Expand Up @@ -64,7 +64,7 @@ private void getColumnList(Expression expression, List<ColumnExpression> lst) {
boolean found = false;

for (ColumnExpression currentColExp : lst) {
if (currentColExp.getColumnName().equals(colExp.getColumnName())) {
if (currentColExp.getColumnName().equalsIgnoreCase(colExp.getColumnName())) {
found = true;
colExp.setColIndex(currentColExp.getColIndex());
break;
Expand Down
Expand Up @@ -397,7 +397,7 @@ private static int getFilterColumnIndexInCachedColumns(
int columnIndexInMinMaxByteArray = -1;
int columnCounter = 0;
for (CarbonColumn cachedColumn : carbonDimensionsToBeCached) {
if (cachedColumn.getColumnId().equals(filterColumn.getColumnId())) {
if (cachedColumn.getColumnId().equalsIgnoreCase(filterColumn.getColumnId())) {
columnIndexInMinMaxByteArray = columnCounter;
break;
}
Expand Down
Expand Up @@ -122,6 +122,8 @@ public class QueryModel {
// whether to clear/free unsafe memory or not
private boolean freeUnsafeMemory = true;

private boolean preFetchData = true;

private QueryModel(CarbonTable carbonTable) {
tableBlockInfos = new ArrayList<TableBlockInfo>();
invalidSegmentIds = new ArrayList<>();
Expand Down Expand Up @@ -396,6 +398,14 @@ public void setFG(boolean FG) {
isFG = FG;
}

public boolean isPreFetchData() {
return preFetchData;
}

public void setPreFetchData(boolean preFetchData) {
this.preFetchData = preFetchData;
}

@Override
public String toString() {
return String.format("scan on table %s.%s, %d projection columns with filter (%s)",
Expand Down
Expand Up @@ -479,7 +479,7 @@ public static boolean useMinMaxForBlockletPruning(FilterResolverIntf filterResol
private static boolean filterColumnExistsInMinMaxColumnList(List<CarbonColumn> minMaxCacheColumns,
CarbonColumn filterColumn) {
for (CarbonColumn column : minMaxCacheColumns) {
if (filterColumn.getColumnId().equals(column.getColumnId())) {
if (filterColumn.getColumnId().equalsIgnoreCase(column.getColumnId())) {
return true;
}
}
Expand Down
Expand Up @@ -1193,7 +1193,7 @@ public static CarbonDimension getDimensionFromCurrentBlock(
List<CarbonDimension> blockDimensions, CarbonDimension dimensionToBeSearched) {
CarbonDimension currentBlockDimension = null;
for (CarbonDimension blockDimension : blockDimensions) {
if (dimensionToBeSearched.getColumnId().equals(blockDimension.getColumnId())) {
if (dimensionToBeSearched.getColumnId().equalsIgnoreCase(blockDimension.getColumnId())) {
currentBlockDimension = blockDimension;
break;
}
Expand Down
Expand Up @@ -387,6 +387,7 @@ class SparkCarbonFileFormat extends FileFormat
new TaskAttemptContextImpl(broadcastedHadoopConf.value.value, attemptId)
val model = format.createQueryModel(split, hadoopAttemptContext)
model.setConverter(new SparkDataTypeConverterImpl)
model.setPreFetchData(false)
val carbonReader = if (readVector) {
val vectorizedReader = new VectorizedCarbonRecordReader(model,
null,
Expand Down

0 comments on commit 4c692d1

Please sign in to comment.