Skip to content

Commit

Permalink
Fixed tests
Browse files Browse the repository at this point in the history
  • Loading branch information
ravipesala committed Oct 11, 2018
1 parent e7e0b6b commit dd528ec
Show file tree
Hide file tree
Showing 10 changed files with 31 additions and 16 deletions.
Expand Up @@ -50,8 +50,9 @@ public abstract class BlockIndexerStorage<T> {
*
* @param rowIds
*/
protected Map<String, short[]> rleEncodeOnRowId(short[] rowIds, short[] rowIdPage,
short[] rowIdRlePage) {
protected Map<String, short[]> rleEncodeOnRowId(short[] rowIds) {
short[] rowIdPage;
short[] rowIdRlePage;
List<Short> list = new ArrayList<Short>(CarbonCommonConstants.CONSTANT_SIZE_TEN);
List<Short> map = new ArrayList<Short>(CarbonCommonConstants.CONSTANT_SIZE_TEN);
int k = 0;
Expand Down
Expand Up @@ -39,8 +39,7 @@ public BlockIndexerStorageForNoDictionary(Object[] dataPage, DataType dataType,
Arrays.sort(dataWithRowId);
}
short[] rowIds = extractDataAndReturnRowId(dataWithRowId, dataPage);
Map<String, short[]> rowIdAndRleRowIdPages =
rleEncodeOnRowId(rowIds, getRowIdPage(), getRowIdRlePage());
Map<String, short[]> rowIdAndRleRowIdPages = rleEncodeOnRowId(rowIds);
rowIdPage = rowIdAndRleRowIdPages.get("rowIdPage");
rowIdRlePage = rowIdAndRleRowIdPages.get("rowRlePage");
}
Expand Down
Expand Up @@ -43,8 +43,7 @@ public BlockIndexerStorageForShort(byte[][] dataPage, boolean rleOnData,
Arrays.sort(dataWithRowId);
}
short[] rowIds = extractDataAndReturnRowId(dataWithRowId, dataPage);
Map<String, short[]> rowIdAndRleRowIdPages =
rleEncodeOnRowId(rowIds, getRowIdPage(), getRowIdRlePage());
Map<String, short[]> rowIdAndRleRowIdPages = rleEncodeOnRowId(rowIds);
rowIdPage = rowIdAndRleRowIdPages.get("rowIdPage");
rowIdRlePage = rowIdAndRleRowIdPages.get("rowRlePage");
if (rleOnData) {
Expand Down Expand Up @@ -150,7 +149,7 @@ private void rleEncodeOnData(ColumnWithRowId<Short>[] dataWithRowId) {
map.add(counter);
// if rle is index size is more than 70% then rle wont give any benefit
// so better to avoid rle index and write data as it is
boolean useRle = (((list.size() + map.size()) * 100) / dataWithRowId.length) < 70;
boolean useRle = (((list.size() + map.size()) * 100) / dataWithRowId.length) < 170;
if (useRle) {
this.dataPage = convertToDataPage(list);
dataRlePage = convertToArray(map);
Expand Down
Expand Up @@ -28,6 +28,9 @@ private UnBlockIndexer() {
public static int[] uncompressIndex(int[] indexData, int[] indexMap) {
int actualSize = indexData.length;
int mapLength = indexMap.length;
if (indexMap.length == 0) {
return indexData;
}
for (int i = 0; i < mapLength; i++) {
actualSize += indexData[indexMap[i] + 1] - indexData[indexMap[i]] - 1;
}
Expand Down
Expand Up @@ -76,6 +76,7 @@ public FileReaderImpl(int capacity) {
channel.close();
}
}
fileNameAndStreamCache.clear();
}

/**
Expand Down
Expand Up @@ -27,4 +27,6 @@ public interface CarbonDictionary {
void setDictionaryUsed();

byte[] getDictionaryValue(int index);

byte[][] getAllDictionaryValues();
}
Expand Up @@ -51,4 +51,7 @@ public CarbonDictionaryImpl(byte[][] dictionary, int actualSize) {
return dictionary[index];
}

@Override public byte[][] getAllDictionaryValues() {
return dictionary;
}
}
Expand Up @@ -75,7 +75,7 @@ public ColumnarVectorWrapperDirectWithInvertedIndex(CarbonColumnVector columnVec
}

@Override public void putNull(int rowId) {
columnVector.putNull(invertedIndex[rowId]);
columnVector.putNull(rowId);
}

@Override public void putFloats(int rowId, int count, float[] src, int srcIndex) {
Expand Down
Expand Up @@ -57,15 +57,25 @@ class AllDataTypesTestCaseFilter extends QueryTest with BeforeAndAfterAll {
test("verify like query ends with filter push down") {
val df = sql("select * from alldatatypestableFilter where empname like '%nandh'").queryExecution
.sparkPlan
assert(df.asInstanceOf[CarbonDataSourceScan].metadata
.get("PushedFilters").get.contains("CarbonEndsWith"))
if (df.isInstanceOf[CarbonDataSourceScan]) {
assert(df.asInstanceOf[CarbonDataSourceScan].metadata
.get("PushedFilters").get.contains("CarbonEndsWith"))
} else {
assert(df.children.head.asInstanceOf[CarbonDataSourceScan].metadata
.get("PushedFilters").get.contains("CarbonEndsWith"))
}
}

test("verify like query contains with filter push down") {
val df = sql("select * from alldatatypestableFilter where empname like '%nand%'").queryExecution
.sparkPlan
assert(df.asInstanceOf[CarbonDataSourceScan].metadata
.get("PushedFilters").get.contains("CarbonContainsWith"))
if (df.isInstanceOf[CarbonDataSourceScan]) {
assert(df.asInstanceOf[CarbonDataSourceScan].metadata
.get("PushedFilters").get.contains("CarbonContainsWith"))
} else {
assert(df.children.head.asInstanceOf[CarbonDataSourceScan].metadata
.get("PushedFilters").get.contains("CarbonContainsWith"))
}
}

override def afterAll {
Expand Down
Expand Up @@ -28,10 +28,7 @@ public class CarbonDictionaryWrapper implements Dictionary {
private byte[][] binaries;

CarbonDictionaryWrapper(CarbonDictionary dictionary) {
binaries = new byte[dictionary.getDictionarySize()][];
for (int i = 0; i < binaries.length; i++) {
binaries[i] = dictionary.getDictionaryValue(i);
}
binaries = dictionary.getAllDictionaryValues();
}

@Override public int decodeToInt(int id) {
Expand Down

0 comments on commit dd528ec

Please sign in to comment.