Skip to content

Commit

Permalink
Fixed failed to uncompress exception for index server
Browse files Browse the repository at this point in the history
  • Loading branch information
kunal642 committed Jun 27, 2019
1 parent 18842b7 commit ac8e0b7
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 3 deletions.
Expand Up @@ -211,7 +211,7 @@ public List<ExtendedBlocklet> readBlocklet(String tablePath, String queryId) thr
}

final byte[] unCompressByte =
new SnappyCompressor().unCompressByte(data, data.length - actualDataLen, actualDataLen);
new SnappyCompressor().unCompressByte(data, this.dataSize - actualDataLen, actualDataLen);
ExtendedByteArrayInputStream ebis = new ExtendedByteArrayInputStream(unCompressByte);
ExtendedDataInputStream eDIS = new ExtendedDataInputStream(ebis);
List<ExtendedBlocklet> extendedBlockletList = new ArrayList<>();
Expand Down
Expand Up @@ -130,8 +130,8 @@ class EmbeddedDataMapJob extends AbstractDataMapJob {
val splits = IndexServer.getSplits(dataMapFormat).getExtendedBlockets(dataMapFormat
.getCarbonTable.getTablePath, dataMapFormat.getQueryId)
// Fire a job to clear the cache from executors as Embedded mode does not maintain the cache.
IndexServer.invalidateSegmentCache(dataMapFormat.getCarbonTable.getDatabaseName,
dataMapFormat.getCarbonTable.getTableName, dataMapFormat.getValidSegmentIds.asScala.toArray)
IndexServer.invalidateSegmentCache(dataMapFormat.getCarbonTable, dataMapFormat
.getValidSegmentIds.asScala.toArray)
spark.sparkContext.setLocalProperty("spark.job.description", originalJobDesc)
splits
}
Expand Down

0 comments on commit ac8e0b7

Please sign in to comment.