From ed5f2251b6c0f10c3166c4a5480734c6741df4a4 Mon Sep 17 00:00:00 2001 From: Marcell Ortutay Date: Sat, 15 Sep 2018 21:05:12 -0700 Subject: [PATCH] PHOENIX-4902 Use only compressed portion of hash cache memory buffer --- .../src/main/java/org/apache/phoenix/join/HashCacheClient.java | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/phoenix-core/src/main/java/org/apache/phoenix/join/HashCacheClient.java b/phoenix-core/src/main/java/org/apache/phoenix/join/HashCacheClient.java index 315c51568cf..b864fc2de05 100644 --- a/phoenix-core/src/main/java/org/apache/phoenix/join/HashCacheClient.java +++ b/phoenix-core/src/main/java/org/apache/phoenix/join/HashCacheClient.java @@ -20,6 +20,7 @@ import java.io.DataOutputStream; import java.io.IOException; import java.sql.SQLException; +import java.util.Arrays; import java.util.List; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; @@ -162,7 +163,7 @@ private void serialize(ImmutableBytesWritable ptr, ResultIterator iterator, long byte[] compressed = new byte[maxCompressedSize]; // size for worst case int compressedSize = Snappy.compress(baOut.getBuffer(), 0, baOut.size(), compressed, 0); // Last realloc to size of compressed buffer. - ptr.set(compressed,0,compressedSize); + ptr.set(Arrays.copyOfRange(compressed, 0, compressedSize)); } finally { dataOut.close(); }