Skip to content
Permalink
Browse files
HBASE-26777 BufferedDataBlockEncoder$OffheapDecodedExtendedCell.deepC… (
#4139)

Signed-off-by: Andrew Purtell <apurtell@apache.org>
  • Loading branch information
stoty committed Mar 4, 2022
1 parent f4b05a8 commit 7d2457e0759d3498e547ac5fc072ca5d319dc8eb
Showing 3 changed files with 24 additions and 5 deletions.
@@ -131,6 +131,8 @@ public static byte[] copyToNewByteArray(final Cell cell) {
//Cell#getSerializedSize returns the serialized size of the Source cell, which may
//not serialize all fields. We are constructing a KeyValue backing array here,
//which does include all fields, and must allocate accordingly.
//TODO we could probably use Cell#getSerializedSize safely, the errors were
//caused by cells corrupted by use-after-free bugs
int v1Length = length(cell.getRowLength(), cell.getFamilyLength(),
cell.getQualifierLength(), cell.getValueLength(), cell.getTagsLength(), true);
byte[] backingBytes = new byte[v1Length];
@@ -94,6 +94,7 @@
import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
import org.apache.hadoop.hbase.HDFSBlocksDistribution;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.MetaCellComparator;
import org.apache.hadoop.hbase.NamespaceDescriptor;
import org.apache.hadoop.hbase.NotServingRegionException;
@@ -7871,7 +7872,7 @@ private List<Cell> getInternal(Get get, boolean withCoprocessor, long nonceGroup
// See more details in HBASE-26036.
for (Cell cell : tmp) {
results.add(cell instanceof ByteBufferExtendedCell ?
((ByteBufferExtendedCell) cell).deepClone(): cell);
KeyValueUtil.copyToNewKeyValue(cell) : cell);
}
}

@@ -32,6 +32,7 @@
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.io.ByteBuffAllocator;
import org.apache.hadoop.hbase.io.DeallocateRewriteByteBuffAllocator;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.BlockCacheFactory;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.HRegionFileSystem;
@@ -87,8 +88,20 @@ public static void tearDownAfterClass() throws Exception {
}

@Test
public void testCheckAndMutateWithByteBuff() throws Exception {
Table testTable = createTable(TableName.valueOf(name.getMethodName()));
public void testCheckAndMutateWithByteBuffNoEncode() throws Exception {
testCheckAndMutateWithByteBuff(TableName.valueOf(name.getMethodName()), DataBlockEncoding.NONE);
}

@Test
public void testCheckAndMutateWithByteBuffEncode() throws Exception {
// Tests for HBASE-26777.
// As most HBase.getRegion() calls have been factored out from HBase, you'd need to revert
// both HBASE-26777, and the HBase.get() replacements from HBASE-26036 for this test to fail
testCheckAndMutateWithByteBuff(TableName.valueOf(name.getMethodName()), DataBlockEncoding.FAST_DIFF);
}

private void testCheckAndMutateWithByteBuff(TableName tableName, DataBlockEncoding dbe) throws Exception {
Table testTable = createTable(tableName, dbe);
byte[] checkRow = Bytes.toBytes("checkRow");
byte[] checkQualifier = Bytes.toBytes("cq");
byte[] checkValue = Bytes.toBytes("checkValue");
@@ -104,10 +117,13 @@ public void testCheckAndMutateWithByteBuff() throws Exception {
Bytes.toBytes("testValue"))));
}

private Table createTable(TableName tableName)
private Table createTable(TableName tableName, DataBlockEncoding dbe)
throws IOException {
TableDescriptor td = TableDescriptorBuilder.newBuilder(tableName)
.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(CF).setBlocksize(100).build())
.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(CF)
.setBlocksize(100)
.setDataBlockEncoding(dbe)
.build())
.build();
return TEST_UTIL.createTable(td, null);
}

0 comments on commit 7d2457e

Please sign in to comment.