From ccbff0b9cc464227ae9cef3a720eb81fd83d4e77 Mon Sep 17 00:00:00 2001 From: niuyulin Date: Sun, 12 Jul 2020 15:29:24 +0800 Subject: [PATCH] HBASE-24659 Calculate FIXED_OVERHEAD automatically --- .../hadoop/hbase/io/hfile/HFileContext.java | 8 +---- .../hadoop/hbase/io/hfile/BlockCacheKey.java | 11 ++---- .../hadoop/hbase/io/hfile/HFileBlock.java | 9 +---- .../hadoop/hbase/io/hfile/LruBlockCache.java | 34 ++++++++++++------- .../hadoop/hbase/regionserver/HRegion.java | 7 +--- .../hadoop/hbase/regionserver/HStore.java | 4 +-- .../apache/hadoop/hbase/io/TestHeapSize.java | 16 ++++++++- 7 files changed, 43 insertions(+), 46 deletions(-) diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java index ea4782d0351f..cfadb6cfd337 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileContext.java @@ -37,13 +37,7 @@ */ @InterfaceAudience.Private public class HFileContext implements HeapSize, Cloneable { - public static final int FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT + - // Algorithm, checksumType, encoding, Encryption.Context, hfileName reference, - 5 * ClassSize.REFERENCE + 2 * Bytes.SIZEOF_INT + - // usesHBaseChecksum, includesMvcc, includesTags and compressTags - 4 * Bytes.SIZEOF_BOOLEAN + Bytes.SIZEOF_LONG + - //byte[] headers for column family and table name - 2 * ClassSize.ARRAY + 2 * ClassSize.REFERENCE); + public static final long FIXED_OVERHEAD = ClassSize.estimateBase(HFileContext.class, false); private static final int DEFAULT_BYTES_PER_CHECKSUM = 16 * 1024; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheKey.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheKey.java index 58d5c00d2c50..4683c3520c1b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheKey.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCacheKey.java @@ -19,7 +19,6 @@ import org.apache.yetus.audience.InterfaceAudience; import org.apache.hadoop.hbase.io.HeapSize; -import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; /** @@ -42,7 +41,8 @@ public BlockCacheKey(String hfileName, long offset) { this(hfileName, offset, true, BlockType.DATA); } - public BlockCacheKey(String hfileName, long offset, boolean isPrimaryReplica, BlockType blockType) { + public BlockCacheKey(String hfileName, long offset, boolean isPrimaryReplica, + BlockType blockType) { this.isPrimaryReplicaBlock = isPrimaryReplica; this.hfileName = hfileName; this.offset = offset; @@ -71,12 +71,7 @@ public String toString() { return this.hfileName + '_' + this.offset; } - public static final long FIXED_OVERHEAD = ClassSize.align( - ClassSize.OBJECT + - Bytes.SIZEOF_BOOLEAN + - ClassSize.REFERENCE + // this.hfileName - ClassSize.REFERENCE + // this.blockType - Bytes.SIZEOF_LONG); // this.offset + public static final long FIXED_OVERHEAD = ClassSize.estimateBase(BlockCacheKey.class, false); /** * Strings have two bytes per character due to default Java Unicode encoding diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java index 6b145718f27e..f4fdb9b27326 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java @@ -113,14 +113,7 @@ @InterfaceAudience.Private public class HFileBlock implements Cacheable { private static final Logger LOG = LoggerFactory.getLogger(HFileBlock.class); - public static final int FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT + - // BlockType, ByteBuff, MemoryType, HFileContext, ByteBuffAllocator - 5 * ClassSize.REFERENCE + - // On-disk size, uncompressed size, and next block's on-disk size - // bytePerChecksum and onDiskDataSize - 4 * Bytes.SIZEOF_INT + - // This and previous block offset - 2 * Bytes.SIZEOF_LONG); + public static final long FIXED_OVERHEAD = ClassSize.estimateBase(HFileBlock.class, false); // Block Header fields. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java index 5587c4033bcf..d15098972a7c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java @@ -37,7 +37,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.io.HeapSize; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; -import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.util.StringUtils; import org.apache.yetus.audience.InterfaceAudience; @@ -440,7 +439,7 @@ public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf, boolean inMemory) map.put(cacheKey, cb); long val = elements.incrementAndGet(); if (buf.getBlockType().isData()) { - dataBlockElements.increment(); + dataBlockElements.increment(); } if (LOG.isTraceEnabled()) { long size = map.size(); @@ -497,7 +496,7 @@ private long updateSizeMetrics(LruCachedBlock cb, boolean evict) { heapsize *= -1; } if (bt != null && bt.isData()) { - dataBlockSize.add(heapsize); + dataBlockSize.add(heapsize); } return size.addAndGet(heapsize); } @@ -583,8 +582,9 @@ public int evictBlocksByHfileName(String hfileName) { int numEvicted = 0; for (BlockCacheKey key : map.keySet()) { if (key.getHfileName().equals(hfileName)) { - if (evictBlock(key)) + if (evictBlock(key)) { ++numEvicted; + } } } if (victimHandler != null) { @@ -657,7 +657,9 @@ long getOverhead() { void evict() { // Ensure only one eviction at a time - if(!evictionLock.tryLock()) return; + if (!evictionLock.tryLock()) { + return; + } try { evictionInProgress = true; @@ -670,7 +672,9 @@ void evict() { StringUtils.byteDesc(currentSize)); } - if (bytesToFree <= 0) return; + if (bytesToFree <= 0) { + return; + } // Instantiate priority buckets BlockBucket bucketSingle = new BlockBucket("single", bytesToFree, blockSize, singleSize()); @@ -945,7 +949,9 @@ public void run() { } } LruBlockCache cache = this.cache.get(); - if (cache == null) break; + if (cache == null) { + break; + } cache.evict(); } } @@ -1022,10 +1028,8 @@ public CacheStats getStats() { return this.stats; } - public final static long CACHE_FIXED_OVERHEAD = ClassSize.align( - (4 * Bytes.SIZEOF_LONG) + (11 * ClassSize.REFERENCE) + - (6 * Bytes.SIZEOF_FLOAT) + (2 * Bytes.SIZEOF_BOOLEAN) - + ClassSize.OBJECT); + public final static long CACHE_FIXED_OVERHEAD = + ClassSize.estimateBase(LruBlockCache.class, false); @Override public long heapSize() { @@ -1093,9 +1097,13 @@ public String getFilename() { @Override public int compareTo(CachedBlock other) { int diff = this.getFilename().compareTo(other.getFilename()); - if (diff != 0) return diff; + if (diff != 0) { + return diff; + } diff = Long.compare(this.getOffset(), other.getOffset()); - if (diff != 0) return diff; + if (diff != 0) { + return diff; + } if (other.getCachedTime() < 0 || this.getCachedTime() < 0) { throw new IllegalStateException(this.getCachedTime() + ", " + other.getCachedTime()); } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 40a009c2c7c0..5645bf1e3c39 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -8405,12 +8405,7 @@ private static List sort(List cells, final CellComparator comparator return cells; } - public static final long FIXED_OVERHEAD = ClassSize.align( - ClassSize.OBJECT + - 56 * ClassSize.REFERENCE + - 3 * Bytes.SIZEOF_INT + - 14 * Bytes.SIZEOF_LONG + - 3 * Bytes.SIZEOF_BOOLEAN); + public static final long FIXED_OVERHEAD = ClassSize.estimateBase(HRegion.class, false); // woefully out of date - currently missing: // 1 x HashMap - coprocessorServiceHandlers diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java index 8116507230c2..a05d4a6d31ef 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java @@ -2566,9 +2566,7 @@ public CacheConfig getCacheConfig() { return this.cacheConf; } - public static final long FIXED_OVERHEAD = - ClassSize.align(ClassSize.OBJECT + (29 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG) - + (6 * Bytes.SIZEOF_INT) + (2 * Bytes.SIZEOF_BOOLEAN)); + public static final long FIXED_OVERHEAD = ClassSize.estimateBase(HStore.class, false); public static final long DEEP_OVERHEAD = ClassSize.align(FIXED_OVERHEAD + ClassSize.OBJECT + ClassSize.REENTRANT_LOCK diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java index 108de7037d32..3d713052559e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/TestHeapSize.java @@ -602,5 +602,19 @@ public void testObjectSize() throws IOException { assertEquals(ClassSize.ARRAY, ClassSize.OBJECT + 8); } } -} + @Test + public void testAutoCalcFixedOverHead() { + Class[] classList = new Class[] { HFileContext.class, HRegion.class, BlockCacheKey.class, + HFileBlock.class, HStore.class, LruBlockCache.class }; + for (Class cl : classList) { + // do estimate in advance to ensure class is loaded + ClassSize.estimateBase(cl, false); + + long startTime = System.currentTimeMillis(); + ClassSize.estimateBase(cl, false); + long endTime = System.currentTimeMillis(); + assertTrue(endTime - startTime < 5); + } + } +}