Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

HBASE-24659 Calculate FIXED_OVERHEAD automatically #2018

Merged
merged 1 commit into from Aug 6, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Expand Up @@ -37,13 +37,7 @@
*/
@InterfaceAudience.Private
public class HFileContext implements HeapSize, Cloneable {
public static final int FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT +
// Algorithm, checksumType, encoding, Encryption.Context, hfileName reference,
5 * ClassSize.REFERENCE + 2 * Bytes.SIZEOF_INT +
// usesHBaseChecksum, includesMvcc, includesTags and compressTags
4 * Bytes.SIZEOF_BOOLEAN + Bytes.SIZEOF_LONG +
//byte[] headers for column family and table name
2 * ClassSize.ARRAY + 2 * ClassSize.REFERENCE);
public static final long FIXED_OVERHEAD = ClassSize.estimateBase(HFileContext.class, false);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Or, sorry, yeah, FIXED_OVERHEAD is 'this' classes size; i.e. the shallow size (I think).

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I went back through history of ClassSize... its so old.


private static final int DEFAULT_BYTES_PER_CHECKSUM = 16 * 1024;

Expand Down
Expand Up @@ -19,7 +19,6 @@

import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;

/**
Expand All @@ -42,7 +41,8 @@ public BlockCacheKey(String hfileName, long offset) {
this(hfileName, offset, true, BlockType.DATA);
}

public BlockCacheKey(String hfileName, long offset, boolean isPrimaryReplica, BlockType blockType) {
public BlockCacheKey(String hfileName, long offset, boolean isPrimaryReplica,
BlockType blockType) {
this.isPrimaryReplicaBlock = isPrimaryReplica;
this.hfileName = hfileName;
this.offset = offset;
Expand Down Expand Up @@ -71,12 +71,7 @@ public String toString() {
return this.hfileName + '_' + this.offset;
}

public static final long FIXED_OVERHEAD = ClassSize.align(
ClassSize.OBJECT +
Bytes.SIZEOF_BOOLEAN +
ClassSize.REFERENCE + // this.hfileName
ClassSize.REFERENCE + // this.blockType
Bytes.SIZEOF_LONG); // this.offset
public static final long FIXED_OVERHEAD = ClassSize.estimateBase(BlockCacheKey.class, false);

/**
* Strings have two bytes per character due to default Java Unicode encoding
Expand Down
Expand Up @@ -113,14 +113,7 @@
@InterfaceAudience.Private
public class HFileBlock implements Cacheable {
private static final Logger LOG = LoggerFactory.getLogger(HFileBlock.class);
public static final int FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT +
// BlockType, ByteBuff, MemoryType, HFileContext, ByteBuffAllocator
5 * ClassSize.REFERENCE +
// On-disk size, uncompressed size, and next block's on-disk size
// bytePerChecksum and onDiskDataSize
4 * Bytes.SIZEOF_INT +
// This and previous block offset
2 * Bytes.SIZEOF_LONG);
public static final long FIXED_OVERHEAD = ClassSize.estimateBase(HFileBlock.class, false);

// Block Header fields.

Expand Down
Expand Up @@ -37,7 +37,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.io.HeapSize;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.util.StringUtils;
import org.apache.yetus.audience.InterfaceAudience;
Expand Down Expand Up @@ -440,7 +439,7 @@ public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf, boolean inMemory)
map.put(cacheKey, cb);
long val = elements.incrementAndGet();
if (buf.getBlockType().isData()) {
dataBlockElements.increment();
dataBlockElements.increment();
}
if (LOG.isTraceEnabled()) {
long size = map.size();
Expand Down Expand Up @@ -497,7 +496,7 @@ private long updateSizeMetrics(LruCachedBlock cb, boolean evict) {
heapsize *= -1;
}
if (bt != null && bt.isData()) {
dataBlockSize.add(heapsize);
dataBlockSize.add(heapsize);
}
return size.addAndGet(heapsize);
}
Expand Down Expand Up @@ -583,8 +582,9 @@ public int evictBlocksByHfileName(String hfileName) {
int numEvicted = 0;
for (BlockCacheKey key : map.keySet()) {
if (key.getHfileName().equals(hfileName)) {
if (evictBlock(key))
if (evictBlock(key)) {
++numEvicted;
}
}
}
if (victimHandler != null) {
Expand Down Expand Up @@ -657,7 +657,9 @@ long getOverhead() {
void evict() {

// Ensure only one eviction at a time
if(!evictionLock.tryLock()) return;
if (!evictionLock.tryLock()) {
return;
}

try {
evictionInProgress = true;
Expand All @@ -670,7 +672,9 @@ void evict() {
StringUtils.byteDesc(currentSize));
}

if (bytesToFree <= 0) return;
if (bytesToFree <= 0) {
return;
}

// Instantiate priority buckets
BlockBucket bucketSingle = new BlockBucket("single", bytesToFree, blockSize, singleSize());
Expand Down Expand Up @@ -945,7 +949,9 @@ public void run() {
}
}
LruBlockCache cache = this.cache.get();
if (cache == null) break;
if (cache == null) {
break;
}
cache.evict();
}
}
Expand Down Expand Up @@ -1022,10 +1028,8 @@ public CacheStats getStats() {
return this.stats;
}

public final static long CACHE_FIXED_OVERHEAD = ClassSize.align(
(4 * Bytes.SIZEOF_LONG) + (11 * ClassSize.REFERENCE) +
(6 * Bytes.SIZEOF_FLOAT) + (2 * Bytes.SIZEOF_BOOLEAN)
+ ClassSize.OBJECT);
public final static long CACHE_FIXED_OVERHEAD =
ClassSize.estimateBase(LruBlockCache.class, false);

@Override
public long heapSize() {
Expand Down Expand Up @@ -1093,9 +1097,13 @@ public String getFilename() {
@Override
public int compareTo(CachedBlock other) {
int diff = this.getFilename().compareTo(other.getFilename());
if (diff != 0) return diff;
if (diff != 0) {
return diff;
}
diff = Long.compare(this.getOffset(), other.getOffset());
if (diff != 0) return diff;
if (diff != 0) {
return diff;
}
if (other.getCachedTime() < 0 || this.getCachedTime() < 0) {
throw new IllegalStateException(this.getCachedTime() + ", " + other.getCachedTime());
}
Expand Down
Expand Up @@ -8405,12 +8405,7 @@ private static List<Cell> sort(List<Cell> cells, final CellComparator comparator
return cells;
}

public static final long FIXED_OVERHEAD = ClassSize.align(
ClassSize.OBJECT +
56 * ClassSize.REFERENCE +
3 * Bytes.SIZEOF_INT +
14 * Bytes.SIZEOF_LONG +
3 * Bytes.SIZEOF_BOOLEAN);
public static final long FIXED_OVERHEAD = ClassSize.estimateBase(HRegion.class, false);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Does ClassSize come up w/ same general numbers as old manual technique. It does deep size rather than shallow?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think ClassSize.estimateBase only calculate shallow size

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I thought it critical it did deep size? Do you get the same numbers roughly?


// woefully out of date - currently missing:
// 1 x HashMap - coprocessorServiceHandlers
Expand Down
Expand Up @@ -2566,9 +2566,7 @@ public CacheConfig getCacheConfig() {
return this.cacheConf;
}

public static final long FIXED_OVERHEAD =
ClassSize.align(ClassSize.OBJECT + (29 * ClassSize.REFERENCE) + (2 * Bytes.SIZEOF_LONG)
+ (6 * Bytes.SIZEOF_INT) + (2 * Bytes.SIZEOF_BOOLEAN));
public static final long FIXED_OVERHEAD = ClassSize.estimateBase(HStore.class, false);

public static final long DEEP_OVERHEAD = ClassSize.align(FIXED_OVERHEAD
+ ClassSize.OBJECT + ClassSize.REENTRANT_LOCK
Expand Down
Expand Up @@ -602,5 +602,19 @@ public void testObjectSize() throws IOException {
assertEquals(ClassSize.ARRAY, ClassSize.OBJECT + 8);
}
}
}

@Test
public void testAutoCalcFixedOverHead() {
Class[] classList = new Class[] { HFileContext.class, HRegion.class, BlockCacheKey.class,
HFileBlock.class, HStore.class, LruBlockCache.class };
for (Class cl : classList) {
// do estimate in advance to ensure class is loaded
ClassSize.estimateBase(cl, false);

long startTime = System.currentTimeMillis();
ClassSize.estimateBase(cl, false);
long endTime = System.currentTimeMillis();
assertTrue(endTime - startTime < 5);
}
}
}