Skip to content

Commit

Permalink
Reduce MemoryPoolAddressOrderedList casting in Usage
Browse files Browse the repository at this point in the history
Using virtual method call backs in base class (MemoryPool) to reduce
extend class casting.
Add empty virtual methods in base class.
rename recycleHeapChunk(env,...) to recycleHeapChunkForFreeList(env,...)

Signed-off-by: Lin Hu <linhu@ca.ibm.com>
  • Loading branch information
LinHu2016 committed May 18, 2021
1 parent 1c6dead commit 2a61988
Show file tree
Hide file tree
Showing 9 changed files with 93 additions and 93 deletions.
76 changes: 72 additions & 4 deletions gc/base/MemoryPool.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,9 @@ class MM_MemoryPool : public MM_BaseVirtual

uintptr_t _darkMatterBytes; /**< estimate of the dark matter in this pool (in bytes) */
uintptr_t _darkMatterSamples;

uintptr_t _scannableBytes; /**< estimate of scannable bytes in the pool (only out of sampled objects) */
uintptr_t _nonScannableBytes; /**< estimate of non-scannable bytes in the pool (only out of sampled objects) */
/*
* Function members
*/
Expand Down Expand Up @@ -346,6 +349,67 @@ class MM_MemoryPool : public MM_BaseVirtual

MMINLINE virtual uintptr_t getDarkMatterSamples() { return _darkMatterSamples; }

MMINLINE virtual uintptr_t getFreeMemoryAndDarkMatterBytes() {
return getActualFreeMemorySize() + getDarkMatterBytes();
}

/**
* Update memory pool statistical data
*
* @param freeBytes free bytes added
* @param freeEntryCount free memory elements added
* @param largestFreeEntry largest free memory element size
*/
MMINLINE void updateMemoryPoolStatistics(MM_EnvironmentBase *env, uintptr_t freeBytes, uintptr_t freeEntryCount, uintptr_t largestFreeEntry)
{
setFreeMemorySize(freeBytes);
setFreeEntryCount(freeEntryCount);
setLargestFreeEntry(largestFreeEntry);
}

virtual void recalculateMemoryPoolStatistics(MM_EnvironmentBase* env)
{
Assert_MM_unreachable();
}

virtual bool recycleHeapChunk(void* chunkBase, void* chunkTop)
{
Assert_MM_unreachable();
return false;
}

virtual void fillWithHoles(void *addrBase, void *addrTop)
{
Assert_MM_unreachable();
}

/**
* Increase the scannable/non-scannable estimate for the receiver by the specified amount
* @param scannableBytes the number of bytes to increase for scannable objects
* @param non-scannableBytes the number of bytes to increase for scannable objects
*/
MMINLINE void incrementScannableBytes(uintptr_t scannableBytes, uintptr_t nonScannableBytes)
{
_scannableBytes += scannableBytes;
_nonScannableBytes += nonScannableBytes;
}

/**
* @return the recorded estimate of scannable in the receiver
*/
MMINLINE uintptr_t getScannableBytes()
{
return _scannableBytes;
}

/**
* @return the recorded estimate of non-scannable in the receiver
*/
MMINLINE uintptr_t getNonScannableBytes()
{
return _nonScannableBytes;
}

#if defined(OMR_GC_IDLE_HEAP_MANAGER)
/**
* @return bytes of free memory in the pool released/decommited back to OS
Expand Down Expand Up @@ -378,8 +442,10 @@ class MM_MemoryPool : public MM_BaseVirtual
_allocSearchCount(0),
_extensions(env->getExtensions()),
_largeObjectAllocateStats(NULL),
_darkMatterBytes(0)
, _darkMatterSamples(0)
_darkMatterBytes(0),
_darkMatterSamples(0),
_scannableBytes(0),
_nonScannableBytes(0)
{
_typeId = __FUNCTION__;
}
Expand Down Expand Up @@ -410,8 +476,10 @@ class MM_MemoryPool : public MM_BaseVirtual
_allocSearchCount(0),
_extensions(env->getExtensions()),
_largeObjectAllocateStats(NULL),
_darkMatterBytes(0)
, _darkMatterSamples(0)
_darkMatterBytes(0),
_darkMatterSamples(0),
_scannableBytes(0),
_nonScannableBytes(0)
{
_typeId = __FUNCTION__;
}
Expand Down
30 changes: 0 additions & 30 deletions gc/base/MemoryPoolAddressOrderedList.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -63,9 +63,6 @@ class MM_MemoryPoolAddressOrderedList : public MM_MemoryPoolAddressOrderedListBa

MM_LargeObjectAllocateStats *_largeObjectCollectorAllocateStats; /**< Same as _largeObjectAllocateStats except specifically for collector allocates */

uintptr_t _scannableBytes; /**< estimate of scannable bytes in the pool (only out of sampled objects) */
uintptr_t _nonScannableBytes; /**< estimate of non-scannable bytes in the pool (only out of sampled objects) */

MM_HeapLinkedFreeHeader *_firstUnalignedFreeEntry; /**< it is only for Balanced GC copyforward and non empty survivor region */
MM_HeapLinkedFreeHeader *_prevFirstUnalignedFreeEntry;
protected:
Expand Down Expand Up @@ -170,25 +167,6 @@ class MM_MemoryPoolAddressOrderedList : public MM_MemoryPoolAddressOrderedListBa
virtual uintptr_t releaseFreeMemoryPages(MM_EnvironmentBase* env);
#endif

/**
* Increase the scannable/non-scannable estimate for the receiver by the specified amount
* @param scannableBytes the number of bytes to increase for scannable objects
* @param non-scannableBytes the number of bytes to increase for scannable objects
*/
MMINLINE virtual void incrementScannableBytes(uintptr_t scannableBytes, uintptr_t nonScannableBytes)
{
_scannableBytes += scannableBytes;
_nonScannableBytes += nonScannableBytes;
}
/**
* @return the recorded estimate of scannable in the receiver
*/
MMINLINE uintptr_t getScannableBytes() { return _scannableBytes; }
/**
* @return the recorded estimate of non-scannable in the receiver
*/
MMINLINE uintptr_t getNonScannableBytes() { return _nonScannableBytes; }

/**
* remove a free entry from freelist
*/
Expand Down Expand Up @@ -246,19 +224,13 @@ class MM_MemoryPoolAddressOrderedList : public MM_MemoryPoolAddressOrderedListBa
return _freeMemorySize - _adjustedBytesForCardAlignment;
}

MMINLINE uintptr_t getFreeMemoryAndDarkMatterBytes() {
return getActualFreeMemorySize() + getDarkMatterBytes();
}

/**
* Create a MemoryPoolAddressOrderedList object.
*/
MM_MemoryPoolAddressOrderedList(MM_EnvironmentBase *env, uintptr_t minimumFreeEntrySize) :
MM_MemoryPoolAddressOrderedListBase(env, minimumFreeEntrySize)
,_heapFreeList(NULL)
,_largeObjectCollectorAllocateStats(NULL)
,_scannableBytes(0)
,_nonScannableBytes(0)
,_firstUnalignedFreeEntry(FREE_ENTRY_END)
,_prevFirstUnalignedFreeEntry(FREE_ENTRY_END)
{
Expand All @@ -269,8 +241,6 @@ class MM_MemoryPoolAddressOrderedList : public MM_MemoryPoolAddressOrderedListBa
MM_MemoryPoolAddressOrderedListBase(env, minimumFreeEntrySize, name)
,_heapFreeList(NULL)
,_largeObjectCollectorAllocateStats(NULL)
,_scannableBytes(0)
,_nonScannableBytes(0)
,_firstUnalignedFreeEntry(FREE_ENTRY_END)
,_prevFirstUnalignedFreeEntry(FREE_ENTRY_END)
{
Expand Down
43 changes: 12 additions & 31 deletions gc/base/MemoryPoolAddressOrderedListBase.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -174,34 +174,7 @@ class MM_MemoryPoolAddressOrderedListBase : public MM_MemoryPool
void connectFinalMemoryToPool(MM_EnvironmentBase *env, void *address, uintptr_t size);
void abandonMemoryInPool(MM_EnvironmentBase *env, void *address, uintptr_t size);


/**
* Check, can free memory element be connected to memory pool
*
* @param address free memory start address
* @param size free memory size in bytes
* @return true if free memory element would be accepted
*/
MMINLINE bool canMemoryBeConnectedToPool(MM_EnvironmentBase* env, void* address, uintptr_t size)
{
return size >= getMinimumFreeEntrySize();
}

public:
/**
* Update memory pool statistical data
*
* @param freeBytes free bytes added
* @param freeEntryCount free memory elements added
* @param largestFreeEntry largest free memory element size
*/
void updateMemoryPoolStatistics(MM_EnvironmentBase *env, uintptr_t freeBytes, uintptr_t freeEntryCount, uintptr_t largestFreeEntry)
{
setFreeMemorySize(freeBytes);
setFreeEntryCount(freeEntryCount);
setLargestFreeEntry(largestFreeEntry);
}

virtual void acquireResetLock(MM_EnvironmentBase* env);
virtual void releaseResetLock(MM_EnvironmentBase* env);

Expand All @@ -218,8 +191,6 @@ class MM_MemoryPoolAddressOrderedListBase : public MM_MemoryPool
return internalRecycleHeapChunk(addrBase, addrTop, NULL);
}

MMINLINE virtual void incrementScannableBytes(uintptr_t scannableBytes, uintptr_t nonScannableBytes) {}

MMINLINE MM_SweepPoolState * getSweepPoolState()
{
Assert_MM_true(NULL != _sweepPoolState);
Expand All @@ -244,13 +215,23 @@ class MM_MemoryPoolAddressOrderedListBase : public MM_MemoryPool

virtual void printCurrentFreeList(MM_EnvironmentBase* env, const char* area)=0;

virtual void recalculateMemoryPoolStatistics(MM_EnvironmentBase* env)=0;

void resetAdjustedBytesForCardAlignment()
{
_adjustedBytesForCardAlignment = 0;
}

/**
* Check, can free memory element be connected to memory pool
*
* @param address free memory start address
* @param size free memory size in bytes
* @return true if free memory element would be accepted
*/
MMINLINE bool canMemoryBeConnectedToPool(MM_EnvironmentBase* env, void* address, uintptr_t size)
{
return size >= getMinimumFreeEntrySize();
}

#if defined(OMR_GC_IDLE_HEAP_MANAGER)
uintptr_t releaseFreeEntryMemoryPages(MM_EnvironmentBase* env, MM_HeapLinkedFreeHeader* freeEntry);
#endif
Expand Down
22 changes: 0 additions & 22 deletions gc/base/MemoryPoolBumpPointer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -50,9 +50,6 @@ class MM_MemoryPoolBumpPointer : public MM_MemoryPool
void *_allocatePointer; /**< The base address of the unused portion of the receiver and the next pointer which could be returned by an allocation request */
void *_topPointer; /**< The top of the memory area managed by the receiver */

uintptr_t _scannableBytes; /**< estimate of scannable bytes in the pool (only out of sampled objects) */
uintptr_t _nonScannableBytes; /**< estimate of non-scannable bytes in the pool (only out of sampled objects) */

MM_SweepPoolState *_sweepPoolState; /**< GC Sweep Pool State */
MM_SweepPoolManager *_sweepPoolManager; /**< pointer to SweepPoolManager class */

Expand Down Expand Up @@ -137,13 +134,6 @@ class MM_MemoryPoolBumpPointer : public MM_MemoryPool
*/
void recalculateMemoryPoolStatistics(MM_EnvironmentBase *env);

/**
* Increase the scannable/non-scannable estimate for the receiver by the specified amount
* @param scannableBytes the number of bytes to increase for scannable objects
* @param non-scannableBytes the number of bytes to increase for scannable objects
*/
MMINLINE void incrementScannableBytes(uintptr_t scannableBytes, uintptr_t nonScannableBytes) { _scannableBytes += scannableBytes; _nonScannableBytes += nonScannableBytes; }

MMINLINE uintptr_t getFreeMemoryAndDarkMatterBytes() {
uintptr_t actualFreeMemory = getActualFreeMemorySize();
uintptr_t darkMatter = getDarkMatterBytes();
Expand All @@ -155,16 +145,6 @@ class MM_MemoryPoolBumpPointer : public MM_MemoryPool
return OMR_MAX(actualFreeMemory + darkMatter, allocatableMemory);
}

/**
* @return the recorded estimate of scannable in the receiver
*/
MMINLINE uintptr_t getScannableBytes() { return _scannableBytes; }
/**
* @return the recorded estimate of non-scannable in the receiver
*/
MMINLINE uintptr_t getNonScannableBytes() { return _nonScannableBytes; }


/**
* Used when a caller wishes to determine the end of the allocated space in the receiver. Note that the space between this
* valud and _topPointer has undefined contents.
Expand Down Expand Up @@ -228,8 +208,6 @@ class MM_MemoryPoolBumpPointer : public MM_MemoryPool
MM_MemoryPool(env, minimumFreeEntrySize)
,_allocatePointer(NULL)
,_topPointer(NULL)
,_scannableBytes(0)
,_nonScannableBytes(0)
,_sweepPoolState(NULL)
,_sweepPoolManager(NULL)
,_heapFreeList(NULL)
Expand Down
4 changes: 2 additions & 2 deletions gc/base/MemoryPoolHybrid.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@ MM_MemoryPoolHybrid::internalAllocate(MM_EnvironmentBase* env, uintptr_t sizeInB
addrBase = (void*)currentFreeEntry;
recycleEntry = (MM_HeapLinkedFreeHeader*)(((uint8_t*)currentFreeEntry) + sizeInBytesRequired);

if (recycleHeapChunk(env, recycleEntry, ((uint8_t*)recycleEntry) + recycleEntrySize, previousFreeEntry, currentFreeEntry->getNext(compressed), curFreeList)) {
if (recycleHeapChunkForFreeList(env, recycleEntry, ((uint8_t*)recycleEntry) + recycleEntrySize, previousFreeEntry, currentFreeEntry->getNext(compressed), curFreeList)) {
_heapFreeLists[curFreeList].updateHint(currentFreeEntry, recycleEntry);
_largeObjectAllocateStatsForFreeList[curFreeList].incrementFreeEntrySizeClassStats(recycleEntrySize);
} else {
Expand Down Expand Up @@ -370,7 +370,7 @@ MM_MemoryPoolHybrid::internalAllocateTLH(MM_EnvironmentBase* env, uintptr_t maxi
entryNext = freeEntry->getNext(compressed);

/* Recycle the remaining entry back onto the free list (if applicable) */
if (!recycleHeapChunk(env, addrTop, topOfRecycledChunk, previousFreeEntry, entryNext, curFreeList)) {
if (!recycleHeapChunkForFreeList(env, addrTop, topOfRecycledChunk, previousFreeEntry, entryNext, curFreeList)) {
/* Adjust the free memory size and count */
Assert_MM_true(_heapFreeLists[curFreeList]._freeSize >= recycleEntrySize);
Assert_MM_true(_heapFreeLists[curFreeList]._freeCount > 0);
Expand Down
4 changes: 2 additions & 2 deletions gc/base/MemoryPoolSplitAddressOrderedList.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -249,7 +249,7 @@ MM_MemoryPoolSplitAddressOrderedList::internalAllocate(MM_EnvironmentBase* env,
addrBase = (void*)currentFreeEntry;
recycleEntry = (MM_HeapLinkedFreeHeader*)(((uint8_t*)currentFreeEntry) + sizeInBytesRequired);

if (recycleHeapChunk(env, recycleEntry, ((uint8_t*)recycleEntry) + recycleEntrySize, previousFreeEntry, currentFreeEntry->getNext(compressed), curFreeList)) {
if (recycleHeapChunkForFreeList(env, recycleEntry, ((uint8_t*)recycleEntry) + recycleEntrySize, previousFreeEntry, currentFreeEntry->getNext(compressed), curFreeList)) {
if (!skipReserved && isPreviousReservedFreeEntry(previousFreeEntry, curFreeList)) {
_reservedFreeEntrySize = recycleEntrySize;
} else if (currentFreeEntry == _previousReservedFreeEntry) {
Expand Down Expand Up @@ -434,7 +434,7 @@ MM_MemoryPoolSplitAddressOrderedList::internalAllocateTLH(MM_EnvironmentBase* en
entryNext = freeEntry->getNext(compressed);

/* Recycle the remaining entry back onto the free list (if applicable) */
if (!recycleHeapChunk(env, addrTop, topOfRecycledChunk, previousFreeEntry, entryNext, curFreeList)) {
if (!recycleHeapChunkForFreeList(env, addrTop, topOfRecycledChunk, previousFreeEntry, entryNext, curFreeList)) {
/* Adjust the free memory size and count */
Assert_MM_true(_heapFreeLists[curFreeList]._freeSize >= recycleEntrySize);
Assert_MM_true(_heapFreeLists[curFreeList]._freeCount > 0);
Expand Down
2 changes: 1 addition & 1 deletion gc/base/MemoryPoolSplitAddressOrderedListBase.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -491,7 +491,7 @@ MM_MemoryPoolSplitAddressOrderedListBase::findAddressAfterFreeSize(MM_Environmen
}

bool
MM_MemoryPoolSplitAddressOrderedListBase::recycleHeapChunk(MM_EnvironmentBase* env, void* addrBase, void* addrTop,
MM_MemoryPoolSplitAddressOrderedListBase::recycleHeapChunkForFreeList(MM_EnvironmentBase* env, void* addrBase, void* addrTop,
MM_HeapLinkedFreeHeader* previousFreeEntry, MM_HeapLinkedFreeHeader* nextFreeEntry, uintptr_t curFreeList)
{
bool const compressed = compressObjectReferences();
Expand Down
2 changes: 1 addition & 1 deletion gc/base/MemoryPoolSplitAddressOrderedListBase.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -277,7 +277,7 @@ class MM_MemoryPoolSplitAddressOrderedListBase : public MM_MemoryPoolAddressOrde
virtual void *internalAllocate(MM_EnvironmentBase *env, uintptr_t sizeInBytesRequired, bool lockingRequired, MM_LargeObjectAllocateStats *largeObjectAllocateStats) = 0;
virtual bool internalAllocateTLH(MM_EnvironmentBase *env, uintptr_t maximumSizeInBytesRequired, void * &addrBase, void * &addrTop, bool lockingRequired, MM_LargeObjectAllocateStats *largeObjectAllocateStats) = 0;

bool recycleHeapChunk(MM_EnvironmentBase* env, void* addrBase, void* addrTop, MM_HeapLinkedFreeHeader* previousFreeEntry, MM_HeapLinkedFreeHeader* nextFreeEntry, uintptr_t curFreeList);
bool recycleHeapChunkForFreeList(MM_EnvironmentBase* env, void* addrBase, void* addrTop, MM_HeapLinkedFreeHeader* previousFreeEntry, MM_HeapLinkedFreeHeader* nextFreeEntry, uintptr_t curFreeList);

MMINLINE uintptr_t findGoodStartFreeList()
{
Expand Down
3 changes: 3 additions & 0 deletions gc/base/j9mm.tdf
Original file line number Diff line number Diff line change
Expand Up @@ -938,3 +938,6 @@ TraceEvent=Trc_MM_ParallelDispatcher_recomputeActiveThreadCountForTask_useCollec

TraceEntry=Trc_MM_CopyForwardScheme_convertFreeMemoryCandidateToSurvivorRegion_Entry Overhead=1 Level=1 Group=copyforwardscheme Template="MM_CopyForwardScheme_convertCandidateToSurvivorRegion region=%p"
TraceExit=Trc_MM_CopyForwardScheme_convertFreeMemoryCandidateToSurvivorRegion_Exit Overhead=1 Level=1 Group=copyforwardscheme Template="MM_CopyForwardScheme_convertCandidateToSurvivorRegion"

TraceEntry=Trc_MM_AllocationContextBalanced_acquireMPAOLRegionFromNode_Entry Overhead=1 Level=1 Group=tarok Template="MM_AllocationContextBalanced::acquireMPAOLRegionFromNode thisContext=%p requestingContext=%p"
TraceExit=Trc_MM_AllocationContextBalanced_acquireMPAOLRegionFromNode_Exit Overhead=1 Level=1 Group=tarok Template="MM_AllocationContextBalanced::acquireMPAOLRegionFromNode result=%p"

0 comments on commit 2a61988

Please sign in to comment.