From 8348e5fa131586391ae4ada32abe53cbbf8b1bd4 Mon Sep 17 00:00:00 2001 From: Bryan Bernhart Date: Tue, 5 Jul 2022 09:56:43 -0700 Subject: [PATCH] Eliminate block malloc/free from slab allocator. Removes the need to wrap the block in a slab as a new type in favor of expanding the exiting block type which is assigned the slab. --- src/gpgmm/common/SlabBlockAllocator.h | 13 ++++-- src/gpgmm/common/SlabMemoryAllocator.cpp | 56 +++++++++++++++++------- src/gpgmm/common/SlabMemoryAllocator.h | 38 ---------------- 3 files changed, 50 insertions(+), 57 deletions(-) diff --git a/src/gpgmm/common/SlabBlockAllocator.h b/src/gpgmm/common/SlabBlockAllocator.h index 81fbaa66b..b0a33481c 100644 --- a/src/gpgmm/common/SlabBlockAllocator.h +++ b/src/gpgmm/common/SlabBlockAllocator.h @@ -19,6 +19,15 @@ namespace gpgmm { + struct Slab; + + // SlabBlock keeps a reference back to the slab to avoid creating a copy of the block with the + // slab being allocated from. + struct SlabBlock : public MemoryBlock { + SlabBlock* pNext = nullptr; + Slab* pSlab = nullptr; + }; + // SlabBlockAllocator uses the slab allocation technique to satisfy an // a block-allocation request. A slab consists of contiguious memory carved up into // fixed-size blocks (also called "pages" or "chunks"). The slab allocator @@ -40,10 +49,6 @@ namespace gpgmm { const char* GetTypename() const override; private: - struct SlabBlock : public MemoryBlock { - SlabBlock* pNext = nullptr; - }; - struct BlockList { SlabBlock* pHead = nullptr; // First free block in slab. }; diff --git a/src/gpgmm/common/SlabMemoryAllocator.cpp b/src/gpgmm/common/SlabMemoryAllocator.cpp index 75e3d1f30..2fa09949e 100644 --- a/src/gpgmm/common/SlabMemoryAllocator.cpp +++ b/src/gpgmm/common/SlabMemoryAllocator.cpp @@ -42,6 +42,36 @@ namespace gpgmm { // emitted. constexpr static double kPrefetchCoverageWarnMinThreshold = 0.50; + // Slab is a node in a doubly-linked list that contains a free-list of blocks + // and a reference to underlying memory. + struct Slab : public LinkNode, public RefCounted { + Slab(uint64_t blockCount, uint64_t blockSize) + : RefCounted(0), Allocator(blockCount, blockSize) { + } + + ~Slab() { + if (IsInList()) { + RemoveFromList(); + } + } + + uint64_t GetBlockCount() const { + return Allocator.GetBlockCount(); + } + + bool IsFull() const { + return static_cast(GetRefCount()) == Allocator.GetBlockCount(); + } + + double GetUsedPercent() const { + return static_cast(GetRefCount()) / + static_cast(Allocator.GetBlockCount()); + } + + SlabBlockAllocator Allocator; + std::unique_ptr SlabMemory; + }; + // SlabMemoryAllocator SlabMemoryAllocator::SlabMemoryAllocator(uint64_t blockSize, @@ -311,22 +341,20 @@ namespace gpgmm { pCache->FullList.push_front(pFreeSlab); } - // Wrap the block in the containing slab. Since the slab's block could reside in another - // allocated block, the slab's allocation offset must be made relative to slab's underlying - // memory and not the slab. - BlockInSlab* blockInSlab = new BlockInSlab(); - blockInSlab->pBlock = subAllocation->GetBlock(); + // Assign the containing slab to the block so DeallocateMemory() knows how to release it. + SlabBlock* blockInSlab = static_cast(subAllocation->GetBlock()); blockInSlab->pSlab = pFreeSlab; - blockInSlab->Size = subAllocation->GetBlock()->Size; - blockInSlab->Offset = - pFreeSlab->SlabMemory->GetOffset() + subAllocation->GetBlock()->Offset; + + // Since the slab's block could reside in another allocated block, the allocation + // offset must be made relative to the slab's underlying memory and not the slab itself. + const uint64_t offsetFromMemory = pFreeSlab->SlabMemory->GetOffset() + blockInSlab->Offset; mInfo.UsedBlockCount++; mInfo.UsedBlockUsage += blockInSlab->Size; - return std::make_unique( - this, subAllocation->GetMemory(), blockInSlab->Offset, AllocationMethod::kSubAllocated, - blockInSlab, request.SizeInBytes); + return std::make_unique(this, subAllocation->GetMemory(), + offsetFromMemory, AllocationMethod::kSubAllocated, + blockInSlab, request.SizeInBytes); } void SlabMemoryAllocator::DeallocateMemory(std::unique_ptr subAllocation) { @@ -334,7 +362,7 @@ namespace gpgmm { std::lock_guard lock(mMutex); - const BlockInSlab* blockInSlab = static_cast(subAllocation->GetBlock()); + SlabBlock* blockInSlab = static_cast(subAllocation->GetBlock()); ASSERT(blockInSlab != nullptr); Slab* slab = blockInSlab->pSlab; @@ -350,9 +378,7 @@ namespace gpgmm { mInfo.UsedBlockCount--; mInfo.UsedBlockUsage -= blockInSlab->Size; - MemoryBlock* block = blockInSlab->pBlock; - slab->Allocator.DeallocateBlock(block); - SafeDelete(blockInSlab); + slab->Allocator.DeallocateBlock(blockInSlab); MemoryBase* slabMemory = subAllocation->GetMemory(); ASSERT(slabMemory != nullptr); diff --git a/src/gpgmm/common/SlabMemoryAllocator.h b/src/gpgmm/common/SlabMemoryAllocator.h index 9bdcfa0f6..67f10be44 100644 --- a/src/gpgmm/common/SlabMemoryAllocator.h +++ b/src/gpgmm/common/SlabMemoryAllocator.h @@ -72,44 +72,6 @@ namespace gpgmm { bool IsPrefetchCoverageBelowThreshold() const; - // Slab is a node in a doubly-linked list that contains a free-list of blocks - // and a reference to underlying memory. - struct Slab : public LinkNode, public RefCounted { - Slab(uint64_t blockCount, uint64_t blockSize) - : RefCounted(0), Allocator(blockCount, blockSize) { - } - - ~Slab() { - ASSERT(SlabMemory == nullptr); - if (IsInList()) { - RemoveFromList(); - } - } - - uint64_t GetBlockCount() const { - return Allocator.GetBlockCount(); - } - - bool IsFull() const { - return static_cast(GetRefCount()) == Allocator.GetBlockCount(); - } - - double GetUsedPercent() const { - return static_cast(GetRefCount()) / - static_cast(Allocator.GetBlockCount()); - } - - SlabBlockAllocator Allocator; - std::unique_ptr SlabMemory; - }; - - // Stores a reference back to the slab containing the block so DeallocateMemory - // knows which slab (and block allocator) to use. - struct BlockInSlab : public MemoryBlock { - MemoryBlock* pBlock = nullptr; - Slab* pSlab = nullptr; - }; - // Group of one or more slabs of the same size. struct SlabCache { SizedLinkedList FreeList; // Slabs that contain partial or empty