Skip to content

Commit

Permalink
Use bump ranges instead of free list in JSC allocator
Browse files Browse the repository at this point in the history
https://bugs.webkit.org/show_bug.cgi?id=255798
rdar://108377867

Reviewed by Yusuke Suzuki and Mark Lam.

Replaces the JSC free list allocator with a list of free intervals.
Each interval represents a contiguous block of dead cells, and within
each interval we can bump-allocate. This should mean we bump-allocate
more often, and potentially speed up sweeping by nature of installing
fewer free list cells, especially for use cases where we don't often
manage to totally empty blocks currently.

* Source/JavaScriptCore/heap/FreeList.cpp:
(JSC::FreeList::clear):
(JSC::FreeList::initialize):
(JSC::FreeList::contains const):
(JSC::FreeList::dump const):
(JSC::FreeList::initializeList): Deleted.
(JSC::FreeList::initializeBump): Deleted.
* Source/JavaScriptCore/heap/FreeList.h:
(JSC::FreeCell::scramble):
(JSC::FreeCell::descramble):
(JSC::FreeCell::makeLast):
(JSC::FreeCell::setNext):
(JSC::FreeCell::decode):
(JSC::FreeCell::offsetOfScrambledBits):
(JSC::FreeList::allocationWillFail const):
(JSC::FreeList::isSentinel):
(JSC::FreeList::offsetOfHead):
(JSC::FreeList::offsetOfPayloadStart):
(JSC::FreeList::offsetOfPayloadEnd):
(JSC::FreeList::head const):
(JSC::FreeCell::next const): Deleted.
(JSC::FreeCell::offsetOfScrambledNext): Deleted.
(JSC::FreeList::offsetOfScrambledHead): Deleted.
(JSC::FreeList::offsetOfRemaining): Deleted.
* Source/JavaScriptCore/heap/FreeListInlines.h:
(JSC::FreeList::allocate):
(JSC::FreeList::forEach const):
* Source/JavaScriptCore/heap/MarkedBlockInlines.h:
(JSC::MarkedBlock::Handle::specializedSweep):
* Source/JavaScriptCore/jit/AssemblyHelpers.cpp:
(JSC::AssemblyHelpers::jitAssertTagsInPlace):
(JSC::AssemblyHelpers::emitExceptionCheck):
(JSC::AssemblyHelpers::emitNonPatchableExceptionCheck):
(JSC::AssemblyHelpers::loadProperty):
(JSC::AssemblyHelpers::storeProperty):
(JSC::AssemblyHelpers::emitAllocateWithNonNullAllocator):
(JSC::AssemblyHelpers::emitAllocateVariableSized):
(JSC::AssemblyHelpers::restoreCalleeSavesFromEntryFrameCalleeSavesBuffer):
(JSC::AssemblyHelpers::emitRestoreCalleeSavesFor):

Canonical link: https://commits.webkit.org/263313@main
  • Loading branch information
ddegazio committed Apr 24, 2023
1 parent 0eef1c8 commit 2fbb315
Show file tree
Hide file tree
Showing 6 changed files with 259 additions and 131 deletions.
7 changes: 7 additions & 0 deletions Source/JavaScriptCore/assembler/MacroAssemblerARM64.h
Expand Up @@ -360,6 +360,13 @@ class MacroAssemblerARM64 : public AbstractMacroAssembler<Assembler> {
m_assembler.add<64>(dest, dest, dataTempRegister);
}

void add64(RegisterID src, Address dest)
{
load64(dest, getCachedDataTempRegisterIDAndInvalidate());
m_assembler.add<64>(src, dataTempRegister, dataTempRegister);
store64(dataTempRegister, dest);
}

void add64(AbsoluteAddress src, RegisterID dest)
{
load64(src.m_ptr, getCachedDataTempRegisterIDAndInvalidate());
Expand Down
47 changes: 20 additions & 27 deletions Source/JavaScriptCore/heap/FreeList.cpp
Expand Up @@ -39,53 +39,46 @@ FreeList::~FreeList()

void FreeList::clear()
{
m_scrambledHead = 0;
m_intervalStart = nullptr;
m_intervalEnd = nullptr;
m_nextInterval = bitwise_cast<FreeCell*>(static_cast<uintptr_t>(1));
m_secret = 0;
m_payloadEnd = nullptr;
m_remaining = 0;
m_originalSize = 0;
}

void FreeList::initializeList(FreeCell* head, uintptr_t secret, unsigned bytes)
void FreeList::initialize(FreeCell* start, uint64_t secret, unsigned bytes)
{
// It's *slightly* more optimal to use a scrambled head. It saves a register on the fast path.
m_scrambledHead = FreeCell::scramble(head, secret);
if (UNLIKELY(!start)) {
clear();
return;
}
m_secret = secret;
m_payloadEnd = nullptr;
m_remaining = 0;
m_nextInterval = start;
FreeCell::advance(m_secret, m_nextInterval, m_intervalStart, m_intervalEnd);
m_originalSize = bytes;
}

void FreeList::initializeBump(char* payloadEnd, unsigned remaining)
{
m_scrambledHead = 0;
m_secret = 0;
m_payloadEnd = payloadEnd;
m_remaining = remaining;
m_originalSize = remaining;
}

bool FreeList::contains(HeapCell* target) const
{
if (m_remaining) {
const void* start = (m_payloadEnd - m_remaining);
const void* end = m_payloadEnd;
return (start <= target) && (target < end);
}
char* targetPtr = bitwise_cast<char*>(target);
if (m_intervalStart <= targetPtr && targetPtr < m_intervalEnd)
return true;

FreeCell* candidate = head();
while (candidate) {
if (bitwise_cast<HeapCell*>(candidate) == target)
FreeCell* candidate = nextInterval();
while (!isSentinel(candidate)) {
char* start;
char* end;
FreeCell::advance(m_secret, candidate, start, end);
if (start <= targetPtr && targetPtr < end)
return true;
candidate = candidate->next(m_secret);
}

return false;
}

void FreeList::dump(PrintStream& out) const
{
out.print("{head = ", RawPointer(head()), ", secret = ", m_secret, ", payloadEnd = ", RawPointer(m_payloadEnd), ", remaining = ", m_remaining, ", originalSize = ", m_originalSize, "}");
out.print("{nextInterval = ", RawPointer(nextInterval()), ", secret = ", m_secret, ", intervalStart = ", RawPointer(m_intervalStart), ", intervalEnd = ", RawPointer(m_intervalEnd), ", originalSize = ", m_originalSize, "}");
}

} // namespace JSC
Expand Down
67 changes: 42 additions & 25 deletions Source/JavaScriptCore/heap/FreeList.h
Expand Up @@ -27,36 +27,53 @@

#include <wtf/Noncopyable.h>
#include <wtf/PrintStream.h>
#include <wtf/StdLibExtras.h>

namespace JSC {

class HeapCell;

struct FreeCell {
static uintptr_t scramble(FreeCell* cell, uintptr_t secret)
static ALWAYS_INLINE uint64_t scramble(int32_t offsetToNext, uint32_t lengthInBytes, uint64_t secret)
{
return bitwise_cast<uintptr_t>(cell) ^ secret;
ASSERT(static_cast<uint64_t>(lengthInBytes) << 32 | offsetToNext);
return (static_cast<uint64_t>(lengthInBytes) << 32 | offsetToNext) ^ secret;
}
static FreeCell* descramble(uintptr_t cell, uintptr_t secret)

static ALWAYS_INLINE std::tuple<int32_t, uint32_t> descramble(uint64_t scrambledBits, uint64_t secret)
{
return bitwise_cast<FreeCell*>(cell ^ secret);
static_assert(WTF::isPowerOfTwo(sizeof(FreeCell))); // Make sure this division isn't super costly.
uint64_t descrambledBits = scrambledBits ^ secret;
return { static_cast<int32_t>(static_cast<uint32_t>(descrambledBits)), static_cast<uint32_t>(descrambledBits >> 32u) };
}
void setNext(FreeCell* next, uintptr_t secret)

ALWAYS_INLINE void makeLast(uint32_t lengthInBytes, uint64_t secret)
{
scrambledNext = scramble(next, secret);
scrambledBits = scramble(1, lengthInBytes, secret); // We use a set LSB to indicate a sentinel pointer.
}
FreeCell* next(uintptr_t secret) const

ALWAYS_INLINE void setNext(FreeCell* next, uint32_t lengthInBytes, uint64_t secret)
{
return descramble(scrambledNext, secret);
scrambledBits = scramble((next - this) * sizeof(FreeCell), lengthInBytes, secret);
}

static ptrdiff_t offsetOfScrambledNext() { return OBJECT_OFFSETOF(FreeCell, scrambledNext); }

ALWAYS_INLINE std::tuple<int32_t, uint32_t> decode(uint64_t secret)
{
return descramble(scrambledBits, secret);
}

static ALWAYS_INLINE void advance(uint64_t secret, FreeCell*& interval, char*& intervalStart, char*& intervalEnd)
{
auto [offsetToNext, lengthInBytes] = interval->decode(secret);
intervalStart = bitwise_cast<char*>(interval);
intervalEnd = intervalStart + lengthInBytes;
interval = bitwise_cast<FreeCell*>(intervalStart + offsetToNext);
}

static ALWAYS_INLINE ptrdiff_t offsetOfScrambledBits() { return OBJECT_OFFSETOF(FreeCell, scrambledBits); }

uint64_t preservedBitsForCrashAnalysis;
uintptr_t scrambledNext;
uint64_t scrambledBits;
};

class FreeList {
Expand All @@ -66,10 +83,9 @@ class FreeList {

void clear();

JS_EXPORT_PRIVATE void initializeList(FreeCell* head, uintptr_t secret, unsigned bytes);
JS_EXPORT_PRIVATE void initializeBump(char* payloadEnd, unsigned remaining);
JS_EXPORT_PRIVATE void initialize(FreeCell* head, uint64_t secret, unsigned bytes);

bool allocationWillFail() const { return !head() && !m_remaining; }
bool allocationWillFail() const { return m_intervalStart >= m_intervalEnd && isSentinel(nextInterval()); }
bool allocationWillSucceed() const { return !allocationWillFail(); }

template<typename Func>
Expand All @@ -82,10 +98,11 @@ class FreeList {

unsigned originalSize() const { return m_originalSize; }

static ptrdiff_t offsetOfScrambledHead() { return OBJECT_OFFSETOF(FreeList, m_scrambledHead); }
static bool isSentinel(FreeCell* cell) { return bitwise_cast<uintptr_t>(cell) & 1; }
static ptrdiff_t offsetOfNextInterval() { return OBJECT_OFFSETOF(FreeList, m_nextInterval); }
static ptrdiff_t offsetOfSecret() { return OBJECT_OFFSETOF(FreeList, m_secret); }
static ptrdiff_t offsetOfPayloadEnd() { return OBJECT_OFFSETOF(FreeList, m_payloadEnd); }
static ptrdiff_t offsetOfRemaining() { return OBJECT_OFFSETOF(FreeList, m_remaining); }
static ptrdiff_t offsetOfIntervalStart() { return OBJECT_OFFSETOF(FreeList, m_intervalStart); }
static ptrdiff_t offsetOfIntervalEnd() { return OBJECT_OFFSETOF(FreeList, m_intervalEnd); }
static ptrdiff_t offsetOfOriginalSize() { return OBJECT_OFFSETOF(FreeList, m_originalSize); }
static ptrdiff_t offsetOfCellSize() { return OBJECT_OFFSETOF(FreeList, m_cellSize); }

Expand All @@ -94,12 +111,12 @@ class FreeList {
unsigned cellSize() const { return m_cellSize; }

private:
FreeCell* head() const { return FreeCell::descramble(m_scrambledHead, m_secret); }
FreeCell* nextInterval() const { return m_nextInterval; }

uintptr_t m_scrambledHead { 0 };
uintptr_t m_secret { 0 };
char* m_payloadEnd { nullptr };
unsigned m_remaining { 0 };
char* m_intervalStart { nullptr };
char* m_intervalEnd { nullptr };
FreeCell* m_nextInterval { bitwise_cast<FreeCell*>(static_cast<uintptr_t>(1)) };
uint64_t m_secret { 0 };
unsigned m_originalSize { 0 };
unsigned m_cellSize { 0 };
};
Expand Down
48 changes: 28 additions & 20 deletions Source/JavaScriptCore/heap/FreeListInlines.h
Expand Up @@ -33,36 +33,44 @@ namespace JSC {
template<typename Func>
ALWAYS_INLINE HeapCell* FreeList::allocate(const Func& slowPath)
{
unsigned remaining = m_remaining;
if (remaining) {
unsigned cellSize = m_cellSize;
remaining -= cellSize;
m_remaining = remaining;
return bitwise_cast<HeapCell*>(m_payloadEnd - remaining - cellSize);
unsigned cellSize = m_cellSize;
if (LIKELY(m_intervalStart < m_intervalEnd)) {
char* result = m_intervalStart;
m_intervalStart += cellSize;
return bitwise_cast<HeapCell*>(result);
}

FreeCell* result = head();
if (UNLIKELY(!result))
FreeCell* cell = nextInterval();
if (UNLIKELY(isSentinel(cell)))
return slowPath();

FreeCell::advance(m_secret, m_nextInterval, m_intervalStart, m_intervalEnd);

m_scrambledHead = result->scrambledNext;
// It's an invariant of our allocator that we don't create empty intervals, so there
// should always be enough space remaining to allocate a cell.
char* result = m_intervalStart;
m_intervalStart += cellSize;
return bitwise_cast<HeapCell*>(result);
}

template<typename Func>
void FreeList::forEach(const Func& func) const
{
if (m_remaining) {
for (unsigned remaining = m_remaining; remaining; remaining -= m_cellSize)
func(bitwise_cast<HeapCell*>(m_payloadEnd - remaining));
} else {
for (FreeCell* cell = head(); cell;) {
// We can use this to overwrite free objects before destroying the free list. So, we need
// to get next before proceeding further.
FreeCell* next = cell->next(m_secret);
func(bitwise_cast<HeapCell*>(cell));
cell = next;
}
FreeCell* cell = nextInterval();
char* intervalStart = m_intervalStart;
char* intervalEnd = m_intervalEnd;
ASSERT(intervalEnd - intervalStart < (ptrdiff_t)(16 * KB));

while (true) {
for (; intervalStart < intervalEnd; intervalStart += m_cellSize)
func(bitwise_cast<HeapCell*>(intervalStart));

// If we explore the whole interval and the cell is the sentinel value, though, we should
// immediately exit so we don't decode anything out of bounds.
if (isSentinel(cell))
break;

FreeCell::advance(m_secret, cell, intervalStart, intervalEnd);
}
}

Expand Down
62 changes: 47 additions & 15 deletions Source/JavaScriptCore/heap/MarkedBlockInlines.h
Expand Up @@ -254,6 +254,7 @@ void MarkedBlock::Handle::specializedSweep(FreeList* freeList, MarkedBlock::Hand
unsigned cellSize = this->cellSize();

VM& vm = this->vm();
uint64_t secret = vm.heapRandom().getUint64();
auto destroy = [&] (void* cell) {
JSCell* jsCell = static_cast<JSCell*>(cell);
if (!jsCell->isZapped()) {
Expand Down Expand Up @@ -293,9 +294,11 @@ void MarkedBlock::Handle::specializedSweep(FreeList* freeList, MarkedBlock::Hand
destroy(cell);
}
if (sweepMode == SweepToFreeList) {
if (scribbleMode == Scribble)
if (UNLIKELY(scribbleMode == Scribble))
scribble(payloadBegin, payloadEnd - payloadBegin);
freeList->initializeBump(payloadEnd, payloadEnd - payloadBegin);
FreeCell* interval = reinterpret_cast_ptr<FreeCell*>(payloadBegin);
interval->makeLast(payloadEnd - payloadBegin, secret);
freeList->initialize(interval, secret, payloadEnd - payloadBegin);
}
if (false)
dataLog("Quickly swept block ", RawPointer(this), " with cell size ", cellSize, " and attributes ", m_attributes, ": ", pointerDump(freeList), "\n");
Expand All @@ -305,10 +308,11 @@ void MarkedBlock::Handle::specializedSweep(FreeList* freeList, MarkedBlock::Hand
// This produces a free list that is ordered in reverse through the block.
// This is fine, since the allocation code makes no assumptions about the
// order of the free list.
FreeCell* head = nullptr;
size_t count = 0;
uintptr_t secret = static_cast<uintptr_t>(vm.heapRandom().getUint64());
size_t freedBytes = 0;
bool isEmpty = true;
FreeCell* head = nullptr;
size_t currentInterval = 0;
size_t previousDeadCell = 0;

// We try to allocate the deadCells vector entirely on the stack if possible.
// Otherwise, we use the maximum permitted space (currently 8kB) to store as
Expand All @@ -323,21 +327,46 @@ void MarkedBlock::Handle::specializedSweep(FreeList* freeList, MarkedBlock::Hand

auto handleDeadCell = [&] (size_t i) {
HeapCell* cell = reinterpret_cast_ptr<HeapCell*>(&block.atoms()[i]);

if (destructionMode != BlockHasNoDestructors)
destroy(cell);

if (sweepMode == SweepToFreeList) {
FreeCell* freeCell = reinterpret_cast_ptr<FreeCell*>(cell);
if (scribbleMode == Scribble)
scribble(freeCell, cellSize);
freeCell->setNext(head, secret);
head = freeCell;
++count;
if (UNLIKELY(scribbleMode == Scribble))
scribble(cell, cellSize);

// The following check passing implies there was at least one live cell
// between us and the last dead cell, meaning that the previous dead
// cell is the start of its interval.
if (i + m_atomsPerCell < previousDeadCell) {
size_t intervalLength = currentInterval * atomSize;
FreeCell* cell = reinterpret_cast_ptr<FreeCell*>(&block.atoms()[previousDeadCell]);
if (LIKELY(head))
cell->setNext(head, intervalLength, secret);
else
cell->makeLast(intervalLength, secret);
freedBytes += intervalLength;
head = cell;
currentInterval = 0;
}
currentInterval += m_atomsPerCell;
previousDeadCell = i;
}
};

for (size_t i = m_startAtom; i < endAtom; i += m_atomsPerCell) {
auto checkForFinalInterval = [&] () {
if (sweepMode == SweepToFreeList && currentInterval) {
size_t intervalLength = currentInterval * atomSize;
FreeCell* cell = reinterpret_cast_ptr<FreeCell*>(&block.atoms()[previousDeadCell]);

if (LIKELY(head))
cell->setNext(head, intervalLength, secret);
else
cell->makeLast(intervalLength, secret);
freedBytes += intervalLength;
head = cell;
}
};

for (int i = endAtom - m_atomsPerCell; i >= static_cast<int>(m_startAtom); i -= m_atomsPerCell) {
if (emptyMode == NotEmpty
&& ((marksMode == MarksNotStale && header.m_marks.get(i))
|| (newlyAllocatedMode == HasNewlyAllocated && header.m_newlyAllocated.get(i)))) {
Expand All @@ -353,6 +382,8 @@ void MarkedBlock::Handle::specializedSweep(FreeList* freeList, MarkedBlock::Hand
} else
handleDeadCell(i);
}
if (destructionMode != BlockHasDestructorsAndCollectorIsRunning)
checkForFinalInterval(); // We need this to handle the first interval in the block, since it has no dead cells before it.

// We only want to discard the newlyAllocated bits if we're creating a FreeList,
// otherwise we would lose information on what's currently alive.
Expand All @@ -365,10 +396,11 @@ void MarkedBlock::Handle::specializedSweep(FreeList* freeList, MarkedBlock::Hand
if (destructionMode == BlockHasDestructorsAndCollectorIsRunning) {
for (size_t i : deadCells)
handleDeadCell(i);
checkForFinalInterval();
}

if (sweepMode == SweepToFreeList) {
freeList->initializeList(head, secret, count * cellSize);
freeList->initialize(head, secret, freedBytes);
setIsFreeListed();
} else if (isEmpty)
m_directory->setIsEmpty(NoLockingNecessary, this, true);
Expand Down

0 comments on commit 2fbb315

Please sign in to comment.