Skip to content

Commit

Permalink
[JSC] Use more m_vmValue in FTL
Browse files Browse the repository at this point in the history
https://bugs.webkit.org/show_bug.cgi?id=245409
rdar://problem/100155777

Reviewed by Mark Lam.

This patch leverages more on m_vmValue in FTL, this potentially allows FTL to reduce VM pointer
materialization more. We also move some of frequently accessed fields to the top of VM so that
we can load with immediate offset in ARM64 (so reducing machine code size).

* Source/JavaScriptCore/ftl/FTLAbstractHeapRepository.h:
* Source/JavaScriptCore/ftl/FTLLowerDFGToB3.cpp:
(JSC::FTL::DFG::LowerDFGToB3::lower):
(JSC::FTL::DFG::LowerDFGToB3::compileCreateActivation):
(JSC::FTL::DFG::LowerDFGToB3::compileNewFunction):
(JSC::FTL::DFG::LowerDFGToB3::compileCreateDirectArguments):
(JSC::FTL::DFG::LowerDFGToB3::compileNewStringObject):
(JSC::FTL::DFG::LowerDFGToB3::compileNotifyWrite):
(JSC::FTL::DFG::LowerDFGToB3::compileCompareStrictEq):
* Source/JavaScriptCore/runtime/VM.h:
(JSC::VM::offsetOfSoftStackLimit):

Canonical link: https://commits.webkit.org/254690@main
  • Loading branch information
Constellation committed Sep 20, 2022
1 parent af69aeb commit 9c06eb3
Show file tree
Hide file tree
Showing 3 changed files with 52 additions and 43 deletions.
3 changes: 3 additions & 0 deletions Source/JavaScriptCore/ftl/FTLAbstractHeapRepository.h
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,9 @@ namespace JSC { namespace FTL {
macro(HashMapBucket_value, HashMapBucket<HashMapBucketDataKeyValue>::offsetOfValue()) \
macro(HashMapBucket_key, HashMapBucket<HashMapBucketDataKeyValue>::offsetOfKey()) \
macro(HashMapBucket_next, HashMapBucket<HashMapBucketDataKeyValue>::offsetOfNext()) \
macro(VM_heap_barrierThreshold, VM::offsetOfHeapBarrierThreshold()) \
macro(VM_heap_mutatorShouldBeFenced, VM::offsetOfHeapMutatorShouldBeFenced()) \
macro(VM_exception, VM::exceptionOffset()) \
macro(WeakMapImpl_capacity, WeakMapImpl<WeakMapBucket<WeakMapBucketDataKey>>::offsetOfCapacity()) \
macro(WeakMapImpl_buffer, WeakMapImpl<WeakMapBucket<WeakMapBucketDataKey>>::offsetOfBuffer()) \
macro(WeakMapBucket_value, WeakMapBucket<WeakMapBucketDataKeyValue>::offsetOfValue()) \
Expand Down
77 changes: 38 additions & 39 deletions Source/JavaScriptCore/ftl/FTLLowerDFGToB3.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -275,16 +275,17 @@ class LowerDFGToB3 {

// Stack Overflow Check.
unsigned exitFrameSize = m_graph.requiredRegisterCountForExit() * sizeof(Register);
MacroAssembler::AbsoluteAddress addressOfStackLimit(vm->addressOfSoftStackLimit());
PatchpointValue* stackOverflowHandler = m_out.patchpoint(Void);
CallSiteIndex callSiteIndex = callSiteIndexForCodeOrigin(m_ftlState, CodeOrigin(BytecodeIndex(0)));
stackOverflowHandler->appendSomeRegister(m_callFrame);
stackOverflowHandler->appendSomeRegister(m_vmValue);
stackOverflowHandler->clobber(RegisterSet::macroScratchRegisters());
stackOverflowHandler->numGPScratchRegisters = 1;
stackOverflowHandler->setGenerator(
[=] (CCallHelpers& jit, const StackmapGenerationParams& params) {
AllowMacroScratchRegisterUsage allowScratch(jit);
GPRReg fp = params[0].gpr();
GPRReg vmGPR = params[1].gpr();
GPRReg scratch = params.gpScratch(0);

unsigned ftlFrameSize = params.proc().frameSize();
Expand All @@ -296,7 +297,7 @@ class LowerDFGToB3 {
MacroAssembler::JumpList stackOverflow;
if (UNLIKELY(maxFrameSize > Options::reservedZoneSize()))
stackOverflow.append(jit.branchPtr(MacroAssembler::Above, scratch, fp));
stackOverflow.append(jit.branchPtr(MacroAssembler::Above, addressOfStackLimit, scratch));
stackOverflow.append(jit.branchPtr(MacroAssembler::Above, CCallHelpers::Address(vmGPR, VM::offsetOfSoftStackLimit()), scratch));

params.addLatePath([=] (CCallHelpers& jit) {
AllowMacroScratchRegisterUsage allowScratch(jit);
Expand Down Expand Up @@ -7097,12 +7098,12 @@ IGNORE_CLANG_WARNINGS_END
LValue callResult = lazySlowPath(
[=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {
return createLazyCallGenerator(vm,
operationCreateActivationDirect, locations[0].directGPR(), CCallHelpers::TrustedImmPtr(&vm),
CCallHelpers::TrustedImmPtr(structure.get()), locations[1].directGPR(),
operationCreateActivationDirect, locations[0].directGPR(), locations[1].directGPR(),
CCallHelpers::TrustedImmPtr(structure.get()), locations[2].directGPR(),
CCallHelpers::TrustedImmPtr(table),
CCallHelpers::TrustedImm64(JSValue::encode(initializationValue)));
},
scope);
m_vmValue, scope);
ValueFromBlock slowResult = m_out.anchor(callResult);
m_out.jump(continuation);

Expand Down Expand Up @@ -7170,8 +7171,6 @@ IGNORE_CLANG_WARNINGS_END

m_out.appendTo(slowPath, continuation);

Vector<LValue> slowPathArguments;
slowPathArguments.append(scope);
VM& vm = this->vm();
LValue callResult = lazySlowPath(
[=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {
Expand All @@ -7184,10 +7183,10 @@ IGNORE_CLANG_WARNINGS_END
operation = operationNewAsyncGeneratorFunctionWithInvalidatedReallocationWatchpoint;

return createLazyCallGenerator(vm, operation,
locations[0].directGPR(), CCallHelpers::TrustedImmPtr(&vm), locations[1].directGPR(),
locations[0].directGPR(), locations[1].directGPR(), locations[2].directGPR(),
CCallHelpers::TrustedImmPtr(executable));
},
slowPathArguments);
m_vmValue, scope);
ValueFromBlock slowResult = m_out.anchor(callResult);
m_out.jump(continuation);

Expand Down Expand Up @@ -7244,10 +7243,10 @@ IGNORE_CLANG_WARNINGS_END
LValue callResult = lazySlowPath(
[=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {
return createLazyCallGenerator(vm,
operationCreateDirectArguments, locations[0].directGPR(), CCallHelpers::TrustedImmPtr(&vm),
CCallHelpers::TrustedImmPtr(structure.get()), locations[1].directGPR(),
operationCreateDirectArguments, locations[0].directGPR(), locations[1].directGPR(),
CCallHelpers::TrustedImmPtr(structure.get()), locations[2].directGPR(),
CCallHelpers::TrustedImm32(minCapacity));
}, length.value);
}, m_vmValue, length.value);
ValueFromBlock slowResult = m_out.anchor(callResult);
m_out.jump(continuation);

Expand Down Expand Up @@ -7581,10 +7580,10 @@ IGNORE_CLANG_WARNINGS_END
LValue slowResultValue = lazySlowPath(
[=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {
return createLazyCallGenerator(vm,
operationNewStringObject, locations[0].directGPR(), CCallHelpers::TrustedImmPtr(&vm), locations[1].directGPR(),
operationNewStringObject, locations[0].directGPR(), locations[1].directGPR(), locations[2].directGPR(),
CCallHelpers::TrustedImmPtr(structure.get()));
},
string);
m_vmValue, string);
ValueFromBlock slowResult = m_out.anchor(slowResultValue);
m_out.jump(continuation);

Expand Down Expand Up @@ -9595,10 +9594,10 @@ IGNORE_CLANG_WARNINGS_END

VM& vm = this->vm();
lazySlowPath(
[=, &vm] (const Vector<Location>&) -> RefPtr<LazySlowPath::Generator> {
[=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {
return createLazyCallGenerator(vm,
operationNotifyWrite, InvalidGPRReg, CCallHelpers::TrustedImmPtr(&vm), CCallHelpers::TrustedImmPtr(set));
});
operationNotifyWrite, InvalidGPRReg, locations[1].directGPR(), CCallHelpers::TrustedImmPtr(set));
}, m_vmValue);
m_out.jump(continuation);

m_out.appendTo(continuation, lastNext);
Expand Down Expand Up @@ -14116,18 +14115,18 @@ IGNORE_CLANG_WARNINGS_END
[=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {
return createLazyCallGenerator(vm,
operationNewObjectWithButterflyWithIndexingHeaderAndVectorLength,
locations[0].directGPR(), CCallHelpers::TrustedImmPtr(&vm), CCallHelpers::TrustedImmPtr(structure.get()),
locations[1].directGPR(), locations[2].directGPR());
locations[0].directGPR(), locations[1].directGPR(), CCallHelpers::TrustedImmPtr(structure.get()),
locations[2].directGPR(), locations[3].directGPR());
},
vectorLength, butterflyValue);
m_vmValue, vectorLength, butterflyValue);
} else {
slowObjectValue = lazySlowPath(
[=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {
return createLazyCallGenerator(vm,
operationNewObjectWithButterfly, locations[0].directGPR(), CCallHelpers::TrustedImmPtr(&vm),
CCallHelpers::TrustedImmPtr(structure.get()), locations[1].directGPR());
operationNewObjectWithButterfly, locations[0].directGPR(), locations[1].directGPR(),
CCallHelpers::TrustedImmPtr(structure.get()), locations[2].directGPR());
},
butterflyValue);
m_vmValue, butterflyValue);
}
ValueFromBlock slowObject = m_out.anchor(slowObjectValue);
ValueFromBlock slowButterfly = m_out.anchor(
Expand Down Expand Up @@ -14324,11 +14323,11 @@ IGNORE_CLANG_WARNINGS_END
LValue callResult = lazySlowPath(
[=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {
return createLazyCallGenerator(vm,
operationCreateActivationDirect, locations[0].directGPR(), CCallHelpers::TrustedImmPtr(&vm),
CCallHelpers::TrustedImmPtr(structure.get()), locations[1].directGPR(),
operationCreateActivationDirect, locations[0].directGPR(), locations[1].directGPR(),
CCallHelpers::TrustedImmPtr(structure.get()), locations[2].directGPR(),
CCallHelpers::TrustedImmPtr(table),
CCallHelpers::TrustedImm64(JSValue::encode(jsUndefined())));
}, scope);
}, m_vmValue, scope);
ValueFromBlock slowResult = m_out.anchor(callResult);
m_out.jump(continuation);

Expand Down Expand Up @@ -14391,9 +14390,9 @@ IGNORE_CLANG_WARNINGS_END
LValue callResult = lazySlowPath(
[=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {
return createLazyCallGenerator(vm,
operation, locations[0].directGPR(), CCallHelpers::TrustedImmPtr(&vm),
operation, locations[0].directGPR(), locations[1].directGPR(),
CCallHelpers::TrustedImmPtr(structure.get()));
});
}, m_vmValue);
ValueFromBlock slowResult = m_out.anchor(callResult);
m_out.jump(continuation);

Expand Down Expand Up @@ -15233,15 +15232,15 @@ IGNORE_CLANG_WARNINGS_END
[=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {
return createLazyCallGenerator(vm,
operationAllocateSimplePropertyStorageWithInitialCapacity,
locations[0].directGPR(), CCallHelpers::TrustedImmPtr(&vm));
});
locations[0].directGPR(), locations[1].directGPR());
}, m_vmValue);
} else {
slowButterflyValue = lazySlowPath(
[=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {
return createLazyCallGenerator(vm,
operationAllocateSimplePropertyStorage, locations[0].directGPR(), CCallHelpers::TrustedImmPtr(&vm),
operationAllocateSimplePropertyStorage, locations[0].directGPR(), locations[1].directGPR(),
CCallHelpers::TrustedImmPtr(sizeInValues));
});
}, m_vmValue);
}
ValueFromBlock slowButterfly = m_out.anchor(slowButterflyValue);

Expand Down Expand Up @@ -17184,9 +17183,9 @@ IGNORE_CLANG_WARNINGS_END
LValue slowResultValue = lazySlowPath(
[=, &vm] (const Vector<Location>& locations) -> RefPtr<LazySlowPath::Generator> {
return createLazyCallGenerator(vm,
operationNewObject, locations[0].directGPR(), CCallHelpers::TrustedImmPtr(&vm),
operationNewObject, locations[0].directGPR(), locations[1].directGPR(),
CCallHelpers::TrustedImmPtr(structure.get()));
});
}, m_vmValue);
ValueFromBlock slowResult = m_out.anchor(slowResultValue);
m_out.jump(continuation);

Expand Down Expand Up @@ -20559,7 +20558,7 @@ IGNORE_CLANG_WARNINGS_END

LValue threshold;
if (isFenced)
threshold = m_out.load32(m_out.absolute(vm().heap.addressOfBarrierThreshold()));
threshold = m_out.load32(m_vmValue, m_heaps.VM_heap_barrierThreshold);
else
threshold = m_out.constInt32(blackThreshold);

Expand Down Expand Up @@ -20601,7 +20600,7 @@ IGNORE_CLANG_WARNINGS_END
LBasicBlock lastNext = m_out.insertNewBlocksBefore(slowPath);

m_out.branch(
m_out.load8ZeroExt32(m_out.absolute(vm().heap.addressOfMutatorShouldBeFenced())),
m_out.load8ZeroExt32(m_vmValue, m_heaps.VM_heap_mutatorShouldBeFenced),
rarely(slowPath), usually(continuation));

m_out.appendTo(slowPath, continuation);
Expand Down Expand Up @@ -20633,7 +20632,7 @@ IGNORE_CLANG_WARNINGS_END
LBasicBlock lastNext = m_out.insertNewBlocksBefore(fastPath);

m_out.branch(
m_out.load8ZeroExt32(m_out.absolute(vm().heap.addressOfMutatorShouldBeFenced())),
m_out.load8ZeroExt32(m_vmValue, m_heaps.VM_heap_mutatorShouldBeFenced),
rarely(slowPath), usually(fastPath));

m_out.appendTo(fastPath, slowPath);
Expand Down Expand Up @@ -20672,7 +20671,7 @@ IGNORE_CLANG_WARNINGS_END
LBasicBlock crash = m_out.newBlock();
LBasicBlock continuation = m_out.newBlock();

LValue exception = m_out.load64(m_out.absolute(vm().addressOfException()));
LValue exception = m_out.load64(m_vmValue, m_heaps.VM_exception);
LValue hadException = m_out.notZero64(exception);

m_out.branch(
Expand Down Expand Up @@ -20731,7 +20730,7 @@ IGNORE_CLANG_WARNINGS_END
m_out.call(Void, m_out.operation(operationExceptionFuzz), weakPointer(globalObject));
}

LValue exception = m_out.load64(m_out.absolute(vm().addressOfException()));
LValue exception = m_out.load64(m_vmValue, m_heaps.VM_exception);
LValue hadException = m_out.notZero64(exception);

CodeOrigin opCatchOrigin;
Expand Down
15 changes: 11 additions & 4 deletions Source/JavaScriptCore/runtime/VM.h
Original file line number Diff line number Diff line change
Expand Up @@ -383,6 +383,12 @@ class VM : public ThreadSafeRefCounted<VM>, public DoublyLinkedListNode<VM> {
RefPtr<JSLock> m_apiLock;
Ref<WTF::RunLoop> m_runLoop;

// Keep super frequently accessed fields top in VM.
void* m_softStackLimit { nullptr };
Exception* m_exception { nullptr };
Exception* m_terminationException { nullptr };
Exception* m_lastException { nullptr };

WeakRandom m_random;
WeakRandom m_heapRandom;
Integrity::Random m_integrityRandom;
Expand Down Expand Up @@ -654,6 +660,11 @@ class VM : public ThreadSafeRefCounted<VM>, public DoublyLinkedListNode<VM> {
return OBJECT_OFFSETOF(VM, heap) + OBJECT_OFFSETOF(Heap, m_mutatorShouldBeFenced);
}

static ptrdiff_t offsetOfSoftStackLimit()
{
return OBJECT_OFFSETOF(VM, m_softStackLimit);
}

void clearLastException() { m_lastException = nullptr; }

CallFrame** addressOfCallFrameForCatch() { return &callFrameForCatch; }
Expand Down Expand Up @@ -961,15 +972,11 @@ class VM : public ThreadSafeRefCounted<VM>, public DoublyLinkedListNode<VM> {
void* m_stackPointerAtVMEntry { nullptr };
size_t m_currentSoftReservedZoneSize;
void* m_stackLimit { nullptr };
void* m_softStackLimit { nullptr };
#if ENABLE(C_LOOP)
void* m_cloopStackLimit { nullptr };
#endif
void* m_lastStackTop { nullptr };

Exception* m_exception { nullptr };
Exception* m_terminationException { nullptr };
Exception* m_lastException { nullptr };
#if ENABLE(EXCEPTION_SCOPE_VERIFICATION)
ExceptionScope* m_topExceptionScope { nullptr };
ExceptionEventLocation m_simulatedThrowPointLocation;
Expand Down

0 comments on commit 9c06eb3

Please sign in to comment.