Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

8252990: Intrinsify Unsafe.storeStoreFence #6136

Closed
wants to merge 4 commits into from
Closed
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
@@ -8567,6 +8567,7 @@ instruct membar_release() %{

instruct membar_storestore() %{
match(MemBarStoreStore);
match(StoreStoreFence);
ins_cost(VOLATILE_REF_COST);

format %{ "MEMBAR-store-store" %}
@@ -4517,6 +4517,7 @@ instruct storeF( memoryF mem, regF src) %{
// pattern-match out unnecessary membars
instruct membar_storestore() %{
match(MemBarStoreStore);
match(StoreStoreFence);
ins_cost(4*MEMORY_REF_COST);

size(4);
@@ -7153,6 +7153,7 @@ instruct membar_release() %{

instruct membar_storestore() %{
match(MemBarStoreStore);
match(StoreStoreFence);
ins_cost(4*MEMORY_REF_COST);

format %{ "MEMBAR-store-store" %}
@@ -5055,6 +5055,7 @@ instruct membar_CPUOrder() %{

instruct membar_storestore() %{
match(MemBarStoreStore);
match(StoreStoreFence);
ins_cost(0);
size(0);
format %{ "MEMBAR-storestore (empty)" %}
@@ -6659,6 +6659,7 @@ instruct unnecessary_membar_volatile() %{

instruct membar_storestore() %{
match(MemBarStoreStore);
match(StoreStoreFence);
ins_cost(0);

size(0);
@@ -6787,6 +6787,7 @@ instruct unnecessary_membar_volatile()

instruct membar_storestore() %{
match(MemBarStoreStore);
match(StoreStoreFence);
ins_cost(0);

size(0);
@@ -4117,6 +4117,7 @@ bool MatchRule::is_ideal_membar() const {
!strcmp(_opType,"MemBarReleaseLock") ||
!strcmp(_opType,"LoadFence" ) ||
!strcmp(_opType,"StoreFence") ||
!strcmp(_opType,"StoreStoreFence") ||
!strcmp(_opType,"MemBarVolatile") ||
!strcmp(_opType,"MemBarCPUOrder") ||
!strcmp(_opType,"MemBarStoreStore") ||
@@ -142,6 +142,7 @@ bool Compiler::is_intrinsic_supported(const methodHandle& method) {
// since GC can change its value.
case vmIntrinsics::_loadFence:
case vmIntrinsics::_storeFence:
case vmIntrinsics::_storeStoreFence:
case vmIntrinsics::_fullFence:
case vmIntrinsics::_floatToRawIntBits:
case vmIntrinsics::_intBitsToFloat:
@@ -2984,6 +2984,9 @@ void LIRGenerator::do_Intrinsic(Intrinsic* x) {
case vmIntrinsics::_storeFence:
__ membar_release();
break;
case vmIntrinsics::_storeStoreFence:
__ membar_storestore();
break;
case vmIntrinsics::_fullFence :
__ membar();
break;
@@ -523,6 +523,9 @@ class methodHandle;
do_intrinsic(_storeFence, jdk_internal_misc_Unsafe, storeFence_name, storeFence_signature, F_RN) \
do_name( storeFence_name, "storeFence") \
do_alias( storeFence_signature, void_method_signature) \
do_intrinsic(_storeStoreFence, jdk_internal_misc_Unsafe, storeStoreFence_name, storeStoreFence_signature, F_R) \
do_name( storeStoreFence_name, "storeStoreFence") \
do_alias( storeStoreFence_signature, void_method_signature) \
do_intrinsic(_fullFence, jdk_internal_misc_Unsafe, fullFence_name, fullFence_signature, F_RN) \
do_name( fullFence_name, "fullFence") \
do_alias( fullFence_signature, void_method_signature) \
@@ -604,6 +604,7 @@ bool C2Compiler::is_intrinsic_supported(const methodHandle& method, bool is_virt
case vmIntrinsics::_putLongUnaligned:
case vmIntrinsics::_loadFence:
case vmIntrinsics::_storeFence:
case vmIntrinsics::_storeStoreFence:
case vmIntrinsics::_fullFence:
case vmIntrinsics::_currentThread:
#ifdef JFR_HAVE_INTRINSICS
@@ -219,6 +219,7 @@ macro(MemBarAcquireLock)
macro(MemBarCPUOrder)
macro(MemBarRelease)
macro(StoreFence)
macro(StoreStoreFence)
macro(MemBarReleaseLock)
macro(MemBarVolatile)
macro(MemBarStoreStore)
@@ -467,6 +467,7 @@ bool LibraryCallKit::try_to_inline(int predicate) {

case vmIntrinsics::_loadFence:
case vmIntrinsics::_storeFence:
case vmIntrinsics::_storeStoreFence:
case vmIntrinsics::_fullFence: return inline_unsafe_fence(intrinsic_id());

case vmIntrinsics::_onSpinWait: return inline_onspinwait();
@@ -2695,6 +2696,9 @@ bool LibraryCallKit::inline_unsafe_fence(vmIntrinsics::ID id) {
case vmIntrinsics::_storeFence:
insert_mem_bar(Op_StoreFence);
return true;
case vmIntrinsics::_storeStoreFence:
insert_mem_bar(Op_StoreStoreFence);
return true;
case vmIntrinsics::_fullFence:
insert_mem_bar(Op_MemBarVolatile);
return true;
@@ -1080,7 +1080,8 @@ Node* MemNode::can_see_stored_value(Node* st, PhaseTransform* phase) const {
opc == Op_MemBarRelease ||
opc == Op_StoreFence ||
opc == Op_MemBarReleaseLock ||
opc == Op_MemBarStoreStore) {
opc == Op_MemBarStoreStore ||
opc == Op_StoreStoreFence) {
Node* mem = current->in(0)->in(TypeFunc::Memory);
if (mem->is_MergeMem()) {
MergeMemNode* merge = mem->as_MergeMem();
@@ -3300,13 +3301,14 @@ MemBarNode* MemBarNode::make(Compile* C, int opcode, int atp, Node* pn) {
case Op_LoadFence: return new LoadFenceNode(C, atp, pn);
case Op_MemBarRelease: return new MemBarReleaseNode(C, atp, pn);
case Op_StoreFence: return new StoreFenceNode(C, atp, pn);
case Op_MemBarStoreStore: return new MemBarStoreStoreNode(C, atp, pn);
case Op_StoreStoreFence: return new StoreStoreFenceNode(C, atp, pn);
case Op_MemBarAcquireLock: return new MemBarAcquireLockNode(C, atp, pn);
case Op_MemBarReleaseLock: return new MemBarReleaseLockNode(C, atp, pn);
case Op_MemBarVolatile: return new MemBarVolatileNode(C, atp, pn);
case Op_MemBarCPUOrder: return new MemBarCPUOrderNode(C, atp, pn);
case Op_OnSpinWait: return new OnSpinWaitNode(C, atp, pn);
case Op_Initialize: return new InitializeNode(C, atp, pn);
case Op_MemBarStoreStore: return new MemBarStoreStoreNode(C, atp, pn);
case Op_Blackhole: return new BlackholeNode(C, atp, pn);
default: ShouldNotReachHere(); return NULL;
}
@@ -1309,6 +1309,13 @@ class MemBarStoreStoreNode: public MemBarNode {
virtual int Opcode() const;
};

class StoreStoreFenceNode: public MemBarNode {
public:
StoreStoreFenceNode(Compile* C, int alias_idx, Node* precedent)
: MemBarNode(C, alias_idx, precedent) {}
virtual int Opcode() const;
};

// Ordering between a volatile store and a following volatile load.
// Requires multi-CPU visibility?
class MemBarVolatileNode: public MemBarNode {
@@ -3441,16 +3441,15 @@ public final void loadLoadFence() {
* Ensures that stores before the fence will not be reordered with
* stores after the fence.
*
* @implNote
* This method is operationally equivalent to {@link #storeFence()}.
*
* @since 9
*/
@IntrinsicCandidate
public final void storeStoreFence() {
// Without the special intrinsic, default to a stronger storeFence,
// which is already intrinsified.
shipilev marked this conversation as resolved.
Show resolved Hide resolved
storeFence();
}


/**
* Throws IllegalAccessError; for use by the VM for access control
* error support.