Skip to content
This repository has been archived by the owner on Aug 27, 2022. It is now read-only.
/ lanai Public archive

Commit

Permalink
Automatic merge of jdk:master into master
Browse files Browse the repository at this point in the history
  • Loading branch information
duke committed Nov 30, 2020
2 parents add5943 + fdee70d commit 7be48b0
Show file tree
Hide file tree
Showing 29 changed files with 401 additions and 249 deletions.
53 changes: 14 additions & 39 deletions src/hotspot/cpu/aarch64/gc/z/z_aarch64.ad
Original file line number Diff line number Diff line change
Expand Up @@ -30,16 +30,19 @@ source_hpp %{

source %{

static void z_load_barrier(MacroAssembler& _masm, const MachNode* node, Address ref_addr, Register ref, Register tmp, bool weak) {
ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, weak);
static void z_load_barrier(MacroAssembler& _masm, const MachNode* node, Address ref_addr, Register ref, Register tmp, int barrier_data) {
if (barrier_data == ZLoadBarrierElided) {
return;
}
ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, barrier_data);
__ ldr(tmp, Address(rthread, ZThreadLocalData::address_bad_mask_offset()));
__ andr(tmp, tmp, ref);
__ cbnz(tmp, *stub->entry());
__ bind(*stub->continuation());
}

static void z_load_barrier_slow_path(MacroAssembler& _masm, const MachNode* node, Address ref_addr, Register ref, Register tmp) {
ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, false /* weak */);
ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, ZLoadBarrierStrong);
__ b(*stub->entry());
__ bind(*stub->continuation());
}
Expand All @@ -50,7 +53,7 @@ static void z_load_barrier_slow_path(MacroAssembler& _masm, const MachNode* node
instruct zLoadP(iRegPNoSp dst, memory8 mem, rFlagsReg cr)
%{
match(Set dst (LoadP mem));
predicate(UseZGC && !needs_acquiring_load(n) && (n->as_Load()->barrier_data() == ZLoadBarrierStrong));
predicate(UseZGC && !needs_acquiring_load(n) && (n->as_Load()->barrier_data() != 0));
effect(TEMP dst, KILL cr);

ins_cost(4 * INSN_COST);
Expand All @@ -60,29 +63,7 @@ instruct zLoadP(iRegPNoSp dst, memory8 mem, rFlagsReg cr)
ins_encode %{
const Address ref_addr = mem2address($mem->opcode(), as_Register($mem$$base), $mem$$index, $mem$$scale, $mem$$disp);
__ ldr($dst$$Register, ref_addr);
if (barrier_data() != ZLoadBarrierElided) {
z_load_barrier(_masm, this, ref_addr, $dst$$Register, rscratch2 /* tmp */, false /* weak */);
}
%}

ins_pipe(iload_reg_mem);
%}

// Load Weak Pointer
instruct zLoadWeakP(iRegPNoSp dst, memory8 mem, rFlagsReg cr)
%{
match(Set dst (LoadP mem));
predicate(UseZGC && !needs_acquiring_load(n) && (n->as_Load()->barrier_data() == ZLoadBarrierWeak));
effect(TEMP dst, KILL cr);

ins_cost(4 * INSN_COST);

format %{ "ldr $dst, $mem" %}

ins_encode %{
const Address ref_addr = mem2address($mem->opcode(), as_Register($mem$$base), $mem$$index, $mem$$scale, $mem$$disp);
__ ldr($dst$$Register, ref_addr);
z_load_barrier(_masm, this, ref_addr, $dst$$Register, rscratch2 /* tmp */, true /* weak */);
z_load_barrier(_masm, this, ref_addr, $dst$$Register, rscratch2 /* tmp */, barrier_data());
%}

ins_pipe(iload_reg_mem);
Expand All @@ -92,7 +73,7 @@ instruct zLoadWeakP(iRegPNoSp dst, memory8 mem, rFlagsReg cr)
instruct zLoadPVolatile(iRegPNoSp dst, indirect mem /* sync_memory */, rFlagsReg cr)
%{
match(Set dst (LoadP mem));
predicate(UseZGC && needs_acquiring_load(n) && n->as_Load()->barrier_data() == ZLoadBarrierStrong);
predicate(UseZGC && needs_acquiring_load(n) && n->as_Load()->barrier_data() != 0);
effect(TEMP dst, KILL cr);

ins_cost(VOLATILE_REF_COST);
Expand All @@ -101,9 +82,7 @@ instruct zLoadPVolatile(iRegPNoSp dst, indirect mem /* sync_memory */, rFlagsReg

ins_encode %{
__ ldar($dst$$Register, $mem$$Register);
if (barrier_data() != ZLoadBarrierElided) {
z_load_barrier(_masm, this, Address($mem$$Register), $dst$$Register, rscratch2 /* tmp */, false /* weak */);
}
z_load_barrier(_masm, this, Address($mem$$Register), $dst$$Register, rscratch2 /* tmp */, barrier_data());
%}

ins_pipe(pipe_serial);
Expand Down Expand Up @@ -231,7 +210,7 @@ instruct zCompareAndExchangePAcq(iRegPNoSp res, indirect mem, iRegP oldval, iReg

instruct zGetAndSetP(indirect mem, iRegP newv, iRegPNoSp prev, rFlagsReg cr) %{
match(Set prev (GetAndSetP mem newv));
predicate(UseZGC && !needs_acquiring_load_exclusive(n) && n->as_LoadStore()->barrier_data() == ZLoadBarrierStrong);
predicate(UseZGC && !needs_acquiring_load_exclusive(n) && n->as_LoadStore()->barrier_data() != 0);
effect(TEMP_DEF prev, KILL cr);

ins_cost(2 * VOLATILE_REF_COST);
Expand All @@ -240,17 +219,15 @@ instruct zGetAndSetP(indirect mem, iRegP newv, iRegPNoSp prev, rFlagsReg cr) %{

ins_encode %{
__ atomic_xchg($prev$$Register, $newv$$Register, $mem$$Register);
if (barrier_data() != ZLoadBarrierElided) {
z_load_barrier(_masm, this, Address(noreg, 0), $prev$$Register, rscratch2 /* tmp */, false /* weak */);
}
z_load_barrier(_masm, this, Address(noreg, 0), $prev$$Register, rscratch2 /* tmp */, barrier_data());
%}

ins_pipe(pipe_serial);
%}

instruct zGetAndSetPAcq(indirect mem, iRegP newv, iRegPNoSp prev, rFlagsReg cr) %{
match(Set prev (GetAndSetP mem newv));
predicate(UseZGC && needs_acquiring_load_exclusive(n) && (n->as_LoadStore()->barrier_data() == ZLoadBarrierStrong));
predicate(UseZGC && needs_acquiring_load_exclusive(n) && (n->as_LoadStore()->barrier_data() != 0));
effect(TEMP_DEF prev, KILL cr);

ins_cost(VOLATILE_REF_COST);
Expand All @@ -259,9 +236,7 @@ instruct zGetAndSetPAcq(indirect mem, iRegP newv, iRegPNoSp prev, rFlagsReg cr)

ins_encode %{
__ atomic_xchgal($prev$$Register, $newv$$Register, $mem$$Register);
if (barrier_data() != ZLoadBarrierElided) {
z_load_barrier(_masm, this, Address(noreg, 0), $prev$$Register, rscratch2 /* tmp */, false /* weak */);
}
z_load_barrier(_masm, this, Address(noreg, 0), $prev$$Register, rscratch2 /* tmp */, barrier_data());
%}
ins_pipe(pipe_serial);
%}
46 changes: 14 additions & 32 deletions src/hotspot/cpu/x86/gc/z/z_x86_64.ad
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,11 @@ source %{

#include "c2_intelJccErratum_x86.hpp"

static void z_load_barrier(MacroAssembler& _masm, const MachNode* node, Address ref_addr, Register ref, Register tmp, bool weak) {
ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, weak);
static void z_load_barrier(MacroAssembler& _masm, const MachNode* node, Address ref_addr, Register ref, Register tmp, int barrier_data) {
if (barrier_data == ZLoadBarrierElided) {
return; // Elided.
}
ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, barrier_data);
{
IntelJccErratumAlignment intel_alignment(_masm, 10 /* jcc_size */);
__ testptr(ref, Address(r15_thread, ZThreadLocalData::address_bad_mask_offset()));
Expand All @@ -43,7 +46,7 @@ static void z_load_barrier(MacroAssembler& _masm, const MachNode* node, Address
}

static void z_load_barrier_cmpxchg(MacroAssembler& _masm, const MachNode* node, Address ref_addr, Register ref, Register tmp, Label& good) {
ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, false /* weak */);
ZLoadBarrierStubC2* const stub = ZLoadBarrierStubC2::create(node, ref_addr, ref, tmp, ZLoadBarrierStrong);
{
IntelJccErratumAlignment intel_alignment(_masm, 10 /* jcc_size */);
__ testptr(ref, Address(r15_thread, ZThreadLocalData::address_bad_mask_offset()));
Expand All @@ -61,7 +64,7 @@ static void z_load_barrier_cmpxchg(MacroAssembler& _masm, const MachNode* node,
// Load Pointer
instruct zLoadP(rRegP dst, memory mem, rFlagsReg cr)
%{
predicate(UseZGC && n->as_Load()->barrier_data() == ZLoadBarrierStrong);
predicate(UseZGC && n->as_Load()->barrier_data() != 0);
match(Set dst (LoadP mem));
effect(KILL cr, TEMP dst);

Expand All @@ -71,28 +74,7 @@ instruct zLoadP(rRegP dst, memory mem, rFlagsReg cr)

ins_encode %{
__ movptr($dst$$Register, $mem$$Address);
if (barrier_data() != ZLoadBarrierElided) {
z_load_barrier(_masm, this, $mem$$Address, $dst$$Register, noreg /* tmp */, false /* weak */);
}
%}

ins_pipe(ialu_reg_mem);
%}

// Load Weak Pointer
instruct zLoadWeakP(rRegP dst, memory mem, rFlagsReg cr)
%{
predicate(UseZGC && n->as_Load()->barrier_data() == ZLoadBarrierWeak);
match(Set dst (LoadP mem));
effect(KILL cr, TEMP dst);

ins_cost(125);

format %{ "movq $dst, $mem" %}

ins_encode %{
__ movptr($dst$$Register, $mem$$Address);
z_load_barrier(_masm, this, $mem$$Address, $dst$$Register, noreg /* tmp */, true /* weak */);
z_load_barrier(_masm, this, $mem$$Address, $dst$$Register, noreg /* tmp */, barrier_data());
%}

ins_pipe(ialu_reg_mem);
Expand All @@ -107,11 +89,12 @@ instruct zCompareAndExchangeP(memory mem, rax_RegP oldval, rRegP newval, rRegP t
"cmpxchgq $newval, $mem" %}

ins_encode %{
if (barrier_data() != ZLoadBarrierElided) {
if (barrier_data() != ZLoadBarrierElided) { // barrier could be elided by ZBarrierSetC2::analyze_dominating_barriers()
__ movptr($tmp$$Register, $oldval$$Register);
}
__ lock();
__ cmpxchgptr($newval$$Register, $mem$$Address);

if (barrier_data() != ZLoadBarrierElided) {
Label good;
z_load_barrier_cmpxchg(_masm, this, $mem$$Address, $oldval$$Register, $tmp$$Register, good);
Expand All @@ -137,11 +120,12 @@ instruct zCompareAndSwapP(rRegI res, memory mem, rRegP newval, rRegP tmp, rFlags
"movzbl $res, $res" %}

ins_encode %{
if (barrier_data() != ZLoadBarrierElided) {
if (barrier_data() != ZLoadBarrierElided) { // barrier could be elided by ZBarrierSetC2::analyze_dominating_barriers()
__ movptr($tmp$$Register, $oldval$$Register);
}
__ lock();
__ cmpxchgptr($newval$$Register, $mem$$Address);

if (barrier_data() != ZLoadBarrierElided) {
Label good;
z_load_barrier_cmpxchg(_masm, this, $mem$$Address, $oldval$$Register, $tmp$$Register, good);
Expand All @@ -160,16 +144,14 @@ instruct zCompareAndSwapP(rRegI res, memory mem, rRegP newval, rRegP tmp, rFlags

instruct zXChgP(memory mem, rRegP newval, rFlagsReg cr) %{
match(Set newval (GetAndSetP mem newval));
predicate(UseZGC && n->as_LoadStore()->barrier_data() == ZLoadBarrierStrong);
predicate(UseZGC && n->as_LoadStore()->barrier_data() != 0);
effect(KILL cr);

format %{ "xchgq $newval, $mem" %}

ins_encode %{
__ xchgptr($newval$$Register, $mem$$Address);
if (barrier_data() != ZLoadBarrierElided) {
z_load_barrier(_masm, this, Address(noreg, 0), $newval$$Register, noreg /* tmp */, false /* weak */);
}
z_load_barrier(_masm, this, Address(noreg, 0), $newval$$Register, noreg /* tmp */, barrier_data());
%}

ins_pipe(pipe_cmpxchg);
Expand Down
2 changes: 2 additions & 0 deletions src/hotspot/share/classfile/vmIntrinsics.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -390,6 +390,8 @@ class methodHandle;
\
/* java/lang/ref/Reference */ \
do_intrinsic(_Reference_get, java_lang_ref_Reference, get_name, void_object_signature, F_R) \
do_intrinsic(_Reference_refersTo0, java_lang_ref_Reference, refersTo0_name, object_boolean_signature, F_R) \
do_intrinsic(_PhantomReference_refersTo0, java_lang_ref_PhantomReference, refersTo0_name, object_boolean_signature, F_R) \
\
/* support for com.sun.crypto.provider.AESCrypt and some of its callers */ \
do_class(com_sun_crypto_provider_aescrypt, "com/sun/crypto/provider/AESCrypt") \
Expand Down
1 change: 1 addition & 0 deletions src/hotspot/share/classfile/vmSymbols.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -394,6 +394,7 @@
template(dispatchUncaughtException_name, "dispatchUncaughtException") \
template(loadClass_name, "loadClass") \
template(get_name, "get") \
template(refersTo0_name, "refersTo0") \
template(put_name, "put") \
template(type_name, "type") \
template(findNative_name, "findNative") \
Expand Down
8 changes: 5 additions & 3 deletions src/hotspot/share/gc/g1/c2/g1BarrierSetC2.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -604,7 +604,9 @@ Node* G1BarrierSetC2::load_at_resolved(C2Access& access, const Type* val_type) c
bool in_heap = (decorators & IN_HEAP) != 0;
bool in_native = (decorators & IN_NATIVE) != 0;
bool on_weak = (decorators & ON_WEAK_OOP_REF) != 0;
bool on_phantom = (decorators & ON_PHANTOM_OOP_REF) != 0;
bool is_unordered = (decorators & MO_UNORDERED) != 0;
bool no_keepalive = (decorators & AS_NO_KEEPALIVE) != 0;
bool is_mixed = !in_heap && !in_native;
bool need_cpu_mem_bar = !is_unordered || mismatched || is_mixed;

Expand All @@ -618,8 +620,8 @@ Node* G1BarrierSetC2::load_at_resolved(C2Access& access, const Type* val_type) c
// SATB log buffer using the pre-barrier mechanism.
// Also we need to add memory barrier to prevent commoning reads
// from this field across safepoint since GC can change its value.
bool need_read_barrier = in_heap && (on_weak ||
(unknown && offset != top && obj != top));
bool need_read_barrier = (((on_weak || on_phantom) && !no_keepalive) ||
(in_heap && unknown && offset != top && obj != top));

if (!access.is_oop() || !need_read_barrier) {
return load;
Expand All @@ -629,7 +631,7 @@ Node* G1BarrierSetC2::load_at_resolved(C2Access& access, const Type* val_type) c
C2ParseAccess& parse_access = static_cast<C2ParseAccess&>(access);
GraphKit* kit = parse_access.kit();

if (on_weak) {
if (on_weak || on_phantom) {
// Use the pre-barrier to record the value in the referent field
pre_barrier(kit, false /* do_load */,
kit->control(),
Expand Down
10 changes: 7 additions & 3 deletions src/hotspot/share/gc/shenandoah/c2/shenandoahSupport.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -996,9 +996,13 @@ void ShenandoahBarrierC2Support::call_lrb_stub(Node*& ctrl, Node*& val, Node* lo
}
} else {
assert(is_phantom, "only remaining strength");
assert(!is_narrow, "phantom access cannot be narrow");
calladdr = CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_phantom);
name = "load_reference_barrier_phantom";
if (is_narrow) {
calladdr = CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_phantom_narrow);
name = "load_reference_barrier_phantom_narrow";
} else {
calladdr = CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_phantom);
name = "load_reference_barrier_phantom";
}
}
Node* call = new CallLeafNode(ShenandoahBarrierSetC2::shenandoah_load_reference_barrier_Type(), calladdr, name, TypeRawPtr::BOTTOM);

Expand Down
4 changes: 4 additions & 0 deletions src/hotspot/share/gc/shenandoah/shenandoahRuntime.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -77,3 +77,7 @@ JRT_END
JRT_LEAF(oopDesc*, ShenandoahRuntime::load_reference_barrier_phantom(oopDesc * src, oop* load_addr))
return (oopDesc*) ShenandoahBarrierSet::barrier_set()->load_reference_barrier<ON_PHANTOM_OOP_REF, oop>(oop(src), load_addr);
JRT_END

JRT_LEAF(oopDesc*, ShenandoahRuntime::load_reference_barrier_phantom_narrow(oopDesc * src, narrowOop* load_addr))
return (oopDesc*) ShenandoahBarrierSet::barrier_set()->load_reference_barrier<ON_PHANTOM_OOP_REF, narrowOop>(oop(src), load_addr);
JRT_END
1 change: 1 addition & 0 deletions src/hotspot/share/gc/shenandoah/shenandoahRuntime.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ class ShenandoahRuntime : public AllStatic {
static oopDesc* load_reference_barrier_weak_narrow(oopDesc* src, narrowOop* load_addr);

static oopDesc* load_reference_barrier_phantom(oopDesc* src, oop* load_addr);
static oopDesc* load_reference_barrier_phantom_narrow(oopDesc* src, narrowOop* load_addr);

static void shenandoah_clone_barrier(oopDesc* src);
};
Expand Down
35 changes: 27 additions & 8 deletions src/hotspot/share/gc/z/c2/zBarrierSetC2.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -64,8 +64,7 @@ class ZBarrierSetC2State : public ResourceObj {
}

const MachNode* const mach = node->as_Mach();
if (mach->barrier_data() != ZLoadBarrierStrong &&
mach->barrier_data() != ZLoadBarrierWeak) {
if (mach->barrier_data() == ZLoadBarrierElided) {
// Don't need liveness data for nodes without barriers
return NULL;
}
Expand All @@ -84,21 +83,21 @@ static ZBarrierSetC2State* barrier_set_state() {
return reinterpret_cast<ZBarrierSetC2State*>(Compile::current()->barrier_set_state());
}

ZLoadBarrierStubC2* ZLoadBarrierStubC2::create(const MachNode* node, Address ref_addr, Register ref, Register tmp, bool weak) {
ZLoadBarrierStubC2* const stub = new (Compile::current()->comp_arena()) ZLoadBarrierStubC2(node, ref_addr, ref, tmp, weak);
ZLoadBarrierStubC2* ZLoadBarrierStubC2::create(const MachNode* node, Address ref_addr, Register ref, Register tmp, int barrier_data) {
ZLoadBarrierStubC2* const stub = new (Compile::current()->comp_arena()) ZLoadBarrierStubC2(node, ref_addr, ref, tmp, barrier_data);
if (!Compile::current()->output()->in_scratch_emit_size()) {
barrier_set_state()->stubs()->append(stub);
}

return stub;
}

ZLoadBarrierStubC2::ZLoadBarrierStubC2(const MachNode* node, Address ref_addr, Register ref, Register tmp, bool weak) :
ZLoadBarrierStubC2::ZLoadBarrierStubC2(const MachNode* node, Address ref_addr, Register ref, Register tmp, int barrier_data) :
_node(node),
_ref_addr(ref_addr),
_ref(ref),
_tmp(tmp),
_weak(weak),
_barrier_data(barrier_data),
_entry(),
_continuation() {
assert_different_registers(ref, ref_addr.base());
Expand All @@ -118,7 +117,19 @@ Register ZLoadBarrierStubC2::tmp() const {
}

address ZLoadBarrierStubC2::slow_path() const {
const DecoratorSet decorators = _weak ? ON_WEAK_OOP_REF : ON_STRONG_OOP_REF;
DecoratorSet decorators = DECORATORS_NONE;
if (_barrier_data & ZLoadBarrierStrong) {
decorators |= ON_STRONG_OOP_REF;
}
if (_barrier_data & ZLoadBarrierWeak) {
decorators |= ON_WEAK_OOP_REF;
}
if (_barrier_data & ZLoadBarrierPhantom) {
decorators |= ON_PHANTOM_OOP_REF;
}
if (_barrier_data & ZLoadBarrierNoKeepalive) {
decorators |= AS_NO_KEEPALIVE;
}
return ZBarrierSetRuntime::load_barrier_on_oop_field_preloaded_addr(decorators);
}

Expand Down Expand Up @@ -318,10 +329,18 @@ void ZBarrierSetC2::analyze_dominating_barriers() const {
MachNode* const mach = node->as_Mach();
switch (mach->ideal_Opcode()) {
case Op_LoadP:
if ((mach->barrier_data() & ZLoadBarrierStrong) != 0) {
barrier_loads.push(mach);
}
if ((mach->barrier_data() & (ZLoadBarrierStrong | ZLoadBarrierNoKeepalive)) ==
ZLoadBarrierStrong) {
mem_ops.push(mach);
}
break;
case Op_CompareAndExchangeP:
case Op_CompareAndSwapP:
case Op_GetAndSetP:
if (mach->barrier_data() == ZLoadBarrierStrong) {
if ((mach->barrier_data() & ZLoadBarrierStrong) != 0) {
barrier_loads.push(mach);
}
case Op_StoreP:
Expand Down
Loading

0 comments on commit 7be48b0

Please sign in to comment.