Skip to content

Commit

Permalink
Merge branch 'master' into 8255384-SSBlock
Browse files Browse the repository at this point in the history
  • Loading branch information
pchilano committed Nov 16, 2020
2 parents f55d066 + b8de239 commit bddb258
Show file tree
Hide file tree
Showing 74 changed files with 2,827 additions and 615 deletions.
579 changes: 574 additions & 5 deletions .github/workflows/submit.yml

Large diffs are not rendered by default.

Expand Up @@ -109,7 +109,7 @@ LIR_Opr ShenandoahBarrierSetC1::atomic_xchg_at_resolved(LIRAccess& access, LIRIt
__ xchg(access.resolved_addr(), value_opr, result, tmp);

if (access.is_oop()) {
result = load_reference_barrier(access.gen(), result, LIR_OprFact::addressConst(0), ShenandoahBarrierSet::AccessKind::NORMAL);
result = load_reference_barrier(access.gen(), result, LIR_OprFact::addressConst(0), access.decorators());
LIR_Opr tmp = gen->new_register(type);
__ move(result, tmp);
result = tmp;
Expand Down
Expand Up @@ -225,11 +225,17 @@ void ShenandoahBarrierSetAssembler::resolve_forward_pointer_not_null(MacroAssemb
}
}

void ShenandoahBarrierSetAssembler::load_reference_barrier(MacroAssembler* masm, Register dst, Address load_addr, ShenandoahBarrierSet::AccessKind kind) {
void ShenandoahBarrierSetAssembler::load_reference_barrier(MacroAssembler* masm, Register dst, Address load_addr, DecoratorSet decorators) {
assert(ShenandoahLoadRefBarrier, "Should be enabled");
assert(dst != rscratch2, "need rscratch2");
assert_different_registers(load_addr.base(), load_addr.index(), rscratch1, rscratch2);

bool is_strong = ShenandoahBarrierSet::is_strong_access(decorators);
bool is_weak = ShenandoahBarrierSet::is_weak_access(decorators);
bool is_phantom = ShenandoahBarrierSet::is_phantom_access(decorators);
bool is_native = ShenandoahBarrierSet::is_native_access(decorators);
bool is_narrow = UseCompressedOops && !is_native;

Label heap_stable, not_cset;
__ enter();
Address gc_state(rthread, in_bytes(ShenandoahThreadLocalData::gc_state_offset()));
Expand All @@ -252,34 +258,30 @@ void ShenandoahBarrierSetAssembler::load_reference_barrier(MacroAssembler* masm,
__ mov(r0, dst);

// Test for in-cset
if (kind == ShenandoahBarrierSet::AccessKind::NORMAL) {
if (is_strong) {
__ mov(rscratch2, ShenandoahHeap::in_cset_fast_test_addr());
__ lsr(rscratch1, r0, ShenandoahHeapRegion::region_size_bytes_shift_jint());
__ ldrb(rscratch2, Address(rscratch2, rscratch1));
__ tbz(rscratch2, 0, not_cset);
}

__ push_call_clobbered_registers();
switch (kind) {
case ShenandoahBarrierSet::AccessKind::NORMAL:
if (UseCompressedOops) {
__ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_narrow));
} else {
__ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier));
}
break;
case ShenandoahBarrierSet::AccessKind::WEAK:
if (UseCompressedOops) {
__ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_weak_narrow));
} else {
__ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_weak));
}
break;
case ShenandoahBarrierSet::AccessKind::NATIVE:
if (is_strong) {
if (is_narrow) {
__ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_strong_narrow));
} else {
__ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_strong));
}
} else if (is_weak) {
if (is_narrow) {
__ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_weak_narrow));
} else {
__ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_weak));
break;
default:
ShouldNotReachHere();
}
} else {
assert(is_phantom, "only remaining strength");
assert(!is_narrow, "phantom access cannot be narrow");
__ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_weak));
}
__ blr(lr);
__ mov(rscratch1, r0);
Expand Down Expand Up @@ -338,8 +340,7 @@ void ShenandoahBarrierSetAssembler::load_at(MacroAssembler* masm, DecoratorSet d

BarrierSetAssembler::load_at(masm, decorators, type, dst, src, tmp1, tmp_thread);

ShenandoahBarrierSet::AccessKind kind = ShenandoahBarrierSet::access_kind(decorators, type);
load_reference_barrier(masm, dst, src, kind);
load_reference_barrier(masm, dst, src, decorators);

if (dst != result_dst) {
__ mov(result_dst, dst);
Expand Down Expand Up @@ -617,6 +618,12 @@ void ShenandoahBarrierSetAssembler::gen_load_reference_barrier_stub(LIR_Assemble
ShenandoahBarrierSetC1* bs = (ShenandoahBarrierSetC1*)BarrierSet::barrier_set()->barrier_set_c1();
__ bind(*stub->entry());

DecoratorSet decorators = stub->decorators();
bool is_strong = ShenandoahBarrierSet::is_strong_access(decorators);
bool is_weak = ShenandoahBarrierSet::is_weak_access(decorators);
bool is_phantom = ShenandoahBarrierSet::is_phantom_access(decorators);
bool is_native = ShenandoahBarrierSet::is_native_access(decorators);

Register obj = stub->obj()->as_register();
Register res = stub->result()->as_register();
Register addr = stub->addr()->as_pointer_register();
Expand All @@ -629,42 +636,27 @@ void ShenandoahBarrierSetAssembler::gen_load_reference_barrier_stub(LIR_Assemble
__ mov(res, obj);
}

// Check for null.
__ cbz(res, *stub->continuation());

// Check for object in cset.
__ mov(tmp2, ShenandoahHeap::in_cset_fast_test_addr());
__ lsr(tmp1, res, ShenandoahHeapRegion::region_size_bytes_shift_jint());
__ ldrb(tmp2, Address(tmp2, tmp1));
__ cbz(tmp2, *stub->continuation());

// Check if object is already forwarded.
Label slow_path;
__ ldr(tmp1, Address(res, oopDesc::mark_offset_in_bytes()));
__ eon(tmp1, tmp1, zr);
__ ands(zr, tmp1, markWord::lock_mask_in_place);
__ br(Assembler::NE, slow_path);

// Decode forwarded object.
__ orr(tmp1, tmp1, markWord::marked_value);
__ eon(res, tmp1, zr);
__ b(*stub->continuation());
if (is_strong) {
// Check for object in cset.
__ mov(tmp2, ShenandoahHeap::in_cset_fast_test_addr());
__ lsr(tmp1, res, ShenandoahHeapRegion::region_size_bytes_shift_jint());
__ ldrb(tmp2, Address(tmp2, tmp1));
__ cbz(tmp2, *stub->continuation());
}

__ bind(slow_path);
ce->store_parameter(res, 0);
ce->store_parameter(addr, 1);
switch (stub->kind()) {
case ShenandoahBarrierSet::AccessKind::NORMAL:
__ far_call(RuntimeAddress(bs->load_reference_barrier_normal_rt_code_blob()->code_begin()));
break;
case ShenandoahBarrierSet::AccessKind::WEAK:
__ far_call(RuntimeAddress(bs->load_reference_barrier_weak_rt_code_blob()->code_begin()));
break;
case ShenandoahBarrierSet::AccessKind::NATIVE:
__ far_call(RuntimeAddress(bs->load_reference_barrier_native_rt_code_blob()->code_begin()));
break;
default:
ShouldNotReachHere();
if (is_strong) {
if (is_native) {
__ far_call(RuntimeAddress(bs->load_reference_barrier_strong_native_rt_code_blob()->code_begin()));
} else {
__ far_call(RuntimeAddress(bs->load_reference_barrier_strong_rt_code_blob()->code_begin()));
}
} else if (is_weak) {
__ far_call(RuntimeAddress(bs->load_reference_barrier_weak_rt_code_blob()->code_begin()));
} else {
assert(is_phantom, "only remaining strength");
__ far_call(RuntimeAddress(bs->load_reference_barrier_phantom_rt_code_blob()->code_begin()));
}

__ b(*stub->continuation());
Expand Down Expand Up @@ -720,33 +712,39 @@ void ShenandoahBarrierSetAssembler::generate_c1_pre_barrier_runtime_stub(StubAss
__ epilogue();
}

void ShenandoahBarrierSetAssembler::generate_c1_load_reference_barrier_runtime_stub(StubAssembler* sasm, ShenandoahBarrierSet::AccessKind kind) {
void ShenandoahBarrierSetAssembler::generate_c1_load_reference_barrier_runtime_stub(StubAssembler* sasm, DecoratorSet decorators) {
__ prologue("shenandoah_load_reference_barrier", false);
// arg0 : object to be resolved

__ push_call_clobbered_registers();
__ load_parameter(0, r0);
__ load_parameter(1, r1);
switch (kind) {
case ShenandoahBarrierSet::AccessKind::NORMAL:
if (UseCompressedOops) {
__ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_narrow));
} else {
__ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier));
}
break;
case ShenandoahBarrierSet::AccessKind::WEAK:

bool is_strong = ShenandoahBarrierSet::is_strong_access(decorators);
bool is_weak = ShenandoahBarrierSet::is_weak_access(decorators);
bool is_phantom = ShenandoahBarrierSet::is_phantom_access(decorators);
bool is_native = ShenandoahBarrierSet::is_native_access(decorators);
if (is_strong) {
if (is_native) {
__ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_strong));
} else {
if (UseCompressedOops) {
__ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_weak_narrow));
__ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_strong_narrow));
} else {
__ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_weak));
__ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_strong));
}
break;
case ShenandoahBarrierSet::AccessKind::NATIVE:
}
} else if (is_weak) {
assert(!is_native, "weak must not be called off-heap");
if (UseCompressedOops) {
__ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_weak_narrow));
} else {
__ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_weak));
break;
default:
ShouldNotReachHere();
}
} else {
assert(is_phantom, "only remaining strength");
assert(is_native, "phantom must only be called off-heap");
__ mov(lr, CAST_FROM_FN_PTR(address, ShenandoahRuntime::load_reference_barrier_phantom));
}
__ blr(lr);
__ mov(rscratch1, r0);
Expand Down
Expand Up @@ -56,7 +56,7 @@ class ShenandoahBarrierSetAssembler: public BarrierSetAssembler {

void resolve_forward_pointer(MacroAssembler* masm, Register dst, Register tmp = noreg);
void resolve_forward_pointer_not_null(MacroAssembler* masm, Register dst, Register tmp = noreg);
void load_reference_barrier(MacroAssembler* masm, Register dst, Address load_addr, ShenandoahBarrierSet::AccessKind kind);
void load_reference_barrier(MacroAssembler* masm, Register dst, Address load_addr, DecoratorSet decorators);

public:

Expand All @@ -66,7 +66,7 @@ class ShenandoahBarrierSetAssembler: public BarrierSetAssembler {
void gen_pre_barrier_stub(LIR_Assembler* ce, ShenandoahPreBarrierStub* stub);
void gen_load_reference_barrier_stub(LIR_Assembler* ce, ShenandoahLoadReferenceBarrierStub* stub);
void generate_c1_pre_barrier_runtime_stub(StubAssembler* sasm);
void generate_c1_load_reference_barrier_runtime_stub(StubAssembler* sasm, ShenandoahBarrierSet::AccessKind kind);
void generate_c1_load_reference_barrier_runtime_stub(StubAssembler* sasm, DecoratorSet decorators);
#endif

virtual void arraycopy_prologue(MacroAssembler* masm, DecoratorSet decorators, bool is_oop,
Expand Down
18 changes: 2 additions & 16 deletions src/hotspot/cpu/x86/c1_LinearScan_x86.cpp
Expand Up @@ -550,21 +550,6 @@ void FpuStackAllocator::handle_op1(LIR_Op1* op1) {
break;
}

case lir_neg: {
if (in->is_fpu_register() && !in->is_xmm_register()) {
assert(res->is_fpu_register() && !res->is_xmm_register(), "must be");
assert(in->is_last_use(), "old value gets destroyed");

insert_free_if_dead(res, in);
insert_exchange(in);
new_in = to_fpu_stack_top(in);

do_rename(in, res);
new_res = to_fpu_stack_top(res);
}
break;
}

case lir_convert: {
Bytecodes::Code bc = op1->as_OpConvert()->bytecode();
switch (bc) {
Expand Down Expand Up @@ -772,7 +757,8 @@ void FpuStackAllocator::handle_op2(LIR_Op2* op2) {
}

case lir_abs:
case lir_sqrt: {
case lir_sqrt:
case lir_neg: {
// Right argument appears to be unused
assert(right->is_illegal(), "must be");
assert(left->is_fpu_register(), "must be");
Expand Down
Expand Up @@ -111,8 +111,7 @@ LIR_Opr ShenandoahBarrierSetC1::atomic_xchg_at_resolved(LIRAccess& access, LIRIt
__ xchg(access.resolved_addr(), result, result, LIR_OprFact::illegalOpr);

if (access.is_oop()) {
ShenandoahBarrierSet::AccessKind kind = ShenandoahBarrierSet::access_kind(access.decorators(), access.type());
result = load_reference_barrier(access.gen(), result, LIR_OprFact::addressConst(0), kind);
result = load_reference_barrier(access.gen(), result, LIR_OprFact::addressConst(0), access.decorators());
LIR_Opr tmp = gen->new_register(type);
__ move(result, tmp);
result = tmp;
Expand Down

0 comments on commit bddb258

Please sign in to comment.