Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

8276901: Implement UseHeavyMonitors consistently #6320

Closed
wants to merge 18 commits into from
Closed
Changes from 14 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
@@ -2563,7 +2563,7 @@ void LIR_Assembler::emit_lock(LIR_OpLock* op) {
Register obj = op->obj_opr()->as_register(); // may not be an oop
Register hdr = op->hdr_opr()->as_register();
Register lock = op->lock_opr()->as_register();
if (!UseFastLocking) {
if (UseHeavyMonitors) {
__ b(*op->stub()->entry());
} else if (op->code() == lir_lock) {
assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
@@ -2425,7 +2425,7 @@ void LIR_Assembler::emit_lock(LIR_OpLock* op) {
Register hdr = op->hdr_opr()->as_pointer_register();
Register lock = op->lock_opr()->as_pointer_register();

if (!UseFastLocking) {
if (UseHeavyMonitors) {
__ b(*op->stub()->entry());
} else if (op->code() == lir_lock) {
assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
@@ -2689,7 +2689,7 @@ void LIR_Assembler::emit_lock(LIR_OpLock* op) {
// Obj may not be an oop.
if (op->code() == lir_lock) {
MonitorEnterStub* stub = (MonitorEnterStub*)op->stub();
if (UseFastLocking) {
if (!UseHeavyMonitors) {
assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
// Add debug info for NullPointerException only if one is possible.
if (op->info() != NULL) {
@@ -2711,7 +2711,7 @@ void LIR_Assembler::emit_lock(LIR_OpLock* op) {
}
} else {
assert (op->code() == lir_unlock, "Invalid code, expected lir_unlock");
if (UseFastLocking) {
if (!UseHeavyMonitors) {
assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
__ unlock_object(hdr, obj, lock, *op->stub()->entry());
} else {
@@ -2730,7 +2730,7 @@ void LIR_Assembler::emit_lock(LIR_OpLock* op) {
Register obj = op->obj_opr()->as_register(); // May not be an oop.
Register hdr = op->hdr_opr()->as_register();
Register lock = op->lock_opr()->as_register();
if (!UseFastLocking) {
if (UseHeavyMonitors) {
__ branch_optimized(Assembler::bcondAlways, *op->stub()->entry());
} else if (op->code() == lir_lock) {
assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
@@ -461,7 +461,11 @@ int LIR_Assembler::emit_unwind_handler() {
if (method()->is_synchronized()) {
monitor_address(0, FrameMap::rax_opr);
stub = new MonitorExitStub(FrameMap::rax_opr, true, 0);
__ unlock_object(rdi, rsi, rax, *stub->entry());
if (UseHeavyMonitors) {
__ jmp(*stub->entry());
} else {
__ unlock_object(rdi, rsi, rax, *stub->entry());
}
__ bind(*stub->continuation());
}

@@ -3498,7 +3502,7 @@ void LIR_Assembler::emit_lock(LIR_OpLock* op) {
Register obj = op->obj_opr()->as_register(); // may not be an oop
Register hdr = op->hdr_opr()->as_register();
Register lock = op->lock_opr()->as_register();
if (!UseFastLocking) {
if (UseHeavyMonitors) {
__ jmp(*op->stub()->entry());
} else if (op->code() == lir_lock) {
assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
@@ -485,6 +485,7 @@ void C2_MacroAssembler::fast_lock(Register objReg, Register boxReg, Register tmp

#if INCLUDE_RTM_OPT
if (UseRTMForStackLocks && use_rtm) {
assert(!UseHeavyMonitors, "+UseHeavyMonitors and +UseRTMForStackLocks are mutually exclusive");
rtm_stack_locking(objReg, tmpReg, scrReg, cx2Reg,
stack_rtm_counters, method_data, profile_rtm,
DONE_LABEL, IsInflated);
@@ -495,20 +496,25 @@ void C2_MacroAssembler::fast_lock(Register objReg, Register boxReg, Register tmp
testptr(tmpReg, markWord::monitor_value); // inflated vs stack-locked|neutral
jccb(Assembler::notZero, IsInflated);

// Attempt stack-locking ...
orptr (tmpReg, markWord::unlocked_value);
movptr(Address(boxReg, 0), tmpReg); // Anticipate successful CAS
lock();
cmpxchgptr(boxReg, Address(objReg, oopDesc::mark_offset_in_bytes())); // Updates tmpReg
jcc(Assembler::equal, DONE_LABEL); // Success

// Recursive locking.
// The object is stack-locked: markword contains stack pointer to BasicLock.
// Locked by current thread if difference with current SP is less than one page.
subptr(tmpReg, rsp);
// Next instruction set ZFlag == 1 (Success) if difference is less then one page.
andptr(tmpReg, (int32_t) (NOT_LP64(0xFFFFF003) LP64_ONLY(7 - os::vm_page_size())) );
movptr(Address(boxReg, 0), tmpReg);
if (!UseHeavyMonitors) {
// Attempt stack-locking ...
orptr (tmpReg, markWord::unlocked_value);
movptr(Address(boxReg, 0), tmpReg); // Anticipate successful CAS
lock();
cmpxchgptr(boxReg, Address(objReg, oopDesc::mark_offset_in_bytes())); // Updates tmpReg
jcc(Assembler::equal, DONE_LABEL); // Success

// Recursive locking.
// The object is stack-locked: markword contains stack pointer to BasicLock.
// Locked by current thread if difference with current SP is less than one page.
subptr(tmpReg, rsp);
// Next instruction set ZFlag == 1 (Success) if difference is less then one page.
andptr(tmpReg, (int32_t) (NOT_LP64(0xFFFFF003) LP64_ONLY(7 - os::vm_page_size())) );
movptr(Address(boxReg, 0), tmpReg);
} else {
// Clear ZF so that we take the slow path at the DONE label. objReg is known to be not 0.
testptr(objReg, objReg);
}
jmp(DONE_LABEL);

bind(IsInflated);
@@ -638,6 +644,7 @@ void C2_MacroAssembler::fast_unlock(Register objReg, Register boxReg, Register t

#if INCLUDE_RTM_OPT
if (UseRTMForStackLocks && use_rtm) {
assert(!UseHeavyMonitors, "+UseHeavyMonitors and +UseRTMForStackLocks are mutually exclusive");
Label L_regular_unlock;
movptr(tmpReg, Address(objReg, oopDesc::mark_offset_in_bytes())); // fetch markword
andptr(tmpReg, markWord::lock_mask_in_place); // look at 2 lock bits
@@ -649,11 +656,15 @@ void C2_MacroAssembler::fast_unlock(Register objReg, Register boxReg, Register t
}
#endif

cmpptr(Address(boxReg, 0), (int32_t)NULL_WORD); // Examine the displaced header
jcc (Assembler::zero, DONE_LABEL); // 0 indicates recursive stack-lock
if (!UseHeavyMonitors) {
cmpptr(Address(boxReg, 0), (int32_t)NULL_WORD); // Examine the displaced header
jcc (Assembler::zero, DONE_LABEL); // 0 indicates recursive stack-lock
}
movptr(tmpReg, Address(objReg, oopDesc::mark_offset_in_bytes())); // Examine the object's markword
testptr(tmpReg, markWord::monitor_value); // Inflated?
jccb (Assembler::zero, Stacked);
if (!UseHeavyMonitors) {
testptr(tmpReg, markWord::monitor_value); // Inflated?
jccb (Assembler::zero, Stacked);
}

// It's inflated.
#if INCLUDE_RTM_OPT
@@ -795,11 +806,12 @@ void C2_MacroAssembler::fast_unlock(Register objReg, Register boxReg, Register t
testl (boxReg, 0); // set ICC.ZF=1 to indicate success
jmpb (DONE_LABEL);

bind (Stacked);
movptr(tmpReg, Address (boxReg, 0)); // re-fetch
lock();
cmpxchgptr(tmpReg, Address(objReg, oopDesc::mark_offset_in_bytes())); // Uses RAX which is box

if (!UseHeavyMonitors) {
bind (Stacked);
movptr(tmpReg, Address (boxReg, 0)); // re-fetch
lock();
cmpxchgptr(tmpReg, Address(objReg, oopDesc::mark_offset_in_bytes())); // Uses RAX which is box
}
#endif
bind(DONE_LABEL);
}
@@ -1705,36 +1705,41 @@ nmethod* SharedRuntime::generate_native_wrapper(MacroAssembler* masm,
// Load the oop from the handle
__ movptr(obj_reg, Address(oop_handle_reg, 0));

// Load immediate 1 into swap_reg %rax,
__ movptr(swap_reg, 1);

// Load (object->mark() | 1) into swap_reg %rax,
__ orptr(swap_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));

// Save (object->mark() | 1) into BasicLock's displaced header
__ movptr(Address(lock_reg, mark_word_offset), swap_reg);

// src -> dest iff dest == rax, else rax, <- dest
// *obj_reg = lock_reg iff *obj_reg == rax, else rax, = *(obj_reg)
__ lock();
__ cmpxchgptr(lock_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
__ jcc(Assembler::equal, lock_done);

// Test if the oopMark is an obvious stack pointer, i.e.,
// 1) (mark & 3) == 0, and
// 2) rsp <= mark < mark + os::pagesize()
// These 3 tests can be done by evaluating the following
// expression: ((mark - rsp) & (3 - os::vm_page_size())),
// assuming both stack pointer and pagesize have their
// least significant 2 bits clear.
// NOTE: the oopMark is in swap_reg %rax, as the result of cmpxchg

__ subptr(swap_reg, rsp);
__ andptr(swap_reg, 3 - os::vm_page_size());

// Save the test result, for recursive case, the result is zero
__ movptr(Address(lock_reg, mark_word_offset), swap_reg);
__ jcc(Assembler::notEqual, slow_path_lock);
if (!UseHeavyMonitors) {
// Load immediate 1 into swap_reg %rax,
__ movptr(swap_reg, 1);

// Load (object->mark() | 1) into swap_reg %rax,
__ orptr(swap_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));

// Save (object->mark() | 1) into BasicLock's displaced header
__ movptr(Address(lock_reg, mark_word_offset), swap_reg);

// src -> dest iff dest == rax, else rax, <- dest
// *obj_reg = lock_reg iff *obj_reg == rax, else rax, = *(obj_reg)
__ lock();
__ cmpxchgptr(lock_reg, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
__ jcc(Assembler::equal, lock_done);

// Test if the oopMark is an obvious stack pointer, i.e.,
// 1) (mark & 3) == 0, and
// 2) rsp <= mark < mark + os::pagesize()
// These 3 tests can be done by evaluating the following
// expression: ((mark - rsp) & (3 - os::vm_page_size())),
// assuming both stack pointer and pagesize have their
// least significant 2 bits clear.
// NOTE: the oopMark is in swap_reg %rax, as the result of cmpxchg

__ subptr(swap_reg, rsp);
__ andptr(swap_reg, 3 - os::vm_page_size());

// Save the test result, for recursive case, the result is zero
__ movptr(Address(lock_reg, mark_word_offset), swap_reg);
__ jcc(Assembler::notEqual, slow_path_lock);
} else {
__ jmp(slow_path_lock);
}

// Slow path will re-enter here
__ bind(lock_done);
}
@@ -1852,28 +1857,32 @@ nmethod* SharedRuntime::generate_native_wrapper(MacroAssembler* masm,
// Get locked oop from the handle we passed to jni
__ movptr(obj_reg, Address(oop_handle_reg, 0));

// Simple recursive lock?
if (!UseHeavyMonitors) {
// Simple recursive lock?

__ cmpptr(Address(rbp, lock_slot_rbp_offset), (int32_t)NULL_WORD);
__ jcc(Assembler::equal, done);
__ cmpptr(Address(rbp, lock_slot_rbp_offset), (int32_t)NULL_WORD);
__ jcc(Assembler::equal, done);

// Must save rax, if if it is live now because cmpxchg must use it
if (ret_type != T_FLOAT && ret_type != T_DOUBLE && ret_type != T_VOID) {
save_native_result(masm, ret_type, stack_slots);
}
// Must save rax, if if it is live now because cmpxchg must use it
if (ret_type != T_FLOAT && ret_type != T_DOUBLE && ret_type != T_VOID) {
save_native_result(masm, ret_type, stack_slots);
}

// get old displaced header
__ movptr(rbx, Address(rbp, lock_slot_rbp_offset));
// get old displaced header
__ movptr(rbx, Address(rbp, lock_slot_rbp_offset));

// get address of the stack lock
__ lea(rax, Address(rbp, lock_slot_rbp_offset));
// get address of the stack lock
__ lea(rax, Address(rbp, lock_slot_rbp_offset));

// Atomic swap old header if oop still contains the stack lock
// src -> dest iff dest == rax, else rax, <- dest
// *obj_reg = rbx, iff *obj_reg == rax, else rax, = *(obj_reg)
__ lock();
__ cmpxchgptr(rbx, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
__ jcc(Assembler::notEqual, slow_path_unlock);
// Atomic swap old header if oop still contains the stack lock
// src -> dest iff dest == rax, else rax, <- dest
// *obj_reg = rbx, iff *obj_reg == rax, else rax, = *(obj_reg)
__ lock();
__ cmpxchgptr(rbx, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
__ jcc(Assembler::notEqual, slow_path_unlock);
} else {
__ jmp(slow_path_unlock);
}

// slow path re-enters here
__ bind(unlock_done);