Skip to content

Commit 2fd7943

Browse files
committed
8256425: Obsolete Biased Locking in JDK 18
Reviewed-by: kvn, dholmes, dcubed, rrich
1 parent 595446b commit 2fd7943

File tree

165 files changed

+294
-5262
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

165 files changed

+294
-5262
lines changed

make/hotspot/lib/JvmFeatures.gmk

-1
Original file line numberDiff line numberDiff line change
@@ -183,7 +183,6 @@ ifeq ($(call check-jvm-feature, opt-size), true)
183183
assembler.cpp \
184184
barrierSet.cpp \
185185
basicLock.cpp \
186-
biasedLocking.cpp \
187186
bytecode.cpp \
188187
bytecodeInterpreter.cpp \
189188
c1_Compilation.cpp \

src/hotspot/cpu/aarch64/aarch64.ad

-13
Original file line numberDiff line numberDiff line change
@@ -3788,10 +3788,6 @@ encode %{
37883788
__ br(Assembler::NE, cont);
37893789
}
37903790

3791-
if (UseBiasedLocking && !UseOptoBiasInlining) {
3792-
__ biased_locking_enter(box, oop, disp_hdr, tmp, true, cont);
3793-
}
3794-
37953791
// Check for existing monitor
37963792
__ tbnz(disp_hdr, exact_log2(markWord::monitor_value), object_has_monitor);
37973793

@@ -3862,10 +3858,6 @@ encode %{
38623858

38633859
assert_different_registers(oop, box, tmp, disp_hdr);
38643860

3865-
if (UseBiasedLocking && !UseOptoBiasInlining) {
3866-
__ biased_locking_exit(oop, tmp, cont);
3867-
}
3868-
38693861
// Find the lock address and load the displaced header from the stack.
38703862
__ ldr(disp_hdr, Address(box, BasicLock::displaced_header_offset_in_bytes()));
38713863

@@ -8887,11 +8879,6 @@ instruct storePConditional(memory8 heap_top_ptr, iRegP oldval, iRegP newval, rFl
88878879
ins_pipe(pipe_serial);
88888880
%}
88898881

8890-
8891-
// storeLConditional is used by PhaseMacroExpand::expand_lock_node
8892-
// when attempting to rebias a lock towards the current thread. We
8893-
// must use the acquire form of cmpxchg in order to guarantee acquire
8894-
// semantics in this case.
88958882
instruct storeLConditional(indirect mem, iRegLNoSp oldval, iRegLNoSp newval, rFlagsReg cr)
88968883
%{
88978884
match(Set cr (StoreLConditional mem (Binary oldval newval)));

src/hotspot/cpu/aarch64/assembler_aarch64.hpp

-2
Original file line numberDiff line numberDiff line change
@@ -3275,8 +3275,6 @@ inline const Assembler::Condition operator~(const Assembler::Condition cond) {
32753275
return Assembler::Condition(int(cond) ^ 1);
32763276
}
32773277

3278-
class BiasedLockingCounters;
3279-
32803278
extern "C" void das(uint64_t start, int len);
32813279

32823280
#endif // CPU_AARCH64_ASSEMBLER_AARCH64_HPP

src/hotspot/cpu/aarch64/c1_LIRAssembler_aarch64.cpp

+1-5
Original file line numberDiff line numberDiff line change
@@ -2577,13 +2577,9 @@ void LIR_Assembler::emit_lock(LIR_OpLock* op) {
25772577
if (!UseFastLocking) {
25782578
__ b(*op->stub()->entry());
25792579
} else if (op->code() == lir_lock) {
2580-
Register scratch = noreg;
2581-
if (UseBiasedLocking) {
2582-
scratch = op->scratch_opr()->as_register();
2583-
}
25842580
assert(BasicLock::displaced_header_offset_in_bytes() == 0, "lock_reg must point to the displaced header");
25852581
// add debug info for NullPointerException only if one is possible
2586-
int null_check_offset = __ lock_object(hdr, obj, lock, scratch, *op->stub()->entry());
2582+
int null_check_offset = __ lock_object(hdr, obj, lock, *op->stub()->entry());
25872583
if (op->info() != NULL) {
25882584
add_debug_info_for_null_check(null_check_offset, op->info());
25892585
}

src/hotspot/cpu/aarch64/c1_LIRGenerator_aarch64.cpp

+1-6
Original file line numberDiff line numberDiff line change
@@ -331,11 +331,6 @@ void LIRGenerator::do_MonitorEnter(MonitorEnter* x) {
331331

332332
// "lock" stores the address of the monitor stack slot, so this is not an oop
333333
LIR_Opr lock = new_register(T_INT);
334-
// Need a scratch register for biased locking
335-
LIR_Opr scratch = LIR_OprFact::illegalOpr;
336-
if (UseBiasedLocking) {
337-
scratch = new_register(T_INT);
338-
}
339334

340335
CodeEmitInfo* info_for_exception = NULL;
341336
if (x->needs_null_check()) {
@@ -344,7 +339,7 @@ void LIRGenerator::do_MonitorEnter(MonitorEnter* x) {
344339
// this CodeEmitInfo must not have the xhandlers because here the
345340
// object is already locked (xhandlers expect object to be unlocked)
346341
CodeEmitInfo* info = state_for(x, x->state(), true);
347-
monitor_enter(obj.result(), lock, syncTempOpr(), scratch,
342+
monitor_enter(obj.result(), lock, syncTempOpr(), LIR_OprFact::illegalOpr,
348343
x->monitor_no(), info_for_exception, info);
349344
}
350345

src/hotspot/cpu/aarch64/c1_MacroAssembler_aarch64.cpp

+5-28
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,6 @@
3333
#include "oops/arrayOop.hpp"
3434
#include "oops/markWord.hpp"
3535
#include "runtime/basicLock.hpp"
36-
#include "runtime/biasedLocking.hpp"
3736
#include "runtime/os.hpp"
3837
#include "runtime/sharedRuntime.hpp"
3938
#include "runtime/stubRoutines.hpp"
@@ -61,7 +60,7 @@ void C1_MacroAssembler::float_cmp(bool is_float, int unordered_result,
6160
}
6261
}
6362

64-
int C1_MacroAssembler::lock_object(Register hdr, Register obj, Register disp_hdr, Register scratch, Label& slow_case) {
63+
int C1_MacroAssembler::lock_object(Register hdr, Register obj, Register disp_hdr, Label& slow_case) {
6564
const int aligned_mask = BytesPerWord -1;
6665
const int hdr_offset = oopDesc::mark_offset_in_bytes();
6766
assert(hdr != obj && hdr != disp_hdr && obj != disp_hdr, "registers must be different");
@@ -82,11 +81,6 @@ int C1_MacroAssembler::lock_object(Register hdr, Register obj, Register disp_hdr
8281
br(Assembler::NE, slow_case);
8382
}
8483

85-
if (UseBiasedLocking) {
86-
assert(scratch != noreg, "should have scratch register at this point");
87-
biased_locking_enter(disp_hdr, obj, hdr, scratch, false, done, &slow_case);
88-
}
89-
9084
// Load object header
9185
ldr(hdr, Address(obj, hdr_offset));
9286
// and mark it as unlocked
@@ -122,10 +116,6 @@ int C1_MacroAssembler::lock_object(Register hdr, Register obj, Register disp_hdr
122116
cbnz(hdr, slow_case);
123117
// done
124118
bind(done);
125-
if (PrintBiasedLockingStatistics) {
126-
lea(rscratch2, ExternalAddress((address)BiasedLocking::fast_path_entry_count_addr()));
127-
addmw(Address(rscratch2, 0), 1, rscratch1);
128-
}
129119
return null_check_offset;
130120
}
131121

@@ -136,21 +126,13 @@ void C1_MacroAssembler::unlock_object(Register hdr, Register obj, Register disp_
136126
assert(hdr != obj && hdr != disp_hdr && obj != disp_hdr, "registers must be different");
137127
Label done;
138128

139-
if (UseBiasedLocking) {
140-
// load object
141-
ldr(obj, Address(disp_hdr, BasicObjectLock::obj_offset_in_bytes()));
142-
biased_locking_exit(obj, hdr, done);
143-
}
144-
145129
// load displaced header
146130
ldr(hdr, Address(disp_hdr, 0));
147131
// if the loaded hdr is NULL we had recursive locking
148132
// if we had recursive locking, we are done
149133
cbz(hdr, done);
150-
if (!UseBiasedLocking) {
151-
// load object
152-
ldr(obj, Address(disp_hdr, BasicObjectLock::obj_offset_in_bytes()));
153-
}
134+
// load object
135+
ldr(obj, Address(disp_hdr, BasicObjectLock::obj_offset_in_bytes()));
154136
verify_oop(obj);
155137
// test if object header is pointing to the displaced header, and if so, restore
156138
// the displaced header in the object - if the object header is not pointing to
@@ -179,13 +161,8 @@ void C1_MacroAssembler::try_allocate(Register obj, Register var_size_in_bytes, i
179161

180162
void C1_MacroAssembler::initialize_header(Register obj, Register klass, Register len, Register t1, Register t2) {
181163
assert_different_registers(obj, klass, len);
182-
if (UseBiasedLocking && !len->is_valid()) {
183-
assert_different_registers(obj, klass, len, t1, t2);
184-
ldr(t1, Address(klass, Klass::prototype_header_offset()));
185-
} else {
186-
// This assumes that all prototype bits fit in an int32_t
187-
mov(t1, (int32_t)(intptr_t)markWord::prototype().value());
188-
}
164+
// This assumes that all prototype bits fit in an int32_t
165+
mov(t1, (int32_t)(intptr_t)markWord::prototype().value());
189166
str(t1, Address(obj, oopDesc::mark_offset_in_bytes()));
190167

191168
if (UseCompressedClassPointers) { // Take care not to kill klass

src/hotspot/cpu/aarch64/c1_MacroAssembler_aarch64.hpp

+1-2
Original file line numberDiff line numberDiff line change
@@ -58,9 +58,8 @@ using MacroAssembler::null_check;
5858
// hdr : must be r0, contents destroyed
5959
// obj : must point to the object to lock, contents preserved
6060
// disp_hdr: must point to the displaced header location, contents preserved
61-
// scratch : scratch register, contents destroyed
6261
// returns code offset at which to add null check debug information
63-
int lock_object (Register swap, Register obj, Register disp_hdr, Register scratch, Label& slow_case);
62+
int lock_object (Register swap, Register obj, Register disp_hdr, Label& slow_case);
6463

6564
// unlocking
6665
// hdr : contents destroyed

src/hotspot/cpu/aarch64/interp_masm_aarch64.cpp

+1-26
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,6 @@
3939
#include "prims/jvmtiExport.hpp"
4040
#include "prims/jvmtiThreadState.hpp"
4141
#include "runtime/basicLock.hpp"
42-
#include "runtime/biasedLocking.hpp"
4342
#include "runtime/frame.inline.hpp"
4443
#include "runtime/safepointMechanism.hpp"
4544
#include "runtime/sharedRuntime.hpp"
@@ -754,10 +753,6 @@ void InterpreterMacroAssembler::lock_object(Register lock_reg)
754753
br(Assembler::NE, slow_case);
755754
}
756755

757-
if (UseBiasedLocking) {
758-
biased_locking_enter(lock_reg, obj_reg, swap_reg, tmp, false, done, &slow_case);
759-
}
760-
761756
// Load (object->mark() | 1) into swap_reg
762757
ldr(rscratch1, Address(obj_reg, oopDesc::mark_offset_in_bytes()));
763758
orr(swap_reg, rscratch1, 1);
@@ -769,17 +764,7 @@ void InterpreterMacroAssembler::lock_object(Register lock_reg)
769764
"displached header must be first word in BasicObjectLock");
770765

771766
Label fail;
772-
if (PrintBiasedLockingStatistics) {
773-
Label fast;
774-
cmpxchg_obj_header(swap_reg, lock_reg, obj_reg, rscratch1, fast, &fail);
775-
bind(fast);
776-
atomic_incw(Address((address)BiasedLocking::fast_path_entry_count_addr()),
777-
rscratch2, rscratch1, tmp);
778-
b(done);
779-
bind(fail);
780-
} else {
781-
cmpxchg_obj_header(swap_reg, lock_reg, obj_reg, rscratch1, done, /*fallthrough*/NULL);
782-
}
767+
cmpxchg_obj_header(swap_reg, lock_reg, obj_reg, rscratch1, done, /*fallthrough*/NULL);
783768

784769
// Fast check for recursive lock.
785770
//
@@ -816,12 +801,6 @@ void InterpreterMacroAssembler::lock_object(Register lock_reg)
816801

817802
// Save the test result, for recursive case, the result is zero
818803
str(swap_reg, Address(lock_reg, mark_offset));
819-
820-
if (PrintBiasedLockingStatistics) {
821-
br(Assembler::NE, slow_case);
822-
atomic_incw(Address((address)BiasedLocking::fast_path_entry_count_addr()),
823-
rscratch2, rscratch1, tmp);
824-
}
825804
br(Assembler::EQ, done);
826805

827806
bind(slow_case);
@@ -872,10 +851,6 @@ void InterpreterMacroAssembler::unlock_object(Register lock_reg)
872851
// Free entry
873852
str(zr, Address(lock_reg, BasicObjectLock::obj_offset_in_bytes()));
874853

875-
if (UseBiasedLocking) {
876-
biased_locking_exit(obj_reg, header_reg, done);
877-
}
878-
879854
// Load the old header from BasicLock structure
880855
ldr(header_reg, Address(swap_reg,
881856
BasicLock::displaced_header_offset_in_bytes()));

0 commit comments

Comments
 (0)