Skip to content
This repository has been archived by the owner. It is now read-only.
Permalink
Browse files
Automatic merge of client:master into master
  • Loading branch information
duke committed Aug 22, 2020
2 parents 63a4356 + ea02307 commit 91ff0903410a503459ede649b9918a4eee2fb75f
Showing with 1,147 additions and 439 deletions.
  1. +7 −0 src/hotspot/cpu/aarch64/aarch64.ad
  2. +11 −4 src/hotspot/cpu/aarch64/c1_MacroAssembler_aarch64.cpp
  3. +7 −0 src/hotspot/cpu/aarch64/interp_masm_aarch64.cpp
  4. +8 −12 src/hotspot/cpu/aarch64/macroAssembler_aarch64.cpp
  5. +5 −9 src/hotspot/cpu/aarch64/macroAssembler_aarch64.hpp
  6. +13 −10 src/hotspot/cpu/arm/c1_MacroAssembler_arm.cpp
  7. +7 −0 src/hotspot/cpu/arm/c2_MacroAssembler_arm.cpp
  8. +7 −0 src/hotspot/cpu/arm/interp_masm_arm.cpp
  9. +5 −11 src/hotspot/cpu/arm/macroAssembler_arm.cpp
  10. +5 −9 src/hotspot/cpu/arm/macroAssembler_arm.hpp
  11. +7 −0 src/hotspot/cpu/ppc/c1_MacroAssembler_ppc.cpp
  12. +7 −0 src/hotspot/cpu/ppc/interp_masm_ppc_64.cpp
  13. +6 −0 src/hotspot/cpu/ppc/macroAssembler_ppc.cpp
  14. +6 −0 src/hotspot/cpu/s390/c1_MacroAssembler_s390.cpp
  15. +6 −0 src/hotspot/cpu/s390/interp_masm_s390.cpp
  16. +8 −0 src/hotspot/cpu/s390/macroAssembler_s390.cpp
  17. +12 −5 src/hotspot/cpu/x86/c1_MacroAssembler_x86.cpp
  18. +7 −0 src/hotspot/cpu/x86/c2_MacroAssembler_x86.cpp
  19. +8 −1 src/hotspot/cpu/x86/interp_masm_x86.cpp
  20. +9 −16 src/hotspot/cpu/x86/macroAssembler_x86.cpp
  21. +5 −9 src/hotspot/cpu/x86/macroAssembler_x86.hpp
  22. +25 −4 src/hotspot/share/classfile/classLoaderDataGraph.cpp
  23. +4 −2 src/hotspot/share/classfile/classLoaderDataGraph.hpp
  24. +8 −0 src/hotspot/share/classfile/systemDictionary.cpp
  25. +19 −8 src/hotspot/share/classfile/systemDictionaryShared.cpp
  26. +13 −11 src/hotspot/share/gc/g1/g1CollectedHeap.cpp
  27. +0 −12 src/hotspot/share/gc/g1/g1CollectedHeap.hpp
  28. +4 −4 src/hotspot/share/gc/g1/g1CollectedHeap.inline.hpp
  29. +8 −9 src/hotspot/share/gc/g1/g1ConcurrentMark.cpp
  30. +1 −2 src/hotspot/share/gc/g1/g1ConcurrentMark.hpp
  31. +1 −1 src/hotspot/share/gc/g1/g1ConcurrentMark.inline.hpp
  32. +21 −16 src/hotspot/share/gc/g1/g1IHOPControl.cpp
  33. +15 −9 src/hotspot/share/gc/g1/g1IHOPControl.hpp
  34. +31 −11 src/hotspot/share/gc/g1/g1OldGenAllocationTracker.cpp
  35. +26 −18 src/hotspot/share/gc/g1/g1OldGenAllocationTracker.hpp
  36. +2 −2 src/hotspot/share/gc/g1/g1ParScanThreadState.cpp
  37. +10 −10 src/hotspot/share/gc/g1/g1Policy.cpp
  38. +6 −6 src/hotspot/share/gc/g1/g1Policy.hpp
  39. +1 −2 src/hotspot/share/gc/g1/g1RegionMarkStatsCache.cpp
  40. +1 −3 src/hotspot/share/gc/g1/g1RegionMarkStatsCache.hpp
  41. +1 −1 src/hotspot/share/gc/parallel/psParallelCompact.cpp
  42. +1 −1 src/hotspot/share/gc/shared/genCollectedHeap.cpp
  43. +1 −1 src/hotspot/share/gc/shenandoah/shenandoahHeap.cpp
  44. +1 −1 src/hotspot/share/gc/shenandoah/shenandoahUnload.cpp
  45. +2 −2 src/hotspot/share/gc/z/zUnload.cpp
  46. +4 −0 src/hotspot/share/jfr/metadata/metadata.xml
  47. +1 −0 src/hotspot/share/logging/logTag.hpp
  48. +3 −2 src/hotspot/share/oops/instanceKlass.cpp
  49. +3 −0 src/hotspot/share/oops/klass.hpp
  50. +4 −3 src/hotspot/share/oops/markWord.hpp
  51. +1 −2 src/hotspot/share/opto/callnode.cpp
  52. +5 −0 src/hotspot/share/prims/whitebox.cpp
  53. +5 −0 src/hotspot/share/runtime/arguments.cpp
  54. +10 −0 src/hotspot/share/runtime/globals.hpp
  55. +1 −10 src/hotspot/share/runtime/safepoint.cpp
  56. +7 −0 src/hotspot/share/runtime/serviceThread.cpp
  57. +60 −0 src/hotspot/share/runtime/synchronizer.cpp
  58. +8 −0 src/hotspot/share/runtime/synchronizer.hpp
  59. +5 −2 src/hotspot/share/runtime/vmOperations.cpp
  60. +8 −0 src/hotspot/share/runtime/vmOperations.hpp
  61. +4 −1 src/hotspot/share/utilities/accessFlags.hpp
  62. +1 −3 src/java.base/share/classes/sun/invoke/util/BytecodeDescriptor.java
  63. +5 −0 src/jdk.jfr/share/conf/jfr/default.jfc
  64. +5 −0 src/jdk.jfr/share/conf/jfr/profile.jfc
  65. +112 −15 test/hotspot/gtest/gc/g1/test_g1IHOPControl.cpp
  66. +1 −0 test/hotspot/jtreg/TEST.ROOT
  67. +145 −0 test/hotspot/jtreg/gc/shenandoah/compiler/TestLoadPinnedAfterCall.java
  68. +167 −0 test/hotspot/jtreg/runtime/Monitor/SyncOnPrimitiveWrapperTest.java
  69. +1 −0 test/hotspot/jtreg/serviceability/jvmti/8036666/GetObjectLockCount.java
  70. +1 −0 ...t/jtreg/serviceability/jvmti/AddModuleExportsAndOpens/MyPackage/AddModuleExportsAndOpensTest.java
  71. +1 −0 test/hotspot/jtreg/serviceability/jvmti/AddModuleReads/MyPackage/AddModuleReadsTest.java
  72. +1 −0 ...t/jtreg/serviceability/jvmti/AddModuleUsesAndProvides/MyPackage/AddModuleUsesAndProvidesTest.java
  73. +1 −0 test/hotspot/jtreg/serviceability/jvmti/CompiledMethodLoad/Zombie.java
  74. +1 −0 test/hotspot/jtreg/serviceability/jvmti/FieldAccessWatch/FieldAccessWatch.java
  75. +1 −0 test/hotspot/jtreg/serviceability/jvmti/GenerateEvents/MyPackage/GenerateEventsTest.java
  76. +1 −0 test/hotspot/jtreg/serviceability/jvmti/GetClassMethods/OverpassMethods.java
  77. +1 −1 test/hotspot/jtreg/serviceability/jvmti/GetLocalVariable/GetLocalVars.java
  78. +1 −0 test/hotspot/jtreg/serviceability/jvmti/GetModulesInfo/JvmtiGetAllModulesTest.java
  79. +1 −0 test/hotspot/jtreg/serviceability/jvmti/GetNamedModule/MyPackage/GetNamedModuleTest.java
  80. +1 −0 test/hotspot/jtreg/serviceability/jvmti/GetObjectSizeClass.java
  81. +1 −0 test/hotspot/jtreg/serviceability/jvmti/GetObjectSizeOverflow.java
  82. +1 −0 test/hotspot/jtreg/serviceability/jvmti/GetOwnedMonitorInfo/GetOwnedMonitorInfoTest.java
  83. +1 −0 test/hotspot/jtreg/serviceability/jvmti/GetOwnedMonitorInfo/GetOwnedMonitorInfoWithEATest.java
  84. +1 −0 ...t/jtreg/serviceability/jvmti/GetOwnedMonitorStackDepthInfo/GetOwnedMonitorStackDepthInfoTest.java
  85. +1 −0 ...g/serviceability/jvmti/GetOwnedMonitorStackDepthInfo/GetOwnedMonitorStackDepthInfoWithEATest.java
  86. +1 −0 test/hotspot/jtreg/serviceability/jvmti/GetSystemProperty/JvmtiGetSystemPropertyTest.java
  87. +1 −0 test/hotspot/jtreg/serviceability/jvmti/GetThreadListStackTraces/OneGetThreadListStackTraces.java
  88. +1 −0 test/hotspot/jtreg/serviceability/jvmti/HeapMonitor/MyPackage/HeapMonitorArrayAllSampledTest.java
  89. +1 −0 test/hotspot/jtreg/serviceability/jvmti/HeapMonitor/MyPackage/HeapMonitorEventOnOffTest.java
  90. +1 −0 test/hotspot/jtreg/serviceability/jvmti/HeapMonitor/MyPackage/HeapMonitorGCParallelTest.java
  91. +1 −0 test/hotspot/jtreg/serviceability/jvmti/HeapMonitor/MyPackage/HeapMonitorGCSerialTest.java
  92. +1 −0 test/hotspot/jtreg/serviceability/jvmti/HeapMonitor/MyPackage/HeapMonitorGCTest.java
  93. +1 −0 test/hotspot/jtreg/serviceability/jvmti/HeapMonitor/MyPackage/HeapMonitorIllegalArgumentTest.java
  94. +1 −0 test/hotspot/jtreg/serviceability/jvmti/HeapMonitor/MyPackage/HeapMonitorInitialAllocationTest.java
  95. +1 −0 test/hotspot/jtreg/serviceability/jvmti/HeapMonitor/MyPackage/HeapMonitorInterpreterArrayTest.java
  96. +1 −0 test/hotspot/jtreg/serviceability/jvmti/HeapMonitor/MyPackage/HeapMonitorInterpreterObjectTest.java
  97. +1 −0 test/hotspot/jtreg/serviceability/jvmti/HeapMonitor/MyPackage/HeapMonitorMultiArrayTest.java
  98. +1 −0 test/hotspot/jtreg/serviceability/jvmti/HeapMonitor/MyPackage/HeapMonitorNoCapabilityTest.java
  99. +1 −0 test/hotspot/jtreg/serviceability/jvmti/HeapMonitor/MyPackage/HeapMonitorRecursiveTest.java
  100. +1 −0 ...hotspot/jtreg/serviceability/jvmti/HeapMonitor/MyPackage/HeapMonitorStatArrayCorrectnessTest.java
  101. +1 −0 test/hotspot/jtreg/serviceability/jvmti/HeapMonitor/MyPackage/HeapMonitorStatIntervalTest.java
  102. +1 −0 ...otspot/jtreg/serviceability/jvmti/HeapMonitor/MyPackage/HeapMonitorStatObjectCorrectnessTest.java
  103. +1 −0 test/hotspot/jtreg/serviceability/jvmti/HeapMonitor/MyPackage/HeapMonitorStatSimpleTest.java
  104. +1 −0 test/hotspot/jtreg/serviceability/jvmti/HeapMonitor/MyPackage/HeapMonitorTest.java
  105. +1 −0 test/hotspot/jtreg/serviceability/jvmti/HeapMonitor/MyPackage/HeapMonitorThreadDisabledTest.java
  106. +1 −0 test/hotspot/jtreg/serviceability/jvmti/HeapMonitor/MyPackage/HeapMonitorThreadOnOffTest.java
  107. +1 −0 test/hotspot/jtreg/serviceability/jvmti/HeapMonitor/MyPackage/HeapMonitorThreadTest.java
  108. +1 −0 test/hotspot/jtreg/serviceability/jvmti/HeapMonitor/MyPackage/HeapMonitorTwoAgentsTest.java
  109. +1 −0 test/hotspot/jtreg/serviceability/jvmti/HeapMonitor/MyPackage/HeapMonitorVMEventsTest.java
  110. +1 −0 test/hotspot/jtreg/serviceability/jvmti/HiddenClass/P/Q/HiddenClassSigTest.java
  111. +1 −0 test/hotspot/jtreg/serviceability/jvmti/IsModifiableModule/MyPackage/IsModifiableModuleTest.java
  112. +1 −0 .../hotspot/jtreg/serviceability/jvmti/ModuleAwareAgents/ClassFileLoadHook/MAAClassFileLoadHook.java
  113. +1 −0 test/hotspot/jtreg/serviceability/jvmti/ModuleAwareAgents/ClassLoadPrepare/MAAClassLoadPrepare.java
  114. +1 −0 test/hotspot/jtreg/serviceability/jvmti/ModuleAwareAgents/ThreadStart/MAAThreadStart.java
  115. +1 −0 test/hotspot/jtreg/serviceability/jvmti/NotifyFramePop/NotifyFramePopTest.java
  116. +1 −0 test/hotspot/jtreg/serviceability/jvmti/RedefineClasses/ModifyAnonymous.java
  117. +1 −0 test/hotspot/jtreg/serviceability/jvmti/RedefineClasses/RedefineAddLambdaExpression.java
  118. +1 −0 test/hotspot/jtreg/serviceability/jvmti/RedefineClasses/RedefineAnnotations.java
  119. +1 −0 test/hotspot/jtreg/serviceability/jvmti/RedefineClasses/RedefineDoubleDelete.java
  120. +1 −0 test/hotspot/jtreg/serviceability/jvmti/RedefineClasses/RedefineFinalizer.java
  121. +1 −0 test/hotspot/jtreg/serviceability/jvmti/RedefineClasses/RedefineInterfaceCall.java
  122. +1 −0 test/hotspot/jtreg/serviceability/jvmti/RedefineClasses/RedefineInterfaceMethods.java
  123. +1 −0 test/hotspot/jtreg/serviceability/jvmti/RedefineClasses/RedefineLeak.java
  124. +1 −0 test/hotspot/jtreg/serviceability/jvmti/RedefineClasses/RedefineObject.java
  125. +1 −0 test/hotspot/jtreg/serviceability/jvmti/RedefineClasses/RedefinePreviousVersions.java
  126. +1 −0 test/hotspot/jtreg/serviceability/jvmti/RedefineClasses/RedefineRunningMethods.java
  127. +1 −0 test/hotspot/jtreg/serviceability/jvmti/RedefineClasses/RedefineRunningMethodsWithBacktrace.java
  128. +1 −0 ...otspot/jtreg/serviceability/jvmti/RedefineClasses/RedefineRunningMethodsWithResolutionErrors.java
  129. +1 −0 test/hotspot/jtreg/serviceability/jvmti/RedefineClasses/RedefineSubtractLambdaExpression.java
  130. +1 −0 test/hotspot/jtreg/serviceability/jvmti/RedefineClasses/RetransformClassesZeroLength.java
  131. +1 −0 test/hotspot/jtreg/serviceability/jvmti/RedefineClasses/TestAddDeleteMethods.java
  132. +1 −0 test/hotspot/jtreg/serviceability/jvmti/RedefineClasses/TestLambdaFormRetransformation.java
  133. +1 −0 test/hotspot/jtreg/serviceability/jvmti/RedefineClasses/TestMultipleClasses.java
  134. +1 −0 test/hotspot/jtreg/serviceability/jvmti/RedefineClasses/TestRedefineCondy.java
  135. +1 −0 test/hotspot/jtreg/serviceability/jvmti/RedefineClasses/TestRedefineObject.java
  136. +1 −0 test/hotspot/jtreg/serviceability/jvmti/RedefineClasses/TestRedefineWithUnresolvedClass.java
  137. +1 −0 test/hotspot/jtreg/serviceability/jvmti/StartPhase/AllowedFunctions/AllowedFunctions.java
  138. +1 −0 test/hotspot/jtreg/serviceability/jvmti/SuspendWithCurrentThread/SuspendWithCurrentThread.java
  139. +1 −0 test/hotspot/jtreg/serviceability/jvmti/VMEvent/MyPackage/VMEventRecursionTest.java
  140. +0 −2 test/hotspot/jtreg/vmTestbase/jit/t/TEST.properties
  141. +22 −30 test/hotspot/jtreg/vmTestbase/jit/t/t087/t087.java
  142. +23 −33 test/hotspot/jtreg/vmTestbase/jit/t/t088/t088.java
  143. +3 −3 test/jdk/javax/transaction/xa/testng/test/transaction/XAExceptionTests.java
  144. +1 −109 test/jdk/javax/transaction/xa/testng/util/SerializedTransactionExceptions.java
  145. +1 −1 test/jdk/jdk/jfr/event/metadata/TestLookForUntestedEvents.java
  146. +92 −0 test/jdk/jdk/jfr/event/runtime/TestSyncOnPrimitiveWrapperEvent.java
  147. +8 −0 test/jtreg-ext/requires/VMProps.java
  148. +1 −0 test/lib/jdk/test/lib/jfr/EventNames.java
  149. +2 −0 test/lib/sun/hotspot/WhiteBox.java
@@ -3511,6 +3511,13 @@ encode %{
// Load markWord from object into displaced_header.
__ ldr(disp_hdr, Address(oop, oopDesc::mark_offset_in_bytes()));

if (DiagnoseSyncOnPrimitiveWrappers != 0) {
__ load_klass(tmp, oop);
__ ldrw(tmp, Address(tmp, Klass::access_flags_offset()));
__ tstw(tmp, JVM_ACC_IS_BOX_CLASS);
__ br(Assembler::NE, cont);
}

if (UseBiasedLocking && !UseOptoBiasInlining) {
__ biased_locking_enter(box, oop, disp_hdr, tmp, true, cont);
}
@@ -1,5 +1,5 @@
/*
* Copyright (c) 1999, 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1999, 2020, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, Red Hat Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
@@ -73,11 +73,18 @@ int C1_MacroAssembler::lock_object(Register hdr, Register obj, Register disp_hdr
// save object being locked into the BasicObjectLock
str(obj, Address(disp_hdr, BasicObjectLock::obj_offset_in_bytes()));

null_check_offset = offset();

if (DiagnoseSyncOnPrimitiveWrappers != 0) {
load_klass(hdr, obj);
ldrw(hdr, Address(hdr, Klass::access_flags_offset()));
tstw(hdr, JVM_ACC_IS_BOX_CLASS);
br(Assembler::NE, slow_case);
}

if (UseBiasedLocking) {
assert(scratch != noreg, "should have scratch register at this point");
null_check_offset = biased_locking_enter(disp_hdr, obj, hdr, scratch, false, done, &slow_case);
} else {
null_check_offset = offset();
biased_locking_enter(disp_hdr, obj, hdr, scratch, false, done, &slow_case);
}

// Load object header
@@ -725,6 +725,13 @@ void InterpreterMacroAssembler::lock_object(Register lock_reg)
// Load object pointer into obj_reg %c_rarg3
ldr(obj_reg, Address(lock_reg, obj_offset));

if (DiagnoseSyncOnPrimitiveWrappers != 0) {
load_klass(tmp, obj_reg);
ldrw(tmp, Address(tmp, Klass::access_flags_offset()));
tstw(tmp, JVM_ACC_IS_BOX_CLASS);
br(Assembler::NE, slow_case);
}

if (UseBiasedLocking) {
biased_locking_enter(lock_reg, obj_reg, swap_reg, tmp, false, done, &slow_case);
}
@@ -444,14 +444,14 @@ void MacroAssembler::reserved_stack_check() {
bind(no_reserved_zone_enabling);
}

int MacroAssembler::biased_locking_enter(Register lock_reg,
Register obj_reg,
Register swap_reg,
Register tmp_reg,
bool swap_reg_contains_mark,
Label& done,
Label* slow_case,
BiasedLockingCounters* counters) {
void MacroAssembler::biased_locking_enter(Register lock_reg,
Register obj_reg,
Register swap_reg,
Register tmp_reg,
bool swap_reg_contains_mark,
Label& done,
Label* slow_case,
BiasedLockingCounters* counters) {
assert(UseBiasedLocking, "why call this otherwise?");
assert_different_registers(lock_reg, obj_reg, swap_reg);

@@ -471,9 +471,7 @@ int MacroAssembler::biased_locking_enter(Register lock_reg,
// pointers to allow age to be placed into low bits
// First check to see whether biasing is even enabled for this object
Label cas_label;
int null_check_offset = -1;
if (!swap_reg_contains_mark) {
null_check_offset = offset();
ldr(swap_reg, mark_addr);
}
andr(tmp_reg, swap_reg, markWord::biased_lock_mask_in_place);
@@ -601,8 +599,6 @@ int MacroAssembler::biased_locking_enter(Register lock_reg,
}

bind(cas_label);

return null_check_offset;
}

void MacroAssembler::biased_locking_exit(Register obj_reg, Register temp_reg, Label& done) {
@@ -111,15 +111,11 @@ class MacroAssembler: public Assembler {
// tmp_reg must be supplied and must not be rscratch1 or rscratch2
// Optional slow case is for implementations (interpreter and C1) which branch to
// slow case directly. Leaves condition codes set for C2's Fast_Lock node.
// Returns offset of first potentially-faulting instruction for null
// check info (currently consumed only by C1). If
// swap_reg_contains_mark is true then returns -1 as it is assumed
// the calling code has already passed any potential faults.
int biased_locking_enter(Register lock_reg, Register obj_reg,
Register swap_reg, Register tmp_reg,
bool swap_reg_contains_mark,
Label& done, Label* slow_case = NULL,
BiasedLockingCounters* counters = NULL);
void biased_locking_enter(Register lock_reg, Register obj_reg,
Register swap_reg, Register tmp_reg,
bool swap_reg_contains_mark,
Label& done, Label* slow_case = NULL,
BiasedLockingCounters* counters = NULL);
void biased_locking_exit (Register obj_reg, Register temp_reg, Label& done);


@@ -1,5 +1,5 @@
/*
* Copyright (c) 2008, 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2008, 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -200,26 +200,29 @@ int C1_MacroAssembler::lock_object(Register hdr, Register obj,
const int obj_offset = BasicObjectLock::obj_offset_in_bytes();
const int mark_offset = BasicLock::displaced_header_offset_in_bytes();

if (UseBiasedLocking) {
// load object
str(obj, Address(disp_hdr, obj_offset));
null_check_offset = biased_locking_enter(obj, hdr/*scratched*/, tmp1, false, tmp2, done, slow_case);
}
str(obj, Address(disp_hdr, obj_offset));

assert(oopDesc::mark_offset_in_bytes() == 0, "Required by atomic instructions");
null_check_offset = offset();

if (DiagnoseSyncOnPrimitiveWrappers != 0) {
load_klass(tmp1, obj);
ldr_u32(tmp1, Address(tmp1, Klass::access_flags_offset()));
tst(tmp1, JVM_ACC_IS_BOX_CLASS);
b(slow_case, ne);
}

if (!UseBiasedLocking) {
null_check_offset = offset();
if (UseBiasedLocking) {
biased_locking_enter(obj, hdr/*scratched*/, tmp1, false, tmp2, done, slow_case);
}

assert(oopDesc::mark_offset_in_bytes() == 0, "Required by atomic instructions");

// On MP platforms the next load could return a 'stale' value if the memory location has been modified by another thread.
// That would be acceptable as ether CAS or slow case path is taken in that case.

// Must be the first instruction here, because implicit null check relies on it
ldr(hdr, Address(obj, oopDesc::mark_offset_in_bytes()));

str(obj, Address(disp_hdr, obj_offset));
tst(hdr, markWord::unlocked_value);
b(fast_lock, ne);

@@ -90,6 +90,13 @@ void C2_MacroAssembler::fast_lock(Register Roop, Register Rbox, Register Rscratc

Label fast_lock, done;

if (DiagnoseSyncOnPrimitiveWrappers != 0) {
load_klass(Rscratch, Roop);
ldr_u32(Rscratch, Address(Rscratch, Klass::access_flags_offset()));
tst(Rscratch, JVM_ACC_IS_BOX_CLASS);
b(done, ne);
}

if (UseBiasedLocking && !UseOptoBiasInlining) {
assert(scratch3 != noreg, "need extra temporary for -XX:-UseOptoBiasInlining");
biased_locking_enter(Roop, Rmark, Rscratch, false, scratch3, done, done);
@@ -883,6 +883,13 @@ void InterpreterMacroAssembler::lock_object(Register Rlock) {
// Load object pointer
ldr(Robj, Address(Rlock, obj_offset));

if (DiagnoseSyncOnPrimitiveWrappers != 0) {
load_klass(R0, Robj);
ldr_u32(R0, Address(R0, Klass::access_flags_offset()));
tst(R0, JVM_ACC_IS_BOX_CLASS);
b(slow_case, ne);
}

if (UseBiasedLocking) {
biased_locking_enter(Robj, Rmark/*scratched*/, R0, false, Rtemp, done, slow_case);
}
@@ -1322,11 +1322,11 @@ void MacroAssembler::biased_locking_enter_with_cas(Register obj_reg, Register ol
#endif // !PRODUCT
}

int MacroAssembler::biased_locking_enter(Register obj_reg, Register swap_reg, Register tmp_reg,
bool swap_reg_contains_mark,
Register tmp2,
Label& done, Label& slow_case,
BiasedLockingCounters* counters) {
void MacroAssembler::biased_locking_enter(Register obj_reg, Register swap_reg, Register tmp_reg,
bool swap_reg_contains_mark,
Register tmp2,
Label& done, Label& slow_case,
BiasedLockingCounters* counters) {
// obj_reg must be preserved (at least) if the bias locking fails
// tmp_reg is a temporary register
// swap_reg was used as a temporary but contained a value
@@ -1357,10 +1357,6 @@ int MacroAssembler::biased_locking_enter(Register obj_reg, Register swap_reg, Re
// First check to see whether biasing is even enabled for this object
Label cas_label;

// The null check applies to the mark loading, if we need to load it.
// If the mark has already been loaded in swap_reg then it has already
// been performed and the offset is irrelevant.
int null_check_offset = offset();
if (!swap_reg_contains_mark) {
ldr(swap_reg, mark_addr);
}
@@ -1504,8 +1500,6 @@ int MacroAssembler::biased_locking_enter(Register obj_reg, Register swap_reg, Re
// removing the bias bit from the object's header.

bind(cas_label);

return null_check_offset;
}


@@ -375,18 +375,14 @@ class MacroAssembler: public Assembler {
// biased and we acquired it. Slow case label is branched to with
// condition code NE set if the lock is biased but we failed to acquire
// it. Otherwise fall through.
// Returns offset of first potentially-faulting instruction for null
// check info (currently consumed only by C1). If
// swap_reg_contains_mark is true then returns -1 as it is assumed
// the calling code has already passed any potential faults.
// Notes:
// - swap_reg and tmp_reg are scratched
// - Rtemp was (implicitly) scratched and can now be specified as the tmp2
int biased_locking_enter(Register obj_reg, Register swap_reg, Register tmp_reg,
bool swap_reg_contains_mark,
Register tmp2,
Label& done, Label& slow_case,
BiasedLockingCounters* counters = NULL);
void biased_locking_enter(Register obj_reg, Register swap_reg, Register tmp_reg,
bool swap_reg_contains_mark,
Register tmp2,
Label& done, Label& slow_case,
BiasedLockingCounters* counters = NULL);
void biased_locking_exit(Register obj_reg, Register temp_reg, Label& done);

// Building block for CAS cases of biased locking: makes CAS and records statistics.
@@ -105,6 +105,13 @@ void C1_MacroAssembler::lock_object(Register Rmark, Register Roop, Register Rbox
// Save object being locked into the BasicObjectLock...
std(Roop, BasicObjectLock::obj_offset_in_bytes(), Rbox);

if (DiagnoseSyncOnPrimitiveWrappers != 0) {
load_klass(Rscratch, Roop);
lwz(Rscratch, in_bytes(Klass::access_flags_offset()), Rscratch);
testbitdi(CCR0, R0, Rscratch, exact_log2(JVM_ACC_IS_BOX_CLASS));
bne(CCR0, slow_int);
}

if (UseBiasedLocking) {
biased_locking_enter(CCR0, Roop, Rmark, Rscratch, R0, done, &slow_int);
}
@@ -910,6 +910,13 @@ void InterpreterMacroAssembler::lock_object(Register monitor, Register object) {
// Load markWord from object into displaced_header.
ld(displaced_header, oopDesc::mark_offset_in_bytes(), object);

if (DiagnoseSyncOnPrimitiveWrappers != 0) {
load_klass(tmp, object);
lwz(tmp, in_bytes(Klass::access_flags_offset()), tmp);
testbitdi(CCR0, R0, tmp, exact_log2(JVM_ACC_IS_BOX_CLASS));
bne(CCR0, slow_case);
}

if (UseBiasedLocking) {
biased_locking_enter(CCR0, object, displaced_header, tmp, current_header, done, &slow_case);
}
@@ -2836,6 +2836,12 @@ void MacroAssembler::compiler_fast_lock_object(ConditionRegister flag, Register
// Load markWord from object into displaced_header.
ld(displaced_header, oopDesc::mark_offset_in_bytes(), oop);

if (DiagnoseSyncOnPrimitiveWrappers != 0) {
load_klass(temp, oop);
lwz(temp, in_bytes(Klass::access_flags_offset()), temp);
testbitdi(flag, R0, temp, exact_log2(JVM_ACC_IS_BOX_CLASS));
bne(flag, cont);
}

if (try_bias) {
biased_locking_enter(flag, oop, displaced_header, temp, current_header, cont);
@@ -91,6 +91,12 @@ void C1_MacroAssembler::lock_object(Register hdr, Register obj, Register disp_hd
// Save object being locked into the BasicObjectLock...
z_stg(obj, Address(disp_hdr, BasicObjectLock::obj_offset_in_bytes()));

if (DiagnoseSyncOnPrimitiveWrappers != 0) {
load_klass(Z_R1_scratch, obj);
testbit(Address(Z_R1_scratch, Klass::access_flags_offset()), exact_log2(JVM_ACC_IS_BOX_CLASS));
z_btrue(slow_case);
}

if (UseBiasedLocking) {
biased_locking_enter(obj, hdr, Z_R1_scratch, Z_R0_scratch, done, &slow_case);
}
@@ -1000,6 +1000,12 @@ void InterpreterMacroAssembler::lock_object(Register monitor, Register object) {
// Load markWord from object into displaced_header.
z_lg(displaced_header, oopDesc::mark_offset_in_bytes(), object);

if (DiagnoseSyncOnPrimitiveWrappers != 0) {
load_klass(Z_R1_scratch, object);
testbit(Address(Z_R1_scratch, Klass::access_flags_offset()), exact_log2(JVM_ACC_IS_BOX_CLASS));
z_btrue(slow_case);
}

if (UseBiasedLocking) {
biased_locking_enter(object, displaced_header, Z_R1, Z_R0, done, &slow_case);
}
@@ -3358,6 +3358,14 @@ void MacroAssembler::compiler_fast_lock_object(Register oop, Register box, Regis
// Load markWord from oop into mark.
z_lg(displacedHeader, 0, oop);

if (DiagnoseSyncOnPrimitiveWrappers != 0) {
load_klass(Z_R1_scratch, oop);
z_l(Z_R1_scratch, Address(Z_R1_scratch, Klass::access_flags_offset()));
assert((JVM_ACC_IS_BOX_CLASS & 0xFFFF) == 0, "or change following instruction");
z_nilh(Z_R1_scratch, JVM_ACC_IS_BOX_CLASS >> 16);
z_brne(done);
}

if (try_bias) {
biased_locking_enter(oop, displacedHeader, temp, Z_R0, done);
}
@@ -1,5 +1,5 @@
/*
* Copyright (c) 1999, 2018, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 1999, 2020, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
@@ -39,6 +39,7 @@
#include "runtime/stubRoutines.hpp"

int C1_MacroAssembler::lock_object(Register hdr, Register obj, Register disp_hdr, Register scratch, Label& slow_case) {
const Register rklass_decode_tmp = LP64_ONLY(rscratch1) NOT_LP64(noreg);
const int aligned_mask = BytesPerWord -1;
const int hdr_offset = oopDesc::mark_offset_in_bytes();
assert(hdr == rax, "hdr must be rax, for the cmpxchg instruction");
@@ -51,12 +52,18 @@ int C1_MacroAssembler::lock_object(Register hdr, Register obj, Register disp_hdr
// save object being locked into the BasicObjectLock
movptr(Address(disp_hdr, BasicObjectLock::obj_offset_in_bytes()), obj);

null_check_offset = offset();

if (DiagnoseSyncOnPrimitiveWrappers != 0) {
load_klass(hdr, obj, rklass_decode_tmp);
movl(hdr, Address(hdr, Klass::access_flags_offset()));
testl(hdr, JVM_ACC_IS_BOX_CLASS);
jcc(Assembler::notZero, slow_case);
}

if (UseBiasedLocking) {
assert(scratch != noreg, "should have scratch register at this point");
Register rklass_decode_tmp = LP64_ONLY(rscratch1) NOT_LP64(noreg);
null_check_offset = biased_locking_enter(disp_hdr, obj, hdr, scratch, rklass_decode_tmp, false, done, &slow_case);
} else {
null_check_offset = offset();
biased_locking_enter(disp_hdr, obj, hdr, scratch, rklass_decode_tmp, false, done, &slow_case);
}

// Load object header
@@ -470,6 +470,13 @@ void C2_MacroAssembler::fast_lock(Register objReg, Register boxReg, Register tmp

Label IsInflated, DONE_LABEL;

if (DiagnoseSyncOnPrimitiveWrappers != 0) {
load_klass(tmpReg, objReg, cx1Reg);
movl(tmpReg, Address(tmpReg, Klass::access_flags_offset()));
testl(tmpReg, JVM_ACC_IS_BOX_CLASS);
jcc(Assembler::notZero, DONE_LABEL);
}

// it's stack-locked, biased or neutral
// TODO: optimize away redundant LDs of obj->mark and improve the markword triage
// order to reduce the number of conditional branches in the most common cases.

0 comments on commit 91ff090

Please sign in to comment.