Skip to content

Commit

Permalink
8320310: CompiledMethod::has_monitors flag can be incorrect
Browse files Browse the repository at this point in the history
Reviewed-by: vlivanov, thartmann
  • Loading branch information
JornVernee committed Jan 8, 2024
1 parent 57a65fe commit c8fa3e2
Show file tree
Hide file tree
Showing 5 changed files with 31 additions and 39 deletions.
6 changes: 1 addition & 5 deletions src/hotspot/share/c1/c1_Compilation.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -390,10 +390,6 @@ int Compilation::compile_java_method() {
BAILOUT_("mdo allocation failed", no_frame_size);
}

if (method()->is_synchronized()) {
set_has_monitors(true);
}

{
PhaseTraceTime timeit(_t_buildIR);
build_hir();
Expand Down Expand Up @@ -581,7 +577,7 @@ Compilation::Compilation(AbstractCompiler* compiler, ciEnv* env, ciMethod* metho
, _would_profile(false)
, _has_method_handle_invokes(false)
, _has_reserved_stack_access(method->has_reserved_stack_access())
, _has_monitors(false)
, _has_monitors(method->is_synchronized() || method->has_monitor_bytecodes())
, _install_code(install_code)
, _bailout_msg(nullptr)
, _first_failure_details(nullptr)
Expand Down
22 changes: 12 additions & 10 deletions src/hotspot/share/c1/c1_GraphBuilder.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2316,7 +2316,6 @@ void GraphBuilder::instance_of(int klass_index) {
void GraphBuilder::monitorenter(Value x, int bci) {
// save state before locking in case of deoptimization after a NullPointerException
ValueStack* state_before = copy_state_for_exception_with_bci(bci);
compilation()->set_has_monitors(true);
append_with_bci(new MonitorEnter(x, state()->lock(x), state_before), bci);
kill_all();
}
Expand Down Expand Up @@ -3510,6 +3509,15 @@ int GraphBuilder::recursive_inline_level(ciMethod* cur_callee) const {
return recur_level;
}

static void set_flags_for_inlined_callee(Compilation* compilation, ciMethod* callee) {
if (callee->has_reserved_stack_access()) {
compilation->set_has_reserved_stack_access(true);
}
if (callee->is_synchronized() || callee->has_monitor_bytecodes()) {
compilation->set_has_monitors(true);
}
}

bool GraphBuilder::try_inline(ciMethod* callee, bool holder_known, bool ignore_return, Bytecodes::Code bc, Value receiver) {
const char* msg = nullptr;

Expand All @@ -3526,9 +3534,7 @@ bool GraphBuilder::try_inline(ciMethod* callee, bool holder_known, bool ignore_r
// method handle invokes
if (callee->is_method_handle_intrinsic()) {
if (try_method_handle_inline(callee, ignore_return)) {
if (callee->has_reserved_stack_access()) {
compilation()->set_has_reserved_stack_access(true);
}
set_flags_for_inlined_callee(compilation(), callee);
return true;
}
return false;
Expand All @@ -3539,9 +3545,7 @@ bool GraphBuilder::try_inline(ciMethod* callee, bool holder_known, bool ignore_r
callee->check_intrinsic_candidate()) {
if (try_inline_intrinsics(callee, ignore_return)) {
print_inlining(callee, "intrinsic");
if (callee->has_reserved_stack_access()) {
compilation()->set_has_reserved_stack_access(true);
}
set_flags_for_inlined_callee(compilation(), callee);
return true;
}
// try normal inlining
Expand All @@ -3559,9 +3563,7 @@ bool GraphBuilder::try_inline(ciMethod* callee, bool holder_known, bool ignore_r
bc = code();
}
if (try_inline_full(callee, holder_known, ignore_return, bc, receiver)) {
if (callee->has_reserved_stack_access()) {
compilation()->set_has_reserved_stack_access(true);
}
set_flags_for_inlined_callee(compilation(), callee);
return true;
}

Expand Down
6 changes: 0 additions & 6 deletions src/hotspot/share/opto/locknode.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -179,8 +179,6 @@ void FastLockNode::create_rtm_lock_counter(JVMState* state) {
void Parse::do_monitor_enter() {
kill_dead_locals();

C->set_has_monitors(true);

// Null check; get casted pointer.
Node* obj = null_check(peek());
// Check for locking null object
Expand All @@ -198,10 +196,6 @@ void Parse::do_monitor_enter() {
void Parse::do_monitor_exit() {
kill_dead_locals();

// need to set it for monitor exit as well.
// OSR compiled methods can start with lock taken
C->set_has_monitors(true);

pop(); // Pop oop to unlock
// Because monitors are guaranteed paired (else we bail out), we know
// the matching Lock for this Unlock. Hence we know there is no need
Expand Down
2 changes: 1 addition & 1 deletion src/hotspot/share/opto/parse1.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -423,7 +423,7 @@ Parse::Parse(JVMState* caller, ciMethod* parse_method, float expected_uses)
C->set_has_reserved_stack_access(true);
}

if (parse_method->is_synchronized()) {
if (parse_method->is_synchronized() || parse_method->has_monitor_bytecodes()) {
C->set_has_monitors(true);
}

Expand Down
34 changes: 17 additions & 17 deletions src/hotspot/share/runtime/continuationFreezeThaw.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1498,21 +1498,21 @@ static void jvmti_yield_cleanup(JavaThread* thread, ContinuationWrapper& cont) {
#endif // INCLUDE_JVMTI

#ifdef ASSERT
// static bool monitors_on_stack(JavaThread* thread) {
// ContinuationEntry* ce = thread->last_continuation();
// RegisterMap map(thread,
// RegisterMap::UpdateMap::include,
// RegisterMap::ProcessFrames::include,
// RegisterMap::WalkContinuation::skip);
// map.set_include_argument_oops(false);
// for (frame f = thread->last_frame(); Continuation::is_frame_in_continuation(ce, f); f = f.sender(&map)) {
// if ((f.is_interpreted_frame() && ContinuationHelper::InterpretedFrame::is_owning_locks(f)) ||
// (f.is_compiled_frame() && ContinuationHelper::CompiledFrame::is_owning_locks(map.thread(), &map, f))) {
// return true;
// }
// }
// return false;
// }
static bool monitors_on_stack(JavaThread* thread) {
ContinuationEntry* ce = thread->last_continuation();
RegisterMap map(thread,
RegisterMap::UpdateMap::include,
RegisterMap::ProcessFrames::include,
RegisterMap::WalkContinuation::skip);
map.set_include_argument_oops(false);
for (frame f = thread->last_frame(); Continuation::is_frame_in_continuation(ce, f); f = f.sender(&map)) {
if ((f.is_interpreted_frame() && ContinuationHelper::InterpretedFrame::is_owning_locks(f)) ||
(f.is_compiled_frame() && ContinuationHelper::CompiledFrame::is_owning_locks(map.thread(), &map, f))) {
return true;
}
}
return false;
}

bool FreezeBase::interpreted_native_or_deoptimized_on_stack() {
ContinuationEntry* ce = _thread->last_continuation();
Expand Down Expand Up @@ -1575,8 +1575,8 @@ static inline int freeze_internal(JavaThread* current, intptr_t* const sp) {

assert(entry->is_virtual_thread() == (entry->scope(current) == java_lang_VirtualThread::vthread_scope()), "");

// assert(monitors_on_stack(current) == ((current->held_monitor_count() - current->jni_monitor_count()) > 0),
// "Held monitor count and locks on stack invariant: " INT64_FORMAT " JNI: " INT64_FORMAT, (int64_t)current->held_monitor_count(), (int64_t)current->jni_monitor_count());
assert(monitors_on_stack(current) == ((current->held_monitor_count() - current->jni_monitor_count()) > 0),
"Held monitor count and locks on stack invariant: " INT64_FORMAT " JNI: " INT64_FORMAT, (int64_t)current->held_monitor_count(), (int64_t)current->jni_monitor_count());

if (entry->is_pinned() || current->held_monitor_count() > 0) {
log_develop_debug(continuations)("PINNED due to critical section/hold monitor");
Expand Down

1 comment on commit c8fa3e2

@openjdk-notifier
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Please sign in to comment.