Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

8255522: [lworld] References to biased pattern remain in markWord::is_unlocked() #245

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/hotspot/share/memory/heapShared.cpp
Expand Up @@ -117,7 +117,7 @@ void HeapShared::fixup_mapped_heap_regions() {
}

unsigned HeapShared::oop_hash(oop const& p) {
assert(!p->mark().has_bias_pattern(),
assert(!UseBiasedLocking || !p->mark().has_bias_pattern(),
"this object should never have been locked"); // so identity_hash won't safepoin
unsigned hash = (unsigned)p->identity_hash();
return hash;
Expand Down
2 changes: 1 addition & 1 deletion src/hotspot/share/oops/klass.inline.hpp
Expand Up @@ -31,7 +31,7 @@

inline void Klass::set_prototype_header(markWord header) {
assert(!is_inline_klass() || header.is_inline_type(), "Unexpected prototype");
assert(!header.has_bias_pattern() || is_instance_klass(), "biased locking currently only supported for Java instances");
assert(!UseBiasedLocking || !header.has_bias_pattern() || is_instance_klass(), "biased locking currently only supported for Java instances");
_prototype_header = header;
}

Expand Down
2 changes: 1 addition & 1 deletion src/hotspot/share/oops/markWord.cpp
Expand Up @@ -53,7 +53,7 @@ void markWord::print_on(outputStream* st) const {
} else {
st->print(" hash=" INTPTR_FORMAT, hash());
}
} else if (has_bias_pattern()) { // last bits = 101
} else if (UseBiasedLocking && has_bias_pattern()) { // last bits = 101
st->print("is_biased");
JavaThread* jt = biased_locker();
st->print(" biased_locker=" INTPTR_FORMAT " epoch=%d", p2i(jt), bias_epoch());
Expand Down
14 changes: 12 additions & 2 deletions src/hotspot/share/oops/markWord.hpp
Expand Up @@ -290,34 +290,41 @@ class markWord {
// fixes up biased locks to be compatible with it when a bias is
// revoked.
bool has_bias_pattern() const {
ShouldNotReachHere(); // Valhalla: unused
return (mask_bits(value(), biased_lock_mask_in_place) == biased_lock_pattern);
}
JavaThread* biased_locker() const {
ShouldNotReachHere(); // Valhalla: unused
assert(has_bias_pattern(), "should not call this otherwise");
return (JavaThread*) mask_bits(value(), ~(biased_lock_mask_in_place | age_mask_in_place | epoch_mask_in_place));
}
// Indicates that the mark has the bias bit set but that it has not
// yet been biased toward a particular thread
bool is_biased_anonymously() const {
ShouldNotReachHere(); // Valhalla: unused
return (has_bias_pattern() && (biased_locker() == NULL));
}
// Indicates epoch in which this bias was acquired. If the epoch
// changes due to too many bias revocations occurring, the biases
// from the previous epochs are all considered invalid.
int bias_epoch() const {
ShouldNotReachHere(); // Valhalla: unused
assert(has_bias_pattern(), "should not call this otherwise");
return (mask_bits(value(), epoch_mask_in_place) >> epoch_shift);
}
markWord set_bias_epoch(int epoch) {
ShouldNotReachHere(); // Valhalla: unused
assert(has_bias_pattern(), "should not call this otherwise");
assert((epoch & (~epoch_mask)) == 0, "epoch overflow");
return markWord(mask_bits(value(), ~epoch_mask_in_place) | (epoch << epoch_shift));
}
markWord incr_bias_epoch() {
ShouldNotReachHere(); // Valhalla: unused
return set_bias_epoch((1 + bias_epoch()) & epoch_mask);
}
// Prototype mark for initialization
static markWord biased_locking_prototype() {
ShouldNotReachHere(); // Valhalla: unused
return markWord( biased_lock_pattern );
}

Expand All @@ -326,12 +333,15 @@ class markWord {
return (mask_bits(value(), lock_mask_in_place) != unlocked_value);
}
bool is_unlocked() const {
return (mask_bits(value(), biased_lock_mask_in_place) == unlocked_value);
return (mask_bits(value(), lock_mask_in_place) == unlocked_value);
}
bool is_marked() const {
return (mask_bits(value(), lock_mask_in_place) == marked_value);
}
bool is_neutral() const { return (mask_bits(value(), biased_lock_mask_in_place) == unlocked_value); }

// is unlocked and not an inline type (which cannot be involved in locking, displacement or inflation)
// i.e. test both lock bits and the inline type bit together
bool is_neutral() const { return (mask_bits(value(), inline_type_mask_in_place) == unlocked_value); }

// Special temporary state of the markWord while being inflated.
// Code that looks at mark outside a lock need to take this into account.
Expand Down
2 changes: 1 addition & 1 deletion src/hotspot/share/oops/markWord.inline.hpp
Expand Up @@ -71,7 +71,7 @@ inline bool markWord::must_be_preserved_for_promotion_failure(KlassProxy klass)
inline markWord markWord::prototype_for_klass(const Klass* klass) {
markWord prototype_header = klass->prototype_header();
assert(prototype_header == prototype() ||
prototype_header.has_bias_pattern() ||
(UseBiasedLocking && prototype_header.has_bias_pattern()) ||
prototype_header.is_inline_type() ||
prototype_header.is_flat_array() ||
prototype_header.is_nullfree_array(), "corrupt prototype header");
Expand Down
6 changes: 4 additions & 2 deletions src/hotspot/share/prims/jvmtiEnvBase.cpp
Expand Up @@ -968,8 +968,10 @@ JvmtiEnvBase::get_object_monitor_usage(JavaThread* calling_thread, jobject objec
uint32_t debug_bits = 0;
// first derive the object's owner and entry_count (if any)
{
// Revoke any biases before querying the mark word
BiasedLocking::revoke_at_safepoint(hobj);
if (UseBiasedLocking) {
// Revoke any biases before querying the mark word
BiasedLocking::revoke_at_safepoint(hobj);
}

address owner = NULL;
{
Expand Down
4 changes: 4 additions & 0 deletions src/hotspot/share/runtime/biasedLocking.cpp
Expand Up @@ -732,6 +732,7 @@ void BiasedLocking::walk_stack_and_revoke(oop obj, JavaThread* biased_locker) {
}

void BiasedLocking::revoke_own_lock(Handle obj, TRAPS) {
assert(UseBiasedLocking, "biased locking not enabled");
JavaThread* thread = THREAD->as_Java_thread();

markWord mark = obj->mark();
Expand All @@ -755,6 +756,7 @@ void BiasedLocking::revoke_own_lock(Handle obj, TRAPS) {
}

void BiasedLocking::revoke(Handle obj, TRAPS) {
assert(UseBiasedLocking, "biased locking not enabled");
assert(!SafepointSynchronize::is_at_safepoint(), "must not be called while at safepoint");

while (true) {
Expand Down Expand Up @@ -858,6 +860,7 @@ void BiasedLocking::revoke(Handle obj, TRAPS) {

// All objects in objs should be locked by biaser
void BiasedLocking::revoke(GrowableArray<Handle>* objs, JavaThread *biaser) {
assert(UseBiasedLocking, "biased locking not enabled");
bool clean_my_cache = false;
for (int i = 0; i < objs->length(); i++) {
oop obj = (objs->at(i))();
Expand All @@ -874,6 +877,7 @@ void BiasedLocking::revoke(GrowableArray<Handle>* objs, JavaThread *biaser) {


void BiasedLocking::revoke_at_safepoint(Handle h_obj) {
assert(UseBiasedLocking, "biased locking not enabled");
assert(SafepointSynchronize::is_at_safepoint(), "must only be called while at safepoint");
oop obj = h_obj();
HeuristicsResult heuristics = update_heuristics(obj);
Expand Down
2 changes: 1 addition & 1 deletion src/hotspot/share/runtime/globals.hpp
Expand Up @@ -832,7 +832,7 @@ const intx ObjectAlignmentInBytes = 8;
"Restrict @Contended to trusted classes") \
\
product(bool, UseBiasedLocking, false, \
"(Deprecated) Enable biased locking in JVM") \
"(Deprecated) Enable biased locking in JVM (completely disabled by Valhalla)") \
\
product(intx, BiasedLockingStartupDelay, 0, \
"(Deprecated) Number of milliseconds to wait before enabling " \
Expand Down
9 changes: 5 additions & 4 deletions src/hotspot/share/runtime/synchronizer.cpp
Expand Up @@ -634,7 +634,7 @@ void ObjectSynchronizer::enter(Handle obj, BasicLock* lock, TRAPS) {
}

markWord mark = obj->mark();
assert(!mark.has_bias_pattern(), "should not see bias pattern here");
assert(!UseBiasedLocking || !mark.has_bias_pattern(), "should not see bias pattern here");

if (mark.is_neutral()) {
// Anticipate successful CAS -- the ST of the displaced mark must
Expand Down Expand Up @@ -676,6 +676,7 @@ void ObjectSynchronizer::exit(oop object, BasicLock* lock, TRAPS) {
assert(!EnableValhalla || !object->klass()->is_inline_klass(), "monitor op on inline type");
// We cannot check for Biased Locking if we are racing an inflation.
assert(mark == markWord::INFLATING() ||
!UseBiasedLocking ||
!mark.has_bias_pattern(), "should not see bias pattern here");

markWord dhw = lock->displaced_header();
Expand Down Expand Up @@ -804,8 +805,8 @@ void ObjectSynchronizer::jni_exit(oop obj, Thread* THREAD) {
Handle h_obj(THREAD, obj);
BiasedLocking::revoke(h_obj, THREAD);
obj = h_obj();
assert(!obj->mark().has_bias_pattern(), "biases should be revoked by now");
}
assert(!obj->mark().has_bias_pattern(), "biases should be revoked by now");

// The ObjectMonitor* can't be async deflated until ownership is
// dropped inside exit() and the ObjectMonitor* must be !is_busy().
Expand Down Expand Up @@ -1107,7 +1108,7 @@ intptr_t ObjectSynchronizer::FastHashCode(Thread* self, oop obj) {
markWord mark = read_stable_mark(obj);

// object should remain ineligible for biased locking
assert(!mark.has_bias_pattern(), "invariant");
assert(!UseBiasedLocking || !mark.has_bias_pattern(), "invariant");

if (mark.is_neutral()) { // if this is a normal header
hash = mark.hash();
Expand Down Expand Up @@ -1863,7 +1864,7 @@ ObjectMonitor* ObjectSynchronizer::inflate(Thread* self, oop object,

for (;;) {
const markWord mark = object->mark();
assert(!mark.has_bias_pattern(), "invariant");
assert(!UseBiasedLocking || !mark.has_bias_pattern(), "invariant");

// The mark can be in one of the following states:
// * Inflated - just return
Expand Down
2 changes: 1 addition & 1 deletion src/hotspot/share/runtime/vframeArray.cpp
Expand Up @@ -95,7 +95,7 @@ void vframeArrayElement::fill_in(compiledVFrame* vf, bool realloc_failures) {
if (monitor->owner_is_scalar_replaced()) {
dest->set_obj(NULL);
} else {
assert(monitor->owner() == NULL || (!monitor->owner()->is_unlocked() && !monitor->owner()->has_bias_pattern()), "object must be null or locked, and unbiased");
assert(monitor->owner() == NULL || (!monitor->owner()->is_unlocked() && (!UseBiasedLocking || !monitor->owner()->has_bias_pattern())), "object must be null or locked, and unbiased");
dest->set_obj(monitor->owner());
monitor->lock()->move_to(monitor->owner(), dest->lock());
}
Expand Down
117 changes: 117 additions & 0 deletions test/hotspot/gtest/oops/test_markWord.cpp
Expand Up @@ -139,4 +139,121 @@ TEST_VM(markWord, printing) {
done.wait_with_safepoint_check(THREAD); // wait till the thread is done.
}
}

static void assert_unlocked_state(markWord mark) {
EXPECT_FALSE(mark.has_displaced_mark_helper());
EXPECT_FALSE(mark.has_locker());
EXPECT_FALSE(mark.has_monitor());
EXPECT_FALSE(mark.is_being_inflated());
EXPECT_FALSE(mark.is_locked());
EXPECT_TRUE(mark.is_unlocked());
}

static void assert_copy_set_hash(markWord mark) {
const intptr_t hash = 4711;
EXPECT_TRUE(mark.has_no_hash());
markWord copy = mark.copy_set_hash(hash);
EXPECT_EQ(hash, copy.hash());
EXPECT_FALSE(copy.has_no_hash());
}

static void assert_type(markWord mark) {
EXPECT_FALSE(mark.is_flat_array());
EXPECT_FALSE(mark.is_inline_type());
EXPECT_FALSE(mark.is_larval_state());
EXPECT_FALSE(mark.is_nullfree_array());
}

TEST_VM(markWord, prototype) {
markWord mark = markWord::prototype();
assert_unlocked_state(mark);
EXPECT_TRUE(mark.is_neutral());

assert_type(mark);

EXPECT_TRUE(mark.has_no_hash());
EXPECT_FALSE(mark.is_marked());
EXPECT_TRUE(mark.decode_pointer() == NULL);

assert_copy_set_hash(mark);
assert_type(mark);
}

static void assert_inline_type(markWord mark) {
EXPECT_FALSE(mark.is_flat_array());
EXPECT_TRUE(mark.is_inline_type());
EXPECT_FALSE(mark.is_nullfree_array());
}

TEST_VM(markWord, inline_type_prototype) {
markWord mark = markWord::inline_type_prototype();
assert_unlocked_state(mark);
EXPECT_FALSE(mark.is_neutral());

assert_inline_type(mark);
EXPECT_FALSE(mark.is_larval_state());

EXPECT_TRUE(mark.has_no_hash());
EXPECT_FALSE(mark.is_marked());
EXPECT_TRUE(mark.decode_pointer() == NULL);

markWord larval = mark.enter_larval_state();
EXPECT_TRUE(larval.is_larval_state());
assert_inline_type(larval);
mark = larval.exit_larval_state();
EXPECT_FALSE(mark.is_larval_state());
assert_inline_type(mark);

EXPECT_TRUE(mark.has_no_hash());
EXPECT_FALSE(mark.is_marked());
EXPECT_TRUE(mark.decode_pointer() == NULL);
}

#if _LP64

static void assert_flat_array_type(markWord mark) {
EXPECT_TRUE(mark.is_flat_array());
EXPECT_FALSE(mark.is_inline_type());
EXPECT_FALSE(mark.is_larval_state());
EXPECT_TRUE(mark.is_nullfree_array());
}

TEST_VM(markWord, flat_array_prototype) {
markWord mark = markWord::flat_array_prototype();
assert_unlocked_state(mark);
EXPECT_TRUE(mark.is_neutral());

assert_flat_array_type(mark);

EXPECT_TRUE(mark.has_no_hash());
EXPECT_FALSE(mark.is_marked());
EXPECT_TRUE(mark.decode_pointer() == NULL);

assert_copy_set_hash(mark);
assert_flat_array_type(mark);
}

static void assert_nullfree_array_type(markWord mark) {
EXPECT_FALSE(mark.is_flat_array());
EXPECT_FALSE(mark.is_inline_type());
EXPECT_FALSE(mark.is_larval_state());
EXPECT_TRUE(mark.is_nullfree_array());
}

TEST_VM(markWord, nullfree_array_prototype) {
markWord mark = markWord::nullfree_array_prototype();
assert_unlocked_state(mark);
EXPECT_TRUE(mark.is_neutral());

assert_nullfree_array_type(mark);

EXPECT_TRUE(mark.has_no_hash());
EXPECT_FALSE(mark.is_marked());
EXPECT_TRUE(mark.decode_pointer() == NULL);

assert_copy_set_hash(mark);
assert_nullfree_array_type(mark);
}
#endif // _LP64

#endif // PRODUCT