Skip to content
Permalink
Browse files
8255522: [lworld] References to biased pattern remain in markWord::is…
…_unlocked()

Reviewed-by: fparain, skuksenko
  • Loading branch information
David Simms committed Nov 17, 2020
1 parent e9724e5 commit a3fb148558731c7d1ff734e20a9b9d4dc7c63220
@@ -118,7 +118,7 @@ void HeapShared::fixup_mapped_heap_regions() {
}

unsigned HeapShared::oop_hash(oop const& p) {
assert(!p->mark().has_bias_pattern(),
assert(!UseBiasedLocking || !p->mark().has_bias_pattern(),
"this object should never have been locked"); // so identity_hash won't safepoin
unsigned hash = (unsigned)p->identity_hash();
return hash;
@@ -31,7 +31,7 @@

inline void Klass::set_prototype_header(markWord header) {
assert(!is_inline_klass() || header.is_inline_type(), "Unexpected prototype");
assert(!header.has_bias_pattern() || is_instance_klass(), "biased locking currently only supported for Java instances");
assert(!UseBiasedLocking || !header.has_bias_pattern() || is_instance_klass(), "biased locking currently only supported for Java instances");
_prototype_header = header;
}

@@ -88,7 +88,7 @@ void markWord::print_on(outputStream* st, bool print_monitor_info) const {
} else {
st->print(" hash=" INTPTR_FORMAT, hash());
}
} else if (has_bias_pattern()) { // last bits = 101
} else if (UseBiasedLocking && has_bias_pattern()) { // last bits = 101
st->print("is_biased");
JavaThread* jt = biased_locker();
st->print(" biased_locker=" INTPTR_FORMAT " epoch=%d", p2i(jt), bias_epoch());
@@ -290,34 +290,41 @@ class markWord {
// fixes up biased locks to be compatible with it when a bias is
// revoked.
bool has_bias_pattern() const {
ShouldNotReachHere(); // Valhalla: unused
return (mask_bits(value(), biased_lock_mask_in_place) == biased_lock_pattern);
}
JavaThread* biased_locker() const {
ShouldNotReachHere(); // Valhalla: unused
assert(has_bias_pattern(), "should not call this otherwise");
return (JavaThread*) mask_bits(value(), ~(biased_lock_mask_in_place | age_mask_in_place | epoch_mask_in_place));
}
// Indicates that the mark has the bias bit set but that it has not
// yet been biased toward a particular thread
bool is_biased_anonymously() const {
ShouldNotReachHere(); // Valhalla: unused
return (has_bias_pattern() && (biased_locker() == NULL));
}
// Indicates epoch in which this bias was acquired. If the epoch
// changes due to too many bias revocations occurring, the biases
// from the previous epochs are all considered invalid.
int bias_epoch() const {
ShouldNotReachHere(); // Valhalla: unused
assert(has_bias_pattern(), "should not call this otherwise");
return (mask_bits(value(), epoch_mask_in_place) >> epoch_shift);
}
markWord set_bias_epoch(int epoch) {
ShouldNotReachHere(); // Valhalla: unused
assert(has_bias_pattern(), "should not call this otherwise");
assert((epoch & (~epoch_mask)) == 0, "epoch overflow");
return markWord(mask_bits(value(), ~epoch_mask_in_place) | (epoch << epoch_shift));
}
markWord incr_bias_epoch() {
ShouldNotReachHere(); // Valhalla: unused
return set_bias_epoch((1 + bias_epoch()) & epoch_mask);
}
// Prototype mark for initialization
static markWord biased_locking_prototype() {
ShouldNotReachHere(); // Valhalla: unused
return markWord( biased_lock_pattern );
}

@@ -326,12 +333,15 @@ class markWord {
return (mask_bits(value(), lock_mask_in_place) != unlocked_value);
}
bool is_unlocked() const {
return (mask_bits(value(), biased_lock_mask_in_place) == unlocked_value);
return (mask_bits(value(), lock_mask_in_place) == unlocked_value);
}
bool is_marked() const {
return (mask_bits(value(), lock_mask_in_place) == marked_value);
}
bool is_neutral() const { return (mask_bits(value(), biased_lock_mask_in_place) == unlocked_value); }

// is unlocked and not an inline type (which cannot be involved in locking, displacement or inflation)
// i.e. test both lock bits and the inline type bit together
bool is_neutral() const { return (mask_bits(value(), inline_type_mask_in_place) == unlocked_value); }

// Special temporary state of the markWord while being inflated.
// Code that looks at mark outside a lock need to take this into account.
@@ -71,7 +71,7 @@ inline bool markWord::must_be_preserved_for_promotion_failure(KlassProxy klass)
inline markWord markWord::prototype_for_klass(const Klass* klass) {
markWord prototype_header = klass->prototype_header();
assert(prototype_header == prototype() ||
prototype_header.has_bias_pattern() ||
(UseBiasedLocking && prototype_header.has_bias_pattern()) ||
prototype_header.is_inline_type() ||
prototype_header.is_flat_array() ||
prototype_header.is_nullfree_array(), "corrupt prototype header");
@@ -968,8 +968,10 @@ JvmtiEnvBase::get_object_monitor_usage(JavaThread* calling_thread, jobject objec
uint32_t debug_bits = 0;
// first derive the object's owner and entry_count (if any)
{
// Revoke any biases before querying the mark word
BiasedLocking::revoke_at_safepoint(hobj);
if (UseBiasedLocking) {
// Revoke any biases before querying the mark word
BiasedLocking::revoke_at_safepoint(hobj);
}

address owner = NULL;
{
@@ -732,6 +732,7 @@ void BiasedLocking::walk_stack_and_revoke(oop obj, JavaThread* biased_locker) {
}

void BiasedLocking::revoke_own_lock(Handle obj, TRAPS) {
assert(UseBiasedLocking, "biased locking not enabled");
JavaThread* thread = THREAD->as_Java_thread();

markWord mark = obj->mark();
@@ -755,6 +756,7 @@ void BiasedLocking::revoke_own_lock(Handle obj, TRAPS) {
}

void BiasedLocking::revoke(Handle obj, TRAPS) {
assert(UseBiasedLocking, "biased locking not enabled");
assert(!SafepointSynchronize::is_at_safepoint(), "must not be called while at safepoint");

while (true) {
@@ -858,6 +860,7 @@ void BiasedLocking::revoke(Handle obj, TRAPS) {

// All objects in objs should be locked by biaser
void BiasedLocking::revoke(GrowableArray<Handle>* objs, JavaThread *biaser) {
assert(UseBiasedLocking, "biased locking not enabled");
bool clean_my_cache = false;
for (int i = 0; i < objs->length(); i++) {
oop obj = (objs->at(i))();
@@ -874,6 +877,7 @@ void BiasedLocking::revoke(GrowableArray<Handle>* objs, JavaThread *biaser) {


void BiasedLocking::revoke_at_safepoint(Handle h_obj) {
assert(UseBiasedLocking, "biased locking not enabled");
assert(SafepointSynchronize::is_at_safepoint(), "must only be called while at safepoint");
oop obj = h_obj();
HeuristicsResult heuristics = update_heuristics(obj);
@@ -843,7 +843,7 @@ const intx ObjectAlignmentInBytes = 8;
"Restrict @Contended to trusted classes") \
\
product(bool, UseBiasedLocking, false, \
"(Deprecated) Enable biased locking in JVM") \
"(Deprecated) Enable biased locking in JVM (completely disabled by Valhalla)") \
\
product(intx, BiasedLockingStartupDelay, 0, \
"(Deprecated) Number of milliseconds to wait before enabling " \
@@ -461,7 +461,7 @@ void ObjectSynchronizer::enter(Handle obj, BasicLock* lock, TRAPS) {
}

markWord mark = obj->mark();
assert(!mark.has_bias_pattern(), "should not see bias pattern here");
assert(!UseBiasedLocking || !mark.has_bias_pattern(), "should not see bias pattern here");

if (mark.is_neutral()) {
// Anticipate successful CAS -- the ST of the displaced mark must
@@ -503,6 +503,7 @@ void ObjectSynchronizer::exit(oop object, BasicLock* lock, TRAPS) {
assert(!EnableValhalla || !object->klass()->is_inline_klass(), "monitor op on inline type");
// We cannot check for Biased Locking if we are racing an inflation.
assert(mark == markWord::INFLATING() ||
!UseBiasedLocking ||
!mark.has_bias_pattern(), "should not see bias pattern here");

markWord dhw = lock->displaced_header();
@@ -631,8 +632,8 @@ void ObjectSynchronizer::jni_exit(oop obj, Thread* THREAD) {
Handle h_obj(THREAD, obj);
BiasedLocking::revoke(h_obj, THREAD);
obj = h_obj();
assert(!obj->mark().has_bias_pattern(), "biases should be revoked by now");
}
assert(!obj->mark().has_bias_pattern(), "biases should be revoked by now");

// The ObjectMonitor* can't be async deflated until ownership is
// dropped inside exit() and the ObjectMonitor* must be !is_busy().
@@ -934,7 +935,7 @@ intptr_t ObjectSynchronizer::FastHashCode(Thread* self, oop obj) {
markWord mark = read_stable_mark(obj);

// object should remain ineligible for biased locking
assert(!mark.has_bias_pattern(), "invariant");
assert(!UseBiasedLocking || !mark.has_bias_pattern(), "invariant");

if (mark.is_neutral()) { // if this is a normal header
hash = mark.hash();
@@ -1302,7 +1303,7 @@ ObjectMonitor* ObjectSynchronizer::inflate(Thread* self, oop object,

for (;;) {
const markWord mark = object->mark();
assert(!mark.has_bias_pattern(), "invariant");
assert(!UseBiasedLocking || !mark.has_bias_pattern(), "invariant");

// The mark can be in one of the following states:
// * Inflated - just return
@@ -95,7 +95,7 @@ void vframeArrayElement::fill_in(compiledVFrame* vf, bool realloc_failures) {
if (monitor->owner_is_scalar_replaced()) {
dest->set_obj(NULL);
} else {
assert(monitor->owner() == NULL || (!monitor->owner()->is_unlocked() && !monitor->owner()->has_bias_pattern()), "object must be null or locked, and unbiased");
assert(monitor->owner() == NULL || (!monitor->owner()->is_unlocked() && (!UseBiasedLocking || !monitor->owner()->has_bias_pattern())), "object must be null or locked, and unbiased");
dest->set_obj(monitor->owner());
monitor->lock()->move_to(monitor->owner(), dest->lock());
}
@@ -139,4 +139,121 @@ TEST_VM(markWord, printing) {
done.wait_with_safepoint_check(THREAD); // wait till the thread is done.
}
}

static void assert_unlocked_state(markWord mark) {
EXPECT_FALSE(mark.has_displaced_mark_helper());
EXPECT_FALSE(mark.has_locker());
EXPECT_FALSE(mark.has_monitor());
EXPECT_FALSE(mark.is_being_inflated());
EXPECT_FALSE(mark.is_locked());
EXPECT_TRUE(mark.is_unlocked());
}

static void assert_copy_set_hash(markWord mark) {
const intptr_t hash = 4711;
EXPECT_TRUE(mark.has_no_hash());
markWord copy = mark.copy_set_hash(hash);
EXPECT_EQ(hash, copy.hash());
EXPECT_FALSE(copy.has_no_hash());
}

static void assert_type(markWord mark) {
EXPECT_FALSE(mark.is_flat_array());
EXPECT_FALSE(mark.is_inline_type());
EXPECT_FALSE(mark.is_larval_state());
EXPECT_FALSE(mark.is_nullfree_array());
}

TEST_VM(markWord, prototype) {
markWord mark = markWord::prototype();
assert_unlocked_state(mark);
EXPECT_TRUE(mark.is_neutral());

assert_type(mark);

EXPECT_TRUE(mark.has_no_hash());
EXPECT_FALSE(mark.is_marked());
EXPECT_TRUE(mark.decode_pointer() == NULL);

assert_copy_set_hash(mark);
assert_type(mark);
}

static void assert_inline_type(markWord mark) {
EXPECT_FALSE(mark.is_flat_array());
EXPECT_TRUE(mark.is_inline_type());
EXPECT_FALSE(mark.is_nullfree_array());
}

TEST_VM(markWord, inline_type_prototype) {
markWord mark = markWord::inline_type_prototype();
assert_unlocked_state(mark);
EXPECT_FALSE(mark.is_neutral());

assert_inline_type(mark);
EXPECT_FALSE(mark.is_larval_state());

EXPECT_TRUE(mark.has_no_hash());
EXPECT_FALSE(mark.is_marked());
EXPECT_TRUE(mark.decode_pointer() == NULL);

markWord larval = mark.enter_larval_state();
EXPECT_TRUE(larval.is_larval_state());
assert_inline_type(larval);
mark = larval.exit_larval_state();
EXPECT_FALSE(mark.is_larval_state());
assert_inline_type(mark);

EXPECT_TRUE(mark.has_no_hash());
EXPECT_FALSE(mark.is_marked());
EXPECT_TRUE(mark.decode_pointer() == NULL);
}

#if _LP64

static void assert_flat_array_type(markWord mark) {
EXPECT_TRUE(mark.is_flat_array());
EXPECT_FALSE(mark.is_inline_type());
EXPECT_FALSE(mark.is_larval_state());
EXPECT_TRUE(mark.is_nullfree_array());
}

TEST_VM(markWord, flat_array_prototype) {
markWord mark = markWord::flat_array_prototype();
assert_unlocked_state(mark);
EXPECT_TRUE(mark.is_neutral());

assert_flat_array_type(mark);

EXPECT_TRUE(mark.has_no_hash());
EXPECT_FALSE(mark.is_marked());
EXPECT_TRUE(mark.decode_pointer() == NULL);

assert_copy_set_hash(mark);
assert_flat_array_type(mark);
}

static void assert_nullfree_array_type(markWord mark) {
EXPECT_FALSE(mark.is_flat_array());
EXPECT_FALSE(mark.is_inline_type());
EXPECT_FALSE(mark.is_larval_state());
EXPECT_TRUE(mark.is_nullfree_array());
}

TEST_VM(markWord, nullfree_array_prototype) {
markWord mark = markWord::nullfree_array_prototype();
assert_unlocked_state(mark);
EXPECT_TRUE(mark.is_neutral());

assert_nullfree_array_type(mark);

EXPECT_TRUE(mark.has_no_hash());
EXPECT_FALSE(mark.is_marked());
EXPECT_TRUE(mark.decode_pointer() == NULL);

assert_copy_set_hash(mark);
assert_nullfree_array_type(mark);
}
#endif // _LP64

#endif // PRODUCT

0 comments on commit a3fb148

Please sign in to comment.