Skip to content
Permalink
Browse files
8275583: [lworld] C2 fails to scalarize inline types in safepoint deb…
…ug info in rare cases
  • Loading branch information
TobiHartmann committed Oct 20, 2021
1 parent c61edd5 commit 3354fc048ce0395c6ab512263313d8fb18583b22
@@ -1600,7 +1600,20 @@ SafePointNode* SafePointNode::next_exception() const {
// Skip over any collapsed Regions
Node *SafePointNode::Ideal(PhaseGVN *phase, bool can_reshape) {
assert(_jvms == NULL || ((uintptr_t)_jvms->map() & 1) || _jvms->map() == this, "inconsistent JVMState");
return remove_dead_region(phase, can_reshape) ? this : NULL;
if (remove_dead_region(phase, can_reshape)) {
return this;
}
// Scalarize inline types in safepoint debug info.
// Delay this until all inlining is over to avoid getting inconsistent debug info.
if (phase->C->scalarize_in_safepoints() && can_reshape && jvms() != NULL) {
for (uint i = jvms()->debug_start(); i < jvms()->debug_end(); i++) {
Node* n = in(i)->uncast();
if (n->is_InlineTypeBase()) {
n->as_InlineTypeBase()->make_scalar_in_safepoints(phase->is_IterGVN());
}
}
}
return NULL;
}

//------------------------------Identity---------------------------------------
@@ -2522,6 +2522,7 @@ Node *PhiNode::Ideal(PhaseGVN *phase, bool can_reshape) {
ciInlineKlass* vk = NULL;
// true if all IsInit inputs of all InlineType* nodes are true
bool is_init = true;
Node_List casts;

for (uint next = 0; next < worklist.size() && can_optimize; next++) {
Node* phi = worklist.at(next);
@@ -2532,10 +2533,7 @@ Node *PhiNode::Ideal(PhaseGVN *phase, bool can_reshape) {
break;
}
while (n->is_ConstraintCast()) {
if (phase->type(n->in(1))->filter_speculative(n->bottom_type()) == Type::TOP) {
can_optimize = false;
break;
}
casts.push(n);
n = n->in(1);
}
const Type* t = phase->type(n);
@@ -2554,6 +2552,14 @@ Node *PhiNode::Ideal(PhaseGVN *phase, bool can_reshape) {
}
}
}
// Check if cast nodes can be pushed through
const Type* t = Type::get_const_type(vk);
while (casts.size() != 0 && can_optimize && t != NULL) {
Node* cast = casts.pop();
if (t->filter(cast->bottom_type()) == Type::TOP) {
can_optimize = false;
}
}
if (can_optimize && vk != NULL) {
// TODO 8275400
// assert(!_type->isa_ptr() || _type->maybe_null() || is_init, "Phi not null but a possible null was seen");
@@ -1941,6 +1941,12 @@ void Compile::process_inline_types(PhaseIterGVN &igvn, bool remove) {
if (_inline_type_nodes.length() == 0) {
return;
}
// Scalarize inline types in safepoint debug info.
// Delay this until all inlining is over to avoid getting inconsistent debug info.
set_scalarize_in_safepoints(true);
for (int i = _inline_type_nodes.length()-1; i >= 0; i--) {
_inline_type_nodes.at(i)->as_InlineTypeBase()->make_scalar_in_safepoints(&igvn);
}
if (remove) {
// Remove inline type nodes
while (_inline_type_nodes.length() > 0) {
@@ -1986,13 +1992,6 @@ void Compile::process_inline_types(PhaseIterGVN &igvn, bool remove) {
igvn.replace_node(vt, igvn.C->top());
}
}
} else {
// Give inline types a chance to be scalarized in safepoints
// Delay this until all inlining is over to avoid getting inconsistent debug info
set_scalarize_in_safepoints(true);
for (int i = _inline_type_nodes.length()-1; i >= 0; i--) {
igvn._worklist.push(_inline_type_nodes.at(i));
}
}
igvn.optimize();
}
@@ -2740,9 +2739,6 @@ void Compile::Optimize() {
bs->verify_gc_barriers(this, BarrierSetC2::BeforeMacroExpand);
#endif

// Process inline type nodes again after loop opts
process_inline_types(igvn);

assert(_late_inlines.length() == 0 || IncrementalInlineMH || IncrementalInlineVirtual, "not empty");

if (_late_inlines.length() > 0) {
@@ -590,13 +590,6 @@ Node* InlineTypeBaseNode::allocate_fields(GraphKit* kit) {
}

Node* InlineTypeBaseNode::Ideal(PhaseGVN* phase, bool can_reshape) {
if (phase->C->scalarize_in_safepoints() && can_reshape) {
PhaseIterGVN* igvn = phase->is_IterGVN();
make_scalar_in_safepoints(igvn);
if (outcnt() == 0) {
return NULL;
}
}
Node* is_init = get_is_init();
if (is_init->isa_InlineTypePtr()) {
set_req(IsInit, is_init->as_InlineTypePtr()->get_is_init());
@@ -149,6 +149,14 @@ bool FastLockNode::cmp( const Node &n ) const {
return (&n == this); // Always fail except on self
}

const Type* FastLockNode::Value(PhaseGVN* phase) const {
if (phase->type(in(1))->is_inlinetypeptr()) {
// Locking on inline types always fails
return TypeInt::CC_GT;
}
return TypeInt::CC;
}

//=============================================================================
//-----------------------------hash--------------------------------------------
uint FastUnlockNode::hash() const { return NO_HASH; }
@@ -93,7 +93,7 @@ class FastLockNode: public CmpNode {
virtual uint size_of() const;
virtual bool cmp( const Node &n ) const ; // Always fail, except on self
virtual int Opcode() const;
virtual const Type* Value(PhaseGVN* phase) const { return TypeInt::CC; }
virtual const Type* Value(PhaseGVN* phase) const;
const Type *sub(const Type *t1, const Type *t2) const { return TypeInt::CC;}

void create_rtm_lock_counter(JVMState* state);
@@ -3683,6 +3683,7 @@ public Object test130_inlinee() {
}

@Test
@IR(failOn = {ALLOC, LOAD, STORE})
public void test130() {
Object obj = test130_inlinee();
synchronized (obj) {
@@ -3727,6 +3728,7 @@ public void test131_verifier() {

// Test locking on object that is known to be an inline type only after CCP
@Test
@IR(failOn = {ALLOC, LOAD, STORE})
public void test132() {
MyValue2 vt = MyValue2.createWithFieldsInline(rI, rD);
Object obj = Integer.valueOf(42);

0 comments on commit 3354fc0

Please sign in to comment.