Skip to content
Permalink
Browse files
8251442: [lworld] C2 compilation fails with assert(value->bottom_type…
…()->higher_equal(_type))
  • Loading branch information
TobiHartmann committed Aug 18, 2020
1 parent 248fdec commit 5d1350ef32eba34258f5e700d2637c8a0a95dd99
Showing 18 changed files with 403 additions and 194 deletions.
@@ -146,7 +146,7 @@ bool Instruction::maybe_flattened_array() {
}
} else if (type->is_flat_array_klass()) {
ciKlass* element_klass = type->as_flat_array_klass()->element_klass();
assert(!element_klass->is_loaded() || element_klass->as_inline_klass()->flatten_array(), "must be flattened");
assert(!element_klass->is_loaded() || element_klass->flatten_array(), "must be flattened");
return true;
} else if (type->is_klass() && type->as_klass()->is_java_lang_Object()) {
// This can happen as a parameter to System.arraycopy()
@@ -104,7 +104,7 @@ bool ciArrayKlass::is_leaf_type() {
ciArrayKlass* ciArrayKlass::make(ciType* element_type) {
if (element_type->is_primitive_type()) {
return ciTypeArrayKlass::make(element_type->basic_type());
} else if (element_type->is_inlinetype() && element_type->as_inline_klass()->flatten_array()) {
} else if (element_type->flatten_array()) {
return ciFlatArrayKlass::make(element_type->as_klass());
} else {
return ciObjArrayKlass::make(element_type->as_klass());
@@ -313,6 +313,13 @@ const Type* CheckCastPPNode::Value(PhaseGVN* phase) const {
const TypePtr *my_type = _type->isa_ptr();
const Type *result = _type;
if( in_type != NULL && my_type != NULL ) {
if (my_type->isa_aryptr() && in_type->isa_aryptr()) {
// Propagate array properties (not flat/null-free)
my_type = my_type->is_aryptr()->update_properties(in_type->is_aryptr());
if (my_type == NULL) {
return Type::TOP; // Inconsistent properties
}
}
TypePtr::PTR in_ptr = in_type->ptr();
if (in_ptr == TypePtr::Null) {
result = in_type;
@@ -1313,7 +1313,7 @@ const TypePtr *Compile::flatten_alias_type( const TypePtr *tj ) const {
tj = ta = TypeAryPtr::make(ptr,ta->const_oop(),tary,NULL,false,Type::Offset(offset), ta->field_offset());
}
// Initially all flattened array accesses share a single slice
if (ta->elem()->isa_inlinetype() && ta->elem() != TypeInlineType::BOTTOM && _flattened_accesses_share_alias) {
if (ta->is_flat() && ta->elem() != TypeInlineType::BOTTOM && _flattened_accesses_share_alias) {
const TypeAry *tary = TypeAry::make(TypeInlineType::BOTTOM, ta->size());
tj = ta = TypeAryPtr::make(ptr,ta->const_oop(),tary,NULL,false,Type::Offset(offset), Type::Offset(Type::OffsetBot));
}
@@ -1342,15 +1342,15 @@ const TypePtr *Compile::flatten_alias_type( const TypePtr *tj ) const {
// No constant oop pointers (such as Strings); they alias with
// unknown strings.
assert(!is_known_inst, "not scalarizable allocation");
tj = to = TypeInstPtr::make(TypePtr::BotPTR,to->klass(),false,0,Type::Offset(offset), to->klass()->flatten_array());
tj = to = TypeInstPtr::make(TypePtr::BotPTR,to->klass(),false,0,Type::Offset(offset));
}
} else if( is_known_inst ) {
tj = to; // Keep NotNull and klass_is_exact for instance type
} else if( ptr == TypePtr::NotNull || to->klass_is_exact() ) {
// During the 2nd round of IterGVN, NotNull castings are removed.
// Make sure the Bottom and NotNull variants alias the same.
// Also, make sure exact and non-exact variants alias the same.
tj = to = TypeInstPtr::make(TypePtr::BotPTR,to->klass(),false,0,Type::Offset(offset), to->klass()->flatten_array());
tj = to = TypeInstPtr::make(TypePtr::BotPTR,to->klass(),false,0,Type::Offset(offset));
}
if (to->speculative() != NULL) {
tj = to = TypeInstPtr::make(to->ptr(),to->klass(),to->klass_is_exact(),to->const_oop(),Type::Offset(to->offset()), to->klass()->flatten_array(), to->instance_id());
@@ -1360,7 +1360,7 @@ const TypePtr *Compile::flatten_alias_type( const TypePtr *tj ) const {
// First handle header references such as a LoadKlassNode, even if the
// object's klass is unloaded at compile time (4965979).
if (!is_known_inst) { // Do it only for non-instance types
tj = to = TypeInstPtr::make(TypePtr::BotPTR, env()->Object_klass(), false, NULL, Type::Offset(offset), false);
tj = to = TypeInstPtr::make(TypePtr::BotPTR, env()->Object_klass(), false, NULL, Type::Offset(offset));
}
} else if (offset < 0 || offset >= k->size_helper() * wordSize) {
// Static fields are in the space above the normal instance
@@ -1376,7 +1376,7 @@ const TypePtr *Compile::flatten_alias_type( const TypePtr *tj ) const {
if( is_known_inst ) {
tj = to = TypeInstPtr::make(to->ptr(), canonical_holder, true, NULL, Type::Offset(offset), canonical_holder->flatten_array(), to->instance_id());
} else {
tj = to = TypeInstPtr::make(to->ptr(), canonical_holder, false, NULL, Type::Offset(offset), canonical_holder->flatten_array());
tj = to = TypeInstPtr::make(to->ptr(), canonical_holder, false, NULL, Type::Offset(offset));
}
}
}
@@ -1393,16 +1393,15 @@ const TypePtr *Compile::flatten_alias_type( const TypePtr *tj ) const {

tj = tk = TypeKlassPtr::make(TypePtr::NotNull,
TypeKlassPtr::OBJECT->klass(),
Type::Offset(offset),
false);
Type::Offset(offset));
}

ciKlass* klass = tk->klass();
if (klass != NULL && klass->is_obj_array_klass()) {
ciKlass* k = TypeAryPtr::OOPS->klass();
if( !k || !k->is_loaded() ) // Only fails for some -Xcomp runs
k = TypeInstPtr::BOTTOM->klass();
tj = tk = TypeKlassPtr::make(TypePtr::NotNull, k, Type::Offset(offset), false);
tj = tk = TypeKlassPtr::make(TypePtr::NotNull, k, Type::Offset(offset));
}

// Check for precise loads from the primary supertype array and force them
@@ -1418,7 +1417,7 @@ const TypePtr *Compile::flatten_alias_type( const TypePtr *tj ) const {
offset < (int)(primary_supers_offset + Klass::primary_super_limit() * wordSize)) ||
offset == (int)in_bytes(Klass::secondary_super_cache_offset())) {
offset = in_bytes(Klass::secondary_super_cache_offset());
tj = tk = TypeKlassPtr::make(TypePtr::NotNull, tk->klass(), Type::Offset(offset), tk->flat_array());
tj = tk = TypeKlassPtr::make(TypePtr::NotNull, tk->klass(), Type::Offset(offset));
}
}

@@ -1682,7 +1681,7 @@ Compile::AliasType* Compile::find_alias_type(const TypePtr* adr_type, bool no_cr
alias_type(idx)->set_field(field);
if (flat->isa_aryptr()) {
// Fields of flat arrays are rewritable although they are declared final
assert(flat->is_aryptr()->elem()->isa_inlinetype(), "must be a flat array");
assert(flat->is_aryptr()->is_flat(), "must be a flat array");
alias_type(idx)->set_rewritable(true);
}
}
@@ -2001,7 +2000,7 @@ void Compile::adjust_flattened_array_access_aliases(PhaseIterGVN& igvn) {
AliasCacheEntry* ace = &_alias_cache[i];
if (ace->_adr_type != NULL &&
ace->_adr_type->isa_aryptr() &&
ace->_adr_type->is_aryptr()->elem()->isa_inlinetype()) {
ace->_adr_type->is_aryptr()->is_flat()) {
ace->_adr_type = NULL;
ace->_index = (i != 0) ? 0 : AliasIdxTop; // Make sure the NULL adr_type resolves to AliasIdxTop
}
@@ -2124,7 +2123,7 @@ void Compile::adjust_flattened_array_access_aliases(PhaseIterGVN& igvn) {
Node* r = m->in(0);
for (uint j = (uint)start_alias; j <= (uint)stop_alias; j++) {
const Type* adr_type = get_adr_type(j);
if (!adr_type->isa_aryptr() || !adr_type->is_aryptr()->elem()->isa_inlinetype()) {
if (!adr_type->isa_aryptr() || !adr_type->is_aryptr()->is_flat()) {
continue;
}
Node* phi = new PhiNode(r, Type::MEMORY, get_adr_type(j));
@@ -2154,7 +2153,7 @@ void Compile::adjust_flattened_array_access_aliases(PhaseIterGVN& igvn) {
igvn.replace_input_of(m->in(0), TypeFunc::Control, top());
for (uint j = (uint)start_alias; j <= (uint)stop_alias; j++) {
const Type* adr_type = get_adr_type(j);
if (!adr_type->isa_aryptr() || !adr_type->is_aryptr()->elem()->isa_inlinetype()) {
if (!adr_type->isa_aryptr() || !adr_type->is_aryptr()->is_flat()) {
continue;
}
MemBarNode* mb = new MemBarCPUOrderNode(this, j, NULL);
@@ -2197,7 +2196,7 @@ void Compile::adjust_flattened_array_access_aliases(PhaseIterGVN& igvn) {
igvn.rehash_node_delayed(current);
for (uint j = (uint)start_alias; j <= (uint)stop_alias; j++) {
const Type* adr_type = get_adr_type(j);
if (!adr_type->isa_aryptr() || !adr_type->is_aryptr()->elem()->isa_inlinetype()) {
if (!adr_type->isa_aryptr() || !adr_type->is_aryptr()->is_flat()) {
continue;
}
current->set_memory_at(j, mm);
@@ -994,7 +994,7 @@ void ConnectionGraph::process_call_arguments(CallNode *call) {
(aat->isa_oopptr()->klass() == NULL || aat->isa_instptr() ||
(aat->isa_aryptr() && aat->isa_aryptr()->klass()->is_obj_array_klass()) ||
(aat->isa_aryptr() && aat->isa_aryptr()->elem() != NULL &&
aat->isa_aryptr()->elem()->isa_inlinetype() &&
aat->isa_aryptr()->is_flat() &&
aat->isa_aryptr()->elem()->inline_klass()->contains_oops()));
if (i == TypeFunc::Parms) {
src_has_oops = arg_has_oops;
@@ -2498,6 +2498,11 @@ bool ConnectionGraph::split_AddP(Node *addp, Node *base) {
// In the case of a flattened inline type array, each field has its
// own slice so we need to keep track of the field being accessed.
tinst = tinst->is_aryptr()->with_field_offset(t->is_aryptr()->field_offset().get());
// Keep array properties (not flat/null-free)
tinst = tinst->is_aryptr()->update_properties(t->is_aryptr());
if (tinst == NULL) {
return false; // Skip dead path with inconsistent properties
}
}

// Do NOT remove the next line: ensure a new alias index is allocated
@@ -3179,6 +3184,13 @@ void ConnectionGraph::split_unique_types(GrowableArray<Node *> &alloc_worklist,
tn_t = tn_type->isa_oopptr();
}
if (tn_t != NULL && tinst->klass()->is_subtype_of(tn_t->klass())) {
if (tn_t->isa_aryptr()) {
// Keep array properties (not flat/null-free)
tinst = tinst->is_aryptr()->update_properties(tn_t->is_aryptr());
if (tinst == NULL) {
continue; // Skip dead path with inconsistent properties
}
}
if (tn_type->isa_narrowoop()) {
tn_type = tinst->make_narrowoop();
} else {
@@ -3805,7 +3805,7 @@ Node* GraphKit::get_layout_helper(Node* klass_node, jint& constant_value) {
bool can_be_flattened = false;
if (UseFlatArray && klass->is_obj_array_klass()) {
ciKlass* elem = klass->as_obj_array_klass()->element_klass();
can_be_flattened = elem->can_be_inline_klass() && (!elem->is_inlinetype() || elem->as_inline_klass()->flatten_array());
can_be_flattened = elem->can_be_inline_klass() && (!elem->is_inlinetype() || elem->flatten_array());
}
if (xklass || (klass->is_array_klass() && !can_be_flattened)) {
jint lhelper = klass->layout_helper();
@@ -4439,7 +4439,7 @@ Node* GraphKit::load_String_length(Node* str, bool set_ctrl) {
Node* GraphKit::load_String_value(Node* str, bool set_ctrl) {
int value_offset = java_lang_String::value_offset();
const TypeInstPtr* string_type = TypeInstPtr::make(TypePtr::NotNull, C->env()->String_klass(),
false, NULL, Type::Offset(0), false);
false, NULL, Type::Offset(0));
const TypePtr* value_field_type = string_type->add_offset(value_offset);
const TypeAryPtr* value_type = TypeAryPtr::make(TypePtr::NotNull,
TypeAry::make(TypeInt::BYTE, TypeInt::POS, false, true, true),
@@ -4456,7 +4456,7 @@ Node* GraphKit::load_String_coder(Node* str, bool set_ctrl) {
}
int coder_offset = java_lang_String::coder_offset();
const TypeInstPtr* string_type = TypeInstPtr::make(TypePtr::NotNull, C->env()->String_klass(),
false, NULL, Type::Offset(0), false);
false, NULL, Type::Offset(0));
const TypePtr* coder_field_type = string_type->add_offset(coder_offset);

Node* p = basic_plus_adr(str, str, coder_offset);
@@ -4468,7 +4468,7 @@ Node* GraphKit::load_String_coder(Node* str, bool set_ctrl) {
void GraphKit::store_String_value(Node* str, Node* value) {
int value_offset = java_lang_String::value_offset();
const TypeInstPtr* string_type = TypeInstPtr::make(TypePtr::NotNull, C->env()->String_klass(),
false, NULL, Type::Offset(0), false);
false, NULL, Type::Offset(0));
const TypePtr* value_field_type = string_type->add_offset(value_offset);

access_store_at(str, basic_plus_adr(str, value_offset), value_field_type,
@@ -4478,7 +4478,7 @@ void GraphKit::store_String_value(Node* str, Node* value) {
void GraphKit::store_String_coder(Node* str, Node* value) {
int coder_offset = java_lang_String::coder_offset();
const TypeInstPtr* string_type = TypeInstPtr::make(TypePtr::NotNull, C->env()->String_klass(),
false, NULL, Type::Offset(0), false);
false, NULL, Type::Offset(0));
const TypePtr* coder_field_type = string_type->add_offset(coder_offset);

access_store_at(str, basic_plus_adr(str, coder_offset), coder_field_type,
@@ -99,7 +99,6 @@ InlineTypeBaseNode* InlineTypeBaseNode::merge_with(PhaseGVN* gvn, const InlineTy
val1->as_InlineTypeBase()->merge_with(gvn, val2->as_InlineTypeBase(), pnum, transform);
} else {
assert(val1->is_Phi(), "must be a phi node");
assert(!val2->is_InlineType(), "inconsistent merge values");
val1->set_req(pnum, val2);
}
if (transform) {
@@ -3893,19 +3893,6 @@ bool LibraryCallKit::inline_array_copyOf(bool is_copyOfRange) {
Node* end = is_copyOfRange? argument(2): argument(1);
Node* array_type_mirror = is_copyOfRange? argument(3): argument(2);

const TypeAryPtr* original_t = _gvn.type(original)->isa_aryptr();
const TypeInstPtr* mirror_t = _gvn.type(array_type_mirror)->isa_instptr();
if (EnableValhalla && UseFlatArray &&
(original_t == NULL || mirror_t == NULL ||
(mirror_t->java_mirror_type() == NULL &&
(original_t->elem()->isa_inlinetype() ||
(original_t->elem()->make_oopptr() != NULL &&
original_t->elem()->make_oopptr()->can_be_inline_type()))))) {
// We need to know statically if the copy is to a flattened array
// or not but can't tell.
return false;
}

Node* newcopy = NULL;

// Set the original stack and the reexecute bit for the interpreter to reexecute
@@ -3937,7 +3924,7 @@ bool LibraryCallKit::inline_array_copyOf(bool is_copyOfRange) {
if (not_objArray != NULL) {
// Improve the klass node's type from the new optimistic assumption:
ciKlass* ak = ciArrayKlass::make(env()->Object_klass());
const Type* akls = TypeKlassPtr::make(TypePtr::NotNull, ak, Type::Offset(0), false);
const Type* akls = TypeKlassPtr::make(TypePtr::NotNull, ak, Type::Offset(0));
Node* cast = new CastPPNode(klass_node, akls);
cast->init_req(0, control());
klass_node = _gvn.transform(cast);
@@ -3968,16 +3955,17 @@ bool LibraryCallKit::inline_array_copyOf(bool is_copyOfRange) {
if (UseFlatArray) {
// Either both or neither new array klass and original array
// klass must be flattened
const TypeAryPtr* t_original = _gvn.type(original)->isa_aryptr();
Node* is_flat = generate_flatArray_guard(klass_node, NULL);
if (!original_t->is_not_flat()) {
if (t_original == NULL || !t_original->is_not_flat()) {
generate_flatArray_guard(original_kls, bailout);
}
if (is_flat != NULL) {
RegionNode* r = new RegionNode(2);
record_for_igvn(r);
r->init_req(1, control());
set_control(is_flat);
if (!original_t->is_not_flat()) {
if (t_original == NULL || !t_original->is_not_flat()) {
generate_flatArray_guard(original_kls, r);
}
bailout->add_req(control());
@@ -5065,9 +5053,13 @@ bool LibraryCallKit::inline_arraycopy() {
// If we can have both exact types, emit the missing guards
if (could_have_src && !src_spec) {
src = maybe_cast_profiled_obj(src, src_k, true);
src_type = _gvn.type(src);
top_src = src_type->isa_aryptr();
}
if (could_have_dest && !dest_spec) {
dest = maybe_cast_profiled_obj(dest, dest_k, true);
dest_type = _gvn.type(dest);
top_dest = dest_type->isa_aryptr();
}
}
}
@@ -5143,13 +5135,16 @@ bool LibraryCallKit::inline_arraycopy() {
src_type = _gvn.type(src);
top_src = src_type->isa_aryptr();

if (top_dest != NULL && !top_dest->elem()->isa_inlinetype() && !top_dest->is_not_flat()) {
if (top_dest != NULL && !top_dest->is_flat() && !top_dest->is_not_flat()) {
generate_flatArray_guard(dest_klass, slow_region);
top_dest = top_dest->cast_to_not_flat();
dest = _gvn.transform(new CheckCastPPNode(control(), dest, top_dest));
}

if (top_src != NULL && !top_src->elem()->isa_inlinetype() && !top_src->is_not_flat()) {
if (top_src != NULL && !top_src->is_flat() && !top_src->is_not_flat()) {
Node* src_klass = load_object_klass(src);
generate_flatArray_guard(src_klass, slow_region);
top_src = top_src->cast_to_not_flat();
src = _gvn.transform(new CheckCastPPNode(control(), src, top_src));
}

{
@@ -1414,11 +1414,11 @@ void PhaseMacroExpand::expand_arraycopy_node(ArrayCopyNode *ac) {
// (9) each element of an oop array must be assignable
// The generate_arraycopy subroutine checks this.

if (dest_elem == T_OBJECT && !top_dest->elem()->isa_inlinetype() && !top_dest->is_not_flat()) {
if (dest_elem == T_OBJECT && !top_dest->is_flat() && !top_dest->is_not_flat()) {
generate_flattened_array_guard(&ctrl, merge_mem, dest, slow_region);
}

if (src_elem == T_OBJECT && !top_src->elem()->isa_inlinetype() && !top_src->is_not_flat()) {
if (src_elem == T_OBJECT && !top_src->is_flat() && !top_src->is_not_flat()) {
generate_flattened_array_guard(&ctrl, merge_mem, src, slow_region);
}
}

0 comments on commit 5d1350e

Please sign in to comment.