Skip to content
Permalink
Browse files
8236522: NonTearable marker interface for inline classes
  • Loading branch information
John R Rose committed Mar 31, 2020
1 parent 78b0ffd commit cab7a5b928940dc5d0e616f15439ad6d546120ed
Show file tree
Hide file tree
Showing 23 changed files with 842 additions and 36 deletions.
@@ -83,6 +83,7 @@
#include "utilities/macros.hpp"
#include "utilities/ostream.hpp"
#include "utilities/resourceHash.hpp"
#include "utilities/stringUtils.hpp"
#include "utilities/utf8.hpp"

#if INCLUDE_CDS
@@ -943,10 +944,17 @@ static bool put_after_lookup(const Symbol* name, const Symbol* sig, NameSigHash*
}

// Side-effects: populates the _local_interfaces field
void ClassFileParser::parse_interfaces(const ClassFileStream* const stream,
const int itfs_len,
ConstantPool* const cp,
void ClassFileParser::parse_interfaces(const ClassFileStream* stream,
int itfs_len,
ConstantPool* cp,
bool* const has_nonstatic_concrete_methods,
// FIXME: lots of these functions
// declare their parameters as const,
// which adds only noise to the code.
// Remove the spurious const modifiers.
// Many are of the form "const int x"
// or "T* const x".
bool* const is_declared_atomic,
TRAPS) {
assert(stream != NULL, "invariant");
assert(cp != NULL, "invariant");
@@ -994,10 +1002,14 @@ void ClassFileParser::parse_interfaces(const ClassFileStream* const stream,
interf->class_in_module_of_loader()));
}

if (InstanceKlass::cast(interf)->has_nonstatic_concrete_methods()) {
InstanceKlass* ik = InstanceKlass::cast(interf);
if (ik->has_nonstatic_concrete_methods()) {
*has_nonstatic_concrete_methods = true;
}
_local_interfaces->at_put(index, InstanceKlass::cast(interf));
if (ik->is_declared_atomic()) {
*is_declared_atomic = true;
}
_local_interfaces->at_put(index, ik);
}

if (!_need_verify || itfs_len <= 1) {
@@ -4346,6 +4358,7 @@ void ClassFileParser::layout_fields(ConstantPool* cp,
Klass** nonstatic_value_type_klasses = NULL;
unsigned int value_type_oop_map_count = 0;
int not_flattened_value_types = 0;
int not_atomic_value_types = 0;

int max_nonstatic_value_type = fac->count[NONSTATIC_FLATTENABLE] + 1;

@@ -4380,7 +4393,16 @@ void ClassFileParser::layout_fields(ConstantPool* cp,
}
ValueKlass* vk = ValueKlass::cast(klass);
// Conditions to apply flattening or not should be defined in a single place
if ((ValueFieldMaxFlatSize < 0) || (vk->size_helper() * HeapWordSize) <= ValueFieldMaxFlatSize) {
bool too_big_to_flatten = (ValueFieldMaxFlatSize >= 0 &&
(vk->size_helper() * HeapWordSize) > ValueFieldMaxFlatSize);
bool too_atomic_to_flatten = vk->is_declared_atomic();
bool too_volatile_to_flatten = fs.access_flags().is_volatile();
if (vk->is_naturally_atomic()) {
too_atomic_to_flatten = false;
//too_volatile_to_flatten = false; //FIXME
// volatile fields are currently never flattened, this could change in the future
}
if (!(too_big_to_flatten | too_atomic_to_flatten | too_volatile_to_flatten)) {
nonstatic_value_type_indexes[nonstatic_value_type_count] = fs.index();
nonstatic_value_type_klasses[nonstatic_value_type_count] = klass;
nonstatic_value_type_count++;
@@ -4390,6 +4412,9 @@ void ClassFileParser::layout_fields(ConstantPool* cp,
value_type_oop_map_count += vklass->nonstatic_oop_map_count();
}
fs.set_flattened(true);
if (!vk->is_atomic()) { // flat and non-atomic: take note
not_atomic_value_types++;
}
} else {
not_flattened_value_types++;
fs.set_flattened(false);
@@ -4848,6 +4873,19 @@ void ClassFileParser::layout_fields(ConstantPool* cp,
info->_static_field_size = static_field_size;
info->_nonstatic_field_size = nonstatic_field_size;
info->_has_nonstatic_fields = has_nonstatic_fields;

// A value type is naturally atomic if it has just one field, and
// that field is simple enough.
info->_is_naturally_atomic = (is_value_type() &&
!super_has_nonstatic_fields &&
(nonstatic_fields_count <= 1) &&
(not_atomic_value_types == 0) &&
(nonstatic_contended_count == 0));
// This may be too restrictive, since if all the fields fit in 64
// bits we could make the decision to align instances of this class
// to 64-bit boundaries, and load and store them as single words.
// And on machines which supported larger atomics we could similarly
// allow larger values to be atomic, if properly aligned.
}

void ClassFileParser::set_precomputed_flags(InstanceKlass* ik) {
@@ -5983,6 +6021,7 @@ static void check_methods_for_intrinsics(const InstanceKlass* ik,
}
}

// Called from a factory method in KlassFactory, not from this file.
InstanceKlass* ClassFileParser::create_instance_klass(bool changed_by_loadhook, TRAPS) {
if (_klass != NULL) {
return _klass;
@@ -6052,6 +6091,9 @@ void ClassFileParser::fill_instance_klass(InstanceKlass* ik, bool changed_by_loa
// Not yet: supers are done below to support the new subtype-checking fields
ik->set_nonstatic_field_size(_field_info->_nonstatic_field_size);
ik->set_has_nonstatic_fields(_field_info->_has_nonstatic_fields);
if (_field_info->_is_naturally_atomic && ik->is_value()) {
ik->set_is_naturally_atomic();
}
if (_is_empty_value) {
ik->set_is_empty_value();
}
@@ -6101,6 +6143,9 @@ void ClassFileParser::fill_instance_klass(InstanceKlass* ik, bool changed_by_loa
ik->set_major_version(_major_version);
ik->set_has_nonstatic_concrete_methods(_has_nonstatic_concrete_methods);
ik->set_declares_nonstatic_concrete_methods(_declares_nonstatic_concrete_methods);
if (_is_declared_atomic) {
ik->set_is_declared_atomic();
}

if (_unsafe_anonymous_host != NULL) {
assert (ik->is_unsafe_anonymous(), "should be the same");
@@ -6433,6 +6478,8 @@ ClassFileParser::ClassFileParser(ClassFileStream* stream,
_has_contended_fields(false),
_has_flattenable_fields(false),
_is_empty_value(false),
_is_naturally_atomic(false),
_is_declared_atomic(false),
_has_finalizer(false),
_has_empty_finalizer(false),
_has_vanilla_constructor(false),
@@ -6772,15 +6819,16 @@ void ClassFileParser::parse_stream(const ClassFileStream* const stream,
_itfs_len,
cp,
&_has_nonstatic_concrete_methods,
&_is_declared_atomic,
CHECK);

assert(_local_interfaces != NULL, "invariant");

// Fields (offsets are filled in later)
_fac = new FieldAllocationCount();
parse_fields(stream,
_access_flags.is_interface(),
_access_flags.is_value_type(),
is_interface(),
is_value_type(),
_fac,
cp,
cp_size,
@@ -6792,8 +6840,8 @@ void ClassFileParser::parse_stream(const ClassFileStream* const stream,
// Methods
AccessFlags promoted_flags;
parse_methods(stream,
_access_flags.is_interface(),
_access_flags.is_value_type(),
is_interface(),
is_value_type(),
&promoted_flags,
&_has_final_method,
&_declares_nonstatic_concrete_methods,
@@ -6842,7 +6890,7 @@ void ClassFileParser::post_process_parsed_stream(const ClassFileStream* const st
// We check super class after class file is parsed and format is checked
if (_super_class_index > 0 && NULL ==_super_klass) {
Symbol* const super_class_name = cp->klass_name_at(_super_class_index);
if (_access_flags.is_interface()) {
if (is_interface()) {
// Before attempting to resolve the superclass, check for class format
// errors not checked yet.
guarantee_property(super_class_name == vmSymbols::java_lang_Object(),
@@ -6863,6 +6911,9 @@ void ClassFileParser::post_process_parsed_stream(const ClassFileStream* const st
if (_super_klass->has_nonstatic_concrete_methods()) {
_has_nonstatic_concrete_methods = true;
}
if (_super_klass->is_declared_atomic()) {
_is_declared_atomic = true;
}

if (_super_klass->is_interface()) {
ResourceMark rm(THREAD);
@@ -6889,6 +6940,18 @@ void ClassFileParser::post_process_parsed_stream(const ClassFileStream* const st
}
}

if (_class_name == vmSymbols::java_lang_NonTearable() && _loader_data->class_loader() == NULL) {
// This is the original source of this condition.
// It propagates by inheritance, as if testing "instanceof NonTearable".
_is_declared_atomic = true;
} else if (*ForceNonTearable != '\0') {
// Allow a command line switch to force the same atomicity property:
const char* class_name_str = _class_name->as_C_string();
if (StringUtils::class_list_match(ForceNonTearable, class_name_str)) {
_is_declared_atomic = true;
}
}

// Compute the transitive list of all unique interfaces implemented by this class
_transitive_interfaces =
compute_transitive_interfaces(_super_klass,
@@ -6917,7 +6980,7 @@ void ClassFileParser::post_process_parsed_stream(const ClassFileStream* const st
CHECK);

// Size of Java itable (in words)
_itable_size = _access_flags.is_interface() ? 0 :
_itable_size = is_interface() ? 0 :
klassItable::compute_itable_size(_transitive_interfaces);

assert(_fac != NULL, "invariant");
@@ -73,6 +73,7 @@ class FieldLayoutInfo : public ResourceObj {
int _nonstatic_field_size;
int _static_field_size;
bool _has_nonstatic_fields;
bool _is_naturally_atomic;
};

// Parser for for .class files
@@ -199,6 +200,8 @@ class ClassFileParser {

bool _has_flattenable_fields;
bool _is_empty_value;
bool _is_naturally_atomic;
bool _is_declared_atomic;

// precomputed flags
bool _has_finalizer;
@@ -246,6 +249,7 @@ class ClassFileParser {
const int itfs_len,
ConstantPool* const cp,
bool* has_nonstatic_concrete_methods,
bool* is_declared_atomic,
TRAPS);

const InstanceKlass* parse_super_class(ConstantPool* const cp,
@@ -539,7 +539,10 @@ FieldLayoutBuilder::FieldLayoutBuilder(const Symbol* classname, const InstanceKl
_has_nonstatic_fields(false),
_is_contended(is_contended),
_is_value_type(is_value_type),
_has_flattening_information(is_value_type) {}
_has_flattening_information(is_value_type),
_has_nonatomic_values(false),
_atomic_field_count(0)
{}

FieldGroup* FieldLayoutBuilder::get_or_create_contended_group(int g) {
assert(g > 0, "must only be called for named contended groups");
@@ -579,6 +582,7 @@ void FieldLayoutBuilder::regular_field_sorting() {
group = _static_fields;
} else {
_has_nonstatic_fields = true;
_atomic_field_count++; // we might decrement this
if (fs.is_contended()) {
int g = fs.contended_group();
if (g == 0) {
@@ -626,14 +630,23 @@ void FieldLayoutBuilder::regular_field_sorting() {
_protection_domain, true, THREAD);
assert(klass != NULL, "Sanity check");
ValueKlass* vk = ValueKlass::cast(klass);
bool has_flattenable_size = (ValueFieldMaxFlatSize < 0)
|| (vk->size_helper() * HeapWordSize) <= ValueFieldMaxFlatSize;
// volatile fields are currently never flattened, this could change in the future
bool flattened = !fs.access_flags().is_volatile() && has_flattenable_size;
if (flattened) {
bool too_big_to_flatten = (ValueFieldMaxFlatSize >= 0 &&
(vk->size_helper() * HeapWordSize) > ValueFieldMaxFlatSize);
bool too_atomic_to_flatten = vk->is_declared_atomic();
bool too_volatile_to_flatten = fs.access_flags().is_volatile();
if (vk->is_naturally_atomic()) {
too_atomic_to_flatten = false;
//too_volatile_to_flatten = false; //FIXME
// volatile fields are currently never flattened, this could change in the future
}
if (!(too_big_to_flatten | too_atomic_to_flatten | too_volatile_to_flatten)) {
group->add_flattened_field(fs, vk);
_nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
fs.set_flattened(true);
if (!vk->is_atomic()) { // flat and non-atomic: take note
_has_nonatomic_values = true;
_atomic_field_count--; // every other field is atomic but this one
}
} else {
_nonstatic_oopmap_count++;
group->add_oop_field(fs);
@@ -674,6 +687,7 @@ void FieldLayoutBuilder::inline_class_field_sorting(TRAPS) {
group = _static_fields;
} else {
_has_nonstatic_fields = true;
_atomic_field_count++; // we might decrement this
group = _root_group;
}
assert(group != NULL, "invariant");
@@ -716,13 +730,24 @@ void FieldLayoutBuilder::inline_class_field_sorting(TRAPS) {
_protection_domain, true, CHECK);
assert(klass != NULL, "Sanity check");
ValueKlass* vk = ValueKlass::cast(klass);
bool flattened = (ValueFieldMaxFlatSize < 0)
|| (vk->size_helper() * HeapWordSize) <= ValueFieldMaxFlatSize;
if (flattened) {
bool too_big_to_flatten = (ValueFieldMaxFlatSize >= 0 &&
(vk->size_helper() * HeapWordSize) > ValueFieldMaxFlatSize);
bool too_atomic_to_flatten = vk->is_declared_atomic();
bool too_volatile_to_flatten = fs.access_flags().is_volatile();
if (vk->is_naturally_atomic()) {
too_atomic_to_flatten = false;
//too_volatile_to_flatten = false; //FIXME
// volatile fields are currently never flattened, this could change in the future
}
if (!(too_big_to_flatten | too_atomic_to_flatten | too_volatile_to_flatten)) {
group->add_flattened_field(fs, vk);
_nonstatic_oopmap_count += vk->nonstatic_oop_map_count();
field_alignment = vk->get_alignment();
fs.set_flattened(true);
if (!vk->is_atomic()) { // flat and non-atomic: take note
_has_nonatomic_values = true;
_atomic_field_count--; // every other field is atomic but this one
}
} else {
_nonstatic_oopmap_count++;
field_alignment = type2aelembytes(T_OBJECT);
@@ -983,6 +1008,19 @@ void FieldLayoutBuilder::epilogue() {
_info->_nonstatic_field_size = (nonstatic_field_end - instanceOopDesc::base_offset_in_bytes()) / heapOopSize;
_info->_has_nonstatic_fields = _has_nonstatic_fields;

// A value type is naturally atomic if it has just one field, and
// that field is simple enough.
_info->_is_naturally_atomic = (_is_value_type &&
(_atomic_field_count <= 1) &&
!_has_nonatomic_values &&
_contended_groups.is_empty());
// This may be too restrictive, since if all the fields fit in 64
// bits we could make the decision to align instances of this class
// to 64-bit boundaries, and load and store them as single words.
// And on machines which supported larger atomics we could similarly
// allow larger values to be atomic, if properly aligned.


if (PrintFieldLayout) {
ResourceMark rm;
tty->print_cr("Layout of class %s", _classname->as_C_string());
@@ -256,6 +256,8 @@ class FieldLayoutBuilder : public ResourceObj {
bool _is_contended;
bool _is_value_type;
bool _has_flattening_information;
bool _has_nonatomic_values;
int _atomic_field_count;

FieldGroup* get_or_create_contended_group(int g);

@@ -64,6 +64,7 @@
template(java_lang_Thread, "java/lang/Thread") \
template(java_lang_ThreadGroup, "java/lang/ThreadGroup") \
template(java_lang_Cloneable, "java/lang/Cloneable") \
template(java_lang_NonTearable, "java/lang/NonTearable") \
template(java_lang_Throwable, "java/lang/Throwable") \
template(java_lang_ClassLoader, "java/lang/ClassLoader") \
template(java_lang_ClassLoader_NativeLibrary, "java/lang/ClassLoader\x024NativeLibrary") \
@@ -69,6 +69,10 @@ class ArrayKlass: public Klass {
// Presented with an ArrayKlass, which storage_properties should be encoded into arrayOop
virtual ArrayStorageProperties storage_properties() { return ArrayStorageProperties::empty; }

// Are loads and stores to this concrete array type atomic?
// Note that Object[] is naturally atomic, but its subtypes may not be.
virtual bool element_access_is_atomic() { return true; }

// Testing operation
DEBUG_ONLY(bool is_array_klass_slow() const { return true; })

0 comments on commit cab7a5b

Please sign in to comment.