Skip to content

Commit

Permalink
[vm/concurrency] Ensure instance size in class table is either 0 or t…
Browse files Browse the repository at this point in the history
…he correct size

Until now it was possible to register classes with a default size (16 bytes on 64-bit)
and later on change the size for the cid.

This CL changes this to ensure the size information in the class table
for a given cid is either 0 or the final instance size (and adds an
ASSERT for it)

Issue dart-lang/sdk#36097

Change-Id: I94c61c6a1566c13dec7b9eb80c9ae0dadf0e6b6a
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/115861
Commit-Queue: Martin Kustermann <kustermann@google.com>
Reviewed-by: Ryan Macnak <rmacnak@google.com>
  • Loading branch information
mkustermann authored and commit-bot@chromium.org committed Sep 6, 2019
1 parent eb60f05 commit f5c0e2e
Show file tree
Hide file tree
Showing 6 changed files with 268 additions and 321 deletions.
7 changes: 6 additions & 1 deletion runtime/vm/class_table.cc
Original file line number Diff line number Diff line change
Expand Up @@ -253,7 +253,12 @@ void ClassTable::SetAt(intptr_t index, RawClass* raw_cls) {
if (raw_cls == NULL) {
table_[index] = ClassAndSize(raw_cls, 0);
} else {
table_[index] = ClassAndSize(raw_cls, Class::instance_size(raw_cls));
// Ensure we never change size for a given cid from one non-zero size to
// another non-zero size.
const intptr_t old_size = table_[index].size_;
const intptr_t new_size = Class::instance_size(raw_cls);
ASSERT(old_size == 0 || old_size == new_size);
table_[index] = ClassAndSize(raw_cls, new_size);
}
}

Expand Down
41 changes: 41 additions & 0 deletions runtime/vm/class_table.h
Original file line number Diff line number Diff line change
Expand Up @@ -193,6 +193,47 @@ class ClassTable {
explicit ClassTable(ClassTable* original);
~ClassTable();

void CopyBeforeHotReload(ClassAndSize** copy, intptr_t* copy_num_cids) {
// The [IsolateReloadContext] will need to maintain a copy of the old class
// table until instances have been morphed.
const intptr_t bytes = sizeof(ClassAndSize) * NumCids();
*copy_num_cids = NumCids();
*copy = static_cast<ClassAndSize*>(malloc(bytes));
memmove(*copy, table_, bytes);
}

void ResetBeforeHotReload() {
// The [IsolateReloadContext] is now source-of-truth for GC.
//
// Though we cannot clear out the class pointers, because a hot-reload
// contains only a diff: If e.g. a class included in the hot-reload has a
// super class not included in the diff, it will look up in this class table
// to find the super class (e.g. `cls.SuperClass` will cause us to come
// here).
for (intptr_t i = 0; i < top_; ++i) {
table_[i].size_ = 0;
}
}

void ResetAfterHotReload(ClassAndSize* old_table,
intptr_t num_old_cids,
bool is_rollback) {
// The [IsolateReloadContext] is no longer source-of-truth for GC after we
// return, so we restore size information for all classes.
if (is_rollback) {
SetNumCids(num_old_cids);
memmove(table_, old_table, num_old_cids * sizeof(ClassAndSize));
} else {
CopySizesFromClassObjects();
}

// Can't free this table immediately as another thread (e.g., concurrent
// marker or sweeper) may be between loading the table pointer and loading
// the table element. The table will be freed at the next major GC or
// isolate shutdown.
AddOldTable(old_table);
}

// Thread-safe.
RawClass* At(intptr_t index) const {
ASSERT(IsValidIndex(index));
Expand Down
174 changes: 21 additions & 153 deletions runtime/vm/isolate_reload.cc
Original file line number Diff line number Diff line change
Expand Up @@ -972,28 +972,21 @@ void IsolateReloadContext::CheckpointClasses() {
// Copy the size of the class table.
saved_num_cids_ = I->class_table()->NumCids();

// Copy of the class table.
ClassAndSize* local_saved_class_table = reinterpret_cast<ClassAndSize*>(
malloc(sizeof(ClassAndSize) * saved_num_cids_));
ClassAndSize* saved_class_table = nullptr;
class_table->CopyBeforeHotReload(&saved_class_table, &saved_num_cids_);

// Copy classes into saved_class_table_ first. Make sure there are no
// safepoints until saved_class_table_ is filled up and saved so class raw
// pointers in saved_class_table_ are properly visited by GC.
{
NoSafepointScope no_safepoint_scope(Thread::Current());

for (intptr_t i = 0; i < saved_num_cids_; i++) {
if (class_table->IsValidIndex(i) && class_table->HasValidClassAt(i)) {
// Copy the class into the saved class table.
local_saved_class_table[i] = class_table->PairAt(i);
} else {
// No class at this index, mark it as NULL.
local_saved_class_table[i] = ClassAndSize(NULL);
}
}
// The saved_class_table_ is now source of truth for GC.
AtomicOperations::StoreRelease(&saved_class_table_, saved_class_table);

// Elements of saved_class_table_ are now visible to GC.
saved_class_table_ = local_saved_class_table;
// We can therefore wipe out all of the old entries (if that table is used
// for GC during the hot-reload we have a bug).
class_table->ResetBeforeHotReload();
}

// Add classes to the set. Set is stored in the Array, so adding an element
Expand Down Expand Up @@ -1026,20 +1019,6 @@ bool IsolateReloadContext::ScriptModifiedSince(const Script& script,
return (*file_modified_callback_)(url_chars, since);
}

static void PropagateLibraryModified(
const ZoneGrowableArray<ZoneGrowableArray<intptr_t>*>* imported_by,
intptr_t lib_index,
BitVector* modified_libs) {
ZoneGrowableArray<intptr_t>* dep_libs = (*imported_by)[lib_index];
for (intptr_t i = 0; i < dep_libs->length(); i++) {
intptr_t dep_lib_index = (*dep_libs)[i];
if (!modified_libs->Contains(dep_lib_index)) {
modified_libs->Add(dep_lib_index);
PropagateLibraryModified(imported_by, dep_lib_index, modified_libs);
}
}
}

static bool ContainsScriptUri(const GrowableArray<const char*>& seen_uris,
const char* uri) {
for (intptr_t i = 0; i < seen_uris.length(); i++) {
Expand Down Expand Up @@ -1112,109 +1091,6 @@ void IsolateReloadContext::FindModifiedSources(
}
}

BitVector* IsolateReloadContext::FindModifiedLibraries(bool force_reload,
bool root_lib_modified) {
Thread* thread = Thread::Current();
int64_t last_reload = I->last_reload_timestamp();

const GrowableObjectArray& libs =
GrowableObjectArray::Handle(object_store()->libraries());
Library& lib = Library::Handle();
Array& scripts = Array::Handle();
Script& script = Script::Handle();
intptr_t num_libs = libs.Length();

// Construct the imported-by graph.
ZoneGrowableArray<ZoneGrowableArray<intptr_t>*>* imported_by = new (zone_)
ZoneGrowableArray<ZoneGrowableArray<intptr_t>*>(zone_, num_libs);
imported_by->SetLength(num_libs);
for (intptr_t i = 0; i < num_libs; i++) {
(*imported_by)[i] = new (zone_) ZoneGrowableArray<intptr_t>(zone_, 0);
}
Array& ports = Array::Handle();
Namespace& ns = Namespace::Handle();
Library& target = Library::Handle();

for (intptr_t lib_idx = 0; lib_idx < num_libs; lib_idx++) {
lib ^= libs.At(lib_idx);
ASSERT(lib_idx == lib.index());
if (lib.is_dart_scheme()) {
// We don't care about imports among dart scheme libraries.
continue;
}

// Add imports to the import-by graph.
ports = lib.imports();
for (intptr_t import_idx = 0; import_idx < ports.Length(); import_idx++) {
ns ^= ports.At(import_idx);
if (!ns.IsNull()) {
target = ns.library();
(*imported_by)[target.index()]->Add(lib.index());
}
}

// Add exports to the import-by graph.
ports = lib.exports();
for (intptr_t export_idx = 0; export_idx < ports.Length(); export_idx++) {
ns ^= ports.At(export_idx);
if (!ns.IsNull()) {
target = ns.library();
(*imported_by)[target.index()]->Add(lib.index());
}
}

// Add prefixed imports to the import-by graph.
DictionaryIterator entries(lib);
Object& entry = Object::Handle();
LibraryPrefix& prefix = LibraryPrefix::Handle();
while (entries.HasNext()) {
entry = entries.GetNext();
if (entry.IsLibraryPrefix()) {
prefix ^= entry.raw();
ports = prefix.imports();
for (intptr_t import_idx = 0; import_idx < ports.Length();
import_idx++) {
ns ^= ports.At(import_idx);
if (!ns.IsNull()) {
target = ns.library();
(*imported_by)[target.index()]->Add(lib.index());
}
}
}
}
}

BitVector* modified_libs = new (Z) BitVector(Z, num_libs);

if (root_lib_modified) {
// The root library was either moved or replaced. Mark it as modified to
// force a reload of the potential root library replacement.
lib = object_store()->root_library();
modified_libs->Add(lib.index());
}

for (intptr_t lib_idx = 0; lib_idx < num_libs; lib_idx++) {
lib ^= libs.At(lib_idx);
if (lib.is_dart_scheme() || modified_libs->Contains(lib_idx)) {
// We don't consider dart scheme libraries during reload. If
// the modified libs set already contains this library, then we
// have already visited it.
continue;
}
scripts = lib.LoadedScripts();
for (intptr_t script_idx = 0; script_idx < scripts.Length(); script_idx++) {
script ^= scripts.At(script_idx);
if (force_reload || ScriptModifiedSince(script, last_reload)) {
modified_libs->Add(lib_idx);
PropagateLibraryModified(imported_by, lib_idx, modified_libs);
break;
}
}
}

return modified_libs;
}

void IsolateReloadContext::CheckpointLibraries() {
TIMELINE_SCOPE(CheckpointLibraries);
TIR_Print("---- CHECKPOINTING LIBRARIES\n");
Expand Down Expand Up @@ -1272,16 +1148,8 @@ void IsolateReloadContext::RollbackClasses() {
TIR_Print("---- ROLLING BACK CLASS TABLE\n");
ASSERT(saved_num_cids_ > 0);
ASSERT(saved_class_table_ != NULL);
ClassTable* class_table = I->class_table();
class_table->SetNumCids(saved_num_cids_);
// Overwrite classes in class table with the saved classes.
for (intptr_t i = 0; i < saved_num_cids_; i++) {
if (class_table->IsValidIndex(i)) {
class_table->SetAt(i, saved_class_table_[i].get_raw_class());
}
}

DiscardSavedClassTable();
DiscardSavedClassTable(/*is_rollback=*/true);
}

void IsolateReloadContext::RollbackLibraries() {
Expand Down Expand Up @@ -1676,7 +1544,7 @@ void IsolateReloadContext::MorphInstancesAndApplyNewClassTable() {
TIMELINE_SCOPE(MorphInstances);
if (!HasInstanceMorphers()) {
// Fast path: no class had a shape change.
DiscardSavedClassTable();
DiscardSavedClassTable(/*is_rollback=*/false);
return;
}

Expand All @@ -1698,7 +1566,7 @@ void IsolateReloadContext::MorphInstancesAndApplyNewClassTable() {
intptr_t count = locator.count();
if (count == 0) {
// Fast path: classes with shape change have no instances.
DiscardSavedClassTable();
DiscardSavedClassTable(/*is_rollback=*/false);
return;
}

Expand Down Expand Up @@ -1742,8 +1610,10 @@ void IsolateReloadContext::MorphInstancesAndApplyNewClassTable() {
saved_class_table_[i] = ClassAndSize(nullptr, -1);
}
#endif
free(saved_class_table_);
saved_class_table_ = nullptr;

// We accepted the hot-reload and morphed instances. So now we can commit to
// the changed class table and deleted the saved one.
DiscardSavedClassTable(/*is_rollback=*/false);

Become::ElementsForwardIdentity(before, after);
// The heap now contains only instances with the new size. Ordinary GC is safe
Expand Down Expand Up @@ -1808,7 +1678,7 @@ RawClass* IsolateReloadContext::FindOriginalClass(const Class& cls) {

RawClass* IsolateReloadContext::GetClassForHeapWalkAt(intptr_t cid) {
ClassAndSize* class_table =
AtomicOperations::LoadRelaxed(&saved_class_table_);
AtomicOperations::LoadAcquire(&saved_class_table_);
if (class_table != NULL) {
ASSERT(cid > 0);
ASSERT(cid < saved_num_cids_);
Expand All @@ -1820,7 +1690,7 @@ RawClass* IsolateReloadContext::GetClassForHeapWalkAt(intptr_t cid) {

intptr_t IsolateReloadContext::GetClassSizeForHeapWalkAt(intptr_t cid) {
ClassAndSize* class_table =
AtomicOperations::LoadRelaxed(&saved_class_table_);
AtomicOperations::LoadAcquire(&saved_class_table_);
if (class_table != NULL) {
ASSERT(cid > 0);
ASSERT(cid < saved_num_cids_);
Expand All @@ -1830,14 +1700,12 @@ intptr_t IsolateReloadContext::GetClassSizeForHeapWalkAt(intptr_t cid) {
}
}

void IsolateReloadContext::DiscardSavedClassTable() {
void IsolateReloadContext::DiscardSavedClassTable(bool is_rollback) {
ClassAndSize* local_saved_class_table = saved_class_table_;
saved_class_table_ = nullptr;
// Can't free this table immediately as another thread (e.g., concurrent
// marker or sweeper) may be between loading the table pointer and loading the
// table element. The table will be freed at the next major GC or isolate
// shutdown.
I->class_table()->AddOldTable(local_saved_class_table);
I->class_table()->ResetAfterHotReload(local_saved_class_table,
saved_num_cids_, is_rollback);
AtomicOperations::StoreRelease(&saved_class_table_,
static_cast<ClassAndSize*>(nullptr));
}

RawLibrary* IsolateReloadContext::saved_root_library() const {
Expand Down
3 changes: 1 addition & 2 deletions runtime/vm/isolate_reload.h
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ class IsolateReloadContext {
// Prefers old classes when we are in the middle of a reload.
RawClass* GetClassForHeapWalkAt(intptr_t cid);
intptr_t GetClassSizeForHeapWalkAt(intptr_t cid);
void DiscardSavedClassTable();
void DiscardSavedClassTable(bool is_rollback);

void RegisterClass(const Class& new_cls);

Expand Down Expand Up @@ -236,7 +236,6 @@ class IsolateReloadContext {
void CheckpointClasses();

bool ScriptModifiedSince(const Script& script, int64_t since);
BitVector* FindModifiedLibraries(bool force_reload, bool root_lib_modified);
void FindModifiedSources(Thread* thread,
bool force_reload,
Dart_SourceFile** modified_sources,
Expand Down
Loading

0 comments on commit f5c0e2e

Please sign in to comment.