Skip to content

Commit

Permalink
Revert of Reland of Remove slots that point to unboxed doubles from t…
Browse files Browse the repository at this point in the history
…he StoreBuffer/SlotsBuffer. (patchset #3 id:40001 of https://codereview.chromium.org/988363002/)

Reason for revert:
Increased rate of Chrome crashes. Requires further investigation.

Original issue's description:
> Reland of Remove slots that point to unboxed doubles from the StoreBuffer/SlotsBuffer.
>
> The problem is that tagged slot could become a double slot after migrating of an object to another map with "shifted" fields (for example as a result of generalizing immutable data property to a data field).
> This CL also adds useful machinery that helps triggering incremental write barriers.
>
> BUG=chromium:454297, chromium:465273
> LOG=Y
>
> Committed: https://crrev.com/6d0677d845c47ab9fa297de61d0e3d8e5480a02a
> Cr-Commit-Position: refs/heads/master@{#27141}

TBR=hpayer@chromium.org
NOPRESUBMIT=true
NOTREECHECKS=true
NOTRY=true
BUG=chromium:454297, chromium:465273

Review URL: https://codereview.chromium.org/1004623003

Cr-Commit-Position: refs/heads/master@{#27207}
  • Loading branch information
isheludko authored and Commit bot committed Mar 16, 2015
1 parent 8db09a3 commit 52cb51f
Show file tree
Hide file tree
Showing 8 changed files with 10 additions and 389 deletions.
5 changes: 0 additions & 5 deletions src/flag-definitions.h
Expand Up @@ -759,11 +759,6 @@ DEFINE_BOOL(stress_compaction, false,
"stress the GC compactor to flush out bugs (implies "
"--force_marking_deque_overflows)")

DEFINE_BOOL(manual_evacuation_candidates_selection, false,
"Test mode only flag. It allows an unit test to select evacuation "
"candidates pages (requires --stress_compaction).")


//
// Dev shell flags
//
Expand Down
29 changes: 3 additions & 26 deletions src/heap/mark-compact.cc
Expand Up @@ -721,7 +721,6 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {

int count = 0;
int fragmentation = 0;
int page_number = 0;
Candidate* least = NULL;

PageIterator it(space);
Expand All @@ -736,16 +735,9 @@ void MarkCompactCollector::CollectEvacuationCandidates(PagedSpace* space) {
CHECK(p->slots_buffer() == NULL);

if (FLAG_stress_compaction) {
if (FLAG_manual_evacuation_candidates_selection) {
if (p->IsFlagSet(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING)) {
p->ClearFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING);
fragmentation = 1;
}
} else {
unsigned int counter = space->heap()->ms_count();
if ((counter & 1) == (page_number & 1)) fragmentation = 1;
page_number++;
}
unsigned int counter = space->heap()->ms_count();
uintptr_t page_number = reinterpret_cast<uintptr_t>(p) >> kPageSizeBits;
if ((counter & 1) == (page_number & 1)) fragmentation = 1;
} else if (mode == REDUCE_MEMORY_FOOTPRINT) {
// Don't try to release too many pages.
if (estimated_release >= over_reserved) {
Expand Down Expand Up @@ -4491,21 +4483,6 @@ bool SlotsBuffer::AddTo(SlotsBufferAllocator* allocator,
}


static Object* g_smi_slot = NULL;


void SlotsBuffer::RemoveSlot(SlotsBuffer* buffer, ObjectSlot slot_to_remove) {
DCHECK_EQ(Smi::FromInt(0), g_smi_slot);
DCHECK(!IsTypedSlot(slot_to_remove));
while (buffer != NULL) {
ObjectSlot* slots = buffer->slots_;
// Remove entries by replacing them with a dummy slot containing a smi.
std::replace(&slots[0], &slots[buffer->idx_], slot_to_remove, &g_smi_slot);
buffer = buffer->next_;
}
}


static inline SlotsBuffer::SlotType SlotTypeForRMode(RelocInfo::Mode rmode) {
if (RelocInfo::IsCodeTarget(rmode)) {
return SlotsBuffer::CODE_TARGET_SLOT;
Expand Down
3 changes: 0 additions & 3 deletions src/heap/mark-compact.h
Expand Up @@ -363,9 +363,6 @@ class SlotsBuffer {
SlotsBuffer** buffer_address, SlotType type, Address addr,
AdditionMode mode);

// Removes all occurrences of given slot from the chain of slots buffers.
static void RemoveSlot(SlotsBuffer* buffer, ObjectSlot slot_to_remove);

static const int kNumberOfElements = 1021;

private:
Expand Down
6 changes: 0 additions & 6 deletions src/heap/spaces.h
Expand Up @@ -385,12 +385,6 @@ class MemoryChunk {
// to grey transition is performed in the value.
HAS_PROGRESS_BAR,

// This flag is intended to be used for testing. Works only when both
// FLAG_stress_compaction and FLAG_manual_evacuation_candidates_selection
// are set. It forces the page to become an evacuation candidate at next
// candidates selection cycle.
FORCE_EVACUATION_CANDIDATE_FOR_TESTING,

// Last flag, keep at bottom.
NUM_MEMORY_CHUNK_FLAGS
};
Expand Down
61 changes: 3 additions & 58 deletions src/heap/store-buffer.cc
Expand Up @@ -247,55 +247,6 @@ void StoreBuffer::Filter(int flag) {
}


void StoreBuffer::RemoveSlots(Address start_address, Address end_address) {
struct IsValueInRangePredicate {
Address start_address_;
Address end_address_;

IsValueInRangePredicate(Address start_address, Address end_address)
: start_address_(start_address), end_address_(end_address) {}

bool operator()(Address addr) {
return start_address_ <= addr && addr < end_address_;
}
};

IsValueInRangePredicate predicate(start_address, end_address);
// Some address in old space that does not move.
const Address kRemovedSlot = heap_->undefined_value()->address();
DCHECK(Page::FromAddress(kRemovedSlot)->NeverEvacuate());

{
Address* top = reinterpret_cast<Address*>(heap_->store_buffer_top());
std::replace_if(start_, top, predicate, kRemovedSlot);
}

if (old_buffer_is_sorted_) {
// Remove slots from an old buffer preserving the order.
Address* lower = std::lower_bound(old_start_, old_top_, start_address);
if (lower != old_top_) {
// [lower, old_top_) range contain elements that are >= |start_address|.
Address* upper = std::lower_bound(lower, old_top_, end_address);
// Remove [lower, upper) from the buffer.
if (upper == old_top_) {
// All elements in [lower, old_top_) range are < |end_address|.
old_top_ = lower;
} else if (lower != upper) {
// [upper, old_top_) range contain elements that are >= |end_address|,
// move [upper, old_top_) range to [lower, ...) and update old_top_.
Address* new_top = lower;
for (Address* p = upper; p < old_top_; p++) {
*new_top++ = *p;
}
old_top_ = new_top;
}
}
} else {
std::replace_if(old_start_, old_top_, predicate, kRemovedSlot);
}
}


void StoreBuffer::SortUniq() {
Compact();
if (old_buffer_is_sorted_) return;
Expand Down Expand Up @@ -346,18 +297,12 @@ static Address* in_store_buffer_1_element_cache = NULL;


bool StoreBuffer::CellIsInStoreBuffer(Address cell_address) {
DCHECK_NOT_NULL(cell_address);
Address* top = reinterpret_cast<Address*>(heap_->store_buffer_top());
if (!FLAG_enable_slow_asserts) return true;
if (in_store_buffer_1_element_cache != NULL &&
*in_store_buffer_1_element_cache == cell_address) {
// Check if the cache still points into the active part of the buffer.
if ((start_ <= in_store_buffer_1_element_cache &&
in_store_buffer_1_element_cache < top) ||
(old_start_ <= in_store_buffer_1_element_cache &&
in_store_buffer_1_element_cache < old_top_)) {
return true;
}
return true;
}
Address* top = reinterpret_cast<Address*>(heap_->store_buffer_top());
for (Address* current = top - 1; current >= start_; current--) {
if (*current == cell_address) {
in_store_buffer_1_element_cache = current;
Expand Down
4 changes: 0 additions & 4 deletions src/heap/store-buffer.h
Expand Up @@ -107,10 +107,6 @@ class StoreBuffer {
void ClearInvalidStoreBufferEntries();
void VerifyValidStoreBufferEntries();

// Removes all the slots in [start_address, end_address) range from the store
// buffer.
void RemoveSlots(Address start_address, Address end_address);

private:
Heap* heap_;

Expand Down
66 changes: 4 additions & 62 deletions src/objects.cc
Expand Up @@ -1923,50 +1923,6 @@ void JSObject::MigrateToMap(Handle<JSObject> object, Handle<Map> new_map) {
}


// Returns true if during migration from |old_map| to |new_map| "tagged"
// inobject fields are going to be replaced with unboxed double fields.
static bool ShouldClearSlotsRecorded(Map* old_map, Map* new_map,
int new_number_of_fields) {
DisallowHeapAllocation no_gc;
int inobject = new_map->inobject_properties();
DCHECK(inobject <= old_map->inobject_properties());

int limit = Min(inobject, new_number_of_fields);
for (int i = 0; i < limit; i++) {
FieldIndex index = FieldIndex::ForPropertyIndex(new_map, i);
if (new_map->IsUnboxedDoubleField(index) &&
!old_map->IsUnboxedDoubleField(index)) {
return true;
}
}
return false;
}


static void RemoveOldToOldSlotsRecorded(Heap* heap, JSObject* object,
FieldIndex index) {
DisallowHeapAllocation no_gc;

Object* old_value = object->RawFastPropertyAt(index);
if (old_value->IsHeapObject()) {
HeapObject* ho = HeapObject::cast(old_value);
if (heap->InNewSpace(ho)) {
// At this point there must be no old-to-new slots recorded for this
// object.
SLOW_DCHECK(
!heap->store_buffer()->CellIsInStoreBuffer(reinterpret_cast<Address>(
HeapObject::RawField(object, index.offset()))));
} else {
Page* p = Page::FromAddress(reinterpret_cast<Address>(ho));
if (p->IsEvacuationCandidate()) {
Object** slot = HeapObject::RawField(object, index.offset());
SlotsBuffer::RemoveSlot(p->slots_buffer(), slot);
}
}
}
}


// To migrate a fast instance to a fast map:
// - First check whether the instance needs to be rewritten. If not, simply
// change the map.
Expand Down Expand Up @@ -2120,39 +2076,25 @@ void JSObject::MigrateFastToFast(Handle<JSObject> object, Handle<Map> new_map) {
// From here on we cannot fail and we shouldn't GC anymore.
DisallowHeapAllocation no_allocation;

Heap* heap = isolate->heap();

// If we are going to put an unboxed double to the field that used to
// contain HeapObject we should ensure that this slot is removed from
// both StoreBuffer and respective SlotsBuffer.
bool clear_slots_recorded =
FLAG_unbox_double_fields && !heap->InNewSpace(object->address()) &&
ShouldClearSlotsRecorded(*old_map, *new_map, number_of_fields);
if (clear_slots_recorded) {
Address obj_address = object->address();
Address start_address = obj_address + JSObject::kHeaderSize;
Address end_address = obj_address + old_map->instance_size();
heap->store_buffer()->RemoveSlots(start_address, end_address);
}

// Copy (real) inobject properties. If necessary, stop at number_of_fields to
// avoid overwriting |one_pointer_filler_map|.
int limit = Min(inobject, number_of_fields);
for (int i = 0; i < limit; i++) {
FieldIndex index = FieldIndex::ForPropertyIndex(*new_map, i);
Object* value = array->get(external + i);
// Can't use JSObject::FastPropertyAtPut() because proper map was not set
// yet.
if (new_map->IsUnboxedDoubleField(index)) {
DCHECK(value->IsMutableHeapNumber());
if (clear_slots_recorded && !old_map->IsUnboxedDoubleField(index)) {
RemoveOldToOldSlotsRecorded(heap, *object, index);
}
object->RawFastDoublePropertyAtPut(index,
HeapNumber::cast(value)->value());
} else {
object->RawFastPropertyAtPut(index, value);
}
}

Heap* heap = isolate->heap();

// If there are properties in the new backing store, trim it to the correct
// size and install the backing store into the object.
if (external > 0) {
Expand Down

0 comments on commit 52cb51f

Please sign in to comment.