Skip to content
This repository has been archived by the owner on Jul 18, 2018. It is now read-only.

Commit

Permalink
Oilpan: Disable prompty free for weak collection backings
Browse files Browse the repository at this point in the history
In anticipation for incremental marking, disable promptly free for weak collection backings. This is required to avoid callbacks to objects that can be destroyed during the mutator between incremental marking.

This CL:
- Disable prompty free for weak hash table backings
- Adds CheckObjectNotInCallbackStacks which checks that an object doesn't have any callback registered

Bug: 757440
Change-Id: Ibeffed9e5a2f15a1b88c934f68e5d86f54c2e2c9
Reviewed-on: https://chromium-review.googlesource.com/836299
Commit-Queue: Keishi Hattori <keishi@chromium.org>
Reviewed-by: Kentaro Hara <haraken@chromium.org>
Reviewed-by: Michael Lippautz <mlippautz@chromium.org>
Cr-Commit-Position: refs/heads/master@{#532399}
  • Loading branch information
Keishi Hattori authored and Commit Bot committed Jan 29, 2018
1 parent 0279ee3 commit 6e19c26
Show file tree
Hide file tree
Showing 9 changed files with 63 additions and 8 deletions.
8 changes: 6 additions & 2 deletions third_party/WebKit/Source/platform/heap/HeapAllocator.cpp
Expand Up @@ -21,6 +21,8 @@ void HeapAllocator::BackingFree(void* address) {
if (page->IsLargeObjectPage() || page->Arena()->GetThreadState() != state)
return;

state->CheckObjectNotInCallbackStacks(address);

HeapObjectHeader* header = HeapObjectHeader::FromPayload(address);
NormalPageArena* arena = static_cast<NormalPage*>(page)->ArenaForNormalPage();
state->Heap().PromptlyFreed(header->GcInfoIndex());
Expand All @@ -35,8 +37,10 @@ void HeapAllocator::FreeInlineVectorBacking(void* address) {
BackingFree(address);
}

void HeapAllocator::FreeHashTableBacking(void* address) {
BackingFree(address);
void HeapAllocator::FreeHashTableBacking(void* address, bool is_weak_table) {
if (!ThreadState::Current()->IsIncrementalMarkingInProgress() ||
!is_weak_table)
BackingFree(address);
}

bool HeapAllocator::BackingExpand(void* address, size_t new_size) {
Expand Down
2 changes: 1 addition & 1 deletion third_party/WebKit/Source/platform/heap/HeapAllocator.h
Expand Up @@ -124,7 +124,7 @@ class PLATFORM_EXPORT HeapAllocator {
static T* AllocateZeroedHashTableBacking(size_t size) {
return AllocateHashTableBacking<T, HashTable>(size);
}
static void FreeHashTableBacking(void* address);
static void FreeHashTableBacking(void* address, bool is_weak_table);
static bool ExpandHashTableBacking(void*, size_t);

template <typename Return, typename Metadata>
Expand Down
1 change: 1 addition & 0 deletions third_party/WebKit/Source/platform/heap/HeapPage.h
Expand Up @@ -870,6 +870,7 @@ class PLATFORM_EXPORT NormalPageArena final : public BaseArena {
bool ExpandObject(HeapObjectHeader*, size_t);
bool ShrinkObject(HeapObjectHeader*, size_t);
void DecreasePromptlyFreedSize(size_t size) { promptly_freed_size_ -= size; }
size_t promptly_freed_size() const { return promptly_freed_size_; }

bool IsObjectAllocatedAtAllocationPoint(HeapObjectHeader* header) {
return header->PayloadEnd() == current_allocation_point_;
Expand Down
36 changes: 36 additions & 0 deletions third_party/WebKit/Source/platform/heap/IncrementalMarkingTest.cpp
Expand Up @@ -1382,6 +1382,42 @@ TEST(IncrementalMarkingTest, HeapHashMapCopyValuesToVectorMember) {
}
}

TEST(IncrementalMarkingTest, WeakHashMapPromptlyFreeDisabled) {
ThreadState* state = ThreadState::Current();
state->SetGCState(ThreadState::kIncrementalMarkingStartScheduled);
state->SetGCState(ThreadState::kIncrementalMarkingStepScheduled);
Persistent<Object> obj1 = Object::Create();
NormalPageArena* arena = static_cast<NormalPageArena*>(
ThreadState::Current()->Heap().Arena(BlinkGC::kHashTableArenaIndex));
CHECK(arena);
{
size_t before = arena->promptly_freed_size();
// Create two maps so we don't promptly free at the allocation point.
HeapHashMap<WeakMember<Object>, Member<Object>> weak_map1;
HeapHashMap<WeakMember<Object>, Member<Object>> weak_map2;
weak_map1.insert(obj1, obj1);
weak_map2.insert(obj1, obj1);
weak_map1.clear();
size_t after = arena->promptly_freed_size();
// Weak hash table backings should not be promptly freed.
EXPECT_EQ(after, before);
}
{
size_t before = arena->promptly_freed_size();
// Create two maps so we don't promptly free at the allocation point.
HeapHashMap<Member<Object>, Member<Object>> map1;
HeapHashMap<Member<Object>, Member<Object>> map2;
map1.insert(obj1, obj1);
map2.insert(obj1, obj1);
map1.clear();
size_t after = arena->promptly_freed_size();
// Non-weak hash table backings should be promptly freed.
EXPECT_GT(after, before);
}
state->SetGCState(ThreadState::kIncrementalMarkingFinalizeScheduled);
state->SetGCState(ThreadState::kNoGCScheduled);
}

} // namespace incremental_marking_test
} // namespace blink

Expand Down
9 changes: 9 additions & 0 deletions third_party/WebKit/Source/platform/heap/ThreadState.cpp
Expand Up @@ -1431,4 +1431,13 @@ void ThreadState::CollectAllGarbage() {
}
}

void ThreadState::CheckObjectNotInCallbackStacks(const void* object) {
#if DCHECK_IS_ON()
DCHECK(!Heap().MarkingStack()->HasCallbackForObject(object));
DCHECK(!Heap().PostMarkingCallbackStack()->HasCallbackForObject(object));
DCHECK(!Heap().WeakCallbackStack()->HasCallbackForObject(object));
DCHECK(!Heap().EphemeronStack()->HasCallbackForObject(object));
#endif
}

} // namespace blink
5 changes: 3 additions & 2 deletions third_party/WebKit/Source/platform/heap/ThreadState.h
Expand Up @@ -270,8 +270,7 @@ class PLATFORM_EXPORT ThreadState {
void IncrementalMarkingFinalize();

bool IsIncrementalMarkingInProgress() const {
return GcState() == kIncrementalMarkingStartScheduled ||
GcState() == kIncrementalMarkingStepScheduled ||
return GcState() == kIncrementalMarkingStepScheduled ||
GcState() == kIncrementalMarkingFinalizeScheduled;
}

Expand Down Expand Up @@ -342,6 +341,8 @@ class PLATFORM_EXPORT ThreadState {
bool IsIncrementalMarking() const { return incremental_marking_; }
void SetIncrementalMarking(bool value) { incremental_marking_ = value; }

void CheckObjectNotInCallbackStacks(const void*);

class MainThreadGCForbiddenScope final {
STACK_ALLOCATED();

Expand Down
5 changes: 4 additions & 1 deletion third_party/WebKit/Source/platform/wtf/HashTable.h
Expand Up @@ -1629,7 +1629,10 @@ void HashTable<Key,
}
}
}
Allocator::FreeHashTableBacking(table);
// Notify if this is a weak table since immediately freeing a weak hash table
// backing may cause a use-after-free when the weak callback is called.
Allocator::FreeHashTableBacking(
table, Traits::kWeakHandlingFlag == kWeakHandlingInCollections);
}

template <typename Key,
Expand Down
Expand Up @@ -17,7 +17,8 @@ void PartitionAllocator::FreeVectorBacking(void* address) {
Partitions::BufferFree(address);
}

void PartitionAllocator::FreeHashTableBacking(void* address) {
void PartitionAllocator::FreeHashTableBacking(void* address,
bool is_weak_table) {
Partitions::BufferFree(address);
}

Expand Down
Expand Up @@ -82,7 +82,7 @@ class WTF_EXPORT PartitionAllocator {
memset(result, 0, size);
return reinterpret_cast<T*>(result);
}
static void FreeHashTableBacking(void* address);
static void FreeHashTableBacking(void* address, bool is_weak_table);

template <typename Return, typename Metadata>
static Return Malloc(size_t size, const char* type_name) {
Expand Down

0 comments on commit 6e19c26

Please sign in to comment.