Skip to content

Commit

Permalink
[discardable_memory] Keep track of how much memory is dirty in
Browse files Browse the repository at this point in the history
freelists.

Memory retained in freelists may be dirty (if it was previously used).
How much dirty memory is in the freelists is not accounted for anywhere.
This CL fixes this, by tracking the amount of dirty memory in the
freelists.

Bug: None
Change-Id: I3739bfbdaf07017ae9901de43a82f0a979cab8ce
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2820649
Reviewed-by: Peng Huang <penghuang@chromium.org>
Reviewed-by: Benoit L <lizeb@chromium.org>
Commit-Queue: Thiabaud Engelbrecht <thiabaud@google.com>
Cr-Commit-Position: refs/heads/master@{#880976}
  • Loading branch information
Thiabaud Engelbrecht authored and Chromium LUCI CQ committed May 10, 2021
1 parent 92f2d98 commit 0b8370a
Show file tree
Hide file tree
Showing 6 changed files with 252 additions and 22 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -321,6 +321,9 @@ ClientDiscardableSharedMemoryManager::AllocateLockedDiscardableMemory(
// at least one span from the free lists.
MemoryUsageChanged(heap_->GetSize(), heap_->GetFreelistSize());

// Memory in this span is no longer held in the freelist, so we don't want
// to count it towards the total of dirty freelist memory.
heap_->dirty_freed_memory_page_count_ -= free_span->MarkAsClean();
auto discardable_memory =
std::make_unique<DiscardableMemoryImpl>(this, std::move(free_span));
allocated_memory_.insert(discardable_memory.get());
Expand Down Expand Up @@ -373,7 +376,7 @@ ClientDiscardableSharedMemoryManager::AllocateLockedDiscardableMemory(
reinterpret_cast<size_t>(leftover->shared_memory()->memory()),
leftover->length() * base::GetPageSize());
leftover->set_is_locked(false);
heap_->MergeIntoFreeLists(std::move(leftover));
heap_->MergeIntoFreeListsClean(std::move(leftover));
}

if (pages >= allocation_pages) {
Expand Down Expand Up @@ -405,6 +408,11 @@ bool ClientDiscardableSharedMemoryManager::OnMemoryDump(
base::UmaHistogramCounts1M("Memory.Discardable.Size.Foreground",
total_size - freelist_size);
}

base::UmaHistogramCounts1M(
"Memory.Discardable.FreelistSize.Dirty",
heap_->dirty_freed_memory_page_count_ * base::GetPageSize() / 1024);

return heap_->OnMemoryDump(args, pmd);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,11 @@ class TestClientDiscardableSharedMemoryManager
return heap_->GetFreelistSize();
}

size_t GetDirtyFreedMemoryPageCount() const {
base::AutoLock lock(lock_);
return heap_->dirty_freed_memory_page_count_;
}

bool IsPurgeScheduled() const {
base::AutoLock lock(lock_);
return is_purge_scheduled_;
Expand Down Expand Up @@ -458,5 +463,79 @@ TEST_F(ClientDiscardableSharedMemoryManagerTest,
EXPECT_FALSE(client->IsPurgeScheduled());
}

TEST_F(ClientDiscardableSharedMemoryManagerTest, MarkDirtyFreelistPages) {
auto client =
base::MakeRefCounted<TestClientDiscardableSharedMemoryManager>();

ASSERT_EQ(0u, client->GetDirtyFreedMemoryPageCount());

auto mem1 = client->AllocateLockedDiscardableMemory(base::GetPageSize() / 2u);

ASSERT_EQ(0u, client->GetDirtyFreedMemoryPageCount());

auto mem2 =
client->AllocateLockedDiscardableMemory(base::GetPageSize() * 1.2);

ASSERT_EQ(0u, client->GetDirtyFreedMemoryPageCount());

// Allocate 5 MiB. This is to test large allocations, which are special-cased
// when allocating.
auto mem3 = client->AllocateLockedDiscardableMemory(5 * 1024 * 1024);

ASSERT_EQ(0u, client->GetDirtyFreedMemoryPageCount());

mem1 = nullptr;

ASSERT_EQ(1u, client->GetDirtyFreedMemoryPageCount());

mem2 = nullptr;

// Allocations on done in multiples of the page size, so we have 3 pages
// dirtied, even though we only actually touched 1.7 pages (since the 0.5 page
// allocation used 1 page, and the 1.2 page allocation used 2).
ASSERT_EQ(3u, client->GetDirtyFreedMemoryPageCount());

mem3 = nullptr;

ASSERT_EQ(1283u, client->GetDirtyFreedMemoryPageCount());

client->ReleaseFreeMemory();

// All pages should be freed now, so there are no dirty pages in the freelist.
ASSERT_EQ(0u, client->GetDirtyFreedMemoryPageCount());
}

TEST_F(ClientDiscardableSharedMemoryManagerTest,
MarkDirtyFreelistPagesReleaseFreeListPages) {
base::test::ScopedFeatureList fl;
fl.InitAndEnableFeature(discardable_memory::kReleaseDiscardableFreeListPages);
auto client =
base::MakeRefCounted<TestClientDiscardableSharedMemoryManager>();

ASSERT_EQ(0u, client->GetDirtyFreedMemoryPageCount());

auto mem1 = client->AllocateLockedDiscardableMemory(base::GetPageSize() / 2u);

ASSERT_EQ(0u, client->GetDirtyFreedMemoryPageCount());

auto mem2 =
client->AllocateLockedDiscardableMemory(base::GetPageSize() * 1.2);

ASSERT_EQ(0u, client->GetDirtyFreedMemoryPageCount());

mem1 = nullptr;

ASSERT_EQ(0u, client->GetDirtyFreedMemoryPageCount());

mem2 = nullptr;

// Freelist memory is released immediately, so there's no dirty memory.
ASSERT_EQ(0u, client->GetDirtyFreedMemoryPageCount());

client->ReleaseFreeMemory();

ASSERT_EQ(0u, client->GetDirtyFreedMemoryPageCount());
}

} // namespace
} // namespace discardable_memory
Original file line number Diff line number Diff line change
Expand Up @@ -36,31 +36,70 @@ bool IsInFreeList(DiscardableSharedMemoryHeap::Span* span) {
DiscardableSharedMemoryHeap::Span::Span(
base::DiscardableSharedMemory* shared_memory,
size_t start,
size_t length)
: shared_memory_(shared_memory),
size_t length,
DiscardableSharedMemoryHeap::ScopedMemorySegment* memory_segment)
: memory_segment_(memory_segment),
shared_memory_(shared_memory),
start_(start),
length_(length),
is_locked_(false) {}

DiscardableSharedMemoryHeap::Span::~Span() {}

DiscardableSharedMemoryHeap::ScopedMemorySegment::ScopedMemorySegment(
DiscardableSharedMemoryHeap* heap,
std::unique_ptr<base::DiscardableSharedMemory> shared_memory,
size_t size,
int32_t id,
base::OnceClosure deleted_callback)
: heap_(heap),
: dirty_pages_(std::vector<bool>(size / base::GetPageSize())),
heap_(heap),
shared_memory_(std::move(shared_memory)),
size_(size),
id_(id),
deleted_callback_(std::move(deleted_callback)) {}

size_t DiscardableSharedMemoryHeap::Span::MarkAsClean() {
return memory_segment_->MarkPages(start_, length_, false);
}

size_t DiscardableSharedMemoryHeap::Span::MarkAsDirty() {
return memory_segment_->MarkPages(start_, length_, true);
}

DiscardableSharedMemoryHeap::ScopedMemorySegment*
DiscardableSharedMemoryHeap::Span::GetScopedMemorySegmentForTesting() const {
return memory_segment_;
}

DiscardableSharedMemoryHeap::ScopedMemorySegment::~ScopedMemorySegment() {
heap_->dirty_freed_memory_page_count_ -= MarkPages(
reinterpret_cast<size_t>(shared_memory_->memory()) / base::GetPageSize(),
dirty_pages_.size(), false);
heap_->ReleaseMemory(shared_memory_.get(), size_);
std::move(deleted_callback_).Run();
}

size_t DiscardableSharedMemoryHeap::ScopedMemorySegment::MarkPages(
size_t start,
size_t length,
bool value) {
if (!shared_memory_)
return 0;

const size_t offset =
start -
reinterpret_cast<size_t>(shared_memory_->memory()) / base::GetPageSize();

size_t tmp = 0;
for (size_t i = offset; i < offset + length; i++) {
if (dirty_pages_[i] != value) {
dirty_pages_[i] = value;
tmp++;
}
}

return tmp;
}

bool DiscardableSharedMemoryHeap::ScopedMemorySegment::IsUsed() const {
return heap_->IsMemoryUsed(shared_memory_.get(), size_);
}
Expand All @@ -74,6 +113,11 @@ bool DiscardableSharedMemoryHeap::ScopedMemorySegment::ContainsSpan(
return shared_memory_.get() == span->shared_memory();
}

size_t DiscardableSharedMemoryHeap::ScopedMemorySegment::CountMarkedPages()
const {
return std::count(dirty_pages_.begin(), dirty_pages_.end(), true);
}

base::trace_event::MemoryAllocatorDump*
DiscardableSharedMemoryHeap::ScopedMemorySegment::CreateMemoryAllocatorDump(
Span* span,
Expand Down Expand Up @@ -124,29 +168,36 @@ DiscardableSharedMemoryHeap::Grow(
DCHECK(base::IsAligned(shared_memory->memory(), block_size_));
DCHECK(base::IsAligned(size, block_size_));

std::unique_ptr<Span> span(
new Span(shared_memory.get(),
reinterpret_cast<size_t>(shared_memory->memory()) / block_size_,
size / block_size_));
auto* raw_shared_memory = shared_memory.get();
auto scoped_memory_segment = std::make_unique<ScopedMemorySegment>(
this, std::move(shared_memory), size, id, std::move(deleted_callback));
std::unique_ptr<Span> span(new Span(
raw_shared_memory,
reinterpret_cast<size_t>(raw_shared_memory->memory()) / block_size_,
size / block_size_, scoped_memory_segment.get()));
DCHECK(spans_.find(span->start_) == spans_.end());
DCHECK(spans_.find(span->start_ + span->length_ - 1) == spans_.end());
RegisterSpan(span.get());

num_blocks_ += span->length_;

// Start tracking if segment is resident by adding it to |memory_segments_|.
memory_segments_.push_back(std::make_unique<ScopedMemorySegment>(
this, std::move(shared_memory), size, id, std::move(deleted_callback)));
memory_segments_.push_back(std::move(scoped_memory_segment));

return span;
}

void DiscardableSharedMemoryHeap::MergeIntoFreeLists(
std::unique_ptr<Span> span) {
DCHECK(span->shared_memory_);
if (!base::FeatureList::IsEnabled(kReleaseDiscardableFreeListPages)) {
dirty_freed_memory_page_count_ += span->MarkAsDirty();
}
MergeIntoFreeListsClean(std::move(span));
}

// First add length of |span| to |num_free_blocks_|.
num_free_blocks_ += span->length_;
void DiscardableSharedMemoryHeap::MergeIntoFreeListsClean(
std::unique_ptr<Span> span) {
DCHECK(span->shared_memory_);

if (base::FeatureList::IsEnabled(kReleaseDiscardableFreeListPages)) {
SCOPED_UMA_HISTOGRAM_SHORT_TIMER("Memory.Discardable.FreeListReleaseTime");
Expand All @@ -167,6 +218,9 @@ void DiscardableSharedMemoryHeap::MergeIntoFreeLists(
offset, span->length_ * base::GetPageSize());
}

// First add length of |span| to |num_free_blocks_|.
num_free_blocks_ += span->length_;

// Merge with previous span if possible.
auto prev_it = spans_.find(span->start_ - 1);
if (prev_it != spans_.end() && IsInFreeList(prev_it->second)) {
Expand Down Expand Up @@ -200,8 +254,9 @@ DiscardableSharedMemoryHeap::Split(Span* span, size_t blocks) {
DCHECK(blocks);
DCHECK_LT(blocks, span->length_);

std::unique_ptr<Span> leftover(new Span(
span->shared_memory_, span->start_ + blocks, span->length_ - blocks));
std::unique_ptr<Span> leftover(
new Span(span->shared_memory_, span->start_ + blocks,
span->length_ - blocks, span->memory_segment_));
DCHECK(leftover->length_ == 1 ||
spans_.find(leftover->start_) == spans_.end());
RegisterSpan(leftover.get());
Expand Down Expand Up @@ -363,8 +418,9 @@ DiscardableSharedMemoryHeap::Carve(Span* span, size_t blocks) {

const size_t extra = serving->length_ - blocks;
if (extra) {
std::unique_ptr<Span> leftover(
new Span(serving->shared_memory_, serving->start_ + blocks, extra));
std::unique_ptr<Span> leftover(new Span(serving->shared_memory_,
serving->start_ + blocks, extra,
serving->memory_segment_));
leftover->set_is_locked(false);
DCHECK(extra == 1 || spans_.find(leftover->start_) == spans_.end());
RegisterSpan(leftover.get());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

#include "base/callback.h"
#include "base/containers/linked_list.h"
#include "base/feature_list.h"
#include "base/macros.h"
#include "base/trace_event/process_memory_dump.h"
#include "components/discardable_memory/common/discardable_memory_export.h"
Expand All @@ -24,26 +25,43 @@ class DiscardableSharedMemory;

namespace discardable_memory {

DISCARDABLE_MEMORY_EXPORT extern const base::Feature
kReleaseDiscardableFreeListPages;

// Implements a heap of discardable shared memory. An array of free lists
// is used to keep track of free blocks.
class DISCARDABLE_MEMORY_EXPORT DiscardableSharedMemoryHeap {
private:
class ScopedMemorySegment;

public:
class DISCARDABLE_MEMORY_EXPORT Span : public base::LinkNode<Span> {
public:
~Span();
~Span() = default;

base::DiscardableSharedMemory* shared_memory() { return shared_memory_; }
size_t start() const { return start_; }
size_t length() const { return length_; }
void set_is_locked(bool is_locked) { is_locked_ = is_locked; }

// Marks all bytes in this Span as dirty, returns the number of pages
// marked as dirty this way.
size_t MarkAsDirty();
// Marks all bytes in this Span as non-dirty, returning the number of
// pages marked as non-dirty this way.
size_t MarkAsClean();

ScopedMemorySegment* GetScopedMemorySegmentForTesting() const;

private:
friend class DiscardableSharedMemoryHeap;

Span(base::DiscardableSharedMemory* shared_memory,
size_t start,
size_t length);
size_t length,
DiscardableSharedMemoryHeap::ScopedMemorySegment* memory_segment);

DiscardableSharedMemoryHeap::ScopedMemorySegment* const memory_segment_;
base::DiscardableSharedMemory* shared_memory_;
size_t start_;
size_t length_;
Expand All @@ -69,6 +87,11 @@ class DISCARDABLE_MEMORY_EXPORT DiscardableSharedMemoryHeap {
// neighboring free spans when possible.
void MergeIntoFreeLists(std::unique_ptr<Span> span);

// Same as |MergeIntoFreeLists|, but doesn't mark the memory in the span as
// dirtied (this is used for keeping track of how much memory is dirtied in
// the freelist at any given time.
void MergeIntoFreeListsClean(std::unique_ptr<Span> span);

// Split an allocated span into two spans, one of length |blocks| followed
// by another span of length "span->length - blocks" blocks. Modifies |span|
// to point to the first span of length |blocks|. Return second span.
Expand Down Expand Up @@ -104,8 +127,10 @@ class DISCARDABLE_MEMORY_EXPORT DiscardableSharedMemoryHeap {
const char* name,
base::trace_event::ProcessMemoryDump* pmd) const;

size_t dirty_freed_memory_page_count_ = 0;

private:
class ScopedMemorySegment {
class DISCARDABLE_MEMORY_EXPORT ScopedMemorySegment {
public:
ScopedMemorySegment(
DiscardableSharedMemoryHeap* heap,
Expand All @@ -120,6 +145,8 @@ class DISCARDABLE_MEMORY_EXPORT DiscardableSharedMemoryHeap {

bool ContainsSpan(Span* span) const;

size_t CountMarkedPages() const;

base::trace_event::MemoryAllocatorDump* CreateMemoryAllocatorDump(
Span* span,
size_t block_size,
Expand All @@ -129,7 +156,10 @@ class DISCARDABLE_MEMORY_EXPORT DiscardableSharedMemoryHeap {
// Used for dumping memory statistics from the segment to chrome://tracing.
void OnMemoryDump(base::trace_event::ProcessMemoryDump* pmd) const;

size_t MarkPages(size_t start, size_t length, bool value);

private:
std::vector<bool> dirty_pages_;
DiscardableSharedMemoryHeap* const heap_;
std::unique_ptr<base::DiscardableSharedMemory> shared_memory_;
const size_t size_;
Expand Down

0 comments on commit 0b8370a

Please sign in to comment.