Skip to content

Commit

Permalink
Move recursion protection to PoissonAllocationSampler
Browse files Browse the repository at this point in the history
Moving the ReentryGuard from the dispatcher and hooks into the
PoissonAllocationSampler enables observers to receive notifications on
other observer's memory allocations.

Furthermore is reduces overhead of creating a ReentryGuard (which
involves accessing thread local storage) until it is really necessary.

Bug: 1137393
Change-Id: Ia3fdede065af0b240357cb85508a4efbe953ce8d
Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/4184715
Reviewed-by: Benoit Lize <lizeb@chromium.org>
Reviewed-by: Joe Mason <joenotcharles@google.com>
Commit-Queue: Richard Townsend <richard.townsend@arm.com>
Cr-Commit-Position: refs/heads/main@{#1096754}
  • Loading branch information
andre-kempe-arm authored and Chromium LUCI CQ committed Jan 25, 2023
1 parent fa811b4 commit 1f9ea58
Show file tree
Hide file tree
Showing 3 changed files with 76 additions and 95 deletions.
79 changes: 36 additions & 43 deletions base/allocator/dispatcher/dispatcher.cc
Expand Up @@ -6,7 +6,6 @@

#include "base/allocator/buildflags.h"
#include "base/allocator/dispatcher/internal/dispatch_data.h"
#include "base/allocator/dispatcher/reentry_guard.h"
#include "base/allocator/partition_allocator/partition_alloc.h"
#include "base/allocator/partition_allocator/partition_alloc_buildflags.h"
#include "base/allocator/partition_allocator/shim/allocator_shim.h"
Expand All @@ -26,68 +25,63 @@ namespace {
using allocator_shim::AllocatorDispatch;

void* AllocFn(const AllocatorDispatch* self, size_t size, void* context) {
ReentryGuard guard;
void* address = self->next->alloc_function(self->next, size, context);
if (LIKELY(guard)) {
PoissonAllocationSampler::RecordAlloc(
address, size, AllocationSubsystem::kAllocatorShim, nullptr);
}

PoissonAllocationSampler::RecordAlloc(
address, size, AllocationSubsystem::kAllocatorShim, nullptr);

return address;
}

void* AllocUncheckedFn(const AllocatorDispatch* self,
size_t size,
void* context) {
ReentryGuard guard;
void* address =
self->next->alloc_unchecked_function(self->next, size, context);
if (LIKELY(guard)) {
PoissonAllocationSampler::RecordAlloc(
address, size, AllocationSubsystem::kAllocatorShim, nullptr);
}

PoissonAllocationSampler::RecordAlloc(
address, size, AllocationSubsystem::kAllocatorShim, nullptr);

return address;
}

void* AllocZeroInitializedFn(const AllocatorDispatch* self,
size_t n,
size_t size,
void* context) {
ReentryGuard guard;
void* address =
self->next->alloc_zero_initialized_function(self->next, n, size, context);
if (LIKELY(guard)) {
PoissonAllocationSampler::RecordAlloc(
address, n * size, AllocationSubsystem::kAllocatorShim, nullptr);
}

PoissonAllocationSampler::RecordAlloc(
address, n * size, AllocationSubsystem::kAllocatorShim, nullptr);

return address;
}

void* AllocAlignedFn(const AllocatorDispatch* self,
size_t alignment,
size_t size,
void* context) {
ReentryGuard guard;
void* address =
self->next->alloc_aligned_function(self->next, alignment, size, context);
if (LIKELY(guard)) {
PoissonAllocationSampler::RecordAlloc(
address, size, AllocationSubsystem::kAllocatorShim, nullptr);
}

PoissonAllocationSampler::RecordAlloc(
address, size, AllocationSubsystem::kAllocatorShim, nullptr);

return address;
}

void* ReallocFn(const AllocatorDispatch* self,
void* address,
size_t size,
void* context) {
ReentryGuard guard;
// Note: size == 0 actually performs free.
PoissonAllocationSampler::RecordFree(address);
address = self->next->realloc_function(self->next, address, size, context);
if (LIKELY(guard)) {
PoissonAllocationSampler::RecordAlloc(
address, size, AllocationSubsystem::kAllocatorShim, nullptr);
}

PoissonAllocationSampler::RecordAlloc(
address, size, AllocationSubsystem::kAllocatorShim, nullptr);

return address;
}

Expand Down Expand Up @@ -118,24 +112,25 @@ unsigned BatchMallocFn(const AllocatorDispatch* self,
void** results,
unsigned num_requested,
void* context) {
ReentryGuard guard;
unsigned num_allocated = self->next->batch_malloc_function(
self->next, size, results, num_requested, context);
if (LIKELY(guard)) {
for (unsigned i = 0; i < num_allocated; ++i) {
PoissonAllocationSampler::RecordAlloc(
results[i], size, AllocationSubsystem::kAllocatorShim, nullptr);
}

for (unsigned i = 0; i < num_allocated; ++i) {
PoissonAllocationSampler::RecordAlloc(
results[i], size, AllocationSubsystem::kAllocatorShim, nullptr);
}

return num_allocated;
}

void BatchFreeFn(const AllocatorDispatch* self,
void** to_be_freed,
unsigned num_to_be_freed,
void* context) {
for (unsigned i = 0; i < num_to_be_freed; ++i)
for (unsigned i = 0; i < num_to_be_freed; ++i) {
PoissonAllocationSampler::RecordFree(to_be_freed[i]);
}

self->next->batch_free_function(self->next, to_be_freed, num_to_be_freed,
context);
}
Expand All @@ -159,13 +154,12 @@ static void* AlignedMallocFn(const AllocatorDispatch* self,
size_t size,
size_t alignment,
void* context) {
ReentryGuard guard;
void* address =
self->next->aligned_malloc_function(self->next, size, alignment, context);
if (LIKELY(guard)) {
PoissonAllocationSampler::RecordAlloc(
address, size, AllocationSubsystem::kAllocatorShim, nullptr);
}

PoissonAllocationSampler::RecordAlloc(
address, size, AllocationSubsystem::kAllocatorShim, nullptr);

return address;
}

Expand All @@ -174,15 +168,14 @@ static void* AlignedReallocFn(const AllocatorDispatch* self,
size_t size,
size_t alignment,
void* context) {
ReentryGuard guard;
// Note: size == 0 actually performs free.
PoissonAllocationSampler::RecordFree(address);
address = self->next->aligned_realloc_function(self->next, address, size,
alignment, context);
if (LIKELY(guard)) {
PoissonAllocationSampler::RecordAlloc(
address, size, AllocationSubsystem::kAllocatorShim, nullptr);
}

PoissonAllocationSampler::RecordAlloc(
address, size, AllocationSubsystem::kAllocatorShim, nullptr);

return address;
}

Expand Down
77 changes: 25 additions & 52 deletions base/allocator/dispatcher/internal/dispatcher_internal.h
Expand Up @@ -9,9 +9,9 @@
#include "base/allocator/dispatcher/configuration.h"
#include "base/allocator/dispatcher/internal/dispatch_data.h"
#include "base/allocator/dispatcher/internal/tools.h"
#include "base/allocator/dispatcher/reentry_guard.h"
#include "base/allocator/dispatcher/subsystem.h"
#include "base/allocator/partition_allocator/partition_alloc_buildflags.h"
#include "base/check.h"
#include "base/compiler_specific.h"
#include "build/build_config.h"

Expand Down Expand Up @@ -115,71 +115,60 @@ struct DispatcherImpl {
static void* AllocFn(const AllocatorDispatch* self,
size_t size,
void* context) {
ReentryGuard guard;
void* const address = self->next->alloc_function(self->next, size, context);
if (LIKELY(guard)) {
DoNotifyAllocation(address, size, AllocationSubsystem::kAllocatorShim);
}

DoNotifyAllocation(address, size, AllocationSubsystem::kAllocatorShim);

return address;
}

static void* AllocUncheckedFn(const AllocatorDispatch* self,
size_t size,
void* context) {
ReentryGuard guard;
void* const address =
self->next->alloc_unchecked_function(self->next, size, context);
if (LIKELY(guard)) {
DoNotifyAllocation(address, size, AllocationSubsystem::kAllocatorShim);
}

DoNotifyAllocation(address, size, AllocationSubsystem::kAllocatorShim);

return address;
}

static void* AllocZeroInitializedFn(const AllocatorDispatch* self,
size_t n,
size_t size,
void* context) {
ReentryGuard guard;
void* const address = self->next->alloc_zero_initialized_function(
self->next, n, size, context);
if (LIKELY(guard)) {
DoNotifyAllocation(address, n * size,
AllocationSubsystem::kAllocatorShim);
}

DoNotifyAllocation(address, n * size, AllocationSubsystem::kAllocatorShim);

return address;
}

static void* AllocAlignedFn(const AllocatorDispatch* self,
size_t alignment,
size_t size,
void* context) {
ReentryGuard guard;
void* const address = self->next->alloc_aligned_function(
self->next, alignment, size, context);
if (LIKELY(guard)) {
DoNotifyAllocation(address, size, AllocationSubsystem::kAllocatorShim);
}

DoNotifyAllocation(address, size, AllocationSubsystem::kAllocatorShim);

return address;
}

static void* ReallocFn(const AllocatorDispatch* self,
void* address,
size_t size,
void* context) {
ReentryGuard guard;
// Note: size == 0 actually performs free.
// Note: ReentryGuard prevents from recursions introduced by malloc and
// initialization of thread local storage which happen in the allocation
// path only (please see docs of ReentryGuard for full details). Therefore,
// the DoNotifyFree doesn't need to be guarded. Instead, making it unguarded
// also ensures proper notification.
DoNotifyFree(address);
void* const reallocated_address =
self->next->realloc_function(self->next, address, size, context);
if (LIKELY(guard)) {
DoNotifyAllocation(reallocated_address, size,
AllocationSubsystem::kAllocatorShim);
}

DoNotifyAllocation(reallocated_address, size,
AllocationSubsystem::kAllocatorShim);

return reallocated_address;
}

Expand All @@ -191,8 +180,6 @@ struct DispatcherImpl {
// being freed before calling free_function, as once the latter is executed
// the address becomes available and can be allocated by another thread.
// That would be racy otherwise.
// Note: The code doesn't need to protect from recursions using
// ReentryGuard, see ReallocFn for details.
DoNotifyFree(address);
self->next->free_function(self->next, address, context);
}
Expand All @@ -214,14 +201,10 @@ struct DispatcherImpl {
void** results,
unsigned num_requested,
void* context) {
ReentryGuard guard;
unsigned const num_allocated = self->next->batch_malloc_function(
self->next, size, results, num_requested, context);
if (LIKELY(guard)) {
for (unsigned i = 0; i < num_allocated; ++i) {
DoNotifyAllocation(results[i], size,
AllocationSubsystem::kAllocatorShim);
}
for (unsigned i = 0; i < num_allocated; ++i) {
DoNotifyAllocation(results[i], size, AllocationSubsystem::kAllocatorShim);
}
return num_allocated;
}
Expand All @@ -243,8 +226,6 @@ struct DispatcherImpl {
void* address,
size_t size,
void* context) {
// Note: The code doesn't need to protect from recursions using
// ReentryGuard, see ReallocFn for details.
DoNotifyFree(address);
self->next->free_definite_size_function(self->next, address, size, context);
}
Expand All @@ -260,12 +241,11 @@ struct DispatcherImpl {
size_t size,
size_t alignment,
void* context) {
ReentryGuard guard;
void* const address = self->next->aligned_malloc_function(
self->next, size, alignment, context);
if (LIKELY(guard)) {
DoNotifyAllocation(address, size, AllocationSubsystem::kAllocatorShim);
}

DoNotifyAllocation(address, size, AllocationSubsystem::kAllocatorShim);

return address;
}

Expand All @@ -274,26 +254,19 @@ struct DispatcherImpl {
size_t size,
size_t alignment,
void* context) {
ReentryGuard guard;
// Note: size == 0 actually performs free.
// Note: DoNotifyFree doesn't need to protect from recursions using
// ReentryGuard, see ReallocFn for details.
// Instead, making it unguarded also ensures proper notification of the free
// portion.
DoNotifyFree(address);
address = self->next->aligned_realloc_function(self->next, address, size,
alignment, context);
if (LIKELY(guard)) {
DoNotifyAllocation(address, size, AllocationSubsystem::kAllocatorShim);
}

DoNotifyAllocation(address, size, AllocationSubsystem::kAllocatorShim);

return address;
}

static void AlignedFreeFn(const AllocatorDispatch* self,
void* address,
void* context) {
// Note: The code doesn't need to protect from recursions using
// ReentryGuard, see ReallocFn for details.
DoNotifyFree(address);
self->next->aligned_free_function(self->next, address, context);
}
Expand Down
15 changes: 15 additions & 0 deletions base/sampling_heap_profiler/poisson_allocation_sampler.h
Expand Up @@ -8,6 +8,7 @@
#include <atomic>
#include <vector>

#include "base/allocator/dispatcher/reentry_guard.h"
#include "base/allocator/dispatcher/subsystem.h"
#include "base/base_export.h"
#include "base/compiler_specific.h"
Expand Down Expand Up @@ -261,6 +262,15 @@ inline void PoissonAllocationSampler::OnAllocation(
return;
}

// Note: ReentryGuard prevents from recursions introduced by malloc and
// initialization of thread local storage which happen in the allocation path
// only (please see docs of ReentryGuard for full details).
allocator::dispatcher::ReentryGuard reentry_guard;

if (UNLIKELY(!reentry_guard)) {
return;
}

DoRecordAllocation(state, address, size, type, context);
}

Expand Down Expand Up @@ -327,6 +337,11 @@ inline void PoissonAllocationSampler::OnFree(void* address) {
return;
}

// Note: ReentryGuard prevents from recursions introduced by malloc and
// initialization of thread local storage which happen in the allocation path
// only (please see docs of ReentryGuard for full details). Therefore, the
// DoNotifyFree doesn't need to be guarded.

DoRecordFree(address);
}

Expand Down

0 comments on commit 1f9ea58

Please sign in to comment.