Skip to content

Commit

Permalink
[ VM ] Add support for samping heap profiler in PRODUCT mode
Browse files Browse the repository at this point in the history
Requires INCLUDE_SAMPLING_HEAP_PROFILER to be defined.

TEST=DartAPI_HeapSampling_*

Change-Id: I1c95be4747b295823a8fae1f369f9dc5d95a274e
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/290620
Commit-Queue: Ben Konyi <bkonyi@google.com>
Reviewed-by: Ryan Macnak <rmacnak@google.com>
  • Loading branch information
bkonyi authored and Commit Queue committed Mar 24, 2023
1 parent 6890420 commit c927840
Show file tree
Hide file tree
Showing 21 changed files with 86 additions and 46 deletions.
4 changes: 4 additions & 0 deletions runtime/BUILD.gn
Expand Up @@ -160,6 +160,10 @@ config("dart_config") {
include_dirs += [ "../third_party/tcmalloc/gperftools/src" ]
}

if (dart_include_sampling_heap_profiler) {
defines += [ "FORCE_FORCE_INCLUDE_SAMPLING_HEAP_PROFILER" ]
}

if (dart_use_compressed_pointers) {
defines += [ "DART_COMPRESSED_POINTERS" ]
}
Expand Down
3 changes: 3 additions & 0 deletions runtime/runtime_args.gni
Expand Up @@ -77,6 +77,9 @@ declare_args() {

# Whether to use compressed pointers.
dart_use_compressed_pointers = false

# Whether the sampling heap profiler should be included in product mode.
dart_include_sampling_heap_profiler = false
}

declare_args() {
Expand Down
2 changes: 1 addition & 1 deletion runtime/vm/class_finalizer.cc
Expand Up @@ -215,7 +215,7 @@ bool ClassFinalizer::ProcessPendingClasses() {
for (intptr_t i = 0; i < class_array.Length(); i++) {
cls ^= class_array.At(i);
FinalizeTypesInClass(cls);
#if !defined(PRODUCT)
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
cls.SetUserVisibleNameInClassTable();
#endif
}
Expand Down
14 changes: 8 additions & 6 deletions runtime/vm/class_table.cc
Expand Up @@ -52,14 +52,14 @@ ClassTable::ClassTable(ClassTableAllocator* allocator)
}

ClassTable::~ClassTable() {
#if !defined(PRODUCT)
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
for (intptr_t i = 1; i < classes_.num_cids(); i++) {
const char* name = UserVisibleNameFor(i);
if (name != nullptr) {
free(const_cast<char*>(name));
}
}
#endif // !defined(PRODUCT)
#endif // !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
}

void ClassTable::Register(const Class& cls) {
Expand All @@ -76,9 +76,9 @@ void ClassTable::Register(const Class& cls) {
cls.set_id(cid);
classes_.At<kClassIndex>(cid) = cls.ptr();
classes_.At<kSizeIndex>(cid) = static_cast<int32_t>(instance_size);
#if !defined(PRODUCT)
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
classes_.At<kClassNameIndex>(cid) = nullptr;
#endif // !defined(PRODUCT)
#endif // !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)

if (did_grow) {
IsolateGroup::Current()->set_cached_class_table_table(
Expand Down Expand Up @@ -274,8 +274,7 @@ void ClassTable::PrintObjectLayout(const char* filename) {
}
#endif // defined(DART_PRECOMPILER)

#ifndef PRODUCT

#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
void ClassTable::PopulateUserVisibleNames() {
Class& cls = Class::Handle();
for (intptr_t i = 0; i < classes_.num_cids(); ++i) {
Expand All @@ -285,6 +284,9 @@ void ClassTable::PopulateUserVisibleNames() {
}
}
}
#endif // !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)

#if !defined(PRODUCT)

void ClassTable::PrintToJSONObject(JSONObject* object) {
Class& cls = Class::Handle();
Expand Down
21 changes: 16 additions & 5 deletions runtime/vm/class_table.h
Expand Up @@ -424,7 +424,11 @@ class ClassTable : public MallocAllocated {
cached_allocation_tracing_state_table_.store(
classes_.GetColumn<kAllocationTracingStateIndex>());
}
#else
void UpdateCachedAllocationTracingStateTablePointer() {}
#endif // !defined(PRODUCT)

#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
void PopulateUserVisibleNames();

const char* UserVisibleNameFor(intptr_t cid) {
Expand All @@ -438,9 +442,7 @@ class ClassTable : public MallocAllocated {
ASSERT(classes_.At<kClassNameIndex>(cid) == nullptr);
classes_.At<kClassNameIndex>(cid) = name;
}
#else
void UpdateCachedAllocationTracingStateTablePointer() {}
#endif // !defined(PRODUCT)
#endif // !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)

intptr_t NumCids() const {
return classes_.num_cids();
Expand Down Expand Up @@ -529,7 +531,7 @@ class ClassTable : public MallocAllocated {
top_level_classes_(original.allocator_) {
classes_.CopyFrom(original.classes_);

#if !defined(PRODUCT)
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
// Copying classes_ doesn't perform a deep copy. Ensure we duplicate
// the class names to avoid double free crashes at shutdown.
for (intptr_t cid = 1; cid < classes_.num_cids(); ++cid) {
Expand All @@ -540,7 +542,7 @@ class ClassTable : public MallocAllocated {
}
}
}
#endif // !defined(PRODUCT)
#endif // !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)

top_level_classes_.CopyFrom(original.top_level_classes_);
UpdateCachedAllocationTracingStateTablePointer();
Expand Down Expand Up @@ -571,6 +573,8 @@ class ClassTable : public MallocAllocated {
kUnboxedFieldBitmapIndex,
#if !defined(PRODUCT)
kAllocationTracingStateIndex,
#endif
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
kClassNameIndex,
#endif
};
Expand All @@ -583,6 +587,13 @@ class ClassTable : public MallocAllocated {
uint8_t,
const char*>
classes_;
#elif defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
CidIndexedTable<ClassIdTagType,
ClassPtr,
uint32_t,
UnboxedFieldBitmap,
const char*>
classes_;
#else
CidIndexedTable<ClassIdTagType, ClassPtr, uint32_t, UnboxedFieldBitmap>
classes_;
Expand Down
2 changes: 1 addition & 1 deletion runtime/vm/dart.cc
Expand Up @@ -898,7 +898,7 @@ ErrorPtr Dart::InitIsolateFromSnapshot(Thread* T,
return ApiError::New(message);
}
}
#if !defined(PRODUCT)
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
I->group()->class_table()->PopulateUserVisibleNames();
#endif

Expand Down
10 changes: 5 additions & 5 deletions runtime/vm/dart_api_impl.cc
Expand Up @@ -1847,29 +1847,29 @@ DART_EXPORT void Dart_NotifyDestroyed() {
}

DART_EXPORT void Dart_EnableHeapSampling() {
#if !defined(PRODUCT)
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
HeapProfileSampler::Enable(true);
#endif
}

DART_EXPORT void Dart_DisableHeapSampling() {
#if !defined(PRODUCT)
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
HeapProfileSampler::Enable(false);
#endif
}

DART_EXPORT void Dart_RegisterHeapSamplingCallback(
Dart_HeapSamplingCreateCallback create_callback,
Dart_HeapSamplingDeleteCallback delete_callback) {
#if !defined(PRODUCT)
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
HeapProfileSampler::SetSamplingCallback(create_callback, delete_callback);
#endif
}

DART_EXPORT void Dart_ReportSurvivingAllocations(
Dart_HeapSamplingReportCallback callback,
void* context) {
#if !defined(PRODUCT)
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
CHECK_NO_ISOLATE(Thread::Current());
IsolateGroup::ForEach([&](IsolateGroup* group) {
Thread::EnterIsolateGroupAsHelper(group, Thread::kUnknownTask,
Expand All @@ -1881,7 +1881,7 @@ DART_EXPORT void Dart_ReportSurvivingAllocations(
}

DART_EXPORT void Dart_SetHeapSamplingPeriod(intptr_t bytes) {
#if !defined(PRODUCT)
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
HeapProfileSampler::SetSamplingInterval(bytes);
#endif
}
Expand Down
16 changes: 13 additions & 3 deletions runtime/vm/dart_api_impl_test.cc
Expand Up @@ -10483,6 +10483,9 @@ TEST_CASE(DartAPI_UserTags) {
"Dart_SetCurrentUserTag expects argument 'user_tag' to be non-null");
}

#endif // !PRODUCT

#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
static void* HeapSamplingCreate(Dart_Isolate isolate,
Dart_IsolateGroup isolate_group) {
return strdup("test data");
Expand Down Expand Up @@ -10591,9 +10594,13 @@ TEST_CASE(DartAPI_HeapSampling_APIAllocations) {

Dart_ReportSurvivingAllocations(HeapSamplingReport, nullptr);
EXPECT(heap_samples > 0);
#if !defined(PRODUCT)
EXPECT_STREQ("List", last_allocation_cls);

ResetHeapSamplingState("String");
#else
EXPECT_STREQ("_List", last_allocation_cls);
ResetHeapSamplingState("_OneByteString");
#endif

// Re-enter the isolate.
Dart_EnterIsolate(isolate);
Expand All @@ -10610,7 +10617,11 @@ TEST_CASE(DartAPI_HeapSampling_APIAllocations) {
EXPECT(heap_samples > 0);
EXPECT(found_allocation);

#if !defined(PRODUCT)
ResetHeapSamplingState("String");
#else
ResetHeapSamplingState("_OneByteString");
#endif

// Re-enter the isolate.
Dart_EnterIsolate(isolate);
Expand Down Expand Up @@ -10702,8 +10713,7 @@ TEST_CASE(DartAPI_HeapSampling_NonTrivialSamplingPeriod) {
Dart_EnterIsolate(isolate);
Dart_DisableHeapSampling();
}

#endif // !PRODUCT
#endif // !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)

#if defined(DART_ENABLE_HEAP_SNAPSHOT_WRITER)
TEST_CASE(DartAPI_WriteHeapSnapshot) {
Expand Down
4 changes: 2 additions & 2 deletions runtime/vm/heap/heap.cc
Expand Up @@ -63,7 +63,7 @@ Heap::Heap(IsolateGroup* isolate_group,
}

Heap::~Heap() {
#if !defined(PRODUCT)
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
Dart_HeapSamplingDeleteCallback cleanup =
HeapProfileSampler::delete_callback();
if (cleanup != nullptr) {
Expand Down Expand Up @@ -105,7 +105,7 @@ uword Heap::AllocateNew(Thread* thread, intptr_t size) {
uword Heap::AllocateOld(Thread* thread, intptr_t size, Page::PageType type) {
ASSERT(thread->no_safepoint_scope_depth() == 0);

#if !defined(PRODUCT)
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
if (HeapProfileSampler::enabled()) {
thread->heap_sampler().SampleOldSpaceAllocation(size);
}
Expand Down
6 changes: 3 additions & 3 deletions runtime/vm/heap/heap.h
Expand Up @@ -48,7 +48,7 @@ class Heap {
kCanonicalHashes,
kObjectIds,
kLoadingUnits,
#if !defined(PRODUCT)
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
kHeapSamplingData,
#endif
kNumWeakSelectors
Expand Down Expand Up @@ -242,7 +242,7 @@ class Heap {
return GetWeakEntry(raw_obj, kLoadingUnits);
}

#if !defined(PRODUCT)
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
void SetHeapSamplingData(ObjectPtr obj, void* data) {
SetWeakEntry(obj, kHeapSamplingData, reinterpret_cast<intptr_t>(data));
}
Expand Down Expand Up @@ -274,7 +274,7 @@ class Heap {
void ForwardWeakEntries(ObjectPtr before_object, ObjectPtr after_object);
void ForwardWeakTables(ObjectPointerVisitor* visitor);

#if !defined(PRODUCT)
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
void ReportSurvivingAllocations(Dart_HeapSamplingReportCallback callback,
void* context) {
new_weak_tables_[kHeapSamplingData]->ReportSurvivingAllocations(callback,
Expand Down
2 changes: 1 addition & 1 deletion runtime/vm/heap/marker.cc
Expand Up @@ -601,7 +601,7 @@ void GCMarker::ProcessWeakTables(Thread* thread) {
TIMELINE_FUNCTION_GC_DURATION(thread, "ProcessWeakTables");
for (int sel = 0; sel < Heap::kNumWeakSelectors; sel++) {
Dart_HeapSamplingDeleteCallback cleanup = nullptr;
#if !defined(PRODUCT)
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
if (sel == Heap::kHeapSamplingData) {
cleanup = HeapProfileSampler::delete_callback();
}
Expand Down
2 changes: 1 addition & 1 deletion runtime/vm/heap/page.h
Expand Up @@ -257,7 +257,7 @@ class Page {
thread->set_top(0);
thread->set_end(0);
thread->set_true_end(0);
#if !defined(PRODUCT)
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
thread->heap_sampler().HandleReleasedTLAB(Thread::Current());
#endif
}
Expand Down
4 changes: 2 additions & 2 deletions runtime/vm/heap/sampler.cc
Expand Up @@ -2,7 +2,7 @@
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.

#if !defined(PRODUCT)
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)

#include <math.h>
#include <algorithm>
Expand Down Expand Up @@ -366,4 +366,4 @@ void HeapProfileSampler::SetNextSamplingIntervalLocked(intptr_t next_interval) {

} // namespace dart

#endif // !defined(PRODUCT)
#endif // !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
4 changes: 2 additions & 2 deletions runtime/vm/heap/sampler.h
Expand Up @@ -5,7 +5,7 @@
#ifndef RUNTIME_VM_HEAP_SAMPLER_H_
#define RUNTIME_VM_HEAP_SAMPLER_H_

#if !defined(PRODUCT)
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)

#include <atomic>

Expand Down Expand Up @@ -173,5 +173,5 @@ class HeapProfileSampler {

} // namespace dart

#endif // !defined(PRODUCT)
#endif // !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
#endif // RUNTIME_VM_HEAP_SAMPLER_H_
8 changes: 4 additions & 4 deletions runtime/vm/heap/scavenger.cc
Expand Up @@ -1446,7 +1446,7 @@ void Scavenger::MournWeakTables() {
auto table_new = WeakTable::NewFrom(table);

Dart_HeapSamplingDeleteCallback cleanup = nullptr;
#if !defined(PRODUCT)
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
if (sel == Heap::kHeapSamplingData) {
cleanup = HeapProfileSampler::delete_callback();
}
Expand Down Expand Up @@ -1617,7 +1617,7 @@ void Scavenger::TryAllocateNewTLAB(Thread* thread,
ASSERT(heap_ != Dart::vm_isolate_group()->heap());
ASSERT(!scavenging_);

#if !defined(PRODUCT)
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
// Find the remaining space available in the TLAB before abandoning it so we
// can reset the heap sampling offset in the new TLAB.
intptr_t remaining = thread->true_end() - thread->top();
Expand All @@ -1643,7 +1643,7 @@ void Scavenger::TryAllocateNewTLAB(Thread* thread,
(page->end() - kAllocationRedZoneSize) - page->object_end();
if (available >= min_size) {
page->Acquire(thread);
#if !defined(PRODUCT)
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
thread->heap_sampler().HandleNewTLAB(remaining, /*is_first_tlab=*/false);
#endif
return;
Expand All @@ -1655,7 +1655,7 @@ void Scavenger::TryAllocateNewTLAB(Thread* thread,
return;
}
page->Acquire(thread);
#if !defined(PRODUCT)
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
thread->heap_sampler().HandleNewTLAB(remaining, is_first_tlab);
#endif
}
Expand Down
2 changes: 1 addition & 1 deletion runtime/vm/heap/weak_table.cc
Expand Up @@ -140,7 +140,7 @@ void WeakTable::Forward(ObjectPointerVisitor* visitor) {
Rehash();
}

#if !defined(PRODUCT)
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
void WeakTable::ReportSurvivingAllocations(
Dart_HeapSamplingReportCallback callback,
void* context) {
Expand Down
4 changes: 2 additions & 2 deletions runtime/vm/isolate.cc
Expand Up @@ -633,7 +633,7 @@ Thread* IsolateGroup::ScheduleThreadLocked(MonitorLocker* ml,
thread->set_safepoint_state(
Thread::SetBypassSafepoints(bypass_safepoint, 0));
thread->set_vm_tag(VMTag::kVMTagId);
#if !defined(PRODUCT)
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
thread->heap_sampler().Initialize();
#endif
ASSERT(thread->no_safepoint_scope_depth() == 0);
Expand Down Expand Up @@ -692,7 +692,7 @@ void IsolateGroup::UnscheduleThreadLocked(MonitorLocker* ml,
thread->set_execution_state(Thread::kThreadInNative);
thread->set_safepoint_state(Thread::AtSafepointField::encode(true) |
Thread::AtDeoptSafepointField::encode(true));
#if !defined(PRODUCT)
#if !defined(PRODUCT) || defined(FORCE_INCLUDE_SAMPLING_HEAP_PROFILER)
thread->heap_sampler().Cleanup();
#endif

Expand Down

0 comments on commit c927840

Please sign in to comment.