Skip to content

Commit

Permalink
[sampling heap profiler] Fix a deadlock.
Browse files Browse the repository at this point in the history
Profiler functions used to lock the mutex first and then update the entered_
TLS variable. That caused the following sequence leading to a deadlock:

1. In DoRecordAlloc it locks the mutex and then calls entered_.Set(true)
2. If this happens to be the very first access to TLS, it causes internal
   TLS vector to be allocated.
3. It then reenters DoRecordAlloc, the entered_.Get() is still false since #1 is
   not yet finished.
4. Thus it proceed to locking the mutex again.

The fix is basically swapping the order of mutex locking and entered_.Set(true).

BUG=842396

Change-Id: I926c05755321c082215ca84bfc312348c89ecdca
Reviewed-on: https://chromium-review.googlesource.com/1056412
Commit-Queue: Alexei Filippov <alph@chromium.org>
Reviewed-by: Dmitry Gozman <dgozman@chromium.org>
Cr-Commit-Position: refs/heads/master@{#558122}
  • Loading branch information
a1ph authored and Commit Bot committed May 12, 2018
1 parent 9ca6108 commit 0bd4e5d
Showing 1 changed file with 48 additions and 37 deletions.
85 changes: 48 additions & 37 deletions base/sampling_heap_profiler/sampling_heap_profiler.cc
Original file line number Diff line number Diff line change
Expand Up @@ -329,36 +329,39 @@ void SamplingHeapProfiler::DoRecordAlloc(size_t total_allocated,
uint32_t skip_frames) {
if (entered_.Get())
return;
base::AutoLock lock(mutex_);
entered_.Set(true);

Sample sample(size, total_allocated, ++g_last_sample_ordinal);
RecordStackTrace(&sample, skip_frames);

// Close the fast-path as inserting an element into samples_ may cause
// rehashing that invalidates iterators affecting all the concurrent
// readers.
base::subtle::Release_Store(&g_fast_path_is_closed, 1);
while (base::subtle::Acquire_Load(&g_operations_in_flight)) {
while (base::subtle::NoBarrier_Load(&g_operations_in_flight)) {
{
base::AutoLock lock(mutex_);

Sample sample(size, total_allocated, ++g_last_sample_ordinal);
RecordStackTrace(&sample, skip_frames);

// Close the fast-path as inserting an element into samples_ may cause
// rehashing that invalidates iterators affecting all the concurrent
// readers.
base::subtle::Release_Store(&g_fast_path_is_closed, 1);
while (base::subtle::Acquire_Load(&g_operations_in_flight)) {
while (base::subtle::NoBarrier_Load(&g_operations_in_flight)) {
}
}
for (auto* observer : observers_)
observer->SampleAdded(sample.ordinal, size, total_allocated);
// TODO(alph): We can do better by keeping the fast-path open when
// we know insert won't cause rehashing.
samples_.emplace(address, std::move(sample));
base::subtle::Release_Store(&g_fast_path_is_closed, 0);
}
for (auto* observer : observers_)
observer->SampleAdded(sample.ordinal, size, total_allocated);
// TODO(alph): We can do better by keeping the fast-path open when
// we know insert won't cause rehashing.
samples_.emplace(address, std::move(sample));
base::subtle::Release_Store(&g_fast_path_is_closed, 0);

entered_.Set(false);
}

// static
void SamplingHeapProfiler::RecordFree(void* address) {
bool maybe_sampled = true; // Pessimistically assume allocation was sampled.
base::subtle::Barrier_AtomicIncrement(&g_operations_in_flight, 1);
if (LIKELY(!base::subtle::NoBarrier_Load(&g_fast_path_is_closed)))
maybe_sampled = instance_->samples_.count(address);
if (LIKELY(!base::subtle::NoBarrier_Load(&g_fast_path_is_closed))) {
maybe_sampled =
instance_->samples_.find(address) != instance_->samples_.end();
}
base::subtle::Barrier_AtomicIncrement(&g_operations_in_flight, -1);
if (maybe_sampled)
instance_->DoRecordFree(address);
Expand All @@ -369,13 +372,15 @@ void SamplingHeapProfiler::DoRecordFree(void* address) {
return;
if (entered_.Get())
return;
base::AutoLock lock(mutex_);
entered_.Set(true);
auto it = samples_.find(address);
if (it != samples_.end()) {
for (auto* observer : observers_)
observer->SampleRemoved(it->second.ordinal);
samples_.erase(it);
{
base::AutoLock lock(mutex_);
auto it = samples_.find(address);
if (it != samples_.end()) {
for (auto* observer : observers_)
observer->SampleRemoved(it->second.ordinal);
samples_.erase(it);
}
}
entered_.Set(false);
}
Expand All @@ -392,33 +397,39 @@ void SamplingHeapProfiler::SuppressRandomnessForTest(bool suppress) {
}

void SamplingHeapProfiler::AddSamplesObserver(SamplesObserver* observer) {
base::AutoLock lock(mutex_);
CHECK(!entered_.Get());
entered_.Set(true);
observers_.push_back(observer);
{
base::AutoLock lock(mutex_);
observers_.push_back(observer);
}
entered_.Set(false);
}

void SamplingHeapProfiler::RemoveSamplesObserver(SamplesObserver* observer) {
base::AutoLock lock(mutex_);
CHECK(!entered_.Get());
entered_.Set(true);
auto it = std::find(observers_.begin(), observers_.end(), observer);
CHECK(it != observers_.end());
observers_.erase(it);
{
base::AutoLock lock(mutex_);
auto it = std::find(observers_.begin(), observers_.end(), observer);
CHECK(it != observers_.end());
observers_.erase(it);
}
entered_.Set(false);
}

std::vector<SamplingHeapProfiler::Sample> SamplingHeapProfiler::GetSamples(
uint32_t profile_id) {
base::AutoLock lock(mutex_);
CHECK(!entered_.Get());
entered_.Set(true);
std::vector<Sample> samples;
for (auto& it : samples_) {
Sample& sample = it.second;
if (sample.ordinal > profile_id)
samples.push_back(sample);
{
base::AutoLock lock(mutex_);
for (auto& it : samples_) {
Sample& sample = it.second;
if (sample.ordinal > profile_id)
samples.push_back(sample);
}
}
entered_.Set(false);
return samples;
Expand Down

0 comments on commit 0bd4e5d

Please sign in to comment.