Skip to content

Commit

Permalink
[vm] Bypass malloc for large Zone allocations to avoid jemalloc leaks.
Browse files Browse the repository at this point in the history
Revert 9a07ad8.

Bug: flutter/flutter#29007
Change-Id: I6a5f51f0c3a54d354ec5f8495677d46f94d8a1d3
Reviewed-on: https://dart-review.googlesource.com/c/sdk/+/100568
Commit-Queue: Ryan Macnak <rmacnak@google.com>
Reviewed-by: Zach Anderson <zra@google.com>
Reviewed-by: Vyacheslav Egorov <vegorov@google.com>
  • Loading branch information
rmacnak-google authored and commit-bot@chromium.org committed Apr 30, 2019
1 parent a7325f9 commit 347b72c
Show file tree
Hide file tree
Showing 8 changed files with 151 additions and 38 deletions.
6 changes: 1 addition & 5 deletions runtime/vm/isolate.cc
Expand Up @@ -949,8 +949,7 @@ Isolate::Isolate(const Dart_IsolateFlags& api_flags)
catch_entry_moves_cache_(), catch_entry_moves_cache_(),
embedder_entry_points_(NULL), embedder_entry_points_(NULL),
obfuscation_map_(NULL), obfuscation_map_(NULL),
reverse_pc_lookup_cache_(nullptr), reverse_pc_lookup_cache_(nullptr) {
irregexp_backtrack_stack_(nullptr) {
FlagsCopyFrom(api_flags); FlagsCopyFrom(api_flags);
SetErrorsFatal(true); SetErrorsFatal(true);
set_compilation_allowed(true); set_compilation_allowed(true);
Expand Down Expand Up @@ -982,9 +981,6 @@ Isolate::~Isolate() {
// RELEASE_ASSERT(reload_context_ == NULL); // RELEASE_ASSERT(reload_context_ == NULL);
#endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME) #endif // !defined(PRODUCT) && !defined(DART_PRECOMPILED_RUNTIME)


delete[] irregexp_backtrack_stack_;
irregexp_backtrack_stack_ = nullptr;

delete reverse_pc_lookup_cache_; delete reverse_pc_lookup_cache_;
reverse_pc_lookup_cache_ = nullptr; reverse_pc_lookup_cache_ = nullptr;


Expand Down
14 changes: 0 additions & 14 deletions runtime/vm/isolate.h
Expand Up @@ -734,18 +734,6 @@ class Isolate : public BaseIsolate {
reverse_pc_lookup_cache_ = table; reverse_pc_lookup_cache_ = table;
} }


// This doesn't belong here, but to avoid triggering bugs in jemalloc we
// allocate the irregexpinterpreter's stack once per isolate instead of once
// per regexp execution.
// See https://github.com/flutter/flutter/issues/29007
static constexpr intptr_t kIrregexpBacktrackStackSize = 1 << 16;
intptr_t* irregexp_backtrack_stack() {
if (irregexp_backtrack_stack_ == nullptr) {
irregexp_backtrack_stack_ = new intptr_t[kIrregexpBacktrackStackSize];
}
return irregexp_backtrack_stack_;
}

// Isolate-specific flag handling. // Isolate-specific flag handling.
static void FlagsInitialize(Dart_IsolateFlags* api_flags); static void FlagsInitialize(Dart_IsolateFlags* api_flags);
void FlagsCopyTo(Dart_IsolateFlags* api_flags) const; void FlagsCopyTo(Dart_IsolateFlags* api_flags) const;
Expand Down Expand Up @@ -1058,8 +1046,6 @@ class Isolate : public BaseIsolate {


ReversePcLookupCache* reverse_pc_lookup_cache_; ReversePcLookupCache* reverse_pc_lookup_cache_;


intptr_t* irregexp_backtrack_stack_;

static Dart_IsolateCreateCallback create_callback_; static Dart_IsolateCreateCallback create_callback_;
static Dart_IsolateShutdownCallback shutdown_callback_; static Dart_IsolateShutdownCallback shutdown_callback_;
static Dart_IsolateCleanupCallback cleanup_callback_; static Dart_IsolateCleanupCallback cleanup_callback_;
Expand Down
2 changes: 1 addition & 1 deletion runtime/vm/regexp_assembler_bytecode.cc
Expand Up @@ -494,7 +494,7 @@ static IrregexpInterpreter::IrregexpResult ExecRaw(const RegExp& regexp,
TypedData::Handle(zone, regexp.bytecode(is_one_byte, sticky)); TypedData::Handle(zone, regexp.bytecode(is_one_byte, sticky));
ASSERT(!bytecode.IsNull()); ASSERT(!bytecode.IsNull());
IrregexpInterpreter::IrregexpResult result = IrregexpInterpreter::IrregexpResult result =
IrregexpInterpreter::Match(bytecode, subject, raw_output, index); IrregexpInterpreter::Match(bytecode, subject, raw_output, index, zone);


if (result == IrregexpInterpreter::RE_SUCCESS) { if (result == IrregexpInterpreter::RE_SUCCESS) {
// Copy capture results to the start of the registers array. // Copy capture results to the start of the registers array.
Expand Down
19 changes: 10 additions & 9 deletions runtime/vm/regexp_interpreter.cc
Expand Up @@ -127,17 +127,16 @@ static int32_t Load16Aligned(const uint8_t* pc) {
// matching terminates. // matching terminates.
class BacktrackStack { class BacktrackStack {
public: public:
explicit BacktrackStack(Isolate* isolate) { explicit BacktrackStack(Zone* zone) {
data_ = isolate->irregexp_backtrack_stack(); data_ = zone->Alloc<intptr_t>(kBacktrackStackSize);
} }


intptr_t* data() const { return data_; } intptr_t* data() const { return data_; }


intptr_t max_size() const { return kBacktrackStackSize; } intptr_t max_size() const { return kBacktrackStackSize; }


private: private:
static constexpr intptr_t kBacktrackStackSize = static const intptr_t kBacktrackStackSize = 1 << 16;
Isolate::kIrregexpBacktrackStackSize;


intptr_t* data_; intptr_t* data_;


Expand All @@ -149,12 +148,13 @@ static IrregexpInterpreter::IrregexpResult RawMatch(const uint8_t* code_base,
const String& subject, const String& subject,
int32_t* registers, int32_t* registers,
intptr_t current, intptr_t current,
uint32_t current_char) { uint32_t current_char,
Zone* zone) {
const uint8_t* pc = code_base; const uint8_t* pc = code_base;
// BacktrackStack ensures that the memory allocated for the backtracking stack // BacktrackStack ensures that the memory allocated for the backtracking stack
// is returned to the system or cached if there is no stack being cached at // is returned to the system or cached if there is no stack being cached at
// the moment. // the moment.
BacktrackStack backtrack_stack(Isolate::Current()); BacktrackStack backtrack_stack(zone);
intptr_t* backtrack_stack_base = backtrack_stack.data(); intptr_t* backtrack_stack_base = backtrack_stack.data();
intptr_t* backtrack_sp = backtrack_stack_base; intptr_t* backtrack_sp = backtrack_stack_base;
intptr_t backtrack_stack_space = backtrack_stack.max_size(); intptr_t backtrack_stack_space = backtrack_stack.max_size();
Expand Down Expand Up @@ -627,7 +627,8 @@ IrregexpInterpreter::IrregexpResult IrregexpInterpreter::Match(
const TypedData& bytecode, const TypedData& bytecode,
const String& subject, const String& subject,
int32_t* registers, int32_t* registers,
intptr_t start_position) { intptr_t start_position,
Zone* zone) {
NoSafepointScope no_safepoint; NoSafepointScope no_safepoint;
const uint8_t* code_base = reinterpret_cast<uint8_t*>(bytecode.DataAddr(0)); const uint8_t* code_base = reinterpret_cast<uint8_t*>(bytecode.DataAddr(0));


Expand All @@ -638,10 +639,10 @@ IrregexpInterpreter::IrregexpResult IrregexpInterpreter::Match(


if (subject.IsOneByteString() || subject.IsExternalOneByteString()) { if (subject.IsOneByteString() || subject.IsExternalOneByteString()) {
return RawMatch<uint8_t>(code_base, subject, registers, start_position, return RawMatch<uint8_t>(code_base, subject, registers, start_position,
previous_char); previous_char, zone);
} else if (subject.IsTwoByteString() || subject.IsExternalTwoByteString()) { } else if (subject.IsTwoByteString() || subject.IsExternalTwoByteString()) {
return RawMatch<uint16_t>(code_base, subject, registers, start_position, return RawMatch<uint16_t>(code_base, subject, registers, start_position,
previous_char); previous_char, zone);
} else { } else {
UNREACHABLE(); UNREACHABLE();
return IrregexpInterpreter::RE_FAILURE; return IrregexpInterpreter::RE_FAILURE;
Expand Down
3 changes: 2 additions & 1 deletion runtime/vm/regexp_interpreter.h
Expand Up @@ -20,7 +20,8 @@ class IrregexpInterpreter : public AllStatic {
static IrregexpResult Match(const TypedData& bytecode, static IrregexpResult Match(const TypedData& bytecode,
const String& subject, const String& subject,
int32_t* captures, int32_t* captures,
intptr_t start_position); intptr_t start_position,
Zone* zone);
}; };


} // namespace dart } // namespace dart
Expand Down
59 changes: 51 additions & 8 deletions runtime/vm/zone.cc
Expand Up @@ -11,6 +11,7 @@
#include "vm/handles_impl.h" #include "vm/handles_impl.h"
#include "vm/heap/heap.h" #include "vm/heap/heap.h"
#include "vm/os.h" #include "vm/os.h"
#include "vm/virtual_memory.h"


namespace dart { namespace dart {


Expand All @@ -21,25 +22,28 @@ class Zone::Segment {
public: public:
Segment* next() const { return next_; } Segment* next() const { return next_; }
intptr_t size() const { return size_; } intptr_t size() const { return size_; }
VirtualMemory* memory() const { return memory_; }


uword start() { return address(sizeof(Segment)); } uword start() { return address(sizeof(Segment)); }
uword end() { return address(size_); } uword end() { return address(size_); }


// Allocate or delete individual segments. // Allocate or delete individual segments.
static Segment* New(intptr_t size, Segment* next); static Segment* New(intptr_t size, Segment* next);
static Segment* NewLarge(intptr_t size, Segment* next);
static void DeleteSegmentList(Segment* segment); static void DeleteSegmentList(Segment* segment);
static void DeleteLargeSegmentList(Segment* segment);
static void IncrementMemoryCapacity(uintptr_t size); static void IncrementMemoryCapacity(uintptr_t size);
static void DecrementMemoryCapacity(uintptr_t size); static void DecrementMemoryCapacity(uintptr_t size);


private: private:
Segment* next_; Segment* next_;
intptr_t size_; intptr_t size_;
VirtualMemory* memory_;
void* alignment_;


// Computes the address of the nth byte in this segment. // Computes the address of the nth byte in this segment.
uword address(int n) { return reinterpret_cast<uword>(this) + n; } uword address(int n) { return reinterpret_cast<uword>(this) + n; }


static void Delete(Segment* segment) { free(segment); }

DISALLOW_IMPLICIT_CONSTRUCTORS(Segment); DISALLOW_IMPLICIT_CONSTRUCTORS(Segment);
}; };


Expand All @@ -49,13 +53,36 @@ Zone::Segment* Zone::Segment::New(intptr_t size, Zone::Segment* next) {
if (result == NULL) { if (result == NULL) {
OUT_OF_MEMORY(); OUT_OF_MEMORY();
} }
ASSERT(Utils::IsAligned(result->start(), Zone::kAlignment));
#ifdef DEBUG #ifdef DEBUG
// Zap the entire allocated segment (including the header). // Zap the entire allocated segment (including the header).
memset(result, kZapUninitializedByte, size); memset(result, kZapUninitializedByte, size);
#endif #endif
result->next_ = next; result->next_ = next;
result->size_ = size; result->size_ = size;
result->memory_ = nullptr;
result->alignment_ = nullptr; // Avoid unused variable warnings.
IncrementMemoryCapacity(size);
return result;
}

Zone::Segment* Zone::Segment::NewLarge(intptr_t size, Zone::Segment* next) {
size = Utils::RoundUp(size, VirtualMemory::PageSize());
VirtualMemory* memory = VirtualMemory::Allocate(size, false, "dart-zone");
if (memory == NULL) {
OUT_OF_MEMORY();
}
Segment* result = reinterpret_cast<Segment*>(memory->start());
#ifdef DEBUG
// Zap the entire allocated segment (including the header).
memset(result, kZapUninitializedByte, size);
#endif
result->next_ = next;
result->size_ = size;
result->memory_ = memory;
result->alignment_ = nullptr; // Avoid unused variable warnings.

LSAN_REGISTER_ROOT_REGION(result, sizeof(*result));

IncrementMemoryCapacity(size); IncrementMemoryCapacity(size);
return result; return result;
} }
Expand All @@ -69,7 +96,23 @@ void Zone::Segment::DeleteSegmentList(Segment* head) {
// Zap the entire current segment (including the header). // Zap the entire current segment (including the header).
memset(current, kZapDeletedByte, current->size()); memset(current, kZapDeletedByte, current->size());
#endif #endif
Segment::Delete(current); free(current);
current = next;
}
}

void Zone::Segment::DeleteLargeSegmentList(Segment* head) {
Segment* current = head;
while (current != NULL) {
DecrementMemoryCapacity(current->size());
Segment* next = current->next();
VirtualMemory* memory = current->memory();
#ifdef DEBUG
// Zap the entire current segment (including the header).
memset(current, kZapDeletedByte, current->size());
#endif
LSAN_UNREGISTER_ROOT_REGION(current, sizeof(*current));
delete memory;
current = next; current = next;
} }
} }
Expand Down Expand Up @@ -129,7 +172,7 @@ void Zone::DeleteAll() {
Segment::DeleteSegmentList(head_); Segment::DeleteSegmentList(head_);
} }
if (large_segments_ != NULL) { if (large_segments_ != NULL) {
Segment::DeleteSegmentList(large_segments_); Segment::DeleteLargeSegmentList(large_segments_);
} }
// Reset zone state. // Reset zone state.
#ifdef DEBUG #ifdef DEBUG
Expand Down Expand Up @@ -214,9 +257,9 @@ uword Zone::AllocateLargeSegment(intptr_t size) {
ASSERT(free_size < size); ASSERT(free_size < size);


// Create a new large segment and chain it up. // Create a new large segment and chain it up.
ASSERT(Utils::IsAligned(sizeof(Segment), kAlignment)); // Account for book keeping fields in size.
size += sizeof(Segment); // Account for book keeping fields in size. size += Utils::RoundUp(sizeof(Segment), kAlignment);
large_segments_ = Segment::New(size, large_segments_); large_segments_ = Segment::NewLarge(size, large_segments_);


uword result = Utils::RoundUp(large_segments_->start(), kAlignment); uword result = Utils::RoundUp(large_segments_->start(), kAlignment);
return result; return result;
Expand Down
62 changes: 62 additions & 0 deletions runtime/vm/zone_test.cc
Expand Up @@ -176,4 +176,66 @@ VM_UNIT_TEST_CASE(NativeScopeZoneAllocation) {
EXPECT_EQ(0UL, ApiNativeScope::current_memory_usage()); EXPECT_EQ(0UL, ApiNativeScope::current_memory_usage());
} }


#if !defined(PRODUCT)
// Allow for pooling in the malloc implementation.
static const int64_t kRssSlack = 20 * MB;
#endif // !defined(PRODUCT)

// clang-format off
static const size_t kSizes[] = {
64 * KB,
64 * KB + 2 * kWordSize,
64 * KB - 2 * kWordSize,
128 * KB,
128 * KB + 2 * kWordSize,
128 * KB - 2 * kWordSize,
256 * KB,
256 * KB + 2 * kWordSize,
256 * KB - 2 * kWordSize,
512 * KB,
512 * KB + 2 * kWordSize,
512 * KB - 2 * kWordSize,
};
// clang-format on

TEST_CASE(StressMallocDirectly) {
#if !defined(PRODUCT)
int64_t start_rss = Service::CurrentRSS();
#endif // !defined(PRODUCT)

void* allocations[ARRAY_SIZE(kSizes)];
for (size_t i = 0; i < ((3u * GB) / (512u * KB)); i++) {
for (size_t j = 0; j < ARRAY_SIZE(kSizes); j++) {
allocations[j] = malloc(kSizes[j]);
}
for (size_t j = 0; j < ARRAY_SIZE(kSizes); j++) {
free(allocations[j]);
}
}

#if !defined(PRODUCT)
int64_t stop_rss = Service::CurrentRSS();
EXPECT_LT(stop_rss, start_rss + kRssSlack);
#endif // !defined(PRODUCT)
}

TEST_CASE(StressMallocThroughZones) {
#if !defined(PRODUCT)
int64_t start_rss = Service::CurrentRSS();
#endif // !defined(PRODUCT)

for (size_t i = 0; i < ((3u * GB) / (512u * KB)); i++) {
StackZone stack_zone(Thread::Current());
Zone* zone = stack_zone.GetZone();
for (size_t j = 0; j < ARRAY_SIZE(kSizes); j++) {
zone->Alloc<uint8_t>(kSizes[j]);
}
}

#if !defined(PRODUCT)
int64_t stop_rss = Service::CurrentRSS();
EXPECT_LT(stop_rss, start_rss + kRssSlack);
#endif // !defined(PRODUCT)
}

} // namespace dart } // namespace dart
24 changes: 24 additions & 0 deletions tests/corelib_2/regexp/jemalloc_leak_backtracking_stack_test.dart
@@ -0,0 +1,24 @@
// Copyright (c) 2019, the Dart project authors. Please see the AUTHORS file
// for details. All rights reserved. Use of this source code is governed by a
// BSD-style license that can be found in the LICENSE file.

// Regression test for https://github.com/flutter/flutter/issues/29007

String escape(String string) {
var regex = new RegExp("(\\?|\\\$|\\*|\\(|\\)|\\[)|\\+|\\.|\\\\");
return string.replaceAllMapped(
regex, (Match m) => "\\" + string.substring(m.start, m.end));
}

main() {
var text = """
Yet but three? Come one more.
Two of both kinds make up four.
""";
var accumulate = 0;
for (var i = 0; i < 65536; i++) {
accumulate += escape(text).length;
}

print(accumulate);
}

0 comments on commit 347b72c

Please sign in to comment.