Permalink
Browse files

Version 3.12.0

Fixed Chromium issues: 115100, 129628, 131994, 132727, 132741, 132742, 133211

Fixed V8 issues: 915, 1914, 2034, 2087, 2094, 2134, 2156, 2166, 2172, 2177, 2179, 2185

Added --extra-code flag to mksnapshot to load JS code into the VM before creating the snapshot.

Support 'restart call frame' command in the debugger.

Performance and stability improvements on all platforms.

git-svn-id: https://v8.googlecode.com/svn/trunk@11882 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
  • Loading branch information...
yangguo@chromium.org
yangguo@chromium.org committed Jun 20, 2012
1 parent 4a237ec commit 013eb60d1aebfca17bc253588a646be132d4a27b
Showing with 3,666 additions and 1,167 deletions.
  1. +16 −0 ChangeLog
  2. +4 −0 Makefile
  3. +8 −0 build/common.gypi
  4. +3 −3 preparser/preparser-process.cc
  5. +1 −1 samples/lineprocessor.cc
  6. +7 −7 samples/process.cc
  7. +1 −1 samples/shell.cc
  8. +19 −7 src/arm/assembler-arm.cc
  9. +19 −0 src/arm/assembler-arm.h
  10. +88 −6 src/arm/code-stubs-arm.cc
  11. +31 −8 src/arm/deoptimizer-arm.cc
  12. +2 −2 src/arm/full-codegen-arm.cc
  13. +5 −2 src/arm/lithium-codegen-arm.cc
  14. +10 −12 src/arm/lithium-codegen-arm.h
  15. +1 −8 src/arm/regexp-macro-assembler-arm.h
  16. +2 −2 src/arm/simulator-arm.cc
  17. +88 −63 src/array.js
  18. +42 −7 src/assembler.cc
  19. +12 −2 src/assembler.h
  20. +5 −0 src/ast.cc
  21. +5 −5 src/ast.h
  22. +6 −1 src/collection.js
  23. +80 −29 src/compiler.cc
  24. +33 −5 src/compiler.h
  25. +4 −4 src/d8.cc
  26. +33 −3 src/debug-debugger.js
  27. +124 −59 src/debug.cc
  28. +10 −4 src/debug.h
  29. +13 −4 src/deoptimizer.cc
  30. +3 −0 src/deoptimizer.h
  31. +36 −10 src/factory.cc
  32. +10 −0 src/flag-definitions.h
  33. +8 −4 src/flags.cc
  34. +14 −6 src/frames.cc
  35. +2 −0 src/frames.h
  36. +2 −2 src/full-codegen.cc
  37. +7 −6 src/full-codegen.h
  38. +7 −10 src/heap.cc
  39. +4 −15 src/heap.h
  40. +2 −10 src/hydrogen-instructions.h
  41. +334 −71 src/hydrogen.cc
  42. +7 −11 src/hydrogen.h
  43. +81 −4 src/ia32/code-stubs-ia32.cc
  44. +32 −9 src/ia32/deoptimizer-ia32.cc
  45. +1 −5 src/ia32/full-codegen-ia32.cc
  46. +4 −2 src/ia32/lithium-codegen-ia32.cc
  47. +9 −11 src/ia32/lithium-codegen-ia32.h
  48. +5 −0 src/ia32/regexp-macro-assembler-ia32.cc
  49. +1 −8 src/ia32/regexp-macro-assembler-ia32.h
  50. +42 −9 src/isolate.cc
  51. +7 −3 src/isolate.h
  52. +1 −1 src/json-parser.h
  53. +22 −26 src/jsregexp.cc
  54. +8 −13 src/jsregexp.h
  55. +5 −0 src/list.h
  56. +1 −1 src/lithium.h
  57. +16 −2 src/liveedit-debugger.js
  58. +102 −60 src/liveedit.cc
  59. +5 −1 src/liveedit.h
  60. +7 −11 src/mark-compact.cc
  61. +17 −12 src/messages.js
  62. +88 −6 src/mips/code-stubs-mips.cc
  63. +31 −8 src/mips/deoptimizer-mips.cc
  64. +2 −2 src/mips/full-codegen-mips.cc
  65. +5 −2 src/mips/lithium-codegen-mips.cc
  66. +10 −12 src/mips/lithium-codegen-mips.h
  67. +2 −1 src/mips/macro-assembler-mips.cc
  68. +1 −7 src/mips/regexp-macro-assembler-mips.h
  69. +10 −1 src/mirror-debugger.js
  70. +59 −0 src/mksnapshot.cc
  71. +6 −0 src/objects-debug.cc
  72. +25 −4 src/objects-inl.h
  73. +4 −2 src/objects-printer.cc
  74. +87 −14 src/objects.cc
  75. +38 −5 src/objects.h
  76. +49 −50 src/parser.cc
  77. +13 −10 src/parser.h
  78. +4 −2 src/regexp-macro-assembler-irregexp.cc
  79. +1 −1 src/regexp-macro-assembler-irregexp.h
  80. +4 −4 src/rewriter.cc
  81. +4 −1 src/runtime-profiler.cc
  82. +214 −157 src/runtime.cc
  83. +1 −0 src/runtime.h
  84. +8 −7 src/scopes.cc
  85. +2 −2 src/scopes.h
  86. +33 −50 src/serialize.cc
  87. +0 −1 src/serialize.h
  88. +5 −0 src/snapshot-common.cc
  89. +2 −0 src/snapshot.h
  90. +11 −2 src/splay-tree.h
  91. +5 −4 src/stub-cache.cc
  92. +2 −3 src/stub-cache.h
  93. +2 −1 src/type-info.cc
  94. +3 −4 src/v8-counters.h
  95. +3 −3 src/version.cc
  96. +88 −5 src/x64/code-stubs-x64.cc
  97. +32 −9 src/x64/deoptimizer-x64.cc
  98. +1 −5 src/x64/full-codegen-x64.cc
  99. +5 −2 src/x64/lithium-codegen-x64.cc
  100. +10 −12 src/x64/lithium-codegen-x64.h
  101. +8 −0 src/x64/regexp-macro-assembler-x64.cc
  102. +6 −11 src/zone-inl.h
  103. +5 −5 src/zone.cc
  104. +4 −5 src/zone.h
  105. +25 −1 test/cctest/test-api.cc
  106. +4 −2 test/cctest/test-ast.cc
  107. +29 −0 test/cctest/test-compiler.cc
  108. +2 −2 test/cctest/test-dataflow.cc
  109. +2 −3 test/cctest/test-debug.cc
  110. +14 −1 test/cctest/test-flags.cc
  111. +1 −0 test/cctest/test-heap.cc
  112. +3 −3 test/cctest/test-liveedit.cc
  113. +6 −7 test/cctest/test-parsing.cc
  114. +64 −54 test/cctest/test-regexp.cc
  115. +97 −87 test/cctest/test-serialize.cc
  116. +5 −5 test/cctest/test-strings.cc
  117. +57 −0 test/mjsunit/compiler/optimized-closures.js
  118. +153 −0 test/mjsunit/debug-liveedit-restart-frame.js
  119. +67 −0 test/mjsunit/debug-script-breakpoints-closure.js
  120. +82 −0 test/mjsunit/debug-script-breakpoints-nested.js
  121. +203 −0 test/mjsunit/eval-stack-trace.js
  122. +5 −3 test/mjsunit/harmony/collections.js
  123. +11 −8 test/mjsunit/mjsunit.status
  124. +36 −0 test/mjsunit/regress/regress-115100.js
  125. +70 −0 test/mjsunit/regress/regress-131994.js
  126. +35 −0 test/mjsunit/regress/regress-133211.js
  127. +39 −0 test/mjsunit/regress/regress-133211b.js
  128. +35 −0 test/mjsunit/regress/regress-2172.js
  129. +145 −0 test/mjsunit/regress/regress-2185-2.js
  130. +36 −0 test/mjsunit/regress/regress-2185.js
  131. +49 −0 test/mjsunit/regress/regress-2186.js
  132. +2 −2 tools/gen-postmortem-metadata.py
  133. +22 −1 tools/grokdump.py
View
@@ -1,3 +1,19 @@
+2012-06-20: Version 3.12.0
+
+ Fixed Chromium issues:
+ 115100, 129628, 131994, 132727, 132741, 132742, 133211
+
+ Fixed V8 issues:
+ 915, 1914, 2034, 2087, 2094, 2134, 2156, 2166, 2172, 2177, 2179, 2185
+
+ Added --extra-code flag to mksnapshot to load JS code into the VM
+ before creating the snapshot.
+
+ Support 'restart call frame' command in the debugger.
+
+ Performance and stability improvements on all platforms.
+
+
2012-06-13: Version 3.11.10
Implemented heap profiler memory usage reporting.
View
@@ -95,6 +95,10 @@ endif
ifeq ($(strictaliasing), off)
GYPFLAGS += -Dv8_no_strict_aliasing=1
endif
+# regexp=interpreted
+ifeq ($(regexp), interpreted)
+ GYPFLAGS += -Dv8_interpreted_regexp=1
+endif
# ----------------- available targets: --------------------
# - "dependencies": pulls in external dependencies (currently: GYP)
View
@@ -95,6 +95,10 @@
# For a shared library build, results in "libv8-<(soname_version).so".
'soname_version%': '',
+
+ # Interpreted regexp engine exists as platform-independent alternative
+ # based where the regular expression is compiled to a bytecode.
+ 'v8_interpreted_regexp%': 0,
},
'target_defaults': {
'conditions': [
@@ -110,6 +114,9 @@
['v8_enable_gdbjit==1', {
'defines': ['ENABLE_GDB_JIT_INTERFACE',],
}],
+ ['v8_interpreted_regexp==1', {
+ 'defines': ['V8_INTERPRETED_REGEXP',],
+ }],
['v8_target_arch=="arm"', {
'defines': [
'V8_TARGET_ARCH_ARM',
@@ -220,6 +227,7 @@
'StackReserveSize': '2097152',
},
},
+ 'msvs_configuration_platform': 'x64',
}], # v8_target_arch=="x64"
['v8_use_liveobjectlist=="true"', {
'defines': [
@@ -1,4 +1,4 @@
-// Copyright 2011 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -202,7 +202,7 @@ void fail(v8::PreParserData* data, const char* message, ...) {
fflush(stderr);
if (data != NULL) {
// Print preparser data to stdout.
- uint32_t size = data->size();
+ uint32_t size = static_cast<uint32_t>(data->size());
fprintf(stderr, "LOG: data size: %u\n", size);
if (!WriteBuffer(stdout, data->data(), size)) {
perror("ERROR: Writing data");
@@ -232,7 +232,7 @@ struct ExceptionExpectation {
void CheckException(v8::PreParserData* data,
ExceptionExpectation* expects) {
- PreparseDataInterpreter reader(data->data(), data->size());
+ PreparseDataInterpreter reader(data->data(), static_cast<int>(data->size()));
if (expects->throws) {
if (!reader.throws()) {
if (expects->type == NULL) {
View
@@ -347,7 +347,7 @@ v8::Handle<v8::String> ReadFile(const char* name) {
char* chars = new char[size + 1];
chars[size] = '\0';
for (int i = 0; i < size;) {
- int read = fread(&chars[i], 1, size - i, file);
+ int read = static_cast<int>(fread(&chars[i], 1, size - i, file));
i += read;
}
fclose(file);
View
@@ -1,4 +1,4 @@
-// Copyright 2008 the V8 project authors. All rights reserved.
+// Copyright 2012 the V8 project authors. All rights reserved.
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
@@ -351,7 +351,7 @@ Handle<Value> JsHttpRequestProcessor::MapGet(Local<String> name,
// Otherwise fetch the value and wrap it in a JavaScript string
const string& value = (*iter).second;
- return String::New(value.c_str(), value.length());
+ return String::New(value.c_str(), static_cast<int>(value.length()));
}
@@ -443,31 +443,31 @@ Handle<Value> JsHttpRequestProcessor::GetPath(Local<String> name,
const string& path = request->Path();
// Wrap the result in a JavaScript string and return it.
- return String::New(path.c_str(), path.length());
+ return String::New(path.c_str(), static_cast<int>(path.length()));
}
Handle<Value> JsHttpRequestProcessor::GetReferrer(Local<String> name,
const AccessorInfo& info) {
HttpRequest* request = UnwrapRequest(info.Holder());
const string& path = request->Referrer();
- return String::New(path.c_str(), path.length());
+ return String::New(path.c_str(), static_cast<int>(path.length()));
}
Handle<Value> JsHttpRequestProcessor::GetHost(Local<String> name,
const AccessorInfo& info) {
HttpRequest* request = UnwrapRequest(info.Holder());
const string& path = request->Host();
- return String::New(path.c_str(), path.length());
+ return String::New(path.c_str(), static_cast<int>(path.length()));
}
Handle<Value> JsHttpRequestProcessor::GetUserAgent(Local<String> name,
const AccessorInfo& info) {
HttpRequest* request = UnwrapRequest(info.Holder());
const string& path = request->UserAgent();
- return String::New(path.c_str(), path.length());
+ return String::New(path.c_str(), static_cast<int>(path.length()));
}
@@ -557,7 +557,7 @@ Handle<String> ReadFile(const string& name) {
char* chars = new char[size + 1];
chars[size] = '\0';
for (int i = 0; i < size;) {
- int read = fread(&chars[i], 1, size - i, file);
+ int read = static_cast<int>(fread(&chars[i], 1, size - i, file));
i += read;
}
fclose(file);
View
@@ -205,7 +205,7 @@ v8::Handle<v8::String> ReadFile(const char* name) {
char* chars = new char[size + 1];
chars[size] = '\0';
for (int i = 0; i < size;) {
- int read = fread(&chars[i], 1, size - i, file);
+ int read = static_cast<int>(fread(&chars[i], 1, size - i, file));
i += read;
}
fclose(file);
View
@@ -2435,6 +2435,14 @@ void Assembler::RecordComment(const char* msg) {
}
+void Assembler::RecordConstPool(int size) {
+ // We only need this for debugger support, to correctly compute offsets in the
+ // code.
+#ifdef ENABLE_DEBUGGER_SUPPORT
+ RecordRelocInfo(RelocInfo::CONST_POOL, static_cast<intptr_t>(size));
+#endif
+}
+
void Assembler::GrowBuffer() {
if (!own_buffer_) FATAL("external code buffer is too small");
@@ -2511,12 +2519,15 @@ void Assembler::dd(uint32_t data) {
void Assembler::RecordRelocInfo(RelocInfo::Mode rmode, intptr_t data) {
// We do not try to reuse pool constants.
RelocInfo rinfo(pc_, rmode, data, NULL);
- if (rmode >= RelocInfo::JS_RETURN && rmode <= RelocInfo::DEBUG_BREAK_SLOT) {
+ if (((rmode >= RelocInfo::JS_RETURN) &&
+ (rmode <= RelocInfo::DEBUG_BREAK_SLOT)) ||
+ (rmode == RelocInfo::CONST_POOL)) {
// Adjust code for new modes.
ASSERT(RelocInfo::IsDebugBreakSlot(rmode)
|| RelocInfo::IsJSReturn(rmode)
|| RelocInfo::IsComment(rmode)
- || RelocInfo::IsPosition(rmode));
+ || RelocInfo::IsPosition(rmode)
+ || RelocInfo::IsConstPool(rmode));
// These modes do not need an entry in the constant pool.
} else {
ASSERT(num_pending_reloc_info_ < kMaxNumPendingRelocInfo);
@@ -2602,22 +2613,22 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) {
// pool (include the jump over the pool and the constant pool marker and
// the gap to the relocation information).
int jump_instr = require_jump ? kInstrSize : 0;
- int needed_space = jump_instr + kInstrSize +
- num_pending_reloc_info_ * kInstrSize + kGap;
+ int size = jump_instr + kInstrSize + num_pending_reloc_info_ * kPointerSize;
+ int needed_space = size + kGap;
while (buffer_space() <= needed_space) GrowBuffer();
{
// Block recursive calls to CheckConstPool.
BlockConstPoolScope block_const_pool(this);
+ RecordComment("[ Constant Pool");
+ RecordConstPool(size);
// Emit jump over constant pool if necessary.
Label after_pool;
if (require_jump) {
b(&after_pool);
}
- RecordComment("[ Constant Pool");
-
// Put down constant pool marker "Undefined instruction" as specified by
// A5.6 (ARMv7) Instruction set encoding.
emit(kConstantPoolMarker | num_pending_reloc_info_);
@@ -2627,7 +2638,8 @@ void Assembler::CheckConstPool(bool force_emit, bool require_jump) {
RelocInfo& rinfo = pending_reloc_info_[i];
ASSERT(rinfo.rmode() != RelocInfo::COMMENT &&
rinfo.rmode() != RelocInfo::POSITION &&
- rinfo.rmode() != RelocInfo::STATEMENT_POSITION);
+ rinfo.rmode() != RelocInfo::STATEMENT_POSITION &&
+ rinfo.rmode() != RelocInfo::CONST_POOL);
Instr instr = instr_at(rinfo.pc());
// Instruction to patch must be 'ldr rd, [pc, #offset]' with offset == 0.
View
@@ -1219,6 +1219,25 @@ class Assembler : public AssemblerBase {
// Use --code-comments to enable.
void RecordComment(const char* msg);
+ // Record the emission of a constant pool.
+ //
+ // The emission of constant pool depends on the size of the code generated and
+ // the number of RelocInfo recorded.
+ // The Debug mechanism needs to map code offsets between two versions of a
+ // function, compiled with and without debugger support (see for example
+ // Debug::PrepareForBreakPoints()).
+ // Compiling functions with debugger support generates additional code
+ // (Debug::GenerateSlot()). This may affect the emission of the constant
+ // pools and cause the version of the code with debugger support to have
+ // constant pools generated in different places.
+ // Recording the position and size of emitted constant pools allows to
+ // correctly compute the offset mappings between the different versions of a
+ // function in all situations.
+ //
+ // The parameter indicates the size of the constant pool (in bytes), including
+ // the marker and branch over the data.
+ void RecordConstPool(int size);
+
// Writes a single byte or word of data in the code stream. Used
// for inline tables, e.g., jump-tables. The constant pool should be
// emitted before any use of db and dd to ensure that constant pools
View
@@ -85,6 +85,8 @@ void ToNumberStub::Generate(MacroAssembler* masm) {
void FastNewClosureStub::Generate(MacroAssembler* masm) {
// Create a new closure from the given function info in new
// space. Set the context to the current context in cp.
+ Counters* counters = masm->isolate()->counters();
+
Label gc;
// Pop the function info from the stack.
@@ -98,6 +100,8 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
&gc,
TAG_OBJECT);
+ __ IncrementCounter(counters->fast_new_closure_total(), 1, r6, r7);
+
int map_index = (language_mode_ == CLASSIC_MODE)
? Context::FUNCTION_MAP_INDEX
: Context::STRICT_MODE_FUNCTION_MAP_INDEX;
@@ -106,31 +110,107 @@ void FastNewClosureStub::Generate(MacroAssembler* masm) {
// as the map of the allocated object.
__ ldr(r2, MemOperand(cp, Context::SlotOffset(Context::GLOBAL_INDEX)));
__ ldr(r2, FieldMemOperand(r2, GlobalObject::kGlobalContextOffset));
- __ ldr(r2, MemOperand(r2, Context::SlotOffset(map_index)));
- __ str(r2, FieldMemOperand(r0, HeapObject::kMapOffset));
+ __ ldr(r5, MemOperand(r2, Context::SlotOffset(map_index)));
+ __ str(r5, FieldMemOperand(r0, HeapObject::kMapOffset));
// Initialize the rest of the function. We don't have to update the
// write barrier because the allocated object is in new space.
__ LoadRoot(r1, Heap::kEmptyFixedArrayRootIndex);
- __ LoadRoot(r2, Heap::kTheHoleValueRootIndex);
- __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
+ __ LoadRoot(r5, Heap::kTheHoleValueRootIndex);
__ str(r1, FieldMemOperand(r0, JSObject::kPropertiesOffset));
__ str(r1, FieldMemOperand(r0, JSObject::kElementsOffset));
- __ str(r2, FieldMemOperand(r0, JSFunction::kPrototypeOrInitialMapOffset));
+ __ str(r5, FieldMemOperand(r0, JSFunction::kPrototypeOrInitialMapOffset));
__ str(r3, FieldMemOperand(r0, JSFunction::kSharedFunctionInfoOffset));
__ str(cp, FieldMemOperand(r0, JSFunction::kContextOffset));
__ str(r1, FieldMemOperand(r0, JSFunction::kLiteralsOffset));
- __ str(r4, FieldMemOperand(r0, JSFunction::kNextFunctionLinkOffset));
// Initialize the code pointer in the function to be the one
// found in the shared function info object.
+ // But first check if there is an optimized version for our context.
+ Label check_optimized;
+ Label install_unoptimized;
+ if (FLAG_cache_optimized_code) {
+ __ ldr(r1,
+ FieldMemOperand(r3, SharedFunctionInfo::kOptimizedCodeMapOffset));
+ __ tst(r1, r1);
+ __ b(ne, &check_optimized);
+ }
+ __ bind(&install_unoptimized);
+ __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
+ __ str(r4, FieldMemOperand(r0, JSFunction::kNextFunctionLinkOffset));
__ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kCodeOffset));
__ add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag));
__ str(r3, FieldMemOperand(r0, JSFunction::kCodeEntryOffset));
// Return result. The argument function info has been popped already.
__ Ret();
+ __ bind(&check_optimized);
+
+ __ IncrementCounter(counters->fast_new_closure_try_optimized(), 1, r6, r7);
+
+ // r2 holds global context, r1 points to fixed array of 3-element entries
+ // (global context, optimized code, literals).
+ // The optimized code map must never be empty, so check the first elements.
+ Label install_optimized;
+ // Speculatively move code object into r4.
+ __ ldr(r4, FieldMemOperand(r1, FixedArray::kHeaderSize + kPointerSize));
+ __ ldr(r5, FieldMemOperand(r1, FixedArray::kHeaderSize));
+ __ cmp(r2, r5);
+ __ b(eq, &install_optimized);
+
+ // Iterate through the rest of map backwards. r4 holds an index as a Smi.
+ Label loop;
+ __ ldr(r4, FieldMemOperand(r1, FixedArray::kLengthOffset));
+ __ bind(&loop);
+ // Do not double check first entry.
+
+ __ cmp(r4, Operand(Smi::FromInt(SharedFunctionInfo::kEntryLength)));
+ __ b(eq, &install_unoptimized);
+ __ sub(r4, r4, Operand(
+ Smi::FromInt(SharedFunctionInfo::kEntryLength))); // Skip an entry.
+ __ add(r5, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+ __ add(r5, r5, Operand(r4, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ ldr(r5, MemOperand(r5));
+ __ cmp(r2, r5);
+ __ b(ne, &loop);
+ // Hit: fetch the optimized code.
+ __ add(r5, r1, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+ __ add(r5, r5, Operand(r4, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ add(r5, r5, Operand(kPointerSize));
+ __ ldr(r4, MemOperand(r5));
+
+ __ bind(&install_optimized);
+ __ IncrementCounter(counters->fast_new_closure_install_optimized(),
+ 1, r6, r7);
+
+ // TODO(fschneider): Idea: store proper code pointers in the map and either
+ // unmangle them on marking or do nothing as the whole map is discarded on
+ // major GC anyway.
+ __ add(r4, r4, Operand(Code::kHeaderSize - kHeapObjectTag));
+ __ str(r4, FieldMemOperand(r0, JSFunction::kCodeEntryOffset));
+
+ // Now link a function into a list of optimized functions.
+ __ ldr(r4, ContextOperand(r2, Context::OPTIMIZED_FUNCTIONS_LIST));
+
+ __ str(r4, FieldMemOperand(r0, JSFunction::kNextFunctionLinkOffset));
+ // No need for write barrier as JSFunction (eax) is in the new space.
+
+ __ str(r0, ContextOperand(r2, Context::OPTIMIZED_FUNCTIONS_LIST));
+ // Store JSFunction (eax) into edx before issuing write barrier as
+ // it clobbers all the registers passed.
+ __ mov(r4, r0);
+ __ RecordWriteContextSlot(
+ r2,
+ Context::SlotOffset(Context::OPTIMIZED_FUNCTIONS_LIST),
+ r4,
+ r1,
+ kLRHasNotBeenSaved,
+ kDontSaveFPRegs);
+
+ // Return result. The argument function info has been popped already.
+ __ Ret();
+
// Create a new closure through the slower runtime call.
__ bind(&gc);
__ LoadRoot(r4, Heap::kFalseValueRootIndex);
@@ -7131,6 +7211,8 @@ static const AheadOfTimeWriteBarrierStubList kAheadOfTime[] = {
{ REG(r2), REG(r6), REG(r9), EMIT_REMEMBERED_SET },
// StoreArrayLiteralElementStub::Generate
{ REG(r5), REG(r0), REG(r6), EMIT_REMEMBERED_SET },
+ // FastNewClosureStub::Generate
+ { REG(r2), REG(r4), REG(r1), EMIT_REMEMBERED_SET },
// Null termination.
{ REG(no_reg), REG(no_reg), REG(no_reg), EMIT_REMEMBERED_SET}
};
Oops, something went wrong.

0 comments on commit 013eb60

Please sign in to comment.