Permalink
Browse files

Version 3.11.3

Disable optimization for functions that have scopes that cannot be reconstructed from the context chain. (issue 2071)

Define V8_EXPORT to nothing for clients of v8. (Chromium issue 90078)

Correctly check for native error objects.  (Chromium issue 2138)

Performance and stability improvements on all platforms.

git-svn-id: https://v8.googlecode.com/svn/trunk@11594 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
  • Loading branch information...
1 parent 8843e8d commit 9ba30cb78331d1183b251c1402466f0992758f36 ulan@chromium.org committed May 18, 2012
View
@@ -1,3 +1,15 @@
+2012-05-18: Version 3.11.3
+
+ Disable optimization for functions that have scopes that cannot be
+ reconstructed from the context chain. (issue 2071)
+
+ Define V8_EXPORT to nothing for clients of v8. (Chromium issue 90078)
+
+ Correctly check for native error objects. (Chromium issue 2138)
+
+ Performance and stability improvements on all platforms.
+
+
2012-05-16: Version 3.11.2
Revert r11496. (Chromium issue 128146)
View
@@ -101,14 +101,14 @@ LIBRARY_FLAGS = {
'os:linux': {
'CCFLAGS': ['-ansi'] + GCC_EXTRA_CCFLAGS,
'library:shared': {
- 'CPPDEFINES': ['V8_SHARED'],
+ 'CPPDEFINES': ['V8_SHARED', 'BUILDING_V8_SHARED'],
'LIBS': ['pthread']
}
},
'os:macos': {
'CCFLAGS': ['-ansi', '-mmacosx-version-min=10.4'],
'library:shared': {
- 'CPPDEFINES': ['V8_SHARED']
+ 'CPPDEFINES': ['V8_SHARED', 'BUILDING_V8_SHARED'],
}
},
'os:freebsd': {
View
@@ -62,11 +62,13 @@
#else // _WIN32
-// Setup for Linux shared library export. There is no need to distinguish
-// between building or using the V8 shared library, but we should not
-// export symbols when we are building a static library.
+// Setup for Linux shared library export.
#if defined(__GNUC__) && (__GNUC__ >= 4) && defined(V8_SHARED)
+#ifdef BUILDING_V8_SHARED
#define V8EXPORT __attribute__ ((visibility("default")))
+#else
+#define V8EXPORT
+#endif
#else // defined(__GNUC__) && (__GNUC__ >= 4)
#define V8EXPORT
#endif // defined(__GNUC__) && (__GNUC__ >= 4)
@@ -3466,104 +3466,6 @@ void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
}
-void FullCodeGenerator::EmitSwapElements(CallRuntime* expr) {
- ZoneList<Expression*>* args = expr->arguments();
- ASSERT(args->length() == 3);
- VisitForStackValue(args->at(0));
- VisitForStackValue(args->at(1));
- VisitForStackValue(args->at(2));
- Label done;
- Label slow_case;
- Register object = r0;
- Register index1 = r1;
- Register index2 = r2;
- Register elements = r3;
- Register scratch1 = r4;
- Register scratch2 = r5;
-
- __ ldr(object, MemOperand(sp, 2 * kPointerSize));
- // Fetch the map and check if array is in fast case.
- // Check that object doesn't require security checks and
- // has no indexed interceptor.
- __ CompareObjectType(object, scratch1, scratch2, JS_ARRAY_TYPE);
- __ b(ne, &slow_case);
- // Map is now in scratch1.
-
- __ ldrb(scratch2, FieldMemOperand(scratch1, Map::kBitFieldOffset));
- __ tst(scratch2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask));
- __ b(ne, &slow_case);
-
- // Check the object's elements are in fast case and writable.
- __ ldr(elements, FieldMemOperand(object, JSObject::kElementsOffset));
- __ ldr(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
- __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
- __ cmp(scratch1, ip);
- __ b(ne, &slow_case);
-
- // Check that both indices are smis.
- __ ldr(index1, MemOperand(sp, 1 * kPointerSize));
- __ ldr(index2, MemOperand(sp, 0));
- __ JumpIfNotBothSmi(index1, index2, &slow_case);
-
- // Check that both indices are valid.
- __ ldr(scratch1, FieldMemOperand(object, JSArray::kLengthOffset));
- __ cmp(scratch1, index1);
- __ cmp(scratch1, index2, hi);
- __ b(ls, &slow_case);
-
- // Bring the address of the elements into index1 and index2.
- __ add(scratch1, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- __ add(index1,
- scratch1,
- Operand(index1, LSL, kPointerSizeLog2 - kSmiTagSize));
- __ add(index2,
- scratch1,
- Operand(index2, LSL, kPointerSizeLog2 - kSmiTagSize));
-
- // Swap elements.
- __ ldr(scratch1, MemOperand(index1, 0));
- __ ldr(scratch2, MemOperand(index2, 0));
- __ str(scratch1, MemOperand(index2, 0));
- __ str(scratch2, MemOperand(index1, 0));
-
- Label no_remembered_set;
- __ CheckPageFlag(elements,
- scratch1,
- 1 << MemoryChunk::SCAN_ON_SCAVENGE,
- ne,
- &no_remembered_set);
- // Possible optimization: do a check that both values are Smis
- // (or them and test against Smi mask.)
-
- // We are swapping two objects in an array and the incremental marker never
- // pauses in the middle of scanning a single object. Therefore the
- // incremental marker is not disturbed, so we don't need to call the
- // RecordWrite stub that notifies the incremental marker.
- __ RememberedSetHelper(elements,
- index1,
- scratch2,
- kDontSaveFPRegs,
- MacroAssembler::kFallThroughAtEnd);
- __ RememberedSetHelper(elements,
- index2,
- scratch2,
- kDontSaveFPRegs,
- MacroAssembler::kFallThroughAtEnd);
-
- __ bind(&no_remembered_set);
- // We are done. Drop elements from the stack, and return undefined.
- __ Drop(3);
- __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
- __ jmp(&done);
-
- __ bind(&slow_case);
- __ CallRuntime(Runtime::kSwapElements, 3);
-
- __ bind(&done);
- context()->Plug(r0);
-}
-
-
void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
ZoneList<Expression*>* args = expr->arguments();
ASSERT_EQ(2, args->length());
View
@@ -827,7 +827,8 @@ function ArraySort(comparefn) {
var element = a[i];
var order = %_CallFunction(receiver, element, pivot, comparefn);
if (order < 0) {
- %_SwapElements(a, i, low_end);
+ a[i] = a[low_end];
+ a[low_end] = element;
low_end++;
} else if (order > 0) {
do {
@@ -836,9 +837,12 @@ function ArraySort(comparefn) {
var top_elem = a[high_start];
order = %_CallFunction(receiver, top_elem, pivot, comparefn);
} while (order > 0);
- %_SwapElements(a, i, high_start);
+ a[i] = a[high_start];
+ a[high_start] = element;
if (order < 0) {
- %_SwapElements(a, i, low_end);
+ element = a[i];
+ a[i] = a[low_end];
+ a[low_end] = element;
low_end++;
}
}
View
@@ -118,7 +118,7 @@ bool CompilationInfo::ShouldSelfOptimize() {
FLAG_crankshaft &&
!function()->flags()->Contains(kDontSelfOptimize) &&
!function()->flags()->Contains(kDontOptimize) &&
- function()->scope()->allows_lazy_recompilation() &&
+ function()->scope()->AllowsLazyRecompilation() &&
(shared_info().is_null() || !shared_info()->optimization_disabled());
}
View
@@ -26,7 +26,8 @@
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-#ifdef USING_V8_SHARED // Defined when linking against shared lib on Windows.
+// Defined when linking against shared lib on Windows.
+#if defined(USING_V8_SHARED) && !defined(V8_SHARED)
#define V8_SHARED
#endif
View
@@ -314,7 +314,8 @@ bool FullCodeGenerator::MakeCode(CompilationInfo* info) {
Code::Flags flags = Code::ComputeFlags(Code::FUNCTION);
Handle<Code> code = CodeGenerator::MakeCodeEpilogue(&masm, flags, info);
code->set_optimizable(info->IsOptimizable() &&
- !info->function()->flags()->Contains(kDontOptimize));
+ !info->function()->flags()->Contains(kDontOptimize) &&
+ info->function()->scope()->AllowsLazyRecompilation());
cgen.PopulateDeoptimizationData(code);
cgen.PopulateTypeFeedbackInfo(code);
cgen.PopulateTypeFeedbackCells(code);
View
@@ -8135,14 +8135,6 @@ void HGraphBuilder::GenerateNumberToString(CallRuntime* call) {
}
-// Fast swapping of elements. Takes three expressions, the object and two
-// indices. This should only be used if the indices are known to be
-// non-negative and within bounds of the elements array at the call site.
-void HGraphBuilder::GenerateSwapElements(CallRuntime* call) {
- return Bailout("inlined runtime function: SwapElements");
-}
-
-
// Fast call for custom callbacks.
void HGraphBuilder::GenerateCallFunction(CallRuntime* call) {
// 1 ~ The function to call is not itself an argument to the call.
View
@@ -175,7 +175,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
// Read current padding counter and skip corresponding number of words.
__ pop(unused_reg);
// We divide stored value by 2 (untagging) and multiply it by word's size.
- STATIC_ASSERT(kSmiTagSize == 1);
+ STATIC_ASSERT(kSmiTagSize == 1 && kSmiShiftSize == 0);
__ lea(esp, Operand(esp, unused_reg, times_half_pointer_size, 0));
// Get rid of the internal frame.
@@ -3405,99 +3405,6 @@ void FullCodeGenerator::EmitRegExpConstructResult(CallRuntime* expr) {
}
-void FullCodeGenerator::EmitSwapElements(CallRuntime* expr) {
- ZoneList<Expression*>* args = expr->arguments();
- ASSERT(args->length() == 3);
- VisitForStackValue(args->at(0));
- VisitForStackValue(args->at(1));
- VisitForStackValue(args->at(2));
- Label done;
- Label slow_case;
- Register object = eax;
- Register index_1 = ebx;
- Register index_2 = ecx;
- Register elements = edi;
- Register temp = edx;
- __ mov(object, Operand(esp, 2 * kPointerSize));
- // Fetch the map and check if array is in fast case.
- // Check that object doesn't require security checks and
- // has no indexed interceptor.
- __ CmpObjectType(object, JS_ARRAY_TYPE, temp);
- __ j(not_equal, &slow_case);
- __ test_b(FieldOperand(temp, Map::kBitFieldOffset),
- KeyedLoadIC::kSlowCaseBitFieldMask);
- __ j(not_zero, &slow_case);
-
- // Check the object's elements are in fast case and writable.
- __ mov(elements, FieldOperand(object, JSObject::kElementsOffset));
- __ cmp(FieldOperand(elements, HeapObject::kMapOffset),
- Immediate(isolate()->factory()->fixed_array_map()));
- __ j(not_equal, &slow_case);
-
- // Check that both indices are smis.
- __ mov(index_1, Operand(esp, 1 * kPointerSize));
- __ mov(index_2, Operand(esp, 0));
- __ mov(temp, index_1);
- __ or_(temp, index_2);
- __ JumpIfNotSmi(temp, &slow_case);
-
- // Check that both indices are valid.
- __ mov(temp, FieldOperand(object, JSArray::kLengthOffset));
- __ cmp(temp, index_1);
- __ j(below_equal, &slow_case);
- __ cmp(temp, index_2);
- __ j(below_equal, &slow_case);
-
- // Bring addresses into index1 and index2.
- __ lea(index_1, CodeGenerator::FixedArrayElementOperand(elements, index_1));
- __ lea(index_2, CodeGenerator::FixedArrayElementOperand(elements, index_2));
-
- // Swap elements. Use object and temp as scratch registers.
- __ mov(object, Operand(index_1, 0));
- __ mov(temp, Operand(index_2, 0));
- __ mov(Operand(index_2, 0), object);
- __ mov(Operand(index_1, 0), temp);
-
- Label no_remembered_set;
- __ CheckPageFlag(elements,
- temp,
- 1 << MemoryChunk::SCAN_ON_SCAVENGE,
- not_zero,
- &no_remembered_set,
- Label::kNear);
- // Possible optimization: do a check that both values are Smis
- // (or them and test against Smi mask.)
-
- // We are swapping two objects in an array and the incremental marker never
- // pauses in the middle of scanning a single object. Therefore the
- // incremental marker is not disturbed, so we don't need to call the
- // RecordWrite stub that notifies the incremental marker.
- __ RememberedSetHelper(elements,
- index_1,
- temp,
- kDontSaveFPRegs,
- MacroAssembler::kFallThroughAtEnd);
- __ RememberedSetHelper(elements,
- index_2,
- temp,
- kDontSaveFPRegs,
- MacroAssembler::kFallThroughAtEnd);
-
- __ bind(&no_remembered_set);
-
- // We are done. Drop elements from the stack, and return undefined.
- __ add(esp, Immediate(3 * kPointerSize));
- __ mov(eax, isolate()->factory()->undefined_value());
- __ jmp(&done);
-
- __ bind(&slow_case);
- __ CallRuntime(Runtime::kSwapElements, 3);
-
- __ bind(&done);
- context()->Plug(eax);
-}
-
-
void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
ZoneList<Expression*>* args = expr->arguments();
ASSERT_EQ(2, args->length());
View
@@ -61,18 +61,21 @@ function FormatString(format, message) {
// To check if something is a native error we need to check the
-// concrete native error types. It is not enough to check "obj
-// instanceof $Error" because user code can replace
-// NativeError.prototype.__proto__. User code cannot replace
-// NativeError.prototype though and therefore this is a safe test.
+// concrete native error types. It is not sufficient to use instanceof
+// since it possible to create an object that has Error.prototype on
+// its prototype chain. This is the case for DOMException for example.
function IsNativeErrorObject(obj) {
- return (obj instanceof $Error) ||
- (obj instanceof $EvalError) ||
- (obj instanceof $RangeError) ||
- (obj instanceof $ReferenceError) ||
- (obj instanceof $SyntaxError) ||
- (obj instanceof $TypeError) ||
- (obj instanceof $URIError);
+ switch (%_ClassOf(obj)) {
+ case 'Error':
+ case 'EvalError':
+ case 'RangeError':
+ case 'ReferenceError':
+ case 'SyntaxError':
+ case 'TypeError':
+ case 'URIError':
+ return true;
+ }
+ return false;
}
Oops, something went wrong.

0 comments on commit 9ba30cb

Please sign in to comment.