Skip to content

Commit

Permalink
Make the CompareStub and the UnaryOpStub accept smi inputs.
Browse files Browse the repository at this point in the history
The stubs get an additional flag for including the smi code
inside the stub. This allows us to generate more compact code
if we don't want to inline the smi case outside the stub.

Review URL: http://codereview.chromium.org/3388005

git-svn-id: https://v8.googlecode.com/svn/branches/bleeding_edge@5456 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
  • Loading branch information
fschneider@chromium.org committed Sep 15, 2010
1 parent f3881c2 commit 99c1700
Show file tree
Hide file tree
Showing 14 changed files with 395 additions and 154 deletions.
99 changes: 71 additions & 28 deletions src/arm/code-stubs-arm.cc
Expand Up @@ -930,6 +930,24 @@ void CompareStub::Generate(MacroAssembler* masm) {
Label slow; // Call builtin.
Label not_smis, both_loaded_as_doubles, lhs_not_nan;

if (include_smi_compare_) {
Label not_two_smis, smi_done;
__ orr(r2, r1, r0);
__ tst(r2, Operand(kSmiTagMask));
__ b(ne, &not_two_smis);
__ sub(r0, r1, r0);
__ b(vc, &smi_done);
// Correct the sign in case of overflow.
__ rsb(r0, r0, Operand(0, RelocInfo::NONE));
__ bind(&smi_done);
__ Ret();
__ bind(&not_two_smis);
} else if (FLAG_debug_code) {
__ orr(r2, r1, r0);
__ tst(r2, Operand(kSmiTagMask));
__ Assert(nz, "CompareStub: unexpected smi operands.");
}

// NOTICE! This code is only reached after a smi-fast-case check, so
// it is certain that at least one operand isn't a smi.

Expand Down Expand Up @@ -2288,7 +2306,7 @@ void StackCheckStub::Generate(MacroAssembler* masm) {
__ push(r0);
__ TailCallRuntime(Runtime::kStackGuard, 1, 1);

__ StubReturn(1);
__ Ret();
}


Expand All @@ -2299,32 +2317,37 @@ void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
__ LoadRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);

if (op_ == Token::SUB) {
// Check whether the value is a smi.
Label try_float;
__ tst(r0, Operand(kSmiTagMask));
__ b(ne, &try_float);

// Go slow case if the value of the expression is zero
// to make sure that we switch between 0 and -0.
if (negative_zero_ == kStrictNegativeZero) {
// If we have to check for zero, then we can check for the max negative
// smi while we are at it.
__ bic(ip, r0, Operand(0x80000000), SetCC);
__ b(eq, &slow);
__ rsb(r0, r0, Operand(0, RelocInfo::NONE));
__ StubReturn(1);
} else {
// The value of the expression is a smi and 0 is OK for -0. Try
// optimistic subtraction '0 - value'.
__ rsb(r0, r0, Operand(0, RelocInfo::NONE), SetCC);
__ StubReturn(1, vc);
// We don't have to reverse the optimistic neg since the only case
// where we fall through is the minimum negative Smi, which is the case
// where the neg leaves the register unchanged.
__ jmp(&slow); // Go slow on max negative Smi.
if (include_smi_code_) {
// Check whether the value is a smi.
Label try_float;
__ tst(r0, Operand(kSmiTagMask));
__ b(ne, &try_float);

// Go slow case if the value of the expression is zero
// to make sure that we switch between 0 and -0.
if (negative_zero_ == kStrictNegativeZero) {
// If we have to check for zero, then we can check for the max negative
// smi while we are at it.
__ bic(ip, r0, Operand(0x80000000), SetCC);
__ b(eq, &slow);
__ rsb(r0, r0, Operand(0, RelocInfo::NONE));
__ Ret();
} else {
// The value of the expression is a smi and 0 is OK for -0. Try
// optimistic subtraction '0 - value'.
__ rsb(r0, r0, Operand(0, RelocInfo::NONE), SetCC);
__ Ret(vc);
// We don't have to reverse the optimistic neg since the only case
// where we fall through is the minimum negative Smi, which is the case
// where the neg leaves the register unchanged.
__ jmp(&slow); // Go slow on max negative Smi.
}
__ bind(&try_float);
} else if (FLAG_debug_code) {
__ tst(r0, Operand(kSmiTagMask));
__ Assert(ne, "Unexpected smi operand.");
}

__ bind(&try_float);
__ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
__ AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
__ cmp(r1, heap_number_map);
Expand All @@ -2344,6 +2367,19 @@ void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
__ mov(r0, Operand(r1));
}
} else if (op_ == Token::BIT_NOT) {
if (include_smi_code_) {
Label non_smi;
__ BranchOnNotSmi(r0, &non_smi);
__ mvn(r0, Operand(r0));
// Bit-clear inverted smi-tag.
__ bic(r0, r0, Operand(kSmiTagMask));
__ Ret();
__ bind(&non_smi);
} else if (FLAG_debug_code) {
__ tst(r0, Operand(kSmiTagMask));
__ Assert(ne, "Unexpected smi operand.");
}

// Check if the operand is a heap number.
__ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
__ AssertRegisterIsRoot(heap_number_map, Heap::kHeapNumberMapRootIndex);
Expand Down Expand Up @@ -2391,7 +2427,7 @@ void GenericUnaryOpStub::Generate(MacroAssembler* masm) {
}

__ bind(&done);
__ StubReturn(1);
__ Ret();

// Handle the slow case by jumping to the JavaScript builtin.
__ bind(&slow);
Expand Down Expand Up @@ -3499,14 +3535,20 @@ const char* CompareStub::GetName() {
include_number_compare_name = "_NO_NUMBER";
}

const char* include_smi_compare_name = "";
if (!include_smi_compare_) {
include_smi_compare_name = "_NO_SMI";
}

OS::SNPrintF(Vector<char>(name_, kMaxNameLength),
"CompareStub_%s%s%s%s%s%s",
cc_name,
lhs_name,
rhs_name,
strict_name,
never_nan_nan_name,
include_number_compare_name);
include_number_compare_name,
include_smi_compare_name);
return name_;
}

Expand All @@ -3522,7 +3564,8 @@ int CompareStub::MinorKey() {
| RegisterField::encode(lhs_.is(r0))
| StrictField::encode(strict_)
| NeverNanNanField::encode(cc_ == eq ? never_nan_nan_ : false)
| IncludeNumberCompareField::encode(include_number_compare_);
| IncludeNumberCompareField::encode(include_number_compare_)
| IncludeSmiCompareField::encode(include_smi_compare_);
}


Expand Down
7 changes: 5 additions & 2 deletions src/arm/codegen-arm.cc
Expand Up @@ -1651,7 +1651,7 @@ void CodeGenerator::Comparison(Condition cc,
// Perform non-smi comparison by stub.
// CompareStub takes arguments in r0 and r1, returns <0, >0 or 0 in r0.
// We call with 0 args because there are 0 on the stack.
CompareStub stub(cc, strict, kBothCouldBeNaN, true, lhs, rhs);
CompareStub stub(cc, strict, NO_SMI_COMPARE_IN_STUB, lhs, rhs);
frame_->CallStub(&stub, 0);
__ cmp(r0, Operand(0, RelocInfo::NONE));
exit.Jump();
Expand Down Expand Up @@ -5985,6 +5985,7 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
GenericUnaryOpStub stub(
Token::SUB,
overwrite,
NO_UNARY_FLAGS,
no_negative_zero ? kIgnoreNegativeZero : kStrictNegativeZero);
frame_->CallStub(&stub, 0);
frame_->EmitPush(r0); // r0 has result
Expand All @@ -6009,7 +6010,9 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
not_smi_label.Bind();
frame_->SpillAll();
__ Move(r0, tos);
GenericUnaryOpStub stub(Token::BIT_NOT, overwrite);
GenericUnaryOpStub stub(Token::BIT_NOT,
overwrite,
NO_UNARY_SMI_CODE_IN_STUB);
frame_->CallStub(&stub, 0);
frame_->EmitPush(r0);

Expand Down
29 changes: 21 additions & 8 deletions src/arm/full-codegen-arm.cc
Expand Up @@ -672,7 +672,8 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {

// Perform the comparison as if via '==='.
__ ldr(r1, MemOperand(sp, 0)); // Switch value.
if (ShouldInlineSmiCase(Token::EQ_STRICT)) {
bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
if (inline_smi_code) {
Label slow_case;
__ orr(r2, r1, r0);
__ tst(r2, Operand(kSmiTagMask));
Expand All @@ -684,7 +685,10 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
__ bind(&slow_case);
}

CompareStub stub(eq, true, kBothCouldBeNaN, true, r1, r0);
CompareFlags flags = inline_smi_code
? NO_SMI_COMPARE_IN_STUB
: NO_COMPARE_FLAGS;
CompareStub stub(eq, true, flags, r1, r0);
__ CallStub(&stub);
__ cmp(r0, Operand(0, RelocInfo::NONE));
__ b(ne, &next_test);
Expand Down Expand Up @@ -2888,7 +2892,9 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
UnaryOverwriteMode overwrite =
can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
GenericUnaryOpStub stub(Token::SUB, overwrite);
GenericUnaryOpStub stub(Token::SUB,
overwrite,
NO_UNARY_FLAGS);
// GenericUnaryOpStub expects the argument to be in the
// accumulator register r0.
VisitForValue(expr->expression(), kAccumulator);
Expand All @@ -2903,7 +2909,8 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
// in the accumulator register r0.
VisitForValue(expr->expression(), kAccumulator);
Label done;
if (ShouldInlineSmiCase(expr->op())) {
bool inline_smi_code = ShouldInlineSmiCase(expr->op());
if (inline_smi_code) {
Label call_stub;
__ BranchOnNotSmi(r0, &call_stub);
__ mvn(r0, Operand(r0));
Expand All @@ -2913,9 +2920,12 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
__ bind(&call_stub);
}
bool overwrite = expr->expression()->ResultOverwriteAllowed();
UnaryOpFlags flags = inline_smi_code
? NO_UNARY_SMI_CODE_IN_STUB
: NO_UNARY_FLAGS;
UnaryOverwriteMode mode =
overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
GenericUnaryOpStub stub(Token::BIT_NOT, mode);
GenericUnaryOpStub stub(Token::BIT_NOT, mode, flags);
__ CallStub(&stub);
__ bind(&done);
Apply(context_, r0);
Expand Down Expand Up @@ -3292,16 +3302,19 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
UNREACHABLE();
}

if (ShouldInlineSmiCase(op)) {
bool inline_smi_code = ShouldInlineSmiCase(op);
if (inline_smi_code) {
Label slow_case;
__ orr(r2, r0, Operand(r1));
__ BranchOnNotSmi(r2, &slow_case);
__ cmp(r1, r0);
Split(cc, if_true, if_false, NULL);
__ bind(&slow_case);
}

CompareStub stub(cc, strict, kBothCouldBeNaN, true, r1, r0);
CompareFlags flags = inline_smi_code
? NO_SMI_COMPARE_IN_STUB
: NO_COMPARE_FLAGS;
CompareStub stub(cc, strict, flags, r1, r0);
__ CallStub(&stub);
__ cmp(r0, Operand(0, RelocInfo::NONE));
Split(cc, if_true, if_false, fall_through);
Expand Down
9 changes: 0 additions & 9 deletions src/arm/macro-assembler-arm.cc
Expand Up @@ -1242,15 +1242,6 @@ void MacroAssembler::TailCallStub(CodeStub* stub, Condition cond) {
}


void MacroAssembler::StubReturn(int argc, Condition cond) {
ASSERT(argc >= 1 && generating_stub());
if (argc > 1) {
add(sp, sp, Operand((argc - 1) * kPointerSize), LeaveCC, cond);
}
Ret(cond);
}


void MacroAssembler::IllegalOperation(int num_arguments) {
if (num_arguments > 0) {
add(sp, sp, Operand(num_arguments * kPointerSize));
Expand Down
3 changes: 0 additions & 3 deletions src/arm/macro-assembler-arm.h
Expand Up @@ -531,9 +531,6 @@ class MacroAssembler: public Assembler {
// Call a code stub.
void TailCallStub(CodeStub* stub, Condition cond = al);

// Return from a code stub after popping its arguments.
void StubReturn(int argc, Condition cond = al);

// Call a runtime routine.
void CallRuntime(Runtime::Function* f, int num_arguments);

Expand Down

0 comments on commit 99c1700

Please sign in to comment.