Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
Browse files

Improve our type feedback by recogizining never-executed IC calls for…

… binary operations.

In the case of inlined smi code in non-optimzied code we could not 
distinguish between the smi-only case and the case that the operation was
never executed.

With this change the first execution of a binary operation always jumps
to the stub which in turn patches the smi-check into the correct
conditional branch, so that we benefit from inlined smi code after the
first invocation.

A nop instruction after the call to the BinaryOpIC indicates that no
smi code was inlined. A "test eax" instruction says that there was smi
code inlined and encodes the delta to the patch site and the condition
code of the branch at the patch site to restore the original jump.

Review URL: http://codereview.chromium.org/5714001

git-svn-id: http://v8.googlecode.com/svn/branches/bleeding_edge@5970 ce2b1a6d-e550-0410-aec6-3dcde31c8c00
  • Loading branch information...
commit b16df72475b0a54e117113ca343791fa83a9013d 1 parent 65f98b1
fschneider@chromium.org authored
View
10 src/arm/ic-arm.cc
@@ -2360,10 +2360,8 @@ Condition CompareIC::ComputeCondition(Token::Value op) {
void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
HandleScope scope;
Handle<Code> rewritten;
-#ifdef DEBUG
State previous_state = GetState();
-#endif
- State state = TargetState(x, y);
+ State state = TargetState(previous_state, x, y);
if (state == GENERIC) {
CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS, r1, r0);
rewritten = stub.GetCode();
@@ -2383,6 +2381,12 @@ void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
#endif
}
+
+void PatchInlinedSmiCode(Address address) {
+ UNIMPLEMENTED();
+}
+
+
} } // namespace v8::internal
#endif // V8_TARGET_ARCH_ARM
View
7 src/full-codegen.h
@@ -38,6 +38,9 @@
namespace v8 {
namespace internal {
+// Forward declarations.
+class JumpPatchSite;
+
// AST node visitor which can tell whether a given statement will be breakable
// when the code is compiled by the full compiler in the debugger. This means
// that there will be an IC (load/store/call) in the code generated for the
@@ -533,6 +536,10 @@ class FullCodeGenerator: public AstVisitor {
// Helper for calling an IC stub.
void EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode);
+ // Helper for calling an IC stub with a patch site. Passing NULL for patch_site
+ // indicates no inlined smi code and emits a nop after the IC call.
+ void EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site);
+
// Set fields in the stack frame. Offsets are the frame pointer relative
// offsets defined in, e.g., StandardFrameConstants.
void StoreToFrameField(int frame_offset, Register value);
View
8 src/ia32/assembler-ia32.h
@@ -571,6 +571,14 @@ class Assembler : public Malloced {
static const byte kTestEaxByte = 0xA9;
// One byte opcode for test al, 0xXX.
static const byte kTestAlByte = 0xA8;
+ // One byte opcode for nop.
+ static const byte kNopByte = 0x90;
+
+ // One byte opcode for a short unconditional jump.
+ static const byte kJmpShortOpcode = 0xEB;
+ // One byte prefix for a short conditional jump.
+ static const byte kJccShortPrefix = 0x70;
+
// ---------------------------------------------------------------------------
// Code generation
View
4 src/ia32/code-stubs-ia32.cc
@@ -1366,8 +1366,8 @@ void TypeRecordingBinaryOpStub::GenerateSmiCode(MacroAssembler* masm,
if (op_ == Token::DIV || op_ == Token::MOD) {
left = eax;
right = ebx;
- __ mov(ebx, eax);
- __ mov(eax, edx);
+ __ mov(ebx, eax);
+ __ mov(eax, edx);
}
View
6 src/ia32/code-stubs-ia32.h
@@ -231,7 +231,8 @@ class TypeRecordingBinaryOpStub: public CodeStub {
ASSERT(OpBits::is_valid(Token::NUM_TOKENS));
}
- TypeRecordingBinaryOpStub(int key,
+ TypeRecordingBinaryOpStub(
+ int key,
TRBinaryOpIC::TypeInfo operands_type,
TRBinaryOpIC::TypeInfo result_type = TRBinaryOpIC::UNINITIALIZED)
: op_(OpBits::decode(key)),
@@ -239,8 +240,7 @@ class TypeRecordingBinaryOpStub: public CodeStub {
use_sse3_(SSE3Bits::decode(key)),
operands_type_(operands_type),
result_type_(result_type),
- name_(NULL) {
- }
+ name_(NULL) { }
// Generate code to call the stub with the supplied arguments. This will add
// code at the call site to prepare arguments either in registers or on the
View
160 src/ia32/full-codegen-ia32.cc
@@ -41,6 +41,46 @@
namespace v8 {
namespace internal {
+
+class JumpPatchSite BASE_EMBEDDED {
+ public:
+ explicit JumpPatchSite(MacroAssembler* masm)
+ : masm_(masm) {
+#ifdef DEBUG
+ info_emitted_ = false;
+#endif
+ }
+
+ ~JumpPatchSite() {
+ ASSERT(patch_site_.is_bound() == info_emitted_);
+ }
+
+ void EmitJump(NearLabel* target) {
+ ASSERT(!patch_site_.is_bound() && !info_emitted_);
+ masm_->bind(&patch_site_);
+ masm_->jmp(target);
+ }
+
+ void EmitPatchInfo() {
+ int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(&patch_site_);
+ ASSERT(is_int8(delta_to_patch_site));
+ masm_->test(eax, Immediate(delta_to_patch_site));
+#ifdef DEBUG
+ info_emitted_ = true;
+#endif
+ }
+
+ bool is_bound() const { return patch_site_.is_bound(); }
+
+ private:
+ MacroAssembler* masm_;
+ Label patch_site_;
+#ifdef DEBUG
+ bool info_emitted_;
+#endif
+};
+
+
#define __ ACCESS_MASM(masm_)
// Generate code for a JS function. On entry to the function the receiver
@@ -715,12 +755,13 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
// Perform the comparison as if via '==='.
__ mov(edx, Operand(esp, 0)); // Switch value.
bool inline_smi_code = ShouldInlineSmiCase(Token::EQ_STRICT);
+ JumpPatchSite patch_site(masm_);
if (inline_smi_code) {
NearLabel slow_case;
__ mov(ecx, edx);
__ or_(ecx, Operand(eax));
__ test(ecx, Immediate(kSmiTagMask));
- __ j(not_zero, &slow_case, not_taken);
+ patch_site.EmitJump(&slow_case);
__ cmp(edx, Operand(eax));
__ j(not_equal, &next_test);
__ Drop(1); // Switch value is no longer needed.
@@ -730,9 +771,8 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
// Record position before stub call for type feedback.
SetSourcePosition(clause->position());
-
Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
- __ call(ic, RelocInfo::CODE_TARGET);
+ EmitCallIC(ic, &patch_site);
__ test(eax, Operand(eax));
__ j(not_equal, &next_test);
@@ -1552,12 +1592,13 @@ void FullCodeGenerator::EmitConstantSmiAdd(Expression* expr,
OverwriteMode mode,
bool left_is_constant_smi,
Smi* value) {
- NearLabel call_stub;
- Label done;
+ NearLabel call_stub, done;
__ add(Operand(eax), Immediate(value));
__ j(overflow, &call_stub);
__ test(eax, Immediate(kSmiTagMask));
- __ j(zero, &done);
+ JumpPatchSite patch_site(masm_);
+ patch_site.EmitJump(&call_stub);
+ __ jmp(&done);
// Undo the optimistic add operation and call the shared stub.
__ bind(&call_stub);
@@ -1570,7 +1611,8 @@ void FullCodeGenerator::EmitConstantSmiAdd(Expression* expr,
__ mov(edx, eax);
__ mov(eax, Immediate(value));
}
- __ CallStub(&stub);
+ EmitCallIC(stub.GetCode(), &patch_site);
+
__ bind(&done);
context()->Plug(eax);
}
@@ -1580,7 +1622,7 @@ void FullCodeGenerator::EmitConstantSmiSub(Expression* expr,
OverwriteMode mode,
bool left_is_constant_smi,
Smi* value) {
- Label call_stub, done;
+ NearLabel call_stub, done;
if (left_is_constant_smi) {
__ mov(ecx, eax);
__ mov(eax, Immediate(value));
@@ -1590,7 +1632,9 @@ void FullCodeGenerator::EmitConstantSmiSub(Expression* expr,
}
__ j(overflow, &call_stub);
__ test(eax, Immediate(kSmiTagMask));
- __ j(zero, &done);
+ JumpPatchSite patch_site(masm_);
+ patch_site.EmitJump(&call_stub);
+ __ jmp(&done);
__ bind(&call_stub);
if (left_is_constant_smi) {
@@ -1603,7 +1647,8 @@ void FullCodeGenerator::EmitConstantSmiSub(Expression* expr,
}
Token::Value op = Token::SUB;
TypeRecordingBinaryOpStub stub(op, mode);
- __ CallStub(&stub);
+ EmitCallIC(stub.GetCode(), &patch_site);
+
__ bind(&done);
context()->Plug(eax);
}
@@ -1613,20 +1658,15 @@ void FullCodeGenerator::EmitConstantSmiShiftOp(Expression* expr,
Token::Value op,
OverwriteMode mode,
Smi* value) {
- Label call_stub, smi_case, done;
+ NearLabel call_stub, done;
int shift_value = value->value() & 0x1f;
__ test(eax, Immediate(kSmiTagMask));
- __ j(zero, &smi_case);
-
- __ bind(&call_stub);
- __ mov(edx, eax);
- __ mov(eax, Immediate(value));
- TypeRecordingBinaryOpStub stub(op, mode);
- __ CallStub(&stub);
- __ jmp(&done);
+ // Patch site.
+ JumpPatchSite patch_site(masm_);
+ patch_site.EmitJump(&call_stub);
- __ bind(&smi_case);
+ // Smi case.
switch (op) {
case Token::SHL:
if (shift_value != 0) {
@@ -1665,6 +1705,14 @@ void FullCodeGenerator::EmitConstantSmiShiftOp(Expression* expr,
default:
UNREACHABLE();
}
+ __ jmp(&done);
+
+ // Call stub.
+ __ bind(&call_stub);
+ __ mov(edx, eax);
+ __ mov(eax, Immediate(value));
+ TypeRecordingBinaryOpStub stub(op, mode);
+ EmitCallIC(stub.GetCode(), &patch_site);
__ bind(&done);
context()->Plug(eax);
@@ -1675,18 +1723,14 @@ void FullCodeGenerator::EmitConstantSmiBitOp(Expression* expr,
Token::Value op,
OverwriteMode mode,
Smi* value) {
- Label smi_case, done;
+ NearLabel call_stub, done;
__ test(eax, Immediate(kSmiTagMask));
- __ j(zero, &smi_case);
-
- // The order of the arguments does not matter for bit-ops with a
- // constant operand.
- __ mov(edx, Immediate(value));
- TypeRecordingBinaryOpStub stub(op, mode);
- __ CallStub(&stub);
- __ jmp(&done);
+ // Patch site. The first invocation of the stub will be patch the jmp with
+ // the required conditional jump.
+ JumpPatchSite patch_site(masm_);
+ patch_site.EmitJump(&call_stub);
- __ bind(&smi_case);
+ // Smi case.
switch (op) {
case Token::BIT_OR:
__ or_(Operand(eax), Immediate(value));
@@ -1700,6 +1744,14 @@ void FullCodeGenerator::EmitConstantSmiBitOp(Expression* expr,
default:
UNREACHABLE();
}
+ __ jmp(&done);
+
+ // The order of the arguments does not matter for bit-ops with a
+ // constant operand.
+ __ bind(&call_stub);
+ __ mov(edx, Immediate(value));
+ TypeRecordingBinaryOpStub stub(op, mode);
+ EmitCallIC(stub.GetCode(), &patch_site);
__ bind(&done);
context()->Plug(eax);
@@ -1753,20 +1805,15 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
// Do combined smi check of the operands. Left operand is on the
// stack. Right operand is in eax.
- Label done, stub_call, smi_case;
+ NearLabel done, stub_call;
__ pop(edx);
__ mov(ecx, eax);
__ or_(eax, Operand(edx));
__ test(eax, Immediate(kSmiTagMask));
- __ j(zero, &smi_case);
+ JumpPatchSite patch_site(masm_);
+ patch_site.EmitJump(&stub_call);
- __ bind(&stub_call);
- __ mov(eax, ecx);
- TypeRecordingBinaryOpStub stub(op, mode);
- __ CallStub(&stub);
- __ jmp(&done);
-
- __ bind(&smi_case);
+ // Smi case.
__ mov(eax, edx); // Copy left operand in case of a stub call.
switch (op) {
@@ -1834,6 +1881,12 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
default:
UNREACHABLE();
}
+ __ jmp(&done);
+
+ __ bind(&stub_call);
+ __ mov(eax, ecx);
+ TypeRecordingBinaryOpStub stub(op, mode);
+ EmitCallIC(stub.GetCode(), &patch_site);
__ bind(&done);
context()->Plug(eax);
@@ -1844,7 +1897,7 @@ void FullCodeGenerator::EmitBinaryOp(Token::Value op,
OverwriteMode mode) {
__ pop(edx);
TypeRecordingBinaryOpStub stub(op, mode);
- __ CallStub(&stub);
+ EmitCallIC(stub.GetCode(), NULL); // NULL signals no inlined smi code.
context()->Plug(eax);
}
@@ -3709,6 +3762,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
// Inline smi case if we are in a loop.
NearLabel stub_call;
+ JumpPatchSite patch_site(masm_);
Label done;
if (ShouldInlineSmiCase(expr->op())) {
if (expr->op() == Token::INC) {
@@ -3720,7 +3774,9 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
// We could eliminate this smi check if we split the code at
// the first smi check before calling ToNumber.
__ test(eax, Immediate(kSmiTagMask));
- __ j(zero, &done);
+ patch_site.EmitJump(&stub_call);
+ __ jmp(&done);
+
__ bind(&stub_call);
// Call stub. Undo operation first.
if (expr->op() == Token::INC) {
@@ -3738,9 +3794,9 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ mov(eax, Immediate(Smi::FromInt(1)));
TypeRecordingBinaryOpStub stub(expr->binary_op(),
NO_OVERWRITE);
- __ CallStub(&stub);
- __ bind(&done);
+ EmitCallIC(stub.GetCode(), &patch_site);
+ __ bind(&done);
// Store the value returned in eax.
switch (assign_type) {
case VARIABLE:
@@ -4005,21 +4061,23 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
}
bool inline_smi_code = ShouldInlineSmiCase(op);
+ JumpPatchSite patch_site(masm_);
if (inline_smi_code) {
NearLabel slow_case;
__ mov(ecx, Operand(edx));
__ or_(ecx, Operand(eax));
__ test(ecx, Immediate(kSmiTagMask));
- __ j(not_zero, &slow_case, not_taken);
+ patch_site.EmitJump(&slow_case);
__ cmp(edx, Operand(eax));
Split(cc, if_true, if_false, NULL);
__ bind(&slow_case);
}
// Record position and call the compare IC.
- Handle<Code> ic = CompareIC::GetUninitialized(op);
SetSourcePosition(expr->position());
- __ call(ic, RelocInfo::CODE_TARGET);
+ Handle<Code> ic = CompareIC::GetUninitialized(op);
+ EmitCallIC(ic, &patch_site);
+
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ test(eax, Operand(eax));
Split(cc, if_true, if_false, fall_through);
@@ -4123,6 +4181,16 @@ void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
}
+void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
+ __ call(ic, RelocInfo::CODE_TARGET);
+ if (patch_site != NULL && patch_site->is_bound()) {
+ patch_site->EmitPatchInfo();
+ } else {
+ __ nop(); // Signals no inlined code.
+ }
+}
+
+
void FullCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
__ mov(Operand(ebp, frame_offset), value);
View
39 src/ia32/ic-ia32.cc
@@ -2052,10 +2052,9 @@ Condition CompareIC::ComputeCondition(Token::Value op) {
void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
HandleScope scope;
Handle<Code> rewritten;
-#ifdef DEBUG
State previous_state = GetState();
-#endif
- State state = TargetState(x, y);
+
+ State state = TargetState(previous_state, x, y);
if (state == GENERIC) {
CompareStub stub(GetCondition(), strict(), NO_COMPARE_FLAGS);
rewritten = stub.GetCode();
@@ -2073,6 +2072,40 @@ void CompareIC::UpdateCaches(Handle<Object> x, Handle<Object> y) {
Token::Name(op_));
}
#endif
+
+ // Activate inlined smi code.
+ if (previous_state == UNINITIALIZED) {
+ PatchInlinedSmiCode(address());
+ }
+}
+
+
+void PatchInlinedSmiCode(Address address) {
+ // The address of the instruction following the call.
+ Address test_instruction_address =
+ address + Assembler::kCallTargetAddressOffset;
+
+ // If the instruction following the call is not a test al, nothing
+ // was inlined.
+ if (*test_instruction_address != Assembler::kTestAlByte) {
+ ASSERT(*test_instruction_address == Assembler::kNopByte);
+ return;
+ }
+
+ Address delta_address = test_instruction_address + 1;
+ // The delta to the start of the map check instruction and the
+ // condition code uses at the patched jump.
+ int8_t delta = *reinterpret_cast<int8_t*>(delta_address);
+ if (FLAG_trace_ic) {
+ PrintF("[ patching ic at %p, test=%p, delta=%d\n",
+ address, test_instruction_address, delta);
+ }
+
+ // Patch with a short conditional jump. There must be an unconditional
+ // short jump at this position.
+ Address jmp_address = test_instruction_address - delta;
+ ASSERT(*jmp_address == Assembler::kJmpShortOpcode);
+ *jmp_address = static_cast<byte>(Assembler::kJccShortPrefix | not_zero);
}
View
7 src/ia32/lithium-codegen-ia32.cc
@@ -315,6 +315,13 @@ void LCodeGen::CallCode(Handle<Code> code,
__ call(code, mode);
RecordSafepoint(&no_pointers, Safepoint::kNoDeoptimizationIndex);
}
+
+ // Signal that we don't inline smi code before these stubs in the
+ // optimizing code generator.
+ if (code->kind() == Code::TYPE_RECORDING_BINARY_OP_IC ||
+ code->kind() == Code::COMPARE_IC) {
+ __ nop();
+ }
}
View
12 src/ic.cc
@@ -1951,7 +1951,7 @@ TRBinaryOpIC::State TRBinaryOpIC::ToState(TypeInfo type_info) {
TRBinaryOpIC::TypeInfo TRBinaryOpIC::JoinTypes(TRBinaryOpIC::TypeInfo x,
- TRBinaryOpIC::TypeInfo y) {
+ TRBinaryOpIC::TypeInfo y) {
if (x == UNINITIALIZED) return y;
if (y == UNINITIALIZED) return x;
if (x == STRING && y == STRING) return STRING;
@@ -2041,6 +2041,11 @@ MaybeObject* TypeRecordingBinaryOp_Patch(Arguments args) {
TRBinaryOpIC::GetName(result_type),
Token::Name(op));
}
+
+ // Activate inlined smi code.
+ if (previous_type == TRBinaryOpIC::UNINITIALIZED) {
+ PatchInlinedSmiCode(ic.address());
+ }
}
Handle<JSBuiltinsObject> builtins = Top::builtins();
@@ -2127,8 +2132,9 @@ const char* CompareIC::GetStateName(State state) {
}
-CompareIC::State CompareIC::TargetState(Handle<Object> x, Handle<Object> y) {
- State state = GetState();
+CompareIC::State CompareIC::TargetState(State state,
+ Handle<Object> x,
+ Handle<Object> y) {
if (state != UNINITIALIZED) return GENERIC;
if (x->IsSmi() && y->IsSmi()) return SMIS;
if (x->IsNumber() && y->IsNumber()) return HEAP_NUMBERS;
View
4 src/ic.h
@@ -582,7 +582,7 @@ class CompareIC: public IC {
static const char* GetStateName(State state);
private:
- State TargetState(Handle<Object> x, Handle<Object> y);
+ State TargetState(State state, Handle<Object> x, Handle<Object> y);
bool strict() const { return op_ == Token::EQ_STRICT; }
Condition GetCondition() const { return ComputeCondition(op_); }
@@ -591,6 +591,8 @@ class CompareIC: public IC {
Token::Value op_;
};
+// Helper for TRBinaryOpIC and CompareIC.
+void PatchInlinedSmiCode(Address address);
} } // namespace v8::internal
View
6 src/type-info.cc
@@ -142,6 +142,8 @@ TypeInfo TypeFeedbackOracle::CompareType(CompareOperation* expr, Side side) {
CompareIC::State state = static_cast<CompareIC::State>(code->compare_state());
switch (state) {
case CompareIC::UNINITIALIZED:
+ // Uninitialized state means never executed.
+ return unknown;
case CompareIC::SMIS:
return TypeInfo::Smi();
case CompareIC::HEAP_NUMBERS:
@@ -184,6 +186,8 @@ TypeInfo TypeFeedbackOracle::BinaryType(BinaryOperation* expr, Side side) {
switch (type) {
case TRBinaryOpIC::UNINITIALIZED:
+ // Uninitialized state means never executed.
+ return unknown;
case TRBinaryOpIC::SMI:
switch (result_type) {
case TRBinaryOpIC::UNINITIALIZED:
@@ -224,6 +228,8 @@ TypeInfo TypeFeedbackOracle::SwitchType(CaseClause* clause) {
CompareIC::State state = static_cast<CompareIC::State>(code->compare_state());
switch (state) {
case CompareIC::UNINITIALIZED:
+ // Uninitialized state means never executed.
+ return unknown;
case CompareIC::SMIS:
return TypeInfo::Smi();
case CompareIC::HEAP_NUMBERS:
Please sign in to comment.
Something went wrong with that request. Please try again.