Skip to content

Commit

Permalink
[wasm] Introduce jump table index calculation helpers.
Browse files Browse the repository at this point in the history
This is in preparation of making sure that jump table slots don't cross
cache line boundaries. It is only introducing helper functions for back
and forth conversion between "index" and "offset", but should not make
any functional changes yet.

R=ahaas@chromium.org
BUG=v8:8018

Change-Id: I6ab525f9b89a6a15414c043a54c9fffb527a1ab6
Reviewed-on: https://chromium-review.googlesource.com/1163517
Reviewed-by: Andreas Haas <ahaas@chromium.org>
Commit-Queue: Michael Starzinger <mstarzinger@chromium.org>
Cr-Commit-Position: refs/heads/master@{#54922}
  • Loading branch information
Michael Starzinger authored and Commit Bot committed Aug 6, 2018
1 parent a12cbd3 commit 3b3f2bb
Show file tree
Hide file tree
Showing 2 changed files with 55 additions and 31 deletions.
52 changes: 42 additions & 10 deletions src/wasm/jump-table-assembler.h
Expand Up @@ -27,6 +27,22 @@ class JumpTableAssembler : public TurboAssembler {
reinterpret_cast<void*>(slot_addr), size,
CodeObjectRequired::kNo) {}

// To allow concurrent patching of the jump table entries we need to ensure
// that slots do not cross cache-line boundaries. Hence translation between
// slot offsets and index is encapsulated in the following methods.
static uint32_t SlotOffsetToIndex(uint32_t slot_offset) {
DCHECK_EQ(0, slot_offset % kJumpTableSlotSize);
return slot_offset / kJumpTableSlotSize;
}
static uint32_t SlotIndexToOffset(uint32_t slot_index) {
return slot_index * kJumpTableSlotSize;
}

// Determine the size of a jump table containing the given number of slots.
static size_t SizeForNumberOfSlots(uint32_t slot_count) {
return slot_count * kJumpTableSlotSize;
}

#if V8_TARGET_ARCH_X64
static constexpr int kJumpTableSlotSize = 18;
#elif V8_TARGET_ARCH_IA32
Expand All @@ -51,22 +67,38 @@ class JumpTableAssembler : public TurboAssembler {
static constexpr int kJumpTableSlotSize = 1;
#endif

void EmitLazyCompileJumpSlot(uint32_t func_index,
Address lazy_compile_target);

void EmitJumpSlot(Address target);

void NopBytes(int bytes);
static void EmitLazyCompileJumpSlot(Address base, uint32_t slot_index,
uint32_t func_index,
Address lazy_compile_target,
WasmCode::FlushICache flush_i_cache) {
Address slot = base + SlotIndexToOffset(slot_index);
JumpTableAssembler jtasm(slot);
jtasm.EmitLazyCompileJumpSlot(func_index, lazy_compile_target);
jtasm.NopBytes(kJumpTableSlotSize - jtasm.pc_offset());
if (flush_i_cache) {
Assembler::FlushICache(slot, kJumpTableSlotSize);
}
}

static void PatchJumpTableSlot(Address slot, Address new_target,
static void PatchJumpTableSlot(Address base, uint32_t slot_index,
Address new_target,
WasmCode::FlushICache flush_i_cache) {
JumpTableAssembler jsasm(slot);
jsasm.EmitJumpSlot(new_target);
jsasm.NopBytes(kJumpTableSlotSize - jsasm.pc_offset());
Address slot = base + SlotIndexToOffset(slot_index);
JumpTableAssembler jtasm(slot);
jtasm.EmitJumpSlot(new_target);
jtasm.NopBytes(kJumpTableSlotSize - jtasm.pc_offset());
if (flush_i_cache) {
Assembler::FlushICache(slot, kJumpTableSlotSize);
}
}

private:
void EmitLazyCompileJumpSlot(uint32_t func_index,
Address lazy_compile_target);

void EmitJumpSlot(Address target);

void NopBytes(int bytes);
};

} // namespace wasm
Expand Down
34 changes: 13 additions & 21 deletions src/wasm/wasm-code-manager.cc
Expand Up @@ -417,16 +417,11 @@ void NativeModule::SetLazyBuiltin(Handle<Code> code) {
WasmCode* lazy_builtin = AddAnonymousCode(code, WasmCode::kLazyStub);
// Fill the jump table with jumps to the lazy compile stub.
Address lazy_compile_target = lazy_builtin->instruction_start();
JumpTableAssembler jtasm(
jump_table_->instruction_start(),
static_cast<int>(jump_table_->instructions().size()) + 256);
for (uint32_t i = 0; i < num_wasm_functions; ++i) {
// Check that the offset in the jump table increases as expected.
DCHECK_EQ(i * JumpTableAssembler::kJumpTableSlotSize, jtasm.pc_offset());
jtasm.EmitLazyCompileJumpSlot(i + module_->num_imported_functions,
lazy_compile_target);
jtasm.NopBytes((i + 1) * JumpTableAssembler::kJumpTableSlotSize -
jtasm.pc_offset());
JumpTableAssembler::EmitLazyCompileJumpSlot(
jump_table_->instruction_start(), i,
i + module_->num_imported_functions, lazy_compile_target,
WasmCode::kNoFlushICache);
}
Assembler::FlushICache(jump_table_->instructions().start(),
jump_table_->instructions().size());
Expand Down Expand Up @@ -600,7 +595,7 @@ WasmCode* NativeModule::CreateEmptyJumpTable(uint32_t num_wasm_functions) {
// Only call this if we really need a jump table.
DCHECK_LT(0, num_wasm_functions);
OwnedVector<byte> instructions = OwnedVector<byte>::New(
num_wasm_functions * JumpTableAssembler::kJumpTableSlotSize);
JumpTableAssembler::SizeForNumberOfSlots(num_wasm_functions));
memset(instructions.start(), 0, instructions.size());
return AddOwnedCode(Nothing<uint32_t>(), // index
instructions.as_vector(), // instructions
Expand All @@ -619,9 +614,8 @@ void NativeModule::PatchJumpTable(uint32_t func_index, Address target,
WasmCode::FlushICache flush_icache) {
DCHECK_LE(module_->num_imported_functions, func_index);
uint32_t slot_idx = func_index - module_->num_imported_functions;
Address jump_table_slot = jump_table_->instruction_start() +
slot_idx * JumpTableAssembler::kJumpTableSlotSize;
JumpTableAssembler::PatchJumpTableSlot(jump_table_slot, target, flush_icache);
JumpTableAssembler::PatchJumpTableSlot(jump_table_->instruction_start(),
slot_idx, target, flush_icache);
}

Address NativeModule::AllocateForCode(size_t size) {
Expand Down Expand Up @@ -710,18 +704,17 @@ Address NativeModule::GetCallTargetForFunction(uint32_t func_index) const {
// Return the jump table slot for that function index.
DCHECK_NOT_NULL(jump_table_);
uint32_t slot_idx = func_index - module_->num_imported_functions;
DCHECK_LT(slot_idx, jump_table_->instructions().size() /
JumpTableAssembler::kJumpTableSlotSize);
return jump_table_->instruction_start() +
slot_idx * JumpTableAssembler::kJumpTableSlotSize;
uint32_t slot_offset = JumpTableAssembler::SlotIndexToOffset(slot_idx);
DCHECK_LT(slot_offset, jump_table_->instructions().size());
return jump_table_->instruction_start() + slot_offset;
}

uint32_t NativeModule::GetFunctionIndexFromJumpTableSlot(
Address slot_address) const {
DCHECK(is_jump_table_slot(slot_address));
uint32_t offset =
uint32_t slot_offset =
static_cast<uint32_t>(slot_address - jump_table_->instruction_start());
uint32_t slot_idx = offset / JumpTableAssembler::kJumpTableSlotSize;
uint32_t slot_idx = JumpTableAssembler::SlotOffsetToIndex(slot_offset);
DCHECK_LT(slot_idx, module_->num_declared_functions);
return module_->num_imported_functions + slot_idx;
}
Expand Down Expand Up @@ -839,8 +832,7 @@ size_t WasmCodeManager::EstimateNativeModuleSize(const WasmModule* module) {
(sizeof(WasmCode*) * num_wasm_functions /* code table size */) +
(sizeof(WasmCode) * num_wasm_functions /* code object size */) +
(kImportSize * module->num_imported_functions /* import size */) +
(JumpTableAssembler::kJumpTableSlotSize *
num_wasm_functions /* jump table size */);
(JumpTableAssembler::SizeForNumberOfSlots(num_wasm_functions));

for (auto& function : module->functions) {
estimate += kCodeSizeMultiplier * function.code.length();
Expand Down

0 comments on commit 3b3f2bb

Please sign in to comment.