summaryrefslogtreecommitdiff
path: root/deps/v8/src/wasm/jump-table-assembler.h
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/wasm/jump-table-assembler.h')
-rw-r--r--deps/v8/src/wasm/jump-table-assembler.h100
1 files changed, 68 insertions, 32 deletions
diff --git a/deps/v8/src/wasm/jump-table-assembler.h b/deps/v8/src/wasm/jump-table-assembler.h
index eef9fea167..379a547b55 100644
--- a/deps/v8/src/wasm/jump-table-assembler.h
+++ b/deps/v8/src/wasm/jump-table-assembler.h
@@ -17,7 +17,16 @@ namespace wasm {
// each slot containing a dispatch to the currently published {WasmCode} that
// corresponds to the function.
//
-// Note that the table is split into lines of fixed size, with lines laid out
+// Additionally to this main jump table, there exist special jump tables for
+// other purposes:
+// - the runtime stub table contains one entry per wasm runtime stub (see
+// {WasmCode::RuntimeStubId}, which jumps to the corresponding embedded
+// builtin.
+// - the lazy compile table contains one entry per wasm function which jumps to
+// the common {WasmCompileLazy} builtin and passes the function index that was
+// invoked.
+//
+// The main jump table is split into lines of fixed size, with lines laid out
// consecutively within the executable memory of the {NativeModule}. The slots
// in turn are consecutive within a line, but do not cross line boundaries.
//
@@ -27,6 +36,7 @@ namespace wasm {
//
// The above illustrates jump table lines {Li} containing slots {Si} with each
// line containing {n} slots and some padding {x} for alignment purposes.
+// Other jump tables are just consecutive.
class V8_EXPORT_PRIVATE JumpTableAssembler : public MacroAssembler {
public:
// Translate an offset into the continuous jump table to a jump table index.
@@ -39,7 +49,7 @@ class V8_EXPORT_PRIVATE JumpTableAssembler : public MacroAssembler {
}
// Translate a jump table index to an offset into the continuous jump table.
- static uint32_t SlotIndexToOffset(uint32_t slot_index) {
+ static uint32_t JumpSlotIndexToOffset(uint32_t slot_index) {
uint32_t line_index = slot_index / kJumpTableSlotsPerLine;
uint32_t line_offset =
(slot_index % kJumpTableSlotsPerLine) * kJumpTableSlotSize;
@@ -60,40 +70,56 @@ class V8_EXPORT_PRIVATE JumpTableAssembler : public MacroAssembler {
return slot_index * kJumpTableStubSlotSize;
}
+ // Translate a slot index to an offset into the lazy compile table.
+ static uint32_t LazyCompileSlotIndexToOffset(uint32_t slot_index) {
+ return slot_index * kLazyCompileTableSlotSize;
+ }
+
// Determine the size of a jump table containing only runtime stub slots.
static constexpr uint32_t SizeForNumberOfStubSlots(uint32_t slot_count) {
return slot_count * kJumpTableStubSlotSize;
}
- static void EmitLazyCompileJumpSlot(Address base, uint32_t slot_index,
- uint32_t func_index,
- Address lazy_compile_target,
- WasmCode::FlushICache flush_i_cache) {
- Address slot = base + SlotIndexToOffset(slot_index);
- JumpTableAssembler jtasm(slot);
- jtasm.EmitLazyCompileJumpSlot(func_index, lazy_compile_target);
- jtasm.NopBytes(kJumpTableSlotSize - jtasm.pc_offset());
- if (flush_i_cache) {
- FlushInstructionCache(slot, kJumpTableSlotSize);
+ // Determine the size of a lazy compile table.
+ static constexpr uint32_t SizeForNumberOfLazyFunctions(uint32_t slot_count) {
+ return slot_count * kLazyCompileTableSlotSize;
+ }
+
+ static void GenerateLazyCompileTable(Address base, uint32_t num_slots,
+ uint32_t num_imported_functions,
+ Address wasm_compile_lazy_target) {
+ uint32_t lazy_compile_table_size = num_slots * kLazyCompileTableSlotSize;
+ // Assume enough space, so the Assembler does not try to grow the buffer.
+ JumpTableAssembler jtasm(base, lazy_compile_table_size + 256);
+ for (uint32_t slot_index = 0; slot_index < num_slots; ++slot_index) {
+ DCHECK_EQ(slot_index * kLazyCompileTableSlotSize, jtasm.pc_offset());
+ jtasm.EmitLazyCompileJumpSlot(slot_index + num_imported_functions,
+ wasm_compile_lazy_target);
}
+ DCHECK_EQ(lazy_compile_table_size, jtasm.pc_offset());
+ FlushInstructionCache(base, lazy_compile_table_size);
}
- static void EmitRuntimeStubSlot(Address base, uint32_t slot_index,
- Address builtin_target,
- WasmCode::FlushICache flush_i_cache) {
- Address slot = base + StubSlotIndexToOffset(slot_index);
- JumpTableAssembler jtasm(slot);
- jtasm.EmitRuntimeStubSlot(builtin_target);
- jtasm.NopBytes(kJumpTableStubSlotSize - jtasm.pc_offset());
- if (flush_i_cache) {
- FlushInstructionCache(slot, kJumpTableStubSlotSize);
+ static void GenerateRuntimeStubTable(Address base, Address* targets,
+ int num_stubs) {
+ uint32_t table_size = num_stubs * kJumpTableStubSlotSize;
+ // Assume enough space, so the Assembler does not try to grow the buffer.
+ JumpTableAssembler jtasm(base, table_size + 256);
+ int offset = 0;
+ for (int index = 0; index < num_stubs; ++index) {
+ DCHECK_EQ(offset, StubSlotIndexToOffset(index));
+ DCHECK_EQ(offset, jtasm.pc_offset());
+ jtasm.EmitRuntimeStubSlot(targets[index]);
+ offset += kJumpTableStubSlotSize;
+ jtasm.NopBytes(offset - jtasm.pc_offset());
}
+ FlushInstructionCache(base, table_size);
}
static void PatchJumpTableSlot(Address base, uint32_t slot_index,
Address new_target,
WasmCode::FlushICache flush_i_cache) {
- Address slot = base + SlotIndexToOffset(slot_index);
+ Address slot = base + JumpSlotIndexToOffset(slot_index);
JumpTableAssembler jtasm(slot);
jtasm.EmitJumpSlot(new_target);
jtasm.NopBytes(kJumpTableSlotSize - jtasm.pc_offset());
@@ -115,44 +141,54 @@ class V8_EXPORT_PRIVATE JumpTableAssembler : public MacroAssembler {
// boundaries. The jump table line size has been chosen to satisfy this.
#if V8_TARGET_ARCH_X64
static constexpr int kJumpTableLineSize = 64;
- static constexpr int kJumpTableSlotSize = 10;
+ static constexpr int kJumpTableSlotSize = 5;
+ static constexpr int kLazyCompileTableSlotSize = 10;
static constexpr int kJumpTableStubSlotSize = 18;
#elif V8_TARGET_ARCH_IA32
static constexpr int kJumpTableLineSize = 64;
- static constexpr int kJumpTableSlotSize = 10;
+ static constexpr int kJumpTableSlotSize = 5;
+ static constexpr int kLazyCompileTableSlotSize = 10;
static constexpr int kJumpTableStubSlotSize = 10;
#elif V8_TARGET_ARCH_ARM
- static constexpr int kJumpTableLineSize = 5 * kInstrSize;
- static constexpr int kJumpTableSlotSize = 5 * kInstrSize;
- static constexpr int kJumpTableStubSlotSize = 5 * kInstrSize;
-#elif V8_TARGET_ARCH_ARM64
static constexpr int kJumpTableLineSize = 3 * kInstrSize;
static constexpr int kJumpTableSlotSize = 3 * kInstrSize;
+ static constexpr int kLazyCompileTableSlotSize = 5 * kInstrSize;
+ static constexpr int kJumpTableStubSlotSize = 5 * kInstrSize;
+#elif V8_TARGET_ARCH_ARM64
+ static constexpr int kJumpTableLineSize = 1 * kInstrSize;
+ static constexpr int kJumpTableSlotSize = 1 * kInstrSize;
+ static constexpr int kLazyCompileTableSlotSize = 3 * kInstrSize;
static constexpr int kJumpTableStubSlotSize = 6 * kInstrSize;
#elif V8_TARGET_ARCH_S390X
static constexpr int kJumpTableLineSize = 128;
- static constexpr int kJumpTableSlotSize = 20;
+ static constexpr int kJumpTableSlotSize = 14;
+ static constexpr int kLazyCompileTableSlotSize = 20;
static constexpr int kJumpTableStubSlotSize = 14;
#elif V8_TARGET_ARCH_PPC64
static constexpr int kJumpTableLineSize = 64;
- static constexpr int kJumpTableSlotSize = 48;
+ static constexpr int kJumpTableSlotSize = 7 * kInstrSize;
+ static constexpr int kLazyCompileTableSlotSize = 12 * kInstrSize;
static constexpr int kJumpTableStubSlotSize = 7 * kInstrSize;
#elif V8_TARGET_ARCH_MIPS
static constexpr int kJumpTableLineSize = 6 * kInstrSize;
- static constexpr int kJumpTableSlotSize = 6 * kInstrSize;
+ static constexpr int kJumpTableSlotSize = 4 * kInstrSize;
+ static constexpr int kLazyCompileTableSlotSize = 6 * kInstrSize;
static constexpr int kJumpTableStubSlotSize = 4 * kInstrSize;
#elif V8_TARGET_ARCH_MIPS64
static constexpr int kJumpTableLineSize = 8 * kInstrSize;
- static constexpr int kJumpTableSlotSize = 8 * kInstrSize;
+ static constexpr int kJumpTableSlotSize = 6 * kInstrSize;
+ static constexpr int kLazyCompileTableSlotSize = 8 * kInstrSize;
static constexpr int kJumpTableStubSlotSize = 6 * kInstrSize;
#else
static constexpr int kJumpTableLineSize = 1;
static constexpr int kJumpTableSlotSize = 1;
+ static constexpr int kLazyCompileTableSlotSize = 1;
static constexpr int kJumpTableStubSlotSize = 1;
#endif
static constexpr int kJumpTableSlotsPerLine =
kJumpTableLineSize / kJumpTableSlotSize;
+ STATIC_ASSERT(kJumpTableSlotsPerLine >= 1);
// {JumpTableAssembler} is never used during snapshot generation, and its code
// must be independent of the code range of any isolate anyway. Just ensure