aboutsummaryrefslogtreecommitdiff
path: root/deps/v8/src/wasm/wasm-code-manager.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/wasm/wasm-code-manager.cc')
-rw-r--r--deps/v8/src/wasm/wasm-code-manager.cc388
1 files changed, 214 insertions, 174 deletions
diff --git a/deps/v8/src/wasm/wasm-code-manager.cc b/deps/v8/src/wasm/wasm-code-manager.cc
index f55508c7a6..0f9da37fa7 100644
--- a/deps/v8/src/wasm/wasm-code-manager.cc
+++ b/deps/v8/src/wasm/wasm-code-manager.cc
@@ -17,6 +17,8 @@
#include "src/macro-assembler.h"
#include "src/objects-inl.h"
#include "src/ostreams.h"
+#include "src/snapshot/embedded-data.h"
+#include "src/vector.h"
#include "src/wasm/compilation-environment.h"
#include "src/wasm/function-compiler.h"
#include "src/wasm/jump-table-assembler.h"
@@ -34,20 +36,6 @@ namespace v8 {
namespace internal {
namespace wasm {
-namespace {
-
-// Binary predicate to perform lookups in {NativeModule::owned_code_} with a
-// given address into a code object. Use with {std::upper_bound} for example.
-struct WasmCodeUniquePtrComparator {
- bool operator()(Address pc, const std::unique_ptr<WasmCode>& code) const {
- DCHECK_NE(kNullAddress, pc);
- DCHECK_NOT_NULL(code);
- return pc < code->instruction_start();
- }
-};
-
-} // namespace
-
void DisjointAllocationPool::Merge(base::AddressRegion region) {
auto dest_it = regions_.begin();
auto dest_end = regions_.end();
@@ -148,6 +136,9 @@ void WasmCode::RegisterTrapHandlerData() {
bool WasmCode::HasTrapHandlerIndex() const { return trap_handler_index_ >= 0; }
bool WasmCode::ShouldBeLogged(Isolate* isolate) {
+ // The return value is cached in {WasmEngine::IsolateData::log_codes}. Ensure
+ // to call {WasmEngine::EnableCodeLogging} if this return value would change
+ // for any isolate. Otherwise we might lose code events.
return isolate->logger()->is_listening_to_code_events() ||
isolate->is_profiling();
}
@@ -190,19 +181,6 @@ void WasmCode::LogCode(Isolate* isolate) const {
}
}
-const char* WasmCode::GetRuntimeStubName() const {
- DCHECK_EQ(WasmCode::kRuntimeStub, kind());
-#define RETURN_NAME(Name) \
- if (native_module_->runtime_stub_table_[WasmCode::k##Name] == this) { \
- return #Name; \
- }
-#define RETURN_NAME_TRAP(Name) RETURN_NAME(ThrowWasm##Name)
- WASM_RUNTIME_STUB_LIST(RETURN_NAME, RETURN_NAME_TRAP)
-#undef RETURN_NAME_TRAP
-#undef RETURN_NAME
- return "<unknown>";
-}
-
void WasmCode::Validate() const {
#ifdef DEBUG
// We expect certain relocation info modes to never appear in {WasmCode}
@@ -217,6 +195,7 @@ void WasmCode::Validate() const {
WasmCode* code = native_module_->Lookup(target);
CHECK_NOT_NULL(code);
CHECK_EQ(WasmCode::kJumpTable, code->kind());
+ CHECK_EQ(native_module()->jump_table_, code);
CHECK(code->contains(target));
break;
}
@@ -224,8 +203,14 @@ void WasmCode::Validate() const {
Address target = it.rinfo()->wasm_stub_call_address();
WasmCode* code = native_module_->Lookup(target);
CHECK_NOT_NULL(code);
+#ifdef V8_EMBEDDED_BUILTINS
+ CHECK_EQ(WasmCode::kJumpTable, code->kind());
+ CHECK_EQ(native_module()->runtime_stub_table_, code);
+ CHECK(code->contains(target));
+#else
CHECK_EQ(WasmCode::kRuntimeStub, code->kind());
CHECK_EQ(target, code->instruction_start());
+#endif
break;
}
case RelocInfo::INTERNAL_REFERENCE:
@@ -380,17 +365,18 @@ WasmCode::~WasmCode() {
}
}
-NativeModule::NativeModule(Isolate* isolate, const WasmFeatures& enabled,
+NativeModule::NativeModule(WasmEngine* engine, const WasmFeatures& enabled,
bool can_request_more, VirtualMemory code_space,
- WasmCodeManager* code_manager,
- std::shared_ptr<const WasmModule> module)
+ std::shared_ptr<const WasmModule> module,
+ std::shared_ptr<Counters> async_counters)
: enabled_features_(enabled),
module_(std::move(module)),
- compilation_state_(CompilationState::New(isolate, this)),
+ compilation_state_(
+ CompilationState::New(this, std::move(async_counters))),
import_wrapper_cache_(std::unique_ptr<WasmImportWrapperCache>(
new WasmImportWrapperCache(this))),
free_code_space_(code_space.region()),
- code_manager_(code_manager),
+ engine_(engine),
can_request_more_memory_(can_request_more),
use_trap_handler_(trap_handler::IsTrapHandlerEnabled() ? kUseTrapHandler
: kNoTrapHandler) {
@@ -403,7 +389,8 @@ NativeModule::NativeModule(Isolate* isolate, const WasmFeatures& enabled,
code_table_.reset(new WasmCode*[num_wasm_functions]);
memset(code_table_.get(), 0, num_wasm_functions * sizeof(WasmCode*));
- jump_table_ = CreateEmptyJumpTable(num_wasm_functions);
+ jump_table_ = CreateEmptyJumpTable(
+ JumpTableAssembler::SizeForNumberOfSlots(num_wasm_functions));
}
}
@@ -411,12 +398,15 @@ void NativeModule::ReserveCodeTableForTesting(uint32_t max_functions) {
DCHECK_LE(num_functions(), max_functions);
WasmCode** new_table = new WasmCode*[max_functions];
memset(new_table, 0, max_functions * sizeof(*new_table));
- memcpy(new_table, code_table_.get(),
- module_->num_declared_functions * sizeof(*new_table));
+ if (module_->num_declared_functions > 0) {
+ memcpy(new_table, code_table_.get(),
+ module_->num_declared_functions * sizeof(*new_table));
+ }
code_table_.reset(new_table);
// Re-allocate jump table.
- jump_table_ = CreateEmptyJumpTable(max_functions);
+ jump_table_ = CreateEmptyJumpTable(
+ JumpTableAssembler::SizeForNumberOfSlots(max_functions));
}
void NativeModule::LogWasmCodes(Isolate* isolate) {
@@ -436,13 +426,15 @@ CompilationEnv NativeModule::CreateCompilationEnv() const {
WasmCode* NativeModule::AddOwnedCode(
uint32_t index, Vector<const byte> instructions, uint32_t stack_slots,
- size_t safepoint_table_offset, size_t handler_table_offset,
- size_t constant_pool_offset, size_t code_comments_offset,
- size_t unpadded_binary_size,
+ uint32_t tagged_parameter_slots, size_t safepoint_table_offset,
+ size_t handler_table_offset, size_t constant_pool_offset,
+ size_t code_comments_offset, size_t unpadded_binary_size,
OwnedVector<trap_handler::ProtectedInstructionData> protected_instructions,
OwnedVector<const byte> reloc_info,
OwnedVector<const byte> source_position_table, WasmCode::Kind kind,
WasmCode::Tier tier) {
+ CHECK(!FLAG_jitless); // TODO(jgruber): Support wasm in jitless mode.
+
WasmCode* code;
{
// Both allocation and insertion in owned_code_ happen in the same critical
@@ -451,25 +443,13 @@ WasmCode* NativeModule::AddOwnedCode(
Vector<byte> executable_buffer = AllocateForCode(instructions.size());
// Ownership will be transferred to {owned_code_} below.
code = new WasmCode(
- this, index, executable_buffer, stack_slots, safepoint_table_offset,
- handler_table_offset, constant_pool_offset, code_comments_offset,
- unpadded_binary_size, std::move(protected_instructions),
- std::move(reloc_info), std::move(source_position_table), kind, tier);
-
- if (owned_code_.empty() ||
- code->instruction_start() > owned_code_.back()->instruction_start()) {
- // Common case.
- owned_code_.emplace_back(code);
- } else {
- // Slow but unlikely case.
- // TODO(mtrofin): We allocate in increasing address order, and
- // even if we end up with segmented memory, we may end up only with a few
- // large moves - if, for example, a new segment is below the current ones.
- auto insert_before = std::upper_bound(
- owned_code_.begin(), owned_code_.end(), code->instruction_start(),
- WasmCodeUniquePtrComparator{});
- owned_code_.emplace(insert_before, code);
- }
+ this, index, executable_buffer, stack_slots, tagged_parameter_slots,
+ safepoint_table_offset, handler_table_offset, constant_pool_offset,
+ code_comments_offset, unpadded_binary_size,
+ std::move(protected_instructions), std::move(reloc_info),
+ std::move(source_position_table), kind, tier);
+
+ owned_code_.emplace_back(code);
}
memcpy(reinterpret_cast<void*>(code->instruction_start()),
instructions.start(), instructions.size());
@@ -494,21 +474,53 @@ void NativeModule::SetLazyBuiltin(Handle<Code> code) {
i + module_->num_imported_functions, lazy_compile_target,
WasmCode::kNoFlushICache);
}
- Assembler::FlushICache(jump_table_->instructions().start(),
- jump_table_->instructions().size());
+ FlushInstructionCache(jump_table_->instructions().start(),
+ jump_table_->instructions().size());
}
+// TODO(mstarzinger): Remove {Isolate} parameter once {V8_EMBEDDED_BUILTINS}
+// was removed and embedded builtins are no longer optional.
void NativeModule::SetRuntimeStubs(Isolate* isolate) {
+ DCHECK_EQ(kNullAddress, runtime_stub_entries_[0]); // Only called once.
+#ifdef V8_EMBEDDED_BUILTINS
+ WasmCode* jump_table =
+ CreateEmptyJumpTable(JumpTableAssembler::SizeForNumberOfStubSlots(
+ WasmCode::kRuntimeStubCount));
+ Address base = jump_table->instruction_start();
+ EmbeddedData embedded_data = EmbeddedData::FromBlob();
+#define RUNTIME_STUB(Name) {Builtins::k##Name, WasmCode::k##Name},
+#define RUNTIME_STUB_TRAP(Name) RUNTIME_STUB(ThrowWasm##Name)
+ std::pair<Builtins::Name, WasmCode::RuntimeStubId> wasm_runtime_stubs[] = {
+ WASM_RUNTIME_STUB_LIST(RUNTIME_STUB, RUNTIME_STUB_TRAP)};
+#undef RUNTIME_STUB
+#undef RUNTIME_STUB_TRAP
+ for (auto pair : wasm_runtime_stubs) {
+ CHECK(embedded_data.ContainsBuiltin(pair.first));
+ Address builtin = embedded_data.InstructionStartOfBuiltin(pair.first);
+ JumpTableAssembler::EmitRuntimeStubSlot(base, pair.second, builtin,
+ WasmCode::kNoFlushICache);
+ uint32_t slot_offset =
+ JumpTableAssembler::StubSlotIndexToOffset(pair.second);
+ runtime_stub_entries_[pair.second] = base + slot_offset;
+ }
+ FlushInstructionCache(jump_table->instructions().start(),
+ jump_table->instructions().size());
+ DCHECK_NULL(runtime_stub_table_);
+ runtime_stub_table_ = jump_table;
+#else // V8_EMBEDDED_BUILTINS
HandleScope scope(isolate);
- DCHECK_NULL(runtime_stub_table_[0]); // Only called once.
+ USE(runtime_stub_table_); // Actually unused, but avoids ifdef's in header.
#define COPY_BUILTIN(Name) \
- runtime_stub_table_[WasmCode::k##Name] = \
+ runtime_stub_entries_[WasmCode::k##Name] = \
AddAnonymousCode(isolate->builtins()->builtin_handle(Builtins::k##Name), \
- WasmCode::kRuntimeStub, #Name);
+ WasmCode::kRuntimeStub, #Name) \
+ ->instruction_start();
#define COPY_BUILTIN_TRAP(Name) COPY_BUILTIN(ThrowWasm##Name)
WASM_RUNTIME_STUB_LIST(COPY_BUILTIN, COPY_BUILTIN_TRAP)
#undef COPY_BUILTIN_TRAP
#undef COPY_BUILTIN
+#endif // V8_EMBEDDED_BUILTINS
+ DCHECK_NE(kNullAddress, runtime_stub_entries_[0]);
}
WasmCode* NativeModule::AddAnonymousCode(Handle<Code> code, WasmCode::Kind kind,
@@ -519,24 +531,37 @@ WasmCode* NativeModule::AddAnonymousCode(Handle<Code> code, WasmCode::Kind kind,
const size_t relocation_size =
code->is_off_heap_trampoline() ? 0 : code->relocation_size();
OwnedVector<byte> reloc_info = OwnedVector<byte>::New(relocation_size);
- memcpy(reloc_info.start(), code->relocation_start(), relocation_size);
+ if (relocation_size > 0) {
+ memcpy(reloc_info.start(), code->relocation_start(), relocation_size);
+ }
Handle<ByteArray> source_pos_table(code->SourcePositionTable(),
code->GetIsolate());
OwnedVector<byte> source_pos =
OwnedVector<byte>::New(source_pos_table->length());
- source_pos_table->copy_out(0, source_pos.start(), source_pos_table->length());
+ if (source_pos_table->length() > 0) {
+ source_pos_table->copy_out(0, source_pos.start(),
+ source_pos_table->length());
+ }
Vector<const byte> instructions(
reinterpret_cast<byte*>(code->InstructionStart()),
static_cast<size_t>(code->InstructionSize()));
- int stack_slots = code->has_safepoint_info() ? code->stack_slots() : 0;
- int safepoint_table_offset =
- code->has_safepoint_info() ? code->safepoint_table_offset() : 0;
+ const int stack_slots = code->has_safepoint_info() ? code->stack_slots() : 0;
+
+ // TODO(jgruber,v8:8758): Remove this translation. It exists only because
+ // Code objects contains real offsets but WasmCode expects an offset of 0 to
+ // mean 'empty'.
+ const int safepoint_table_offset =
+ code->has_safepoint_table() ? code->safepoint_table_offset() : 0;
+ const int handler_table_offset =
+ code->has_handler_table() ? code->handler_table_offset() : 0;
+
WasmCode* ret =
AddOwnedCode(WasmCode::kAnonymousFuncIndex, // index
instructions, // instructions
stack_slots, // stack_slots
+ 0, // tagged_parameter_slots
safepoint_table_offset, // safepoint_table_offset
- code->handler_table_offset(), // handler_table_offset
+ handler_table_offset, // handler_table_offset
code->constant_pool_offset(), // constant_pool_offset
code->code_comments_offset(), // code_comments_offset
instructions.size(), // unpadded_binary_size
@@ -558,10 +583,9 @@ WasmCode* NativeModule::AddAnonymousCode(Handle<Code> code, WasmCode::Kind kind,
if (RelocInfo::IsWasmStubCall(mode)) {
uint32_t stub_call_tag = orig_it.rinfo()->wasm_call_tag();
DCHECK_LT(stub_call_tag, WasmCode::kRuntimeStubCount);
- WasmCode* code =
- runtime_stub(static_cast<WasmCode::RuntimeStubId>(stub_call_tag));
- it.rinfo()->set_wasm_stub_call_address(code->instruction_start(),
- SKIP_ICACHE_FLUSH);
+ Address entry = runtime_stub_entry(
+ static_cast<WasmCode::RuntimeStubId>(stub_call_tag));
+ it.rinfo()->set_wasm_stub_call_address(entry, SKIP_ICACHE_FLUSH);
} else {
it.rinfo()->apply(delta);
}
@@ -569,8 +593,8 @@ WasmCode* NativeModule::AddAnonymousCode(Handle<Code> code, WasmCode::Kind kind,
// Flush the i-cache here instead of in AddOwnedCode, to include the changes
// made while iterating over the RelocInfo above.
- Assembler::FlushICache(ret->instructions().start(),
- ret->instructions().size());
+ FlushInstructionCache(ret->instructions().start(),
+ ret->instructions().size());
ret->MaybePrint(name);
ret->Validate();
return ret;
@@ -578,28 +602,38 @@ WasmCode* NativeModule::AddAnonymousCode(Handle<Code> code, WasmCode::Kind kind,
WasmCode* NativeModule::AddCode(
uint32_t index, const CodeDesc& desc, uint32_t stack_slots,
- size_t safepoint_table_offset, size_t handler_table_offset,
+ uint32_t tagged_parameter_slots,
OwnedVector<trap_handler::ProtectedInstructionData> protected_instructions,
OwnedVector<const byte> source_pos_table, WasmCode::Kind kind,
WasmCode::Tier tier) {
OwnedVector<byte> reloc_info = OwnedVector<byte>::New(desc.reloc_size);
- memcpy(reloc_info.start(), desc.buffer + desc.buffer_size - desc.reloc_size,
- desc.reloc_size);
+ if (desc.reloc_size > 0) {
+ memcpy(reloc_info.start(), desc.buffer + desc.buffer_size - desc.reloc_size,
+ desc.reloc_size);
+ }
+
+ // TODO(jgruber,v8:8758): Remove this translation. It exists only because
+ // CodeDesc contains real offsets but WasmCode expects an offset of 0 to mean
+ // 'empty'.
+ const int safepoint_table_offset =
+ desc.safepoint_table_size == 0 ? 0 : desc.safepoint_table_offset;
+ const int handler_table_offset =
+ desc.handler_table_size == 0 ? 0 : desc.handler_table_offset;
- WasmCode* ret = AddOwnedCode(
+ WasmCode* code = AddOwnedCode(
index, {desc.buffer, static_cast<size_t>(desc.instr_size)}, stack_slots,
- safepoint_table_offset, handler_table_offset, desc.constant_pool_offset(),
- desc.code_comments_offset(), desc.instr_size,
+ tagged_parameter_slots, safepoint_table_offset, handler_table_offset,
+ desc.constant_pool_offset, desc.code_comments_offset, desc.instr_size,
std::move(protected_instructions), std::move(reloc_info),
std::move(source_pos_table), kind, tier);
// Apply the relocation delta by iterating over the RelocInfo.
- intptr_t delta = ret->instructions().start() - desc.buffer;
+ intptr_t delta = code->instructions().start() - desc.buffer;
int mode_mask = RelocInfo::kApplyMask |
RelocInfo::ModeMask(RelocInfo::WASM_CALL) |
RelocInfo::ModeMask(RelocInfo::WASM_STUB_CALL);
- for (RelocIterator it(ret->instructions(), ret->reloc_info(),
- ret->constant_pool(), mode_mask);
+ for (RelocIterator it(code->instructions(), code->reloc_info(),
+ code->constant_pool(), mode_mask);
!it.done(); it.next()) {
RelocInfo::Mode mode = it.rinfo()->rmode();
if (RelocInfo::IsWasmCall(mode)) {
@@ -609,10 +643,9 @@ WasmCode* NativeModule::AddCode(
} else if (RelocInfo::IsWasmStubCall(mode)) {
uint32_t stub_call_tag = it.rinfo()->wasm_call_tag();
DCHECK_LT(stub_call_tag, WasmCode::kRuntimeStubCount);
- WasmCode* code =
- runtime_stub(static_cast<WasmCode::RuntimeStubId>(stub_call_tag));
- it.rinfo()->set_wasm_stub_call_address(code->instruction_start(),
- SKIP_ICACHE_FLUSH);
+ Address entry = runtime_stub_entry(
+ static_cast<WasmCode::RuntimeStubId>(stub_call_tag));
+ it.rinfo()->set_wasm_stub_call_address(entry, SKIP_ICACHE_FLUSH);
} else {
it.rinfo()->apply(delta);
}
@@ -620,27 +653,39 @@ WasmCode* NativeModule::AddCode(
// Flush the i-cache here instead of in AddOwnedCode, to include the changes
// made while iterating over the RelocInfo above.
- Assembler::FlushICache(ret->instructions().start(),
- ret->instructions().size());
- ret->MaybePrint();
- ret->Validate();
- return ret;
+ FlushInstructionCache(code->instructions().start(),
+ code->instructions().size());
+ code->MaybePrint();
+ code->Validate();
+
+ if (!code->protected_instructions_.is_empty()) {
+ code->RegisterTrapHandlerData();
+ }
+
+ base::MutexGuard lock(&allocation_mutex_);
+ // Skip publishing code if there is an active redirection to the interpreter
+ // for the given function index, in order to preserve the redirection.
+ if (!code->IsAnonymous() && !has_interpreter_redirection(code->index())) {
+ InstallCode(code);
+ }
+
+ return code;
}
WasmCode* NativeModule::AddDeserializedCode(
uint32_t index, Vector<const byte> instructions, uint32_t stack_slots,
- size_t safepoint_table_offset, size_t handler_table_offset,
- size_t constant_pool_offset, size_t code_comments_offset,
- size_t unpadded_binary_size,
+ uint32_t tagged_parameter_slots, size_t safepoint_table_offset,
+ size_t handler_table_offset, size_t constant_pool_offset,
+ size_t code_comments_offset, size_t unpadded_binary_size,
OwnedVector<trap_handler::ProtectedInstructionData> protected_instructions,
OwnedVector<const byte> reloc_info,
OwnedVector<const byte> source_position_table, WasmCode::Tier tier) {
- WasmCode* code =
- AddOwnedCode(index, instructions, stack_slots, safepoint_table_offset,
- handler_table_offset, constant_pool_offset,
- code_comments_offset, unpadded_binary_size,
- std::move(protected_instructions), std::move(reloc_info),
- std::move(source_position_table), WasmCode::kFunction, tier);
+ WasmCode* code = AddOwnedCode(
+ index, instructions, stack_slots, tagged_parameter_slots,
+ safepoint_table_offset, handler_table_offset, constant_pool_offset,
+ code_comments_offset, unpadded_binary_size,
+ std::move(protected_instructions), std::move(reloc_info),
+ std::move(source_position_table), WasmCode::kFunction, tier);
if (!code->protected_instructions_.is_empty()) {
code->RegisterTrapHandlerData();
@@ -652,18 +697,6 @@ WasmCode* NativeModule::AddDeserializedCode(
return code;
}
-void NativeModule::PublishCode(WasmCode* code) {
- base::MutexGuard lock(&allocation_mutex_);
- // Skip publishing code if there is an active redirection to the interpreter
- // for the given function index, in order to preserve the redirection.
- if (has_interpreter_redirection(code->index())) return;
-
- if (!code->protected_instructions_.is_empty()) {
- code->RegisterTrapHandlerData();
- }
- InstallCode(code);
-}
-
void NativeModule::PublishInterpreterEntry(WasmCode* code,
uint32_t func_index) {
code->index_ = func_index;
@@ -680,17 +713,17 @@ std::vector<WasmCode*> NativeModule::SnapshotCodeTable() const {
return result;
}
-WasmCode* NativeModule::CreateEmptyJumpTable(uint32_t num_wasm_functions) {
+WasmCode* NativeModule::CreateEmptyJumpTable(uint32_t jump_table_size) {
// Only call this if we really need a jump table.
- DCHECK_LT(0, num_wasm_functions);
- OwnedVector<byte> instructions = OwnedVector<byte>::New(
- JumpTableAssembler::SizeForNumberOfSlots(num_wasm_functions));
+ DCHECK_LT(0, jump_table_size);
+ OwnedVector<byte> instructions = OwnedVector<byte>::New(jump_table_size);
memset(instructions.start(), 0, instructions.size());
return AddOwnedCode(WasmCode::kAnonymousFuncIndex, // index
instructions.as_vector(), // instructions
0, // stack_slots
- instructions.size(), // safepoint_table_offset
- instructions.size(), // handler_table_offset
+ 0, // tagged_parameter_slots
+ 0, // safepoint_table_offset
+ 0, // handler_table_offset
instructions.size(), // constant_pool_offset
instructions.size(), // code_comments_offset
instructions.size(), // unpadded_binary_size
@@ -733,14 +766,15 @@ Vector<byte> NativeModule::AllocateForCode(size_t size) {
Address hint = owned_code_space_.empty() ? kNullAddress
: owned_code_space_.back().end();
- VirtualMemory new_mem =
- code_manager_->TryAllocate(size, reinterpret_cast<void*>(hint));
+ VirtualMemory new_mem = engine_->code_manager()->TryAllocate(
+ size, reinterpret_cast<void*>(hint));
if (!new_mem.IsReserved()) {
V8::FatalProcessOutOfMemory(nullptr,
"NativeModule::AllocateForCode reservation");
UNREACHABLE();
}
- code_manager_->AssignRanges(new_mem.address(), new_mem.end(), this);
+ engine_->code_manager()->AssignRanges(new_mem.address(), new_mem.end(),
+ this);
free_code_space_.Merge(new_mem.region());
owned_code_space_.emplace_back(std::move(new_mem));
@@ -773,7 +807,7 @@ Vector<byte> NativeModule::AllocateForCode(size_t size) {
Address start = std::max(commit_start, vmem.address());
Address end = std::min(commit_end, vmem.end());
size_t commit_size = static_cast<size_t>(end - start);
- if (!code_manager_->Commit(start, commit_size)) {
+ if (!engine_->code_manager()->Commit(start, commit_size)) {
V8::FatalProcessOutOfMemory(nullptr,
"NativeModule::AllocateForCode commit");
UNREACHABLE();
@@ -785,7 +819,8 @@ Vector<byte> NativeModule::AllocateForCode(size_t size) {
if (commit_start >= commit_end) break;
}
#else
- if (!code_manager_->Commit(commit_start, commit_end - commit_start)) {
+ if (!engine_->code_manager()->Commit(commit_start,
+ commit_end - commit_start)) {
V8::FatalProcessOutOfMemory(nullptr,
"NativeModule::AllocateForCode commit");
UNREACHABLE();
@@ -829,13 +864,38 @@ void NativeModule::SetWireBytes(OwnedVector<const uint8_t> wire_bytes) {
WasmCode* NativeModule::Lookup(Address pc) const {
base::MutexGuard lock(&allocation_mutex_);
if (owned_code_.empty()) return nullptr;
- auto iter = std::upper_bound(owned_code_.begin(), owned_code_.end(), pc,
- WasmCodeUniquePtrComparator());
- if (iter == owned_code_.begin()) return nullptr;
- --iter;
- WasmCode* candidate = iter->get();
- DCHECK_NOT_NULL(candidate);
- return candidate->contains(pc) ? candidate : nullptr;
+ // First update the sorted portion counter.
+ if (owned_code_sorted_portion_ == 0) ++owned_code_sorted_portion_;
+ while (owned_code_sorted_portion_ < owned_code_.size() &&
+ owned_code_[owned_code_sorted_portion_ - 1]->instruction_start() <=
+ owned_code_[owned_code_sorted_portion_]->instruction_start()) {
+ ++owned_code_sorted_portion_;
+ }
+ // Execute at most two rounds: First check whether the {pc} is within the
+ // sorted portion of {owned_code_}. If it's not, then sort the whole vector
+ // and retry.
+ while (true) {
+ auto iter =
+ std::upper_bound(owned_code_.begin(), owned_code_.end(), pc,
+ [](Address pc, const std::unique_ptr<WasmCode>& code) {
+ DCHECK_NE(kNullAddress, pc);
+ DCHECK_NOT_NULL(code);
+ return pc < code->instruction_start();
+ });
+ if (iter != owned_code_.begin()) {
+ --iter;
+ WasmCode* candidate = iter->get();
+ DCHECK_NOT_NULL(candidate);
+ if (candidate->contains(pc)) return candidate;
+ }
+ if (owned_code_sorted_portion_ == owned_code_.size()) return nullptr;
+ std::sort(owned_code_.begin(), owned_code_.end(),
+ [](const std::unique_ptr<WasmCode>& code1,
+ const std::unique_ptr<WasmCode>& code2) {
+ return code1->instruction_start() < code2->instruction_start();
+ });
+ owned_code_sorted_portion_ = owned_code_.size();
+ }
}
Address NativeModule::GetCallTargetForFunction(uint32_t func_index) const {
@@ -875,12 +935,24 @@ void NativeModule::DisableTrapHandler() {
// recycled.
}
+const char* NativeModule::GetRuntimeStubName(Address runtime_stub_entry) const {
+#define RETURN_NAME(Name) \
+ if (runtime_stub_entries_[WasmCode::k##Name] == runtime_stub_entry) { \
+ return #Name; \
+ }
+#define RETURN_NAME_TRAP(Name) RETURN_NAME(ThrowWasm##Name)
+ WASM_RUNTIME_STUB_LIST(RETURN_NAME, RETURN_NAME_TRAP)
+#undef RETURN_NAME_TRAP
+#undef RETURN_NAME
+ return "<unknown>";
+}
+
NativeModule::~NativeModule() {
TRACE_HEAP("Deleting native module: %p\n", reinterpret_cast<void*>(this));
// Cancel all background compilation before resetting any field of the
// NativeModule or freeing anything.
- compilation_state_->CancelAndWait();
- code_manager_->FreeNativeModule(this);
+ compilation_state_->AbortCompilation();
+ engine_->FreeNativeModule(this);
}
WasmCodeManager::WasmCodeManager(WasmMemoryTracker* memory_tracker,
@@ -931,13 +1003,6 @@ void WasmCodeManager::AssignRanges(Address start, Address end,
lookup_map_.insert(std::make_pair(start, std::make_pair(end, native_module)));
}
-void WasmCodeManager::AssignRangesAndAddModule(Address start, Address end,
- NativeModule* native_module) {
- base::MutexGuard lock(&native_modules_mutex_);
- lookup_map_.insert(std::make_pair(start, std::make_pair(end, native_module)));
- native_modules_.emplace(native_module);
-}
-
VirtualMemory WasmCodeManager::TryAllocate(size_t size, void* hint) {
v8::PageAllocator* page_allocator = GetPlatformPageAllocator();
DCHECK_GT(size, 0);
@@ -966,36 +1031,11 @@ VirtualMemory WasmCodeManager::TryAllocate(size_t size, void* hint) {
return mem;
}
-void WasmCodeManager::SampleModuleSizes(Isolate* isolate) const {
- base::MutexGuard lock(&native_modules_mutex_);
- for (NativeModule* native_module : native_modules_) {
- int code_size =
- static_cast<int>(native_module->committed_code_space_.load() / MB);
- isolate->counters()->wasm_module_code_size_mb()->AddSample(code_size);
- }
-}
-
void WasmCodeManager::SetMaxCommittedMemoryForTesting(size_t limit) {
remaining_uncommitted_code_space_.store(limit);
critical_uncommitted_code_space_.store(limit / 2);
}
-namespace {
-
-void ModuleSamplingCallback(v8::Isolate* v8_isolate, v8::GCType type,
- v8::GCCallbackFlags flags, void* data) {
- Isolate* isolate = reinterpret_cast<Isolate*>(v8_isolate);
- isolate->wasm_engine()->code_manager()->SampleModuleSizes(isolate);
-}
-
-} // namespace
-
-// static
-void WasmCodeManager::InstallSamplingGCCallback(Isolate* isolate) {
- isolate->heap()->AddGCEpilogueCallback(ModuleSamplingCallback,
- v8::kGCTypeMarkSweepCompact, nullptr);
-}
-
// static
size_t WasmCodeManager::EstimateNativeModuleCodeSize(const WasmModule* module) {
constexpr size_t kCodeSizeMultiplier = 4;
@@ -1031,8 +1071,9 @@ size_t WasmCodeManager::EstimateNativeModuleNonCodeSize(
}
std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule(
- Isolate* isolate, const WasmFeatures& enabled, size_t code_size_estimate,
- bool can_request_more, std::shared_ptr<const WasmModule> module) {
+ WasmEngine* engine, Isolate* isolate, const WasmFeatures& enabled,
+ size_t code_size_estimate, bool can_request_more,
+ std::shared_ptr<const WasmModule> module) {
DCHECK_EQ(this, isolate->wasm_engine()->code_manager());
if (remaining_uncommitted_code_space_.load() <
critical_uncommitted_code_space_.load()) {
@@ -1065,12 +1106,13 @@ std::unique_ptr<NativeModule> WasmCodeManager::NewNativeModule(
Address start = code_space.address();
size_t size = code_space.size();
Address end = code_space.end();
- std::unique_ptr<NativeModule> ret(new NativeModule(
- isolate, enabled, can_request_more, std::move(code_space),
- isolate->wasm_engine()->code_manager(), std::move(module)));
+ std::unique_ptr<NativeModule> ret(
+ new NativeModule(engine, enabled, can_request_more, std::move(code_space),
+ std::move(module), isolate->async_counters()));
TRACE_HEAP("New NativeModule %p: Mem: %" PRIuPTR ",+%zu\n", ret.get(), start,
size);
- AssignRangesAndAddModule(start, end, ret.get());
+ base::MutexGuard lock(&native_modules_mutex_);
+ lookup_map_.insert(std::make_pair(start, std::make_pair(end, ret.get())));
return ret;
}
@@ -1125,8 +1167,6 @@ bool NativeModule::SetExecutable(bool executable) {
void WasmCodeManager::FreeNativeModule(NativeModule* native_module) {
base::MutexGuard lock(&native_modules_mutex_);
- DCHECK_EQ(1, native_modules_.count(native_module));
- native_modules_.erase(native_module);
TRACE_HEAP("Freeing NativeModule %p\n", native_module);
for (auto& code_space : native_module->owned_code_space_) {
DCHECK(code_space.IsReserved());