aboutsummaryrefslogtreecommitdiff
path: root/deps/v8/src/wasm/wasm-objects.cc
diff options
context:
space:
mode:
authorMichaël Zasso <targos@protonmail.com>2017-12-05 16:41:55 +0100
committerMichaël Zasso <targos@protonmail.com>2017-12-06 12:52:07 +0100
commit1854ba04e9a68f062beb299dd6e1479279b26363 (patch)
treed5b2df9b8c1deb6388f7a728fca8e1c98c779abe /deps/v8/src/wasm/wasm-objects.cc
parentb52c23b75f96e1c9d2c7b3a7e5619170d0a0d8e1 (diff)
downloadandroid-node-v8-1854ba04e9a68f062beb299dd6e1479279b26363.tar.gz
android-node-v8-1854ba04e9a68f062beb299dd6e1479279b26363.tar.bz2
android-node-v8-1854ba04e9a68f062beb299dd6e1479279b26363.zip
deps: update V8 to 6.3.292.46
PR-URL: https://github.com/nodejs/node/pull/16271 Reviewed-By: Ben Noordhuis <info@bnoordhuis.nl> Reviewed-By: Myles Borins <myles.borins@gmail.com>
Diffstat (limited to 'deps/v8/src/wasm/wasm-objects.cc')
-rw-r--r--deps/v8/src/wasm/wasm-objects.cc382
1 files changed, 228 insertions, 154 deletions
diff --git a/deps/v8/src/wasm/wasm-objects.cc b/deps/v8/src/wasm/wasm-objects.cc
index 779a2d8430..012aa6644b 100644
--- a/deps/v8/src/wasm/wasm-objects.cc
+++ b/deps/v8/src/wasm/wasm-objects.cc
@@ -11,9 +11,12 @@
#include "src/debug/debug-interface.h"
#include "src/objects-inl.h"
#include "src/objects/debug-objects-inl.h"
+#include "src/wasm/module-compiler.h"
#include "src/wasm/module-decoder.h"
#include "src/wasm/wasm-code-specialization.h"
+#include "src/wasm/wasm-memory.h"
#include "src/wasm/wasm-module.h"
+#include "src/wasm/wasm-objects-inl.h"
#include "src/wasm/wasm-text.h"
#define TRACE(...) \
@@ -21,24 +24,13 @@
if (FLAG_trace_wasm_instances) PrintF(__VA_ARGS__); \
} while (false)
-#define TRACE_CHAIN(instance) \
- do { \
- instance->PrintInstancesChain(); \
- } while (false)
-
-#if __clang__
-// TODO(mostynb@opera.com): remove the using statements and these pragmas.
-#pragma clang diagnostic push
-#pragma clang diagnostic ignored "-Wheader-hygiene"
-#endif
+namespace v8 {
+namespace internal {
-using namespace v8::internal;
-using namespace v8::internal::wasm;
-
-#if __clang__
-// TODO(mostynb@opera.com): remove the using statements and these pragmas.
-#pragma clang diagnostic pop
-#endif
+// Import a few often used types from the wasm namespace.
+using GlobalHandleAddress = wasm::GlobalHandleAddress;
+using WasmFunction = wasm::WasmFunction;
+using WasmModule = wasm::WasmModule;
namespace {
@@ -151,11 +143,12 @@ bool IsBreakablePosition(Handle<WasmCompiledModule> compiled_module,
DisallowHeapAllocation no_gc;
AccountingAllocator alloc;
Zone tmp(&alloc, ZONE_NAME);
- BodyLocalDecls locals(&tmp);
+ wasm::BodyLocalDecls locals(&tmp);
const byte* module_start = compiled_module->module_bytes()->GetChars();
WasmFunction& func = compiled_module->module()->functions[func_index];
- BytecodeIterator iterator(module_start + func.code.offset(),
- module_start + func.code.end_offset(), &locals);
+ wasm::BytecodeIterator iterator(module_start + func.code.offset(),
+ module_start + func.code.end_offset(),
+ &locals);
DCHECK_LT(0, locals.encoded_size);
for (uint32_t offset : iterator.offsets()) {
if (offset > static_cast<uint32_t>(offset_in_func)) break;
@@ -180,6 +173,17 @@ Handle<WasmModuleObject> WasmModuleObject::New(
return module_object;
}
+void WasmModuleObject::ValidateStateForTesting(
+ Isolate* isolate, Handle<WasmModuleObject> module_obj) {
+ DisallowHeapAllocation no_gc;
+ WasmCompiledModule* compiled_module = module_obj->compiled_module();
+ CHECK(compiled_module->has_weak_wasm_module());
+ CHECK_EQ(compiled_module->ptr_to_weak_wasm_module()->value(), *module_obj);
+ CHECK(!compiled_module->has_weak_prev_instance());
+ CHECK(!compiled_module->has_weak_next_instance());
+ CHECK(!compiled_module->has_weak_owning_instance());
+}
+
Handle<WasmTableObject> WasmTableObject::New(Isolate* isolate, uint32_t initial,
int64_t maximum,
Handle<FixedArray>* js_functions) {
@@ -228,7 +232,7 @@ Handle<FixedArray> WasmTableObject::AddDispatchTable(
return new_dispatch_tables;
}
-void WasmTableObject::grow(Isolate* isolate, uint32_t count) {
+void WasmTableObject::Grow(Isolate* isolate, uint32_t count) {
Handle<FixedArray> dispatch_tables(this->dispatch_tables());
DCHECK_EQ(0, dispatch_tables->length() % 4);
uint32_t old_size = functions()->length();
@@ -256,7 +260,8 @@ void WasmTableObject::grow(Isolate* isolate, uint32_t count) {
// Patch the code of the respective instance.
{
DisallowHeapAllocation no_gc;
- CodeSpecialization code_specialization(isolate, &specialization_zone);
+ wasm::CodeSpecialization code_specialization(isolate,
+ &specialization_zone);
WasmInstanceObject* instance =
WasmInstanceObject::cast(dispatch_tables->get(i));
WasmCompiledModule* compiled_module = instance->compiled_module();
@@ -282,15 +287,38 @@ void WasmTableObject::grow(Isolate* isolate, uint32_t count) {
}
}
+void WasmTableObject::Set(Isolate* isolate, Handle<WasmTableObject> table,
+ int32_t index, Handle<JSFunction> function) {
+ Handle<FixedArray> array(table->functions(), isolate);
+
+ Handle<FixedArray> dispatch_tables(table->dispatch_tables(), isolate);
+
+ WasmFunction* wasm_function = nullptr;
+ Handle<Code> code = Handle<Code>::null();
+ Handle<Object> value = isolate->factory()->null_value();
+
+ if (!function.is_null()) {
+ wasm_function = wasm::GetWasmFunctionForExport(isolate, function);
+ // The verification that {function} is an export was done
+ // by the caller.
+ DCHECK_NOT_NULL(wasm_function);
+ code = wasm::UnwrapExportWrapper(function);
+ value = Handle<Object>::cast(function);
+ }
+
+ UpdateDispatchTables(isolate, dispatch_tables, index, wasm_function, code);
+ array->set(index, *value);
+}
+
namespace {
Handle<JSArrayBuffer> GrowMemoryBuffer(Isolate* isolate,
Handle<JSArrayBuffer> old_buffer,
uint32_t pages, uint32_t maximum_pages) {
+ if (!old_buffer->is_growable()) return Handle<JSArrayBuffer>::null();
Address old_mem_start = nullptr;
uint32_t old_size = 0;
if (!old_buffer.is_null()) {
- if (!old_buffer->is_growable()) return Handle<JSArrayBuffer>::null();
old_mem_start = static_cast<Address>(old_buffer->backing_store());
CHECK(old_buffer->byte_length()->ToUint32(&old_size));
}
@@ -301,46 +329,51 @@ Handle<JSArrayBuffer> GrowMemoryBuffer(Isolate* isolate,
if (old_pages > maximum_pages || pages > maximum_pages - old_pages) {
return Handle<JSArrayBuffer>::null();
}
-
- // TODO(gdeepti): Change the protection here instead of allocating a new
- // buffer before guard regions are turned on, see issue #5886.
const bool enable_guard_regions = old_buffer.is_null()
- ? EnableGuardRegions()
+ ? trap_handler::UseTrapHandler()
: old_buffer->has_guard_region();
size_t new_size =
static_cast<size_t>(old_pages + pages) * WasmModule::kPageSize;
- Handle<JSArrayBuffer> new_buffer =
- NewArrayBuffer(isolate, new_size, enable_guard_regions);
- if (new_buffer.is_null()) return new_buffer;
- Address new_mem_start = static_cast<Address>(new_buffer->backing_store());
- memcpy(new_mem_start, old_mem_start, old_size);
- return new_buffer;
+ if (enable_guard_regions && old_size != 0) {
+ DCHECK_NOT_NULL(old_buffer->backing_store());
+ if (new_size > FLAG_wasm_max_mem_pages * WasmModule::kPageSize ||
+ new_size > kMaxInt) {
+ return Handle<JSArrayBuffer>::null();
+ }
+ isolate->array_buffer_allocator()->SetProtection(
+ old_mem_start, new_size,
+ v8::ArrayBuffer::Allocator::Protection::kReadWrite);
+ reinterpret_cast<v8::Isolate*>(isolate)
+ ->AdjustAmountOfExternalAllocatedMemory(pages * WasmModule::kPageSize);
+ Handle<Object> length_obj = isolate->factory()->NewNumberFromSize(new_size);
+ old_buffer->set_byte_length(*length_obj);
+ return old_buffer;
+ } else {
+ Handle<JSArrayBuffer> new_buffer;
+ new_buffer = wasm::NewArrayBuffer(isolate, new_size, enable_guard_regions);
+ if (new_buffer.is_null() || old_size == 0) return new_buffer;
+ Address new_mem_start = static_cast<Address>(new_buffer->backing_store());
+ memcpy(new_mem_start, old_mem_start, old_size);
+ return new_buffer;
+ }
}
// May GC, because SetSpecializationMemInfoFrom may GC
void SetInstanceMemory(Isolate* isolate, Handle<WasmInstanceObject> instance,
Handle<JSArrayBuffer> buffer) {
instance->set_memory_buffer(*buffer);
- WasmCompiledModule::SetSpecializationMemInfoFrom(
- isolate->factory(), handle(instance->compiled_module()), buffer);
if (instance->has_debug_info()) {
instance->debug_info()->UpdateMemory(*buffer);
}
}
-void UncheckedUpdateInstanceMemory(Isolate* isolate,
- Handle<WasmInstanceObject> instance,
- Address old_mem_start, uint32_t old_size) {
- DCHECK(instance->has_memory_buffer());
- Handle<JSArrayBuffer> mem_buffer(instance->memory_buffer());
- uint32_t new_size = mem_buffer->byte_length()->Number();
- Address new_mem_start = static_cast<Address>(mem_buffer->backing_store());
+void UpdateWasmContext(WasmContext* wasm_context,
+ Handle<JSArrayBuffer> buffer) {
+ uint32_t new_mem_size = buffer->byte_length()->Number();
+ Address new_mem_start = static_cast<Address>(buffer->backing_store());
DCHECK_NOT_NULL(new_mem_start);
- Zone specialization_zone(isolate->allocator(), ZONE_NAME);
- CodeSpecialization code_specialization(isolate, &specialization_zone);
- code_specialization.RelocateMemoryReferences(old_mem_start, old_size,
- new_mem_start, new_size);
- code_specialization.ApplyToWholeInstance(*instance);
+ wasm_context->mem_start = new_mem_start;
+ wasm_context->mem_size = new_mem_size;
}
} // namespace
@@ -352,20 +385,28 @@ Handle<WasmMemoryObject> WasmMemoryObject::New(Isolate* isolate,
isolate->native_context()->wasm_memory_constructor());
auto memory_obj = Handle<WasmMemoryObject>::cast(
isolate->factory()->NewJSObject(memory_ctor, TENURED));
+ auto wasm_context = Managed<WasmContext>::Allocate(isolate);
if (buffer.is_null()) {
- const bool enable_guard_regions = EnableGuardRegions();
- buffer = SetupArrayBuffer(isolate, nullptr, 0, nullptr, 0, false,
- enable_guard_regions);
+ const bool enable_guard_regions = trap_handler::UseTrapHandler();
+ buffer = wasm::SetupArrayBuffer(isolate, nullptr, 0, nullptr, 0, false,
+ enable_guard_regions);
+ wasm_context->get()->mem_size = 0;
+ wasm_context->get()->mem_start = nullptr;
+ } else {
+ CHECK(buffer->byte_length()->ToUint32(&wasm_context->get()->mem_size));
+ wasm_context->get()->mem_start =
+ static_cast<Address>(buffer->backing_store());
}
memory_obj->set_array_buffer(*buffer);
memory_obj->set_maximum_pages(maximum);
+ memory_obj->set_wasm_context(*wasm_context);
return memory_obj;
}
uint32_t WasmMemoryObject::current_pages() {
uint32_t byte_length;
CHECK(array_buffer()->byte_length()->ToUint32(&byte_length));
- return byte_length / wasm::WasmModule::kPageSize;
+ return byte_length / WasmModule::kPageSize;
}
void WasmMemoryObject::AddInstance(Isolate* isolate,
@@ -388,6 +429,32 @@ void WasmMemoryObject::RemoveInstance(Isolate* isolate,
}
}
+void WasmMemoryObject::SetupNewBufferWithSameBackingStore(
+ Isolate* isolate, Handle<WasmMemoryObject> memory_object, uint32_t size) {
+ // In case of Memory.Grow(0), or Memory.Grow(delta) with guard pages enabled,
+ // Setup a new buffer, update memory object, and instances associated with the
+ // memory object, as the current buffer will be detached.
+ Handle<JSArrayBuffer> old_buffer(memory_object->array_buffer());
+ Handle<JSArrayBuffer> new_buffer;
+
+ constexpr bool is_external = false;
+ new_buffer = wasm::SetupArrayBuffer(
+ isolate, old_buffer->allocation_base(), old_buffer->allocation_length(),
+ old_buffer->backing_store(), size * WasmModule::kPageSize, is_external,
+ old_buffer->has_guard_region());
+ if (memory_object->has_instances()) {
+ Handle<WeakFixedArray> instances(memory_object->instances(), isolate);
+ for (int i = 0; i < instances->Length(); i++) {
+ Object* elem = instances->Get(i);
+ if (!elem->IsWasmInstanceObject()) continue;
+ Handle<WasmInstanceObject> instance(WasmInstanceObject::cast(elem),
+ isolate);
+ SetInstanceMemory(isolate, instance, new_buffer);
+ }
+ }
+ memory_object->set_array_buffer(*new_buffer);
+}
+
// static
int32_t WasmMemoryObject::Grow(Isolate* isolate,
Handle<WasmMemoryObject> memory_object,
@@ -399,13 +466,6 @@ int32_t WasmMemoryObject::Grow(Isolate* isolate,
Handle<JSArrayBuffer> new_buffer;
// Return current size if grow by 0.
if (pages == 0) {
- // Even for pages == 0, we need to attach a new JSArrayBuffer with the same
- // backing store and neuter the old one to be spec compliant.
- new_buffer = SetupArrayBuffer(
- isolate, old_buffer->allocation_base(),
- old_buffer->allocation_length(), old_buffer->backing_store(),
- old_size, old_buffer->is_external(), old_buffer->has_guard_region());
- memory_object->set_array_buffer(*new_buffer);
DCHECK_EQ(0, old_size % WasmModule::kPageSize);
return old_size / WasmModule::kPageSize;
}
@@ -420,8 +480,13 @@ int32_t WasmMemoryObject::Grow(Isolate* isolate,
new_buffer = GrowMemoryBuffer(isolate, old_buffer, pages, maximum_pages);
if (new_buffer.is_null()) return -1;
+ // Verify that the values we will change are actually the ones we expect.
+ DCHECK_EQ(memory_object->wasm_context()->get()->mem_size, old_size);
+ DCHECK_EQ(memory_object->wasm_context()->get()->mem_start,
+ static_cast<Address>(old_buffer->backing_store()));
+ UpdateWasmContext(memory_object->wasm_context()->get(), new_buffer);
+
if (memory_object->has_instances()) {
- Address old_mem_start = static_cast<Address>(old_buffer->backing_store());
Handle<WeakFixedArray> instances(memory_object->instances(), isolate);
for (int i = 0; i < instances->Length(); i++) {
Object* elem = instances->Get(i);
@@ -429,10 +494,8 @@ int32_t WasmMemoryObject::Grow(Isolate* isolate,
Handle<WasmInstanceObject> instance(WasmInstanceObject::cast(elem),
isolate);
SetInstanceMemory(isolate, instance, new_buffer);
- UncheckedUpdateInstanceMemory(isolate, instance, old_mem_start, old_size);
}
}
-
memory_object->set_array_buffer(*new_buffer);
DCHECK_EQ(0, old_size % WasmModule::kPageSize);
return old_size / WasmModule::kPageSize;
@@ -442,6 +505,11 @@ WasmModuleObject* WasmInstanceObject::module_object() {
return *compiled_module()->wasm_module();
}
+WasmContext* WasmInstanceObject::wasm_context() {
+ DCHECK(has_memory_object());
+ return memory_object()->wasm_context()->get();
+}
+
WasmModule* WasmInstanceObject::module() { return compiled_module()->module(); }
Handle<WasmDebugInfo> WasmInstanceObject::GetOrCreateDebugInfo(
@@ -477,28 +545,9 @@ int32_t WasmInstanceObject::GrowMemory(Isolate* isolate,
Handle<WasmInstanceObject> instance,
uint32_t pages) {
if (pages == 0) return instance->GetMemorySize();
- if (instance->has_memory_object()) {
- return WasmMemoryObject::Grow(
- isolate, handle(instance->memory_object(), isolate), pages);
- }
-
- // No other instances to grow, grow just the one.
- uint32_t old_size = 0;
- Address old_mem_start = nullptr;
- Handle<JSArrayBuffer> old_buffer;
- if (instance->has_memory_buffer()) {
- old_buffer = handle(instance->memory_buffer(), isolate);
- old_size = old_buffer->byte_length()->Number();
- old_mem_start = static_cast<Address>(old_buffer->backing_store());
- }
- uint32_t maximum_pages = instance->GetMaxMemoryPages();
- Handle<JSArrayBuffer> buffer =
- GrowMemoryBuffer(isolate, old_buffer, pages, maximum_pages);
- if (buffer.is_null()) return -1;
- SetInstanceMemory(isolate, instance, buffer);
- UncheckedUpdateInstanceMemory(isolate, instance, old_mem_start, old_size);
- DCHECK_EQ(0, old_size % WasmModule::kPageSize);
- return old_size / WasmModule::kPageSize;
+ DCHECK(instance->has_memory_object());
+ return WasmMemoryObject::Grow(
+ isolate, handle(instance->memory_object(), isolate), pages);
}
uint32_t WasmInstanceObject::GetMaxMemoryPages() {
@@ -518,6 +567,54 @@ uint32_t WasmInstanceObject::GetMaxMemoryPages() {
return FLAG_wasm_max_mem_pages;
}
+WasmInstanceObject* WasmInstanceObject::GetOwningInstance(Code* code) {
+ DisallowHeapAllocation no_gc;
+ DCHECK(code->kind() == Code::WASM_FUNCTION ||
+ code->kind() == Code::WASM_INTERPRETER_ENTRY);
+ FixedArray* deopt_data = code->deoptimization_data();
+ DCHECK_EQ(code->kind() == Code::WASM_INTERPRETER_ENTRY ? 1 : 2,
+ deopt_data->length());
+ Object* weak_link = deopt_data->get(0);
+ DCHECK(weak_link->IsWeakCell());
+ WeakCell* cell = WeakCell::cast(weak_link);
+ if (cell->cleared()) return nullptr;
+ return WasmInstanceObject::cast(cell->value());
+}
+
+void WasmInstanceObject::ValidateInstancesChainForTesting(
+ Isolate* isolate, Handle<WasmModuleObject> module_obj, int instance_count) {
+ CHECK_GE(instance_count, 0);
+ DisallowHeapAllocation no_gc;
+ WasmCompiledModule* compiled_module = module_obj->compiled_module();
+ CHECK_EQ(JSObject::cast(compiled_module->ptr_to_weak_wasm_module()->value()),
+ *module_obj);
+ Object* prev = nullptr;
+ int found_instances = compiled_module->has_weak_owning_instance() ? 1 : 0;
+ WasmCompiledModule* current_instance = compiled_module;
+ while (current_instance->has_weak_next_instance()) {
+ CHECK((prev == nullptr && !current_instance->has_weak_prev_instance()) ||
+ current_instance->ptr_to_weak_prev_instance()->value() == prev);
+ CHECK_EQ(current_instance->ptr_to_weak_wasm_module()->value(), *module_obj);
+ CHECK(current_instance->ptr_to_weak_owning_instance()
+ ->value()
+ ->IsWasmInstanceObject());
+ prev = current_instance;
+ current_instance = WasmCompiledModule::cast(
+ current_instance->ptr_to_weak_next_instance()->value());
+ ++found_instances;
+ CHECK_LE(found_instances, instance_count);
+ }
+ CHECK_EQ(found_instances, instance_count);
+}
+
+void WasmInstanceObject::ValidateOrphanedInstanceForTesting(
+ Isolate* isolate, Handle<WasmInstanceObject> instance) {
+ DisallowHeapAllocation no_gc;
+ WasmCompiledModule* compiled_module = instance->compiled_module();
+ CHECK(compiled_module->has_weak_wasm_module());
+ CHECK(compiled_module->ptr_to_weak_wasm_module()->cleared());
+}
+
bool WasmExportedFunction::IsWasmExportedFunction(Object* object) {
if (!object->IsJSFunction()) return false;
Handle<JSFunction> js_function(JSFunction::cast(object));
@@ -610,13 +707,13 @@ WasmSharedModuleData* WasmSharedModuleData::cast(Object* object) {
return reinterpret_cast<WasmSharedModuleData*>(object);
}
-wasm::WasmModule* WasmSharedModuleData::module() {
+WasmModule* WasmSharedModuleData::module() {
// We populate the kModuleWrapper field with a Foreign holding the
// address to the address of a WasmModule. This is because we can
// handle both cases when the WasmModule's lifetime is managed through
// a Managed<WasmModule> object, as well as cases when it's managed
// by the embedder. CcTests fall into the latter case.
- return *(reinterpret_cast<wasm::WasmModule**>(
+ return *(reinterpret_cast<WasmModule**>(
Foreign::cast(get(kModuleWrapperIndex))->foreign_address()));
}
@@ -673,8 +770,8 @@ void WasmSharedModuleData::ReinitializeAfterDeserialization(
const byte* end = start + module_bytes->length();
// TODO(titzer): remember the module origin in the compiled_module
// For now, we assume serialized modules did not originate from asm.js.
- ModuleResult result =
- SyncDecodeWasmModule(isolate, start, end, false, kWasmOrigin);
+ wasm::ModuleResult result =
+ SyncDecodeWasmModule(isolate, start, end, false, wasm::kWasmOrigin);
CHECK(result.ok());
CHECK_NOT_NULL(result.val);
// Take ownership of the WasmModule and immediately transfer it to the
@@ -682,8 +779,8 @@ void WasmSharedModuleData::ReinitializeAfterDeserialization(
module = result.val.release();
}
- Handle<WasmModuleWrapper> module_wrapper =
- WasmModuleWrapper::New(isolate, module);
+ Handle<wasm::WasmModuleWrapper> module_wrapper =
+ wasm::WasmModuleWrapper::From(isolate, module);
shared->set(kModuleWrapperIndex, *module_wrapper);
DCHECK(WasmSharedModuleData::IsWasmSharedModuleData(*shared));
@@ -813,17 +910,16 @@ void WasmSharedModuleData::PrepareForLazyCompilation(
Handle<WasmSharedModuleData> shared) {
if (shared->has_lazy_compilation_orchestrator()) return;
Isolate* isolate = shared->GetIsolate();
- LazyCompilationOrchestrator* orch = new LazyCompilationOrchestrator();
- Handle<Managed<LazyCompilationOrchestrator>> orch_handle =
- Managed<LazyCompilationOrchestrator>::New(isolate, orch);
+ auto orch_handle =
+ Managed<wasm::LazyCompilationOrchestrator>::Allocate(isolate);
shared->set_lazy_compilation_orchestrator(*orch_handle);
}
Handle<WasmCompiledModule> WasmCompiledModule::New(
Isolate* isolate, Handle<WasmSharedModuleData> shared,
Handle<FixedArray> code_table, Handle<FixedArray> export_wrappers,
- const std::vector<wasm::GlobalHandleAddress>& function_tables,
- const std::vector<wasm::GlobalHandleAddress>& signature_tables) {
+ const std::vector<GlobalHandleAddress>& function_tables,
+ const std::vector<GlobalHandleAddress>& signature_tables) {
DCHECK_EQ(function_tables.size(), signature_tables.size());
Handle<FixedArray> ret =
isolate->factory()->NewFixedArray(PropertyIndices::Count, TENURED);
@@ -881,18 +977,10 @@ Handle<WasmCompiledModule> WasmCompiledModule::Clone(
ret->reset_weak_next_instance();
ret->reset_weak_prev_instance();
ret->reset_weak_exported_functions();
- if (ret->has_embedded_mem_start()) {
- WasmCompiledModule::recreate_embedded_mem_start(ret, isolate->factory(),
- ret->embedded_mem_start());
- }
if (ret->has_globals_start()) {
WasmCompiledModule::recreate_globals_start(ret, isolate->factory(),
ret->globals_start());
}
- if (ret->has_embedded_mem_size()) {
- WasmCompiledModule::recreate_embedded_mem_size(ret, isolate->factory(),
- ret->embedded_mem_size());
- }
return ret;
}
@@ -924,20 +1012,10 @@ void WasmCompiledModule::Reset(Isolate* isolate,
Object* undefined = *isolate->factory()->undefined_value();
Object* fct_obj = compiled_module->ptr_to_code_table();
if (fct_obj != nullptr && fct_obj != undefined) {
- uint32_t old_mem_size = compiled_module->GetEmbeddedMemSizeOrZero();
- // We use default_mem_size throughout, as the mem size of an uninstantiated
- // module, because if we can statically prove a memory access is over
- // bounds, we'll codegen a trap. See {WasmGraphBuilder::BoundsCheckMem}
- uint32_t default_mem_size = compiled_module->default_mem_size();
- Address old_mem_start = compiled_module->GetEmbeddedMemStartOrNull();
-
// Patch code to update memory references, global references, and function
// table references.
Zone specialization_zone(isolate->allocator(), ZONE_NAME);
- CodeSpecialization code_specialization(isolate, &specialization_zone);
-
- code_specialization.RelocateMemoryReferences(old_mem_start, old_mem_size,
- nullptr, default_mem_size);
+ wasm::CodeSpecialization code_specialization(isolate, &specialization_zone);
if (compiled_module->has_globals_start()) {
Address globals_start =
@@ -998,7 +1076,6 @@ void WasmCompiledModule::Reset(Isolate* isolate,
}
}
}
- compiled_module->ResetSpecializationMemInfoIfNeeded();
}
void WasmCompiledModule::InitId() {
@@ -1009,32 +1086,6 @@ void WasmCompiledModule::InitId() {
#endif
}
-void WasmCompiledModule::ResetSpecializationMemInfoIfNeeded() {
- DisallowHeapAllocation no_gc;
- if (has_embedded_mem_start()) {
- set_embedded_mem_size(default_mem_size());
- set_embedded_mem_start(0);
- }
-}
-
-void WasmCompiledModule::SetSpecializationMemInfoFrom(
- Factory* factory, Handle<WasmCompiledModule> compiled_module,
- Handle<JSArrayBuffer> buffer) {
- DCHECK(!buffer.is_null());
- size_t start_address = reinterpret_cast<size_t>(buffer->backing_store());
- uint32_t size = static_cast<uint32_t>(buffer->byte_length()->Number());
- if (!compiled_module->has_embedded_mem_start()) {
- DCHECK(!compiled_module->has_embedded_mem_size());
- WasmCompiledModule::recreate_embedded_mem_start(compiled_module, factory,
- start_address);
- WasmCompiledModule::recreate_embedded_mem_size(compiled_module, factory,
- size);
- } else {
- compiled_module->set_embedded_mem_start(start_address);
- compiled_module->set_embedded_mem_size(size);
- }
-}
-
void WasmCompiledModule::SetGlobalsStartAddressFrom(
Factory* factory, Handle<WasmCompiledModule> compiled_module,
Handle<JSArrayBuffer> buffer) {
@@ -1050,7 +1101,7 @@ void WasmCompiledModule::SetGlobalsStartAddressFrom(
MaybeHandle<String> WasmCompiledModule::ExtractUtf8StringFromModuleBytes(
Isolate* isolate, Handle<WasmCompiledModule> compiled_module,
- WireBytesRef ref) {
+ wasm::WireBytesRef ref) {
// TODO(wasm): cache strings from modules if it's a performance win.
Handle<SeqOneByteString> module_bytes(compiled_module->module_bytes(),
isolate);
@@ -1104,7 +1155,15 @@ bool WasmCompiledModule::IsWasmCompiledModule(Object* obj) {
#define WCM_CHECK_LARGE_NUMBER(TYPE, NAME) \
WCM_CHECK_TYPE(NAME, obj->IsUndefined(isolate) || obj->IsMutableHeapNumber())
WCM_PROPERTY_TABLE(WCM_CHECK)
+#undef WCM_CHECK_TYPE
+#undef WCM_CHECK_OBJECT
+#undef WCM_CHECK_CONST_OBJECT
+#undef WCM_CHECK_WASM_OBJECT
+#undef WCM_CHECK_WEAK_LINK
+#undef WCM_CHECK_SMALL_NUMBER
#undef WCM_CHECK
+#undef WCM_CHECK_SMALL_CONST_NUMBER
+#undef WCM_CHECK_LARGE_NUMBER
// All checks passed.
return true;
@@ -1116,7 +1175,7 @@ void WasmCompiledModule::PrintInstancesChain() {
for (WasmCompiledModule* current = this; current != nullptr;) {
PrintF("->%d", current->instance_id());
if (!current->has_weak_next_instance()) break;
- CHECK(!current->ptr_to_weak_next_instance()->cleared());
+ DCHECK(!current->ptr_to_weak_next_instance()->cleared());
current =
WasmCompiledModule::cast(current->ptr_to_weak_next_instance()->value());
}
@@ -1276,7 +1335,7 @@ Handle<ByteArray> GetDecodedAsmJsOffsetTable(
DCHECK(table_type == Encoded || table_type == Decoded);
if (table_type == Decoded) return offset_table;
- AsmJsOffsetsResult asm_offsets;
+ wasm::AsmJsOffsetsResult asm_offsets;
{
DisallowHeapAllocation no_gc;
const byte* bytes_start = offset_table->GetDataStartAddress();
@@ -1309,10 +1368,11 @@ Handle<ByteArray> GetDecodedAsmJsOffsetTable(
int idx = 0;
std::vector<WasmFunction>& wasm_funs = compiled_module->module()->functions;
for (int func = 0; func < num_functions; ++func) {
- std::vector<AsmJsOffsetEntry>& func_asm_offsets = asm_offsets.val[func];
+ std::vector<wasm::AsmJsOffsetEntry>& func_asm_offsets =
+ asm_offsets.val[func];
if (func_asm_offsets.empty()) continue;
int func_offset = wasm_funs[num_imported_functions + func].code.offset();
- for (AsmJsOffsetEntry& e : func_asm_offsets) {
+ for (wasm::AsmJsOffsetEntry& e : func_asm_offsets) {
// Byte offsets must be strictly monotonously increasing:
DCHECK_IMPLIES(idx > 0, func_offset + e.byte_offset >
decoded_table->get_int(idx - kOTESize));
@@ -1329,16 +1389,24 @@ Handle<ByteArray> GetDecodedAsmJsOffsetTable(
} // namespace
-int WasmCompiledModule::GetAsmJsSourcePosition(
+int WasmCompiledModule::GetSourcePosition(
Handle<WasmCompiledModule> compiled_module, uint32_t func_index,
uint32_t byte_offset, bool is_at_number_conversion) {
Isolate* isolate = compiled_module->GetIsolate();
+ const WasmModule* module = compiled_module->module();
+
+ if (!module->is_asm_js()) {
+ // for non-asm.js modules, we just add the function's start offset
+ // to make a module-relative position.
+ return byte_offset + compiled_module->GetFunctionOffset(func_index);
+ }
+
+ // asm.js modules have an additional offset table that must be searched.
Handle<ByteArray> offset_table =
GetDecodedAsmJsOffsetTable(compiled_module, isolate);
- DCHECK_LT(func_index, compiled_module->module()->functions.size());
- uint32_t func_code_offset =
- compiled_module->module()->functions[func_index].code.offset();
+ DCHECK_LT(func_index, module->functions.size());
+ uint32_t func_code_offset = module->functions[func_index].code.offset();
uint32_t total_offset = func_code_offset + byte_offset;
// Binary search for the total byte offset.
@@ -1437,9 +1505,10 @@ bool WasmCompiledModule::GetPossibleBreakpoints(
WasmFunction& func = functions[func_idx];
if (func.code.length() == 0) continue;
- BodyLocalDecls locals(&tmp);
- BytecodeIterator iterator(module_start + func.code.offset(),
- module_start + func.code.end_offset(), &locals);
+ wasm::BodyLocalDecls locals(&tmp);
+ wasm::BytecodeIterator iterator(module_start + func.code.offset(),
+ module_start + func.code.end_offset(),
+ &locals);
DCHECK_LT(0u, locals.encoded_size);
for (uint32_t offset : iterator.offsets()) {
uint32_t total_offset = func.code.offset() + offset;
@@ -1512,8 +1581,13 @@ Handle<Code> WasmCompiledModule::CompileLazy(
isolate->set_context(*instance->compiled_module()->native_context());
Object* orch_obj =
instance->compiled_module()->shared()->lazy_compilation_orchestrator();
- LazyCompilationOrchestrator* orch =
- Managed<LazyCompilationOrchestrator>::cast(orch_obj)->get();
+ auto* orch =
+ Managed<wasm::LazyCompilationOrchestrator>::cast(orch_obj)->get();
return orch->CompileLazy(isolate, instance, caller, offset, func_index,
patch_caller);
}
+
+#undef TRACE
+
+} // namespace internal
+} // namespace v8