summaryrefslogtreecommitdiff
path: root/deps/v8/src/execution/isolate.cc
diff options
context:
space:
mode:
authorMyles Borins <mylesborins@google.com>2019-09-24 11:56:38 -0400
committerMyles Borins <myles.borins@gmail.com>2019-10-07 03:19:23 -0400
commitf7f6c928c1c9c136b7926f892b8a2fda11d8b4b2 (patch)
treef5edbccb3ffda2573d70a6e291e7157f290e0ae0 /deps/v8/src/execution/isolate.cc
parentffd22e81983056d09c064c59343a0e488236272d (diff)
downloadandroid-node-v8-f7f6c928c1c9c136b7926f892b8a2fda11d8b4b2.tar.gz
android-node-v8-f7f6c928c1c9c136b7926f892b8a2fda11d8b4b2.tar.bz2
android-node-v8-f7f6c928c1c9c136b7926f892b8a2fda11d8b4b2.zip
deps: update V8 to 7.8.279.9
PR-URL: https://github.com/nodejs/node/pull/29694 Reviewed-By: Colin Ihrig <cjihrig@gmail.com> Reviewed-By: Anna Henningsen <anna@addaleax.net> Reviewed-By: Gus Caplan <me@gus.host> Reviewed-By: Jiawen Geng <technicalcute@gmail.com> Reviewed-By: Michaël Zasso <targos@protonmail.com> Reviewed-By: Tobias Nießen <tniessen@tnie.de> Reviewed-By: Ujjwal Sharma <usharma1998@gmail.com>
Diffstat (limited to 'deps/v8/src/execution/isolate.cc')
-rw-r--r--deps/v8/src/execution/isolate.cc170
1 files changed, 98 insertions, 72 deletions
diff --git a/deps/v8/src/execution/isolate.cc b/deps/v8/src/execution/isolate.cc
index 2b3551cdfb..d090ed5260 100644
--- a/deps/v8/src/execution/isolate.cc
+++ b/deps/v8/src/execution/isolate.cc
@@ -56,6 +56,7 @@
#include "src/objects/hash-table-inl.h"
#include "src/objects/js-array-inl.h"
#include "src/objects/js-generator-inl.h"
+#include "src/objects/js-weak-refs-inl.h"
#include "src/objects/module-inl.h"
#include "src/objects/promise-inl.h"
#include "src/objects/prototype.h"
@@ -85,9 +86,9 @@
#include "unicode/uobject.h"
#endif // V8_INTL_SUPPORT
-#if defined(V8_OS_WIN_X64)
+#if defined(V8_OS_WIN64)
#include "src/diagnostics/unwinding-info-win64.h"
-#endif
+#endif // V8_OS_WIN64
extern "C" const uint8_t* v8_Default_embedded_blob_;
extern "C" uint32_t v8_Default_embedded_blob_size_;
@@ -2022,7 +2023,7 @@ void Isolate::CancelScheduledExceptionFromTryCatch(v8::TryCatch* handler) {
DCHECK_EQ(scheduled_exception(),
ReadOnlyRoots(heap()).termination_exception());
// Clear termination once we returned from all V8 frames.
- if (handle_scope_implementer()->CallDepthIsZero()) {
+ if (thread_local_top()->CallDepthIsZero()) {
thread_local_top()->external_caught_exception_ = false;
clear_scheduled_exception();
}
@@ -2648,21 +2649,12 @@ Handle<Context> Isolate::GetIncumbentContext() {
char* Isolate::ArchiveThread(char* to) {
MemCopy(to, reinterpret_cast<char*>(thread_local_top()),
sizeof(ThreadLocalTop));
- InitializeThreadLocal();
- clear_pending_exception();
- clear_pending_message();
- clear_scheduled_exception();
return to + sizeof(ThreadLocalTop);
}
char* Isolate::RestoreThread(char* from) {
MemCopy(reinterpret_cast<char*>(thread_local_top()), from,
sizeof(ThreadLocalTop));
-// This might be just paranoia, but it seems to be needed in case a
-// thread_local_top_ is restored on a separate OS thread.
-#ifdef USE_SIMULATOR
- thread_local_top()->simulator_ = Simulator::current(this);
-#endif
DCHECK(context().is_null() || context().IsContext());
return from + sizeof(ThreadLocalTop);
}
@@ -2884,9 +2876,9 @@ v8::PageAllocator* Isolate::page_allocator() {
}
Isolate::Isolate(std::unique_ptr<i::IsolateAllocator> isolate_allocator)
- : isolate_allocator_(std::move(isolate_allocator)),
+ : isolate_data_(this),
+ isolate_allocator_(std::move(isolate_allocator)),
id_(isolate_counter.fetch_add(1, std::memory_order_relaxed)),
- stack_guard_(this),
allocator_(FLAG_trace_zone_stats
? new VerboseAccountingAllocator(&heap_, 256 * KB)
: new AccountingAllocator()),
@@ -2925,6 +2917,14 @@ void Isolate::CheckIsolateLayout() {
CHECK_EQ(OFFSET_OF(Isolate, isolate_data_), 0);
CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, isolate_data_.embedder_data_)),
Internals::kIsolateEmbedderDataOffset);
+ CHECK_EQ(static_cast<int>(
+ OFFSET_OF(Isolate, isolate_data_.fast_c_call_caller_fp_)),
+ Internals::kIsolateFastCCallCallerFpOffset);
+ CHECK_EQ(static_cast<int>(
+ OFFSET_OF(Isolate, isolate_data_.fast_c_call_caller_pc_)),
+ Internals::kIsolateFastCCallCallerPcOffset);
+ CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, isolate_data_.stack_guard_)),
+ Internals::kIsolateStackGuardOffset);
CHECK_EQ(static_cast<int>(OFFSET_OF(Isolate, isolate_data_.roots_)),
Internals::kIsolateRootsOffset);
CHECK_EQ(Internals::kExternalMemoryOffset % 8, 0);
@@ -2961,7 +2961,7 @@ void Isolate::Deinit() {
heap_profiler()->StopSamplingHeapProfiler();
}
-#if defined(V8_OS_WIN_X64)
+#if defined(V8_OS_WIN64)
if (win64_unwindinfo::CanRegisterUnwindInfoForNonABICompliantCodeRange() &&
heap()->memory_allocator()) {
const base::AddressRegion& code_range =
@@ -2969,7 +2969,7 @@ void Isolate::Deinit() {
void* start = reinterpret_cast<void*>(code_range.begin());
win64_unwindinfo::UnregisterNonABICompliantCodeRange(start);
}
-#endif
+#endif // V8_OS_WIN64
debug()->Unload();
@@ -3139,7 +3139,12 @@ Isolate::~Isolate() {
default_microtask_queue_ = nullptr;
}
-void Isolate::InitializeThreadLocal() { thread_local_top()->Initialize(this); }
+void Isolate::InitializeThreadLocal() {
+ thread_local_top()->Initialize(this);
+ clear_pending_exception();
+ clear_pending_message();
+ clear_scheduled_exception();
+}
void Isolate::SetTerminationOnExternalTryCatch() {
if (try_catch_handler() == nullptr) return;
@@ -3308,19 +3313,31 @@ bool Isolate::InitWithSnapshot(ReadOnlyDeserializer* read_only_deserializer,
return Init(read_only_deserializer, startup_deserializer);
}
-static void AddCrashKeysForIsolateAndHeapPointers(Isolate* isolate) {
- v8::Platform* platform = V8::GetCurrentPlatform();
+static std::string AddressToString(uintptr_t address) {
+ std::stringstream stream_address;
+ stream_address << "0x" << std::hex << address;
+ return stream_address.str();
+}
+
+void Isolate::AddCrashKeysForIsolateAndHeapPointers() {
+ DCHECK_NOT_NULL(add_crash_key_callback_);
- const int id = isolate->id();
- platform->AddCrashKey(id, "isolate", reinterpret_cast<uintptr_t>(isolate));
+ const uintptr_t isolate_address = reinterpret_cast<uintptr_t>(this);
+ add_crash_key_callback_(v8::CrashKeyId::kIsolateAddress,
+ AddressToString(isolate_address));
- auto heap = isolate->heap();
- platform->AddCrashKey(id, "ro_space",
- reinterpret_cast<uintptr_t>(heap->read_only_space()->first_page()));
- platform->AddCrashKey(id, "map_space",
- reinterpret_cast<uintptr_t>(heap->map_space()->first_page()));
- platform->AddCrashKey(id, "code_space",
- reinterpret_cast<uintptr_t>(heap->code_space()->first_page()));
+ const uintptr_t ro_space_firstpage_address =
+ reinterpret_cast<uintptr_t>(heap()->read_only_space()->first_page());
+ add_crash_key_callback_(v8::CrashKeyId::kReadonlySpaceFirstPageAddress,
+ AddressToString(ro_space_firstpage_address));
+ const uintptr_t map_space_firstpage_address =
+ reinterpret_cast<uintptr_t>(heap()->map_space()->first_page());
+ add_crash_key_callback_(v8::CrashKeyId::kMapSpaceFirstPageAddress,
+ AddressToString(map_space_firstpage_address));
+ const uintptr_t code_space_firstpage_address =
+ reinterpret_cast<uintptr_t>(heap()->code_space()->first_page());
+ add_crash_key_callback_(v8::CrashKeyId::kCodeSpaceFirstPageAddress,
+ AddressToString(code_space_firstpage_address));
}
bool Isolate::Init(ReadOnlyDeserializer* read_only_deserializer,
@@ -3343,9 +3360,6 @@ bool Isolate::Init(ReadOnlyDeserializer* read_only_deserializer,
// The initialization process does not handle memory exhaustion.
AlwaysAllocateScope always_allocate(this);
- // Safe after setting Heap::isolate_, and initializing StackGuard
- heap_.SetStackLimits();
-
#define ASSIGN_ELEMENT(CamelName, hacker_name) \
isolate_addresses_[IsolateAddressId::k##CamelName##Address] = \
reinterpret_cast<Address>(hacker_name##_address());
@@ -3379,7 +3393,7 @@ bool Isolate::Init(ReadOnlyDeserializer* read_only_deserializer,
// will ensure this too, but we don't have to use lockers if we are only
// using one thread.
ExecutionAccess lock(this);
- stack_guard_.InitThread(lock);
+ stack_guard()->InitThread(lock);
}
// SetUp the object heap.
@@ -3524,10 +3538,6 @@ bool Isolate::Init(ReadOnlyDeserializer* read_only_deserializer,
clear_pending_message();
clear_scheduled_exception();
- // Deserializing may put strange things in the root array's copy of the
- // stack guard.
- heap_.SetStackLimits();
-
// Quiet the heap NaN if needed on target platform.
if (!create_heap_objects)
Assembler::QuietNaN(ReadOnlyRoots(this).nan_value());
@@ -3553,7 +3563,7 @@ bool Isolate::Init(ReadOnlyDeserializer* read_only_deserializer,
sampling_flags);
}
-#if defined(V8_OS_WIN_X64)
+#if defined(V8_OS_WIN64)
if (win64_unwindinfo::CanRegisterUnwindInfoForNonABICompliantCodeRange()) {
const base::AddressRegion& code_range =
heap()->memory_allocator()->code_range();
@@ -3561,14 +3571,13 @@ bool Isolate::Init(ReadOnlyDeserializer* read_only_deserializer,
size_t size_in_bytes = code_range.size();
win64_unwindinfo::RegisterNonABICompliantCodeRange(start, size_in_bytes);
}
-#endif
+#endif // V8_OS_WIN64
if (create_heap_objects && FLAG_profile_deserialization) {
double ms = timer.Elapsed().InMillisecondsF();
PrintF("[Initializing isolate from scratch took %0.3f ms]\n", ms);
}
- AddCrashKeysForIsolateAndHeapPointers(this);
return true;
}
@@ -3990,15 +3999,6 @@ void Isolate::InvalidateArrayConstructorProtector() {
DCHECK(!IsArrayConstructorIntact());
}
-void Isolate::InvalidateArraySpeciesProtector() {
- DCHECK(factory()->array_species_protector()->value().IsSmi());
- DCHECK(IsArraySpeciesLookupChainIntact());
- PropertyCell::SetValueWithInvalidation(
- this, "array_species_protector", factory()->array_species_protector(),
- handle(Smi::FromInt(kProtectorInvalid), this));
- DCHECK(!IsArraySpeciesLookupChainIntact());
-}
-
void Isolate::InvalidateTypedArraySpeciesProtector() {
DCHECK(factory()->typed_array_species_protector()->value().IsSmi());
DCHECK(IsTypedArraySpeciesLookupChainIntact());
@@ -4009,19 +4009,6 @@ void Isolate::InvalidateTypedArraySpeciesProtector() {
DCHECK(!IsTypedArraySpeciesLookupChainIntact());
}
-void Isolate::InvalidateRegExpSpeciesProtector(
- Handle<NativeContext> native_context) {
- DCHECK_EQ(*native_context, this->raw_native_context());
- DCHECK(native_context->regexp_species_protector().value().IsSmi());
- DCHECK(IsRegExpSpeciesLookupChainIntact(native_context));
- Handle<PropertyCell> species_cell(native_context->regexp_species_protector(),
- this);
- PropertyCell::SetValueWithInvalidation(
- this, "regexp_species_protector", species_cell,
- handle(Smi::FromInt(kProtectorInvalid), this));
- DCHECK(!IsRegExpSpeciesLookupChainIntact(native_context));
-}
-
void Isolate::InvalidatePromiseSpeciesProtector() {
DCHECK(factory()->promise_species_protector()->value().IsSmi());
DCHECK(IsPromiseSpeciesLookupChainIntact());
@@ -4189,7 +4176,7 @@ Handle<Symbol> Isolate::SymbolFor(RootIndex dictionary_index,
PropertyDetails::Empty(), &entry);
switch (dictionary_index) {
case RootIndex::kPublicSymbolTable:
- symbol->set_is_public(true);
+ symbol->set_is_in_public_symbol_table(true);
heap()->set_public_symbol_table(*dictionary);
break;
case RootIndex::kApiSymbolTable:
@@ -4237,7 +4224,7 @@ void Isolate::RemoveCallCompletedCallback(CallCompletedCallback callback) {
}
void Isolate::FireCallCompletedCallback(MicrotaskQueue* microtask_queue) {
- if (!handle_scope_implementer()->CallDepthIsZero()) return;
+ if (!thread_local_top()->CallDepthIsZero()) return;
bool run_microtasks =
microtask_queue && microtask_queue->size() &&
@@ -4246,12 +4233,6 @@ void Isolate::FireCallCompletedCallback(MicrotaskQueue* microtask_queue) {
if (run_microtasks) {
microtask_queue->RunMicrotasks(this);
- } else {
- // TODO(marja): (spec) The discussion about when to clear the KeepDuringJob
- // set is still open (whether to clear it after every microtask or once
- // during a microtask checkpoint). See also
- // https://github.com/tc39/proposal-weakrefs/issues/39 .
- heap()->ClearKeptObjects();
}
if (call_completed_callbacks_.empty()) return;
@@ -4330,6 +4311,23 @@ MaybeHandle<JSPromise> Isolate::RunHostImportModuleDynamicallyCallback(
return v8::Utils::OpenHandle(*promise);
}
+void Isolate::ClearKeptObjects() { heap()->ClearKeptObjects(); }
+
+void Isolate::SetHostCleanupFinalizationGroupCallback(
+ HostCleanupFinalizationGroupCallback callback) {
+ host_cleanup_finalization_group_callback_ = callback;
+}
+
+void Isolate::RunHostCleanupFinalizationGroupCallback(
+ Handle<JSFinalizationGroup> fg) {
+ if (host_cleanup_finalization_group_callback_ != nullptr) {
+ v8::Local<v8::Context> api_context =
+ v8::Utils::ToLocal(handle(Context::cast(fg->native_context()), this));
+ host_cleanup_finalization_group_callback_(api_context,
+ v8::Utils::ToLocal(fg));
+ }
+}
+
void Isolate::SetHostImportModuleDynamicallyCallback(
HostImportModuleDynamicallyCallback callback) {
host_import_module_dynamically_callback_ = callback;
@@ -4337,7 +4335,7 @@ void Isolate::SetHostImportModuleDynamicallyCallback(
Handle<JSObject> Isolate::RunHostInitializeImportMetaObjectCallback(
Handle<SourceTextModule> module) {
- Handle<Object> host_meta(module->import_meta(), this);
+ Handle<HeapObject> host_meta(module->import_meta(), this);
if (host_meta->IsTheHole(this)) {
host_meta = factory()->NewJSObjectWithNullProto();
if (host_initialize_import_meta_object_callback_ != nullptr) {
@@ -4399,7 +4397,7 @@ void Isolate::PrepareBuiltinSourcePositionMap() {
}
}
-#if defined(V8_OS_WIN_X64)
+#if defined(V8_OS_WIN64)
void Isolate::SetBuiltinUnwindData(
int builtin_index,
const win64_unwindinfo::BuiltinUnwindInfo& unwinding_info) {
@@ -4407,7 +4405,7 @@ void Isolate::SetBuiltinUnwindData(
embedded_file_writer_->SetBuiltinUnwindData(builtin_index, unwinding_info);
}
}
-#endif
+#endif // V8_OS_WIN64
void Isolate::SetPrepareStackTraceCallback(PrepareStackTraceCallback callback) {
prepare_stack_trace_callback_ = callback;
@@ -4417,6 +4415,13 @@ bool Isolate::HasPrepareStackTraceCallback() const {
return prepare_stack_trace_callback_ != nullptr;
}
+void Isolate::SetAddCrashKeyCallback(AddCrashKeyCallback callback) {
+ add_crash_key_callback_ = callback;
+
+ // Log the initial set of data.
+ AddCrashKeysForIsolateAndHeapPointers();
+}
+
void Isolate::SetAtomicsWaitCallback(v8::Isolate::AtomicsWaitCallback callback,
void* data) {
atomics_wait_callback_ = callback;
@@ -4663,6 +4668,27 @@ void Isolate::SetIdle(bool is_idle) {
}
}
+void Isolate::CollectSourcePositionsForAllBytecodeArrays() {
+ HandleScope scope(this);
+ std::vector<Handle<SharedFunctionInfo>> sfis;
+ {
+ DisallowHeapAllocation no_gc;
+ HeapObjectIterator iterator(heap());
+ for (HeapObject obj = iterator.Next(); !obj.is_null();
+ obj = iterator.Next()) {
+ if (obj.IsSharedFunctionInfo()) {
+ SharedFunctionInfo sfi = SharedFunctionInfo::cast(obj);
+ if (sfi.HasBytecodeArray()) {
+ sfis.push_back(Handle<SharedFunctionInfo>(sfi, this));
+ }
+ }
+ }
+ }
+ for (auto sfi : sfis) {
+ SharedFunctionInfo::EnsureSourcePositionsAvailable(this, sfi);
+ }
+}
+
#ifdef V8_INTL_SUPPORT
icu::UMemory* Isolate::get_cached_icu_object(ICUObjectCacheType cache_type) {
return icu_object_cache_[cache_type].get();