// Copyright 2012 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "src/execution/isolate.h" #include #include #include // NOLINT(readability/streams) #include #include #include #include "src/api/api-inl.h" #include "src/ast/ast-value-factory.h" #include "src/ast/scopes.h" #include "src/base/adapters.h" #include "src/base/hashmap.h" #include "src/base/platform/platform.h" #include "src/base/sys-info.h" #include "src/base/utils/random-number-generator.h" #include "src/builtins/builtins-promise.h" #include "src/builtins/constants-table-builder.h" #include "src/codegen/assembler-inl.h" #include "src/codegen/compilation-cache.h" #include "src/common/ptr-compr.h" #include "src/compiler-dispatcher/compiler-dispatcher.h" #include "src/compiler-dispatcher/optimizing-compile-dispatcher.h" #include "src/date/date.h" #include "src/debug/debug-frames.h" #include "src/debug/debug.h" #include "src/deoptimizer/deoptimizer.h" #include "src/diagnostics/compilation-statistics.h" #include "src/execution/frames-inl.h" #include "src/execution/isolate-inl.h" #include "src/execution/messages.h" #include "src/execution/microtask-queue.h" #include "src/execution/runtime-profiler.h" #include "src/execution/simulator.h" #include "src/execution/v8threads.h" #include "src/execution/vm-state-inl.h" #include "src/heap/heap-inl.h" #include "src/heap/read-only-heap.h" #include "src/ic/stub-cache.h" #include "src/init/bootstrapper.h" #include "src/init/setup-isolate.h" #include "src/init/v8.h" #include "src/interpreter/interpreter.h" #include "src/libsampler/sampler.h" #include "src/logging/counters.h" #include "src/logging/log.h" #include "src/numbers/hash-seed-inl.h" #include "src/objects/elements.h" #include "src/objects/frame-array-inl.h" #include "src/objects/hash-table-inl.h" #include "src/objects/js-array-inl.h" #include "src/objects/js-generator-inl.h" #include "src/objects/js-weak-refs-inl.h" #include "src/objects/module-inl.h" #include "src/objects/promise-inl.h" #include "src/objects/prototype.h" #include "src/objects/slots.h" #include "src/objects/smi.h" #include "src/objects/stack-frame-info-inl.h" #include "src/objects/visitors.h" #include "src/profiler/heap-profiler.h" #include "src/profiler/tracing-cpu-profiler.h" #include "src/regexp/regexp-stack.h" #include "src/snapshot/embedded/embedded-data.h" #include "src/snapshot/embedded/embedded-file-writer.h" #include "src/snapshot/read-only-deserializer.h" #include "src/snapshot/startup-deserializer.h" #include "src/strings/string-builder-inl.h" #include "src/strings/string-stream.h" #include "src/tasks/cancelable-task.h" #include "src/tracing/tracing-category-observer.h" #include "src/trap-handler/trap-handler.h" #include "src/utils/ostreams.h" #include "src/utils/version.h" #include "src/wasm/wasm-code-manager.h" #include "src/wasm/wasm-engine.h" #include "src/wasm/wasm-objects.h" #include "src/zone/accounting-allocator.h" #ifdef V8_INTL_SUPPORT #include "unicode/uobject.h" #endif // V8_INTL_SUPPORT #if defined(V8_OS_WIN64) #include "src/diagnostics/unwinding-info-win64.h" #endif // V8_OS_WIN64 extern "C" const uint8_t* v8_Default_embedded_blob_; extern "C" uint32_t v8_Default_embedded_blob_size_; namespace v8 { namespace internal { #ifdef DEBUG #define TRACE_ISOLATE(tag) \ do { \ if (FLAG_trace_isolates) { \ PrintF("Isolate %p (id %d)" #tag "\n", reinterpret_cast(this), \ id()); \ } \ } while (false) #else #define TRACE_ISOLATE(tag) #endif const uint8_t* DefaultEmbeddedBlob() { return v8_Default_embedded_blob_; } uint32_t DefaultEmbeddedBlobSize() { return v8_Default_embedded_blob_size_; } #ifdef V8_MULTI_SNAPSHOTS extern "C" const uint8_t* v8_Trusted_embedded_blob_; extern "C" uint32_t v8_Trusted_embedded_blob_size_; const uint8_t* TrustedEmbeddedBlob() { return v8_Trusted_embedded_blob_; } uint32_t TrustedEmbeddedBlobSize() { return v8_Trusted_embedded_blob_size_; } #endif namespace { // These variables provide access to the current embedded blob without requiring // an isolate instance. This is needed e.g. by Code::InstructionStart, which may // not have access to an isolate but still needs to access the embedded blob. // The variables are initialized by each isolate in Init(). Writes and reads are // relaxed since we can guarantee that the current thread has initialized these // variables before accessing them. Different threads may race, but this is fine // since they all attempt to set the same values of the blob pointer and size. std::atomic current_embedded_blob_(nullptr); std::atomic current_embedded_blob_size_(0); // The various workflows around embedded snapshots are fairly complex. We need // to support plain old snapshot builds, nosnap builds, and the requirements of // subtly different serialization tests. There's two related knobs to twiddle: // // - The default embedded blob may be overridden by setting the sticky embedded // blob. This is set automatically whenever we create a new embedded blob. // // - Lifecycle management can be either manual or set to refcounting. // // A few situations to demonstrate their use: // // - A plain old snapshot build neither overrides the default blob nor // refcounts. // // - mksnapshot sets the sticky blob and manually frees the embedded // blob once done. // // - Most serializer tests do the same. // // - Nosnapshot builds set the sticky blob and enable refcounting. // This mutex protects access to the following variables: // - sticky_embedded_blob_ // - sticky_embedded_blob_size_ // - enable_embedded_blob_refcounting_ // - current_embedded_blob_refs_ base::LazyMutex current_embedded_blob_refcount_mutex_ = LAZY_MUTEX_INITIALIZER; const uint8_t* sticky_embedded_blob_ = nullptr; uint32_t sticky_embedded_blob_size_ = 0; bool enable_embedded_blob_refcounting_ = true; int current_embedded_blob_refs_ = 0; const uint8_t* StickyEmbeddedBlob() { return sticky_embedded_blob_; } uint32_t StickyEmbeddedBlobSize() { return sticky_embedded_blob_size_; } void SetStickyEmbeddedBlob(const uint8_t* blob, uint32_t blob_size) { sticky_embedded_blob_ = blob; sticky_embedded_blob_size_ = blob_size; } } // namespace void DisableEmbeddedBlobRefcounting() { base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer()); enable_embedded_blob_refcounting_ = false; } void FreeCurrentEmbeddedBlob() { CHECK(!enable_embedded_blob_refcounting_); base::MutexGuard guard(current_embedded_blob_refcount_mutex_.Pointer()); if (StickyEmbeddedBlob() == nullptr) return; CHECK_EQ(StickyEmbeddedBlob(), Isolate::CurrentEmbeddedBlob()); InstructionStream::FreeOffHeapInstructionStream( const_cast(Isolate::CurrentEmbeddedBlob()), Isolate::CurrentEmbeddedBlobSize()); current_embedded_blob_.store(nullptr, std::memory_order_relaxed); current_embedded_blob_size_.store(0, std::memory_order_relaxed); sticky_embedded_blob_ = nullptr; sticky_embedded_blob_size_ = 0; } // static bool Isolate::CurrentEmbeddedBlobIsBinaryEmbedded() { // In some situations, we must be able to rely on the embedded blob being // immortal immovable. This is the case if the blob is binary-embedded. // See blob lifecycle controls above for descriptions of when the current // embedded blob may change (e.g. in tests or mksnapshot). If the blob is // binary-embedded, it is immortal immovable. const uint8_t* blob = current_embedded_blob_.load(std::memory_order::memory_order_relaxed); if (blob == nullptr) return false; #ifdef V8_MULTI_SNAPSHOTS if (blob == TrustedEmbeddedBlob()) return true; #endif return blob == DefaultEmbeddedBlob(); } void Isolate::SetEmbeddedBlob(const uint8_t* blob, uint32_t blob_size) { CHECK_NOT_NULL(blob); embedded_blob_ = blob; embedded_blob_size_ = blob_size; current_embedded_blob_.store(blob, std::memory_order_relaxed); current_embedded_blob_size_.store(blob_size, std::memory_order_relaxed); #ifdef DEBUG // Verify that the contents of the embedded blob are unchanged from // serialization-time, just to ensure the compiler isn't messing with us. EmbeddedData d = EmbeddedData::FromBlob(); if (d.EmbeddedBlobHash() != d.CreateEmbeddedBlobHash()) { FATAL( "Embedded blob checksum verification failed. This indicates that the " "embedded blob has been modified since compilation time. A common " "cause is a debugging breakpoint set within builtin code."); } #endif // DEBUG } void Isolate::ClearEmbeddedBlob() { CHECK(enable_embedded_blob_refcounting_); CHECK_EQ(embedded_blob_, CurrentEmbeddedBlob()); CHECK_EQ(embedded_blob_, StickyEmbeddedBlob()); embedded_blob_ = nullptr; embedded_blob_size_ = 0; current_embedded_blob_.store(nullptr, std::memory_order_relaxed); current_embedded_blob_size_.store(0, std::memory_order_relaxed); sticky_embedded_blob_ = nullptr; sticky_embedded_blob_size_ = 0; } const uint8_t* Isolate::embedded_blob() const { return embedded_blob_; } uint32_t Isolate::embedded_blob_size() const { return embedded_blob_size_; } // static const uint8_t* Isolate::CurrentEmbeddedBlob() { return current_embedded_blob_.load(std::memory_order::memory_order_relaxed); } // static uint32_t Isolate::CurrentEmbeddedBlobSize() { return current_embedded_blob_size_.load( std::memory_order::memory_order_relaxed); } size_t Isolate::HashIsolateForEmbeddedBlob() { DCHECK(builtins_.is_initialized()); DCHECK(FLAG_embedded_builtins); DCHECK(Builtins::AllBuiltinsAreIsolateIndependent()); DisallowHeapAllocation no_gc; static constexpr size_t kSeed = 0; size_t hash = kSeed; // Hash data sections of builtin code objects. for (int i = 0; i < Builtins::builtin_count; i++) { Code code = heap_.builtin(i); DCHECK(Internals::HasHeapObjectTag(code.ptr())); uint8_t* const code_ptr = reinterpret_cast(code.ptr() - kHeapObjectTag); // These static asserts ensure we don't miss relevant fields. We don't hash // instruction size and flags since they change when creating the off-heap // trampolines. Other data fields must remain the same. STATIC_ASSERT(Code::kInstructionSizeOffset == Code::kDataStart); STATIC_ASSERT(Code::kFlagsOffset == Code::kInstructionSizeOffsetEnd + 1); STATIC_ASSERT(Code::kSafepointTableOffsetOffset == Code::kFlagsOffsetEnd + 1); static constexpr int kStartOffset = Code::kSafepointTableOffsetOffset; for (int j = kStartOffset; j < Code::kUnalignedHeaderSize; j++) { hash = base::hash_combine(hash, size_t{code_ptr[j]}); } } // The builtins constants table is also tightly tied to embedded builtins. hash = base::hash_combine( hash, static_cast(heap_.builtins_constants_table().length())); return hash; } base::Thread::LocalStorageKey Isolate::isolate_key_; base::Thread::LocalStorageKey Isolate::per_isolate_thread_data_key_; #if DEBUG std::atomic Isolate::isolate_key_created_{false}; #endif namespace { // A global counter for all generated Isolates, might overflow. std::atomic isolate_counter{0}; } // namespace Isolate::PerIsolateThreadData* Isolate::FindOrAllocatePerThreadDataForThisThread() { ThreadId thread_id = ThreadId::Current(); PerIsolateThreadData* per_thread = nullptr; { base::MutexGuard lock_guard(&thread_data_table_mutex_); per_thread = thread_data_table_.Lookup(thread_id); if (per_thread == nullptr) { base::OS::AdjustSchedulingParams(); per_thread = new PerIsolateThreadData(this, thread_id); thread_data_table_.Insert(per_thread); } DCHECK(thread_data_table_.Lookup(thread_id) == per_thread); } return per_thread; } void Isolate::DiscardPerThreadDataForThisThread() { ThreadId thread_id = ThreadId::TryGetCurrent(); if (thread_id.IsValid()) { DCHECK_NE(thread_manager_->mutex_owner_.load(std::memory_order_relaxed), thread_id); base::MutexGuard lock_guard(&thread_data_table_mutex_); PerIsolateThreadData* per_thread = thread_data_table_.Lookup(thread_id); if (per_thread) { DCHECK(!per_thread->thread_state_); thread_data_table_.Remove(per_thread); } } } Isolate::PerIsolateThreadData* Isolate::FindPerThreadDataForThisThread() { ThreadId thread_id = ThreadId::Current(); return FindPerThreadDataForThread(thread_id); } Isolate::PerIsolateThreadData* Isolate::FindPerThreadDataForThread( ThreadId thread_id) { PerIsolateThreadData* per_thread = nullptr; { base::MutexGuard lock_guard(&thread_data_table_mutex_); per_thread = thread_data_table_.Lookup(thread_id); } return per_thread; } void Isolate::InitializeOncePerProcess() { isolate_key_ = base::Thread::CreateThreadLocalKey(); #if DEBUG bool expected = false; DCHECK_EQ(true, isolate_key_created_.compare_exchange_strong( expected, true, std::memory_order_relaxed)); #endif per_isolate_thread_data_key_ = base::Thread::CreateThreadLocalKey(); } Address Isolate::get_address_from_id(IsolateAddressId id) { return isolate_addresses_[id]; } char* Isolate::Iterate(RootVisitor* v, char* thread_storage) { ThreadLocalTop* thread = reinterpret_cast(thread_storage); Iterate(v, thread); return thread_storage + sizeof(ThreadLocalTop); } void Isolate::IterateThread(ThreadVisitor* v, char* t) { ThreadLocalTop* thread = reinterpret_cast(t); v->VisitThread(this, thread); } void Isolate::Iterate(RootVisitor* v, ThreadLocalTop* thread) { // Visit the roots from the top for a given thread. v->VisitRootPointer(Root::kTop, nullptr, FullObjectSlot(&thread->pending_exception_)); v->VisitRootPointer(Root::kTop, nullptr, FullObjectSlot(&thread->pending_message_obj_)); v->VisitRootPointer(Root::kTop, nullptr, FullObjectSlot(&thread->context_)); v->VisitRootPointer(Root::kTop, nullptr, FullObjectSlot(&thread->scheduled_exception_)); for (v8::TryCatch* block = thread->try_catch_handler_; block != nullptr; block = block->next_) { // TODO(3770): Make TryCatch::exception_ an Address (and message_obj_ too). v->VisitRootPointer( Root::kTop, nullptr, FullObjectSlot(reinterpret_cast
(&(block->exception_)))); v->VisitRootPointer( Root::kTop, nullptr, FullObjectSlot(reinterpret_cast
(&(block->message_obj_)))); } // Iterate over pointers on native execution stack. wasm::WasmCodeRefScope wasm_code_ref_scope; for (StackFrameIterator it(this, thread); !it.done(); it.Advance()) { it.frame()->Iterate(v); } } void Isolate::Iterate(RootVisitor* v) { ThreadLocalTop* current_t = thread_local_top(); Iterate(v, current_t); } void Isolate::IterateDeferredHandles(RootVisitor* visitor) { for (DeferredHandles* deferred = deferred_handles_head_; deferred != nullptr; deferred = deferred->next_) { deferred->Iterate(visitor); } } #ifdef DEBUG bool Isolate::IsDeferredHandle(Address* handle) { // Comparing unrelated pointers (not from the same array) is undefined // behavior, so cast to Address before making arbitrary comparisons. Address handle_as_address = reinterpret_cast
(handle); // Each DeferredHandles instance keeps the handles to one job in the // concurrent recompilation queue, containing a list of blocks. Each block // contains kHandleBlockSize handles except for the first block, which may // not be fully filled. // We iterate through all the blocks to see whether the argument handle // belongs to one of the blocks. If so, it is deferred. for (DeferredHandles* deferred = deferred_handles_head_; deferred != nullptr; deferred = deferred->next_) { std::vector* blocks = &deferred->blocks_; for (size_t i = 0; i < blocks->size(); i++) { Address* block_limit = (i == 0) ? deferred->first_block_limit_ : blocks->at(i) + kHandleBlockSize; if (reinterpret_cast
(blocks->at(i)) <= handle_as_address && handle_as_address < reinterpret_cast
(block_limit)) { return true; } } } return false; } #endif // DEBUG void Isolate::RegisterTryCatchHandler(v8::TryCatch* that) { thread_local_top()->try_catch_handler_ = that; } void Isolate::UnregisterTryCatchHandler(v8::TryCatch* that) { DCHECK(thread_local_top()->try_catch_handler_ == that); thread_local_top()->try_catch_handler_ = that->next_; } Handle Isolate::StackTraceString() { if (stack_trace_nesting_level_ == 0) { stack_trace_nesting_level_++; HeapStringAllocator allocator; StringStream::ClearMentionedObjectCache(this); StringStream accumulator(&allocator); incomplete_message_ = &accumulator; PrintStack(&accumulator); Handle stack_trace = accumulator.ToString(this); incomplete_message_ = nullptr; stack_trace_nesting_level_ = 0; return stack_trace; } else if (stack_trace_nesting_level_ == 1) { stack_trace_nesting_level_++; base::OS::PrintError( "\n\nAttempt to print stack while printing stack (double fault)\n"); base::OS::PrintError( "If you are lucky you may find a partial stack dump on stdout.\n\n"); incomplete_message_->OutputToStdOut(); return factory()->empty_string(); } else { base::OS::Abort(); // Unreachable return factory()->empty_string(); } } void Isolate::PushStackTraceAndDie(void* ptr1, void* ptr2, void* ptr3, void* ptr4) { StackTraceFailureMessage message(this, ptr1, ptr2, ptr3, ptr4); message.Print(); base::OS::Abort(); } void StackTraceFailureMessage::Print() volatile { // Print the details of this failure message object, including its own address // to force stack allocation. base::OS::PrintError( "Stacktrace:\n ptr1=%p\n ptr2=%p\n ptr3=%p\n ptr4=%p\n " "failure_message_object=%p\n%s", ptr1_, ptr2_, ptr3_, ptr4_, this, &js_stack_trace_[0]); } StackTraceFailureMessage::StackTraceFailureMessage(Isolate* isolate, void* ptr1, void* ptr2, void* ptr3, void* ptr4) { isolate_ = isolate; ptr1_ = ptr1; ptr2_ = ptr2; ptr3_ = ptr3; ptr4_ = ptr4; // Write a stracktrace into the {js_stack_trace_} buffer. const size_t buffer_length = arraysize(js_stack_trace_); memset(&js_stack_trace_, 0, buffer_length); FixedStringAllocator fixed(&js_stack_trace_[0], buffer_length - 1); StringStream accumulator(&fixed, StringStream::kPrintObjectConcise); isolate->PrintStack(&accumulator, Isolate::kPrintStackVerbose); // Keeping a reference to the last code objects to increase likelyhood that // they get included in the minidump. const size_t code_objects_length = arraysize(code_objects_); size_t i = 0; StackFrameIterator it(isolate); for (; !it.done() && i < code_objects_length; it.Advance()) { code_objects_[i++] = reinterpret_cast(it.frame()->unchecked_code().ptr()); } } namespace { class StackFrameCacheHelper : public AllStatic { public: static MaybeHandle LookupCachedFrame( Isolate* isolate, Handle code, int code_offset) { if (FLAG_optimize_for_size) return MaybeHandle(); const auto maybe_cache = handle(code->stack_frame_cache(), isolate); if (!maybe_cache->IsSimpleNumberDictionary()) return MaybeHandle(); const auto cache = Handle::cast(maybe_cache); const int entry = cache->FindEntry(isolate, code_offset); if (entry != NumberDictionary::kNotFound) { return handle(StackTraceFrame::cast(cache->ValueAt(entry)), isolate); } return MaybeHandle(); } static void CacheFrameAndUpdateCache(Isolate* isolate, Handle code, int code_offset, Handle frame) { if (FLAG_optimize_for_size) return; const auto maybe_cache = handle(code->stack_frame_cache(), isolate); const auto cache = maybe_cache->IsSimpleNumberDictionary() ? Handle::cast(maybe_cache) : SimpleNumberDictionary::New(isolate, 1); Handle new_cache = SimpleNumberDictionary::Set(isolate, cache, code_offset, frame); if (*new_cache != *cache || !maybe_cache->IsSimpleNumberDictionary()) { AbstractCode::SetStackFrameCache(code, new_cache); } } }; } // anonymous namespace class FrameArrayBuilder { public: enum FrameFilterMode { ALL, CURRENT_SECURITY_CONTEXT }; FrameArrayBuilder(Isolate* isolate, FrameSkipMode mode, int limit, Handle caller, FrameFilterMode filter_mode) : isolate_(isolate), mode_(mode), limit_(limit), caller_(caller), check_security_context_(filter_mode == CURRENT_SECURITY_CONTEXT) { switch (mode_) { case SKIP_FIRST: skip_next_frame_ = true; break; case SKIP_UNTIL_SEEN: DCHECK(caller_->IsJSFunction()); skip_next_frame_ = true; break; case SKIP_NONE: skip_next_frame_ = false; break; } elements_ = isolate->factory()->NewFrameArray(Min(limit, 10)); } void AppendAsyncFrame(Handle generator_object) { if (full()) return; Handle function(generator_object->function(), isolate_); if (!IsVisibleInStackTrace(function)) return; int flags = FrameArray::kIsAsync; if (IsStrictFrame(function)) flags |= FrameArray::kIsStrict; Handle receiver(generator_object->receiver(), isolate_); Handle code( AbstractCode::cast(function->shared().GetBytecodeArray()), isolate_); int offset = Smi::ToInt(generator_object->input_or_debug_pos()); // The stored bytecode offset is relative to a different base than what // is used in the source position table, hence the subtraction. offset -= BytecodeArray::kHeaderSize - kHeapObjectTag; Handle parameters = isolate_->factory()->empty_fixed_array(); if (V8_UNLIKELY(FLAG_detailed_error_stack_trace)) { int param_count = function->shared().internal_formal_parameter_count(); parameters = isolate_->factory()->NewFixedArray(param_count); for (int i = 0; i < param_count; i++) { parameters->set(i, generator_object->parameters_and_registers().get(i)); } } elements_ = FrameArray::AppendJSFrame(elements_, receiver, function, code, offset, flags, parameters); } void AppendPromiseAllFrame(Handle context, int offset) { if (full()) return; int flags = FrameArray::kIsAsync | FrameArray::kIsPromiseAll; Handle native_context(context->native_context(), isolate_); Handle function(native_context->promise_all(), isolate_); if (!IsVisibleInStackTrace(function)) return; Handle receiver(native_context->promise_function(), isolate_); Handle code(AbstractCode::cast(function->code()), isolate_); // TODO(mmarchini) save Promises list from Promise.all() Handle parameters = isolate_->factory()->empty_fixed_array(); elements_ = FrameArray::AppendJSFrame(elements_, receiver, function, code, offset, flags, parameters); } void AppendJavaScriptFrame( FrameSummary::JavaScriptFrameSummary const& summary) { // Filter out internal frames that we do not want to show. if (!IsVisibleInStackTrace(summary.function())) return; Handle abstract_code = summary.abstract_code(); const int offset = summary.code_offset(); const bool is_constructor = summary.is_constructor(); int flags = 0; Handle function = summary.function(); if (IsStrictFrame(function)) flags |= FrameArray::kIsStrict; if (is_constructor) flags |= FrameArray::kIsConstructor; Handle parameters = isolate_->factory()->empty_fixed_array(); if (V8_UNLIKELY(FLAG_detailed_error_stack_trace)) parameters = summary.parameters(); elements_ = FrameArray::AppendJSFrame( elements_, TheHoleToUndefined(isolate_, summary.receiver()), function, abstract_code, offset, flags, parameters); } void AppendWasmCompiledFrame( FrameSummary::WasmCompiledFrameSummary const& summary) { if (summary.code()->kind() != wasm::WasmCode::kFunction) return; Handle instance = summary.wasm_instance(); int flags = 0; if (instance->module_object().is_asm_js()) { flags |= FrameArray::kIsAsmJsWasmFrame; if (summary.at_to_number_conversion()) { flags |= FrameArray::kAsmJsAtNumberConversion; } } else { flags |= FrameArray::kIsWasmFrame; } elements_ = FrameArray::AppendWasmFrame( elements_, instance, summary.function_index(), summary.code(), summary.code_offset(), flags); } void AppendWasmInterpretedFrame( FrameSummary::WasmInterpretedFrameSummary const& summary) { Handle instance = summary.wasm_instance(); int flags = FrameArray::kIsWasmInterpretedFrame; DCHECK(!instance->module_object().is_asm_js()); elements_ = FrameArray::AppendWasmFrame(elements_, instance, summary.function_index(), {}, summary.byte_offset(), flags); } void AppendBuiltinExitFrame(BuiltinExitFrame* exit_frame) { Handle function = handle(exit_frame->function(), isolate_); // Filter out internal frames that we do not want to show. if (!IsVisibleInStackTrace(function)) return; // TODO(szuend): Remove this check once the flag is enabled // by default. if (!FLAG_experimental_stack_trace_frames && function->shared().IsApiFunction()) { return; } Handle receiver(exit_frame->receiver(), isolate_); Handle code(exit_frame->LookupCode(), isolate_); const int offset = static_cast(exit_frame->pc() - code->InstructionStart()); int flags = 0; if (IsStrictFrame(function)) flags |= FrameArray::kIsStrict; if (exit_frame->IsConstructor()) flags |= FrameArray::kIsConstructor; Handle parameters = isolate_->factory()->empty_fixed_array(); if (V8_UNLIKELY(FLAG_detailed_error_stack_trace)) { int param_count = exit_frame->ComputeParametersCount(); parameters = isolate_->factory()->NewFixedArray(param_count); for (int i = 0; i < param_count; i++) { parameters->set(i, exit_frame->GetParameter(i)); } } elements_ = FrameArray::AppendJSFrame(elements_, receiver, function, Handle::cast(code), offset, flags, parameters); } bool full() { return elements_->FrameCount() >= limit_; } Handle GetElements() { elements_->ShrinkToFit(isolate_); return elements_; } // Creates a StackTraceFrame object for each frame in the FrameArray. Handle GetElementsAsStackTraceFrameArray( bool enable_frame_caching) { elements_->ShrinkToFit(isolate_); const int frame_count = elements_->FrameCount(); Handle stack_trace = isolate_->factory()->NewFixedArray(frame_count); for (int i = 0; i < frame_count; ++i) { // Caching stack frames only happens for user JS frames. const bool cache_frame = enable_frame_caching && !elements_->IsAnyWasmFrame(i) && elements_->Function(i).shared().IsUserJavaScript(); if (cache_frame) { MaybeHandle maybe_frame = StackFrameCacheHelper::LookupCachedFrame( isolate_, handle(elements_->Code(i), isolate_), Smi::ToInt(elements_->Offset(i))); if (!maybe_frame.is_null()) { Handle frame = maybe_frame.ToHandleChecked(); stack_trace->set(i, *frame); continue; } } Handle frame = isolate_->factory()->NewStackTraceFrame(elements_, i); stack_trace->set(i, *frame); if (cache_frame) { StackFrameCacheHelper::CacheFrameAndUpdateCache( isolate_, handle(elements_->Code(i), isolate_), Smi::ToInt(elements_->Offset(i)), frame); } } return stack_trace; } private: // Poison stack frames below the first strict mode frame. // The stack trace API should not expose receivers and function // objects on frames deeper than the top-most one with a strict mode // function. bool IsStrictFrame(Handle function) { if (!encountered_strict_function_) { encountered_strict_function_ = is_strict(function->shared().language_mode()); } return encountered_strict_function_; } // Determines whether the given stack frame should be displayed in a stack // trace. bool IsVisibleInStackTrace(Handle function) { return ShouldIncludeFrame(function) && IsNotHidden(function) && IsInSameSecurityContext(function); } // This mechanism excludes a number of uninteresting frames from the stack // trace. This can be be the first frame (which will be a builtin-exit frame // for the error constructor builtin) or every frame until encountering a // user-specified function. bool ShouldIncludeFrame(Handle function) { switch (mode_) { case SKIP_NONE: return true; case SKIP_FIRST: if (!skip_next_frame_) return true; skip_next_frame_ = false; return false; case SKIP_UNTIL_SEEN: if (skip_next_frame_ && (*function == *caller_)) { skip_next_frame_ = false; return false; } return !skip_next_frame_; } UNREACHABLE(); } bool IsNotHidden(Handle function) { // Functions defined not in user scripts are not visible unless directly // exposed, in which case the native flag is set. // The --builtins-in-stack-traces command line flag allows including // internal call sites in the stack trace for debugging purposes. if (!FLAG_builtins_in_stack_traces && !function->shared().IsUserJavaScript()) { return function->shared().native() || function->shared().IsApiFunction(); } return true; } bool IsInSameSecurityContext(Handle function) { if (!check_security_context_) return true; return isolate_->context().HasSameSecurityTokenAs(function->context()); } // TODO(jgruber): Fix all cases in which frames give us a hole value (e.g. the // receiver in RegExp constructor frames. Handle TheHoleToUndefined(Isolate* isolate, Handle in) { return (in->IsTheHole(isolate)) ? Handle::cast(isolate->factory()->undefined_value()) : in; } Isolate* isolate_; const FrameSkipMode mode_; int limit_; const Handle caller_; bool skip_next_frame_ = true; bool encountered_strict_function_ = false; const bool check_security_context_; Handle elements_; }; bool GetStackTraceLimit(Isolate* isolate, int* result) { Handle error = isolate->error_function(); Handle key = isolate->factory()->stackTraceLimit_string(); Handle stack_trace_limit = JSReceiver::GetDataProperty(error, key); if (!stack_trace_limit->IsNumber()) return false; // Ensure that limit is not negative. *result = Max(FastD2IChecked(stack_trace_limit->Number()), 0); if (*result != FLAG_stack_trace_limit) { isolate->CountUsage(v8::Isolate::kErrorStackTraceLimit); } return true; } bool NoExtension(const v8::FunctionCallbackInfo&) { return false; } bool IsBuiltinFunction(Isolate* isolate, HeapObject object, Builtins::Name builtin_index) { if (!object.IsJSFunction()) return false; JSFunction const function = JSFunction::cast(object); return function.code() == isolate->builtins()->builtin(builtin_index); } void CaptureAsyncStackTrace(Isolate* isolate, Handle promise, FrameArrayBuilder* builder) { while (!builder->full()) { // Check that the {promise} is not settled. if (promise->status() != Promise::kPending) return; // Check that we have exactly one PromiseReaction on the {promise}. if (!promise->reactions().IsPromiseReaction()) return; Handle reaction( PromiseReaction::cast(promise->reactions()), isolate); if (!reaction->next().IsSmi()) return; // Check if the {reaction} has one of the known async function or // async generator continuations as its fulfill handler. if (IsBuiltinFunction(isolate, reaction->fulfill_handler(), Builtins::kAsyncFunctionAwaitResolveClosure) || IsBuiltinFunction(isolate, reaction->fulfill_handler(), Builtins::kAsyncGeneratorAwaitResolveClosure) || IsBuiltinFunction(isolate, reaction->fulfill_handler(), Builtins::kAsyncGeneratorYieldResolveClosure)) { // Now peak into the handlers' AwaitContext to get to // the JSGeneratorObject for the async function. Handle context( JSFunction::cast(reaction->fulfill_handler()).context(), isolate); Handle generator_object( JSGeneratorObject::cast(context->extension()), isolate); CHECK(generator_object->is_suspended()); // Append async frame corresponding to the {generator_object}. builder->AppendAsyncFrame(generator_object); // Try to continue from here. if (generator_object->IsJSAsyncFunctionObject()) { Handle async_function_object = Handle::cast(generator_object); promise = handle(async_function_object->promise(), isolate); } else { Handle async_generator_object = Handle::cast(generator_object); if (async_generator_object->queue().IsUndefined(isolate)) return; Handle async_generator_request( AsyncGeneratorRequest::cast(async_generator_object->queue()), isolate); promise = handle(JSPromise::cast(async_generator_request->promise()), isolate); } } else if (IsBuiltinFunction(isolate, reaction->fulfill_handler(), Builtins::kPromiseAllResolveElementClosure)) { Handle function(JSFunction::cast(reaction->fulfill_handler()), isolate); Handle context(function->context(), isolate); // We store the offset of the promise into the {function}'s // hash field for promise resolve element callbacks. int const offset = Smi::ToInt(Smi::cast(function->GetIdentityHash())) - 1; builder->AppendPromiseAllFrame(context, offset); // Now peak into the Promise.all() resolve element context to // find the promise capability that's being resolved when all // the concurrent promises resolve. int const index = PromiseBuiltins::kPromiseAllResolveElementCapabilitySlot; Handle capability( PromiseCapability::cast(context->get(index)), isolate); if (!capability->promise().IsJSPromise()) return; promise = handle(JSPromise::cast(capability->promise()), isolate); } else if (IsBuiltinFunction(isolate, reaction->fulfill_handler(), Builtins::kPromiseCapabilityDefaultResolve)) { Handle function(JSFunction::cast(reaction->fulfill_handler()), isolate); Handle context(function->context(), isolate); promise = handle(JSPromise::cast(context->get(PromiseBuiltins::kPromiseSlot)), isolate); } else { // We have some generic promise chain here, so try to // continue with the chained promise on the reaction // (only works for native promise chains). Handle promise_or_capability( reaction->promise_or_capability(), isolate); if (promise_or_capability->IsJSPromise()) { promise = Handle::cast(promise_or_capability); } else if (promise_or_capability->IsPromiseCapability()) { Handle capability = Handle::cast(promise_or_capability); if (!capability->promise().IsJSPromise()) return; promise = handle(JSPromise::cast(capability->promise()), isolate); } else { // Otherwise the {promise_or_capability} must be undefined here. CHECK(promise_or_capability->IsUndefined(isolate)); return; } } } } namespace { struct CaptureStackTraceOptions { int limit; // 'filter_mode' and 'skip_mode' are somewhat orthogonal. 'filter_mode' // specifies whether to capture all frames, or just frames in the same // security context. While 'skip_mode' allows skipping the first frame. FrameSkipMode skip_mode; FrameArrayBuilder::FrameFilterMode filter_mode; bool capture_builtin_exit_frames; bool capture_only_frames_subject_to_debugging; bool async_stack_trace; bool enable_frame_caching; }; Handle CaptureStackTrace(Isolate* isolate, Handle caller, CaptureStackTraceOptions options) { DisallowJavascriptExecution no_js(isolate); wasm::WasmCodeRefScope code_ref_scope; FrameArrayBuilder builder(isolate, options.skip_mode, options.limit, caller, options.filter_mode); // Build the regular stack trace, and remember the last relevant // frame ID and inlined index (for the async stack trace handling // below, which starts from this last frame). for (StackFrameIterator it(isolate); !it.done() && !builder.full(); it.Advance()) { StackFrame* const frame = it.frame(); switch (frame->type()) { case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION: case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH: case StackFrame::OPTIMIZED: case StackFrame::INTERPRETED: case StackFrame::BUILTIN: case StackFrame::WASM_COMPILED: case StackFrame::WASM_INTERPRETER_ENTRY: { // A standard frame may include many summarized frames (due to // inlining). std::vector frames; StandardFrame::cast(frame)->Summarize(&frames); for (size_t i = frames.size(); i-- != 0 && !builder.full();) { auto& summary = frames[i]; if (options.capture_only_frames_subject_to_debugging && !summary.is_subject_to_debugging()) { continue; } if (summary.IsJavaScript()) { //========================================================= // Handle a JavaScript frame. //========================================================= auto const& java_script = summary.AsJavaScript(); builder.AppendJavaScriptFrame(java_script); } else if (summary.IsWasmCompiled()) { //========================================================= // Handle a WASM compiled frame. //========================================================= auto const& wasm_compiled = summary.AsWasmCompiled(); builder.AppendWasmCompiledFrame(wasm_compiled); } else if (summary.IsWasmInterpreted()) { //========================================================= // Handle a WASM interpreted frame. //========================================================= auto const& wasm_interpreted = summary.AsWasmInterpreted(); builder.AppendWasmInterpretedFrame(wasm_interpreted); } } break; } case StackFrame::BUILTIN_EXIT: if (!options.capture_builtin_exit_frames) continue; // BuiltinExitFrames are not standard frames, so they do not have // Summarize(). However, they may have one JS frame worth showing. builder.AppendBuiltinExitFrame(BuiltinExitFrame::cast(frame)); break; default: break; } } // If --async-stack-traces are enabled and the "current microtask" is a // PromiseReactionJobTask, we try to enrich the stack trace with async // frames. if (options.async_stack_trace) { Handle current_microtask = isolate->factory()->current_microtask(); if (current_microtask->IsPromiseReactionJobTask()) { Handle promise_reaction_job_task = Handle::cast(current_microtask); // Check if the {reaction} has one of the known async function or // async generator continuations as its fulfill handler. if (IsBuiltinFunction(isolate, promise_reaction_job_task->handler(), Builtins::kAsyncFunctionAwaitResolveClosure) || IsBuiltinFunction(isolate, promise_reaction_job_task->handler(), Builtins::kAsyncGeneratorAwaitResolveClosure) || IsBuiltinFunction(isolate, promise_reaction_job_task->handler(), Builtins::kAsyncGeneratorYieldResolveClosure)) { // Now peak into the handlers' AwaitContext to get to // the JSGeneratorObject for the async function. Handle context( JSFunction::cast(promise_reaction_job_task->handler()).context(), isolate); Handle generator_object( JSGeneratorObject::cast(context->extension()), isolate); if (generator_object->is_executing()) { if (generator_object->IsJSAsyncFunctionObject()) { Handle async_function_object = Handle::cast(generator_object); Handle promise(async_function_object->promise(), isolate); CaptureAsyncStackTrace(isolate, promise, &builder); } else { Handle async_generator_object = Handle::cast(generator_object); Handle async_generator_request( AsyncGeneratorRequest::cast(async_generator_object->queue()), isolate); Handle promise( JSPromise::cast(async_generator_request->promise()), isolate); CaptureAsyncStackTrace(isolate, promise, &builder); } } } else { // The {promise_reaction_job_task} doesn't belong to an await (or // yield inside an async generator), but we might still be able to // find an async frame if we follow along the chain of promises on // the {promise_reaction_job_task}. Handle promise_or_capability( promise_reaction_job_task->promise_or_capability(), isolate); if (promise_or_capability->IsJSPromise()) { Handle promise = Handle::cast(promise_or_capability); CaptureAsyncStackTrace(isolate, promise, &builder); } } } } // TODO(yangguo): Queue this structured stack trace for preprocessing on GC. return builder.GetElementsAsStackTraceFrameArray( options.enable_frame_caching); } } // namespace Handle Isolate::CaptureSimpleStackTrace(Handle error_object, FrameSkipMode mode, Handle caller) { int limit; if (!GetStackTraceLimit(this, &limit)) return factory()->undefined_value(); CaptureStackTraceOptions options; options.limit = limit; options.skip_mode = mode; options.capture_builtin_exit_frames = true; options.async_stack_trace = FLAG_async_stack_traces; options.filter_mode = FrameArrayBuilder::CURRENT_SECURITY_CONTEXT; options.capture_only_frames_subject_to_debugging = false; options.enable_frame_caching = false; return CaptureStackTrace(this, caller, options); } MaybeHandle Isolate::CaptureAndSetDetailedStackTrace( Handle error_object) { if (capture_stack_trace_for_uncaught_exceptions_) { // Capture stack trace for a detailed exception message. Handle key = factory()->detailed_stack_trace_symbol(); Handle stack_trace = CaptureCurrentStackTrace( stack_trace_for_uncaught_exceptions_frame_limit_, stack_trace_for_uncaught_exceptions_options_); RETURN_ON_EXCEPTION( this, Object::SetProperty(this, error_object, key, stack_trace, StoreOrigin::kMaybeKeyed, Just(ShouldThrow::kThrowOnError)), JSReceiver); } return error_object; } MaybeHandle Isolate::CaptureAndSetSimpleStackTrace( Handle error_object, FrameSkipMode mode, Handle caller) { // Capture stack trace for simple stack trace string formatting. Handle key = factory()->stack_trace_symbol(); Handle stack_trace = CaptureSimpleStackTrace(error_object, mode, caller); RETURN_ON_EXCEPTION(this, Object::SetProperty(this, error_object, key, stack_trace, StoreOrigin::kMaybeKeyed, Just(ShouldThrow::kThrowOnError)), JSReceiver); return error_object; } Handle Isolate::GetDetailedStackTrace( Handle error_object) { Handle key_detailed = factory()->detailed_stack_trace_symbol(); Handle stack_trace = JSReceiver::GetDataProperty(error_object, key_detailed); if (stack_trace->IsFixedArray()) return Handle::cast(stack_trace); return Handle(); } Address Isolate::GetAbstractPC(int* line, int* column) { JavaScriptFrameIterator it(this); if (it.done()) { *line = -1; *column = -1; return kNullAddress; } JavaScriptFrame* frame = it.frame(); DCHECK(!frame->is_builtin()); Handle shared = handle(frame->function().shared(), this); SharedFunctionInfo::EnsureSourcePositionsAvailable(this, shared); int position = frame->position(); Object maybe_script = frame->function().shared().script(); if (maybe_script.IsScript()) { Handle