// Copyright 2012 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "src/isolate.h" #include #include #include // NOLINT(readability/streams) #include #include #include "src/api-inl.h" #include "src/assembler-inl.h" #include "src/ast/ast-value-factory.h" #include "src/ast/context-slot-cache.h" #include "src/ast/scopes.h" #include "src/base/adapters.h" #include "src/base/hashmap.h" #include "src/base/platform/platform.h" #include "src/base/sys-info.h" #include "src/base/utils/random-number-generator.h" #include "src/bootstrapper.h" #include "src/builtins/constants-table-builder.h" #include "src/cancelable-task.h" #include "src/code-stubs.h" #include "src/compilation-cache.h" #include "src/compilation-statistics.h" #include "src/compiler-dispatcher/compiler-dispatcher.h" #include "src/compiler-dispatcher/optimizing-compile-dispatcher.h" #include "src/debug/debug-frames.h" #include "src/debug/debug.h" #include "src/deoptimizer.h" #include "src/elements.h" #include "src/frames-inl.h" #include "src/ic/stub-cache.h" #include "src/instruction-stream.h" #include "src/interpreter/interpreter.h" #include "src/isolate-inl.h" #include "src/libsampler/sampler.h" #include "src/log.h" #include "src/messages.h" #include "src/objects/frame-array-inl.h" #include "src/objects/hash-table-inl.h" #include "src/objects/js-array-inl.h" #include "src/objects/js-generator-inl.h" #include "src/objects/module-inl.h" #include "src/objects/promise-inl.h" #include "src/objects/stack-frame-info-inl.h" #include "src/profiler/tracing-cpu-profiler.h" #include "src/prototype.h" #include "src/regexp/regexp-stack.h" #include "src/runtime-profiler.h" #include "src/setup-isolate.h" #include "src/simulator.h" #include "src/snapshot/startup-deserializer.h" #include "src/tracing/tracing-category-observer.h" #include "src/trap-handler/trap-handler.h" #include "src/unicode-cache.h" #include "src/v8.h" #include "src/version.h" #include "src/visitors.h" #include "src/vm-state-inl.h" #include "src/wasm/wasm-code-manager.h" #include "src/wasm/wasm-engine.h" #include "src/wasm/wasm-objects.h" #include "src/zone/accounting-allocator.h" #ifdef V8_INTL_SUPPORT #include "unicode/regex.h" #endif // V8_INTL_SUPPORT namespace v8 { namespace internal { #ifdef DEBUG #define TRACE_ISOLATE(tag) \ do { \ if (FLAG_trace_isolates) { \ PrintF("Isolate %p (id %d)" #tag "\n", reinterpret_cast(this), \ id()); \ } \ } while (false) #else #define TRACE_ISOLATE(tag) #endif base::Atomic32 ThreadId::highest_thread_id_ = 0; extern const uint8_t* DefaultEmbeddedBlob(); extern uint32_t DefaultEmbeddedBlobSize(); #ifdef V8_MULTI_SNAPSHOTS extern const uint8_t* TrustedEmbeddedBlob(); extern uint32_t TrustedEmbeddedBlobSize(); #endif namespace { // These variables provide access to the current embedded blob without requiring // an isolate instance. This is needed e.g. by Code::InstructionStart, which may // not have access to an isolate but still needs to access the embedded blob. // The variables are initialized by each isolate in Init(). Writes and reads are // relaxed since we can guarantee that the current thread has initialized these // variables before accessing them. Different threads may race, but this is fine // since they all attempt to set the same values of the blob pointer and size. std::atomic current_embedded_blob_(nullptr); std::atomic current_embedded_blob_size_(0); } // namespace void Isolate::SetEmbeddedBlob(const uint8_t* blob, uint32_t blob_size) { embedded_blob_ = blob; embedded_blob_size_ = blob_size; current_embedded_blob_.store(blob, std::memory_order_relaxed); current_embedded_blob_size_.store(blob_size, std::memory_order_relaxed); #ifdef DEBUG if (blob != nullptr) { // Verify that the contents of the embedded blob are unchanged from // serialization-time, just to ensure the compiler isn't messing with us. EmbeddedData d = EmbeddedData::FromBlob(); CHECK_EQ(d.Hash(), d.CreateHash()); } #endif // DEBUG } const uint8_t* Isolate::embedded_blob() const { return embedded_blob_; } uint32_t Isolate::embedded_blob_size() const { return embedded_blob_size_; } // static const uint8_t* Isolate::CurrentEmbeddedBlob() { return current_embedded_blob_.load(std::memory_order::memory_order_relaxed); } // static uint32_t Isolate::CurrentEmbeddedBlobSize() { return current_embedded_blob_size_.load( std::memory_order::memory_order_relaxed); } int ThreadId::AllocateThreadId() { int new_id = base::Relaxed_AtomicIncrement(&highest_thread_id_, 1); return new_id; } int ThreadId::GetCurrentThreadId() { int thread_id = base::Thread::GetThreadLocalInt(Isolate::thread_id_key_); if (thread_id == 0) { thread_id = AllocateThreadId(); base::Thread::SetThreadLocalInt(Isolate::thread_id_key_, thread_id); } return thread_id; } void ThreadLocalTop::Initialize(Isolate* isolate) { *this = ThreadLocalTop(); isolate_ = isolate; #ifdef USE_SIMULATOR simulator_ = Simulator::current(isolate); #endif thread_id_ = ThreadId::Current(); thread_in_wasm_flag_address_ = reinterpret_cast
( trap_handler::GetThreadInWasmThreadLocalAddress()); } void ThreadLocalTop::Free() { // Match unmatched PopPromise calls. while (promise_on_stack_) isolate_->PopPromise(); } base::Thread::LocalStorageKey Isolate::isolate_key_; base::Thread::LocalStorageKey Isolate::thread_id_key_; base::Thread::LocalStorageKey Isolate::per_isolate_thread_data_key_; base::Atomic32 Isolate::isolate_counter_ = 0; #if DEBUG base::Atomic32 Isolate::isolate_key_created_ = 0; #endif Isolate::PerIsolateThreadData* Isolate::FindOrAllocatePerThreadDataForThisThread() { ThreadId thread_id = ThreadId::Current(); PerIsolateThreadData* per_thread = nullptr; { base::LockGuard lock_guard(&thread_data_table_mutex_); per_thread = thread_data_table_.Lookup(thread_id); if (per_thread == nullptr) { per_thread = new PerIsolateThreadData(this, thread_id); thread_data_table_.Insert(per_thread); } DCHECK(thread_data_table_.Lookup(thread_id) == per_thread); } return per_thread; } void Isolate::DiscardPerThreadDataForThisThread() { int thread_id_int = base::Thread::GetThreadLocalInt(Isolate::thread_id_key_); if (thread_id_int) { ThreadId thread_id = ThreadId(thread_id_int); DCHECK(!thread_manager_->mutex_owner_.Equals(thread_id)); base::LockGuard lock_guard(&thread_data_table_mutex_); PerIsolateThreadData* per_thread = thread_data_table_.Lookup(thread_id); if (per_thread) { DCHECK(!per_thread->thread_state_); thread_data_table_.Remove(per_thread); } } } Isolate::PerIsolateThreadData* Isolate::FindPerThreadDataForThisThread() { ThreadId thread_id = ThreadId::Current(); return FindPerThreadDataForThread(thread_id); } Isolate::PerIsolateThreadData* Isolate::FindPerThreadDataForThread( ThreadId thread_id) { PerIsolateThreadData* per_thread = nullptr; { base::LockGuard lock_guard(&thread_data_table_mutex_); per_thread = thread_data_table_.Lookup(thread_id); } return per_thread; } void Isolate::InitializeOncePerProcess() { isolate_key_ = base::Thread::CreateThreadLocalKey(); #if DEBUG base::Relaxed_Store(&isolate_key_created_, 1); #endif thread_id_key_ = base::Thread::CreateThreadLocalKey(); per_isolate_thread_data_key_ = base::Thread::CreateThreadLocalKey(); } Address Isolate::get_address_from_id(IsolateAddressId id) { return isolate_addresses_[id]; } char* Isolate::Iterate(RootVisitor* v, char* thread_storage) { ThreadLocalTop* thread = reinterpret_cast(thread_storage); Iterate(v, thread); return thread_storage + sizeof(ThreadLocalTop); } void Isolate::IterateThread(ThreadVisitor* v, char* t) { ThreadLocalTop* thread = reinterpret_cast(t); v->VisitThread(this, thread); } void Isolate::Iterate(RootVisitor* v, ThreadLocalTop* thread) { // Visit the roots from the top for a given thread. v->VisitRootPointer(Root::kTop, nullptr, &thread->pending_exception_); v->VisitRootPointer(Root::kTop, nullptr, &thread->pending_message_obj_); v->VisitRootPointer(Root::kTop, nullptr, bit_cast(&(thread->context_))); v->VisitRootPointer(Root::kTop, nullptr, &thread->scheduled_exception_); for (v8::TryCatch* block = thread->try_catch_handler(); block != nullptr; block = block->next_) { v->VisitRootPointer(Root::kTop, nullptr, bit_cast(&(block->exception_))); v->VisitRootPointer(Root::kTop, nullptr, bit_cast(&(block->message_obj_))); } // Iterate over pointers on native execution stack. for (StackFrameIterator it(this, thread); !it.done(); it.Advance()) { it.frame()->Iterate(v); } } void Isolate::Iterate(RootVisitor* v) { ThreadLocalTop* current_t = thread_local_top(); Iterate(v, current_t); } void Isolate::IterateDeferredHandles(RootVisitor* visitor) { for (DeferredHandles* deferred = deferred_handles_head_; deferred != nullptr; deferred = deferred->next_) { deferred->Iterate(visitor); } } #ifdef DEBUG bool Isolate::IsDeferredHandle(Object** handle) { // Each DeferredHandles instance keeps the handles to one job in the // concurrent recompilation queue, containing a list of blocks. Each block // contains kHandleBlockSize handles except for the first block, which may // not be fully filled. // We iterate through all the blocks to see whether the argument handle // belongs to one of the blocks. If so, it is deferred. for (DeferredHandles* deferred = deferred_handles_head_; deferred != nullptr; deferred = deferred->next_) { std::vector* blocks = &deferred->blocks_; for (size_t i = 0; i < blocks->size(); i++) { Object** block_limit = (i == 0) ? deferred->first_block_limit_ : blocks->at(i) + kHandleBlockSize; if (blocks->at(i) <= handle && handle < block_limit) return true; } } return false; } #endif // DEBUG void Isolate::RegisterTryCatchHandler(v8::TryCatch* that) { thread_local_top()->set_try_catch_handler(that); } void Isolate::UnregisterTryCatchHandler(v8::TryCatch* that) { DCHECK(thread_local_top()->try_catch_handler() == that); thread_local_top()->set_try_catch_handler(that->next_); } Handle Isolate::StackTraceString() { if (stack_trace_nesting_level_ == 0) { stack_trace_nesting_level_++; HeapStringAllocator allocator; StringStream::ClearMentionedObjectCache(this); StringStream accumulator(&allocator); incomplete_message_ = &accumulator; PrintStack(&accumulator); Handle stack_trace = accumulator.ToString(this); incomplete_message_ = nullptr; stack_trace_nesting_level_ = 0; return stack_trace; } else if (stack_trace_nesting_level_ == 1) { stack_trace_nesting_level_++; base::OS::PrintError( "\n\nAttempt to print stack while printing stack (double fault)\n"); base::OS::PrintError( "If you are lucky you may find a partial stack dump on stdout.\n\n"); incomplete_message_->OutputToStdOut(); return factory()->empty_string(); } else { base::OS::Abort(); // Unreachable return factory()->empty_string(); } } void Isolate::PushStackTraceAndDie(void* ptr1, void* ptr2, void* ptr3, void* ptr4) { StackTraceFailureMessage message(this, ptr1, ptr2, ptr3, ptr4); message.Print(); base::OS::Abort(); } void StackTraceFailureMessage::Print() volatile { // Print the details of this failure message object, including its own address // to force stack allocation. base::OS::PrintError( "Stacktrace:\n ptr1=%p\n ptr2=%p\n ptr3=%p\n ptr4=%p\n " "failure_message_object=%p\n%s", ptr1_, ptr2_, ptr3_, ptr4_, this, &js_stack_trace_[0]); } StackTraceFailureMessage::StackTraceFailureMessage(Isolate* isolate, void* ptr1, void* ptr2, void* ptr3, void* ptr4) { isolate_ = isolate; ptr1_ = ptr1; ptr2_ = ptr2; ptr3_ = ptr3; ptr4_ = ptr4; // Write a stracktrace into the {js_stack_trace_} buffer. const size_t buffer_length = arraysize(js_stack_trace_); memset(&js_stack_trace_, 0, buffer_length); FixedStringAllocator fixed(&js_stack_trace_[0], buffer_length - 1); StringStream accumulator(&fixed, StringStream::kPrintObjectConcise); isolate->PrintStack(&accumulator, Isolate::kPrintStackVerbose); // Keeping a reference to the last code objects to increase likelyhood that // they get included in the minidump. const size_t code_objects_length = arraysize(code_objects_); size_t i = 0; StackFrameIterator it(isolate); for (; !it.done() && i < code_objects_length; it.Advance()) { if (it.frame()->type() == StackFrame::INTERNAL) continue; code_objects_[i++] = it.frame()->unchecked_code(); } } namespace { class FrameArrayBuilder { public: FrameArrayBuilder(Isolate* isolate, FrameSkipMode mode, int limit, Handle caller) : isolate_(isolate), mode_(mode), limit_(limit), caller_(caller) { switch (mode_) { case SKIP_FIRST: skip_next_frame_ = true; break; case SKIP_UNTIL_SEEN: DCHECK(caller_->IsJSFunction()); skip_next_frame_ = true; break; case SKIP_NONE: skip_next_frame_ = false; break; } elements_ = isolate->factory()->NewFrameArray(Min(limit, 10)); } void AppendAsyncFrame(Handle generator_object) { if (full()) return; Handle function(generator_object->function(), isolate_); if (!IsVisibleInStackTrace(function)) return; int flags = FrameArray::kIsAsync; if (IsStrictFrame(function)) flags |= FrameArray::kIsStrict; Handle receiver(generator_object->receiver(), isolate_); Handle code( AbstractCode::cast(function->shared()->GetBytecodeArray()), isolate_); int offset = Smi::ToInt(generator_object->input_or_debug_pos()); // The stored bytecode offset is relative to a different base than what // is used in the source position table, hence the subtraction. offset -= BytecodeArray::kHeaderSize - kHeapObjectTag; elements_ = FrameArray::AppendJSFrame(elements_, receiver, function, code, offset, flags); } bool AppendJavaScriptFrame( FrameSummary::JavaScriptFrameSummary const& summary) { // Filter out internal frames that we do not want to show. if (!IsVisibleInStackTrace(summary.function())) return false; Handle abstract_code = summary.abstract_code(); const int offset = summary.code_offset(); bool is_constructor = summary.is_constructor(); // Help CallSite::IsConstructor correctly detect hand-written // construct stubs. if (abstract_code->IsCode() && Code::cast(*abstract_code)->is_construct_stub()) { is_constructor = true; } int flags = 0; Handle function = summary.function(); if (IsStrictFrame(function)) flags |= FrameArray::kIsStrict; if (is_constructor) flags |= FrameArray::kIsConstructor; elements_ = FrameArray::AppendJSFrame( elements_, TheHoleToUndefined(isolate_, summary.receiver()), function, abstract_code, offset, flags); return true; } bool AppendWasmCompiledFrame( FrameSummary::WasmCompiledFrameSummary const& summary) { if (summary.code()->kind() != wasm::WasmCode::kFunction) return false; Handle instance = summary.wasm_instance(); int flags = 0; if (instance->module_object()->is_asm_js()) { flags |= FrameArray::kIsAsmJsWasmFrame; if (summary.at_to_number_conversion()) { flags |= FrameArray::kAsmJsAtNumberConversion; } } else { flags |= FrameArray::kIsWasmFrame; } elements_ = FrameArray::AppendWasmFrame( elements_, instance, summary.function_index(), summary.code(), summary.code_offset(), flags); return true; } bool AppendWasmInterpretedFrame( FrameSummary::WasmInterpretedFrameSummary const& summary) { Handle instance = summary.wasm_instance(); int flags = FrameArray::kIsWasmInterpretedFrame; DCHECK(!instance->module_object()->is_asm_js()); elements_ = FrameArray::AppendWasmFrame(elements_, instance, summary.function_index(), {}, summary.byte_offset(), flags); return true; } bool AppendBuiltinExitFrame(BuiltinExitFrame* exit_frame) { Handle function = handle(exit_frame->function(), isolate_); // Filter out internal frames that we do not want to show. if (!IsVisibleInStackTrace(function)) return false; Handle receiver(exit_frame->receiver(), isolate_); Handle code(exit_frame->LookupCode(), isolate_); const int offset = static_cast(exit_frame->pc() - code->InstructionStart()); int flags = 0; if (IsStrictFrame(function)) flags |= FrameArray::kIsStrict; if (exit_frame->IsConstructor()) flags |= FrameArray::kIsConstructor; elements_ = FrameArray::AppendJSFrame(elements_, receiver, function, Handle::cast(code), offset, flags); return true; } bool full() { return elements_->FrameCount() >= limit_; } Handle GetElements() { elements_->ShrinkToFit(isolate_); return elements_; } private: // Poison stack frames below the first strict mode frame. // The stack trace API should not expose receivers and function // objects on frames deeper than the top-most one with a strict mode // function. bool IsStrictFrame(Handle function) { if (!encountered_strict_function_) { encountered_strict_function_ = is_strict(function->shared()->language_mode()); } return encountered_strict_function_; } // Determines whether the given stack frame should be displayed in a stack // trace. bool IsVisibleInStackTrace(Handle function) { return ShouldIncludeFrame(function) && IsNotHidden(function) && IsInSameSecurityContext(function); } // This mechanism excludes a number of uninteresting frames from the stack // trace. This can be be the first frame (which will be a builtin-exit frame // for the error constructor builtin) or every frame until encountering a // user-specified function. bool ShouldIncludeFrame(Handle function) { switch (mode_) { case SKIP_NONE: return true; case SKIP_FIRST: if (!skip_next_frame_) return true; skip_next_frame_ = false; return false; case SKIP_UNTIL_SEEN: if (skip_next_frame_ && (*function == *caller_)) { skip_next_frame_ = false; return false; } return !skip_next_frame_; } UNREACHABLE(); } bool IsNotHidden(Handle function) { // Functions defined not in user scripts are not visible unless directly // exposed, in which case the native flag is set. // The --builtins-in-stack-traces command line flag allows including // internal call sites in the stack trace for debugging purposes. if (!FLAG_builtins_in_stack_traces && !function->shared()->IsUserJavaScript()) { return function->shared()->native(); } return true; } bool IsInSameSecurityContext(Handle function) { return isolate_->context()->HasSameSecurityTokenAs(function->context()); } // TODO(jgruber): Fix all cases in which frames give us a hole value (e.g. the // receiver in RegExp constructor frames. Handle TheHoleToUndefined(Isolate* isolate, Handle in) { return (in->IsTheHole(isolate)) ? Handle::cast(isolate->factory()->undefined_value()) : in; } Isolate* isolate_; const FrameSkipMode mode_; int limit_; const Handle caller_; bool skip_next_frame_ = true; bool encountered_strict_function_ = false; Handle elements_; }; bool GetStackTraceLimit(Isolate* isolate, int* result) { Handle error = isolate->error_function(); Handle key = isolate->factory()->stackTraceLimit_string(); Handle stack_trace_limit = JSReceiver::GetDataProperty(error, key); if (!stack_trace_limit->IsNumber()) return false; // Ensure that limit is not negative. *result = Max(FastD2IChecked(stack_trace_limit->Number()), 0); if (*result != FLAG_stack_trace_limit) { isolate->CountUsage(v8::Isolate::kErrorStackTraceLimit); } return true; } bool NoExtension(const v8::FunctionCallbackInfo&) { return false; } bool IsBuiltinFunction(Isolate* isolate, HeapObject* object, Builtins::Name builtin_index) { if (!object->IsJSFunction()) return false; JSFunction* const function = JSFunction::cast(object); return function->code() == isolate->builtins()->builtin(builtin_index); } void CaptureAsyncStackTrace(Isolate* isolate, Handle promise, FrameArrayBuilder* builder) { CHECK_EQ(Promise::kPending, promise->status()); while (!builder->full()) { // Check that we have exactly one PromiseReaction on the {promise}. if (!promise->reactions()->IsPromiseReaction()) return; Handle reaction( PromiseReaction::cast(promise->reactions()), isolate); if (!reaction->next()->IsSmi()) return; // Check if the {reaction} has one of the known async function or // async generator continuations as its fulfill handler. if (IsBuiltinFunction(isolate, reaction->fulfill_handler(), Builtins::kAsyncFunctionAwaitResolveClosure) || IsBuiltinFunction(isolate, reaction->fulfill_handler(), Builtins::kAsyncGeneratorAwaitResolveClosure) || IsBuiltinFunction(isolate, reaction->fulfill_handler(), Builtins::kAsyncGeneratorYieldResolveClosure)) { // Now peak into the handlers' AwaitContext to get to // the JSGeneratorObject for the async function. Handle context( JSFunction::cast(reaction->fulfill_handler())->context(), isolate); Handle generator_object( JSGeneratorObject::cast(context->extension()), isolate); CHECK(generator_object->is_suspended()); // Append async frame corresponding to the {generator_object}. builder->AppendAsyncFrame(generator_object); // Try to continue from here. Handle function(generator_object->function(), isolate); Handle shared(function->shared(), isolate); if (IsAsyncGeneratorFunction(shared->kind())) { Handle dot_generator_object( generator_object->parameters_and_registers()->get( DeclarationScope::kGeneratorObjectVarIndex + shared->scope_info()->ParameterCount()), isolate); if (!dot_generator_object->IsJSAsyncGeneratorObject()) return; Handle async_generator_object = Handle::cast(dot_generator_object); Handle async_generator_request( AsyncGeneratorRequest::cast(async_generator_object->queue()), isolate); promise = handle(JSPromise::cast(async_generator_request->promise()), isolate); } else { CHECK(IsAsyncFunction(shared->kind())); Handle dot_promise( generator_object->parameters_and_registers()->get( DeclarationScope::kPromiseVarIndex + shared->scope_info()->ParameterCount()), isolate); if (!dot_promise->IsJSPromise()) return; promise = Handle::cast(dot_promise); } } else { // We have some generic promise chain here, so try to // continue with the chained promise on the reaction // (only works for native promise chains). Handle promise_or_capability( reaction->promise_or_capability(), isolate); if (promise_or_capability->IsJSPromise()) { promise = Handle::cast(promise_or_capability); } else { Handle capability = Handle::cast(promise_or_capability); if (!capability->promise()->IsJSPromise()) return; promise = handle(JSPromise::cast(capability->promise()), isolate); } } } } } // namespace Handle Isolate::CaptureSimpleStackTrace(Handle error_object, FrameSkipMode mode, Handle caller) { DisallowJavascriptExecution no_js(this); int limit; if (!GetStackTraceLimit(this, &limit)) return factory()->undefined_value(); FrameArrayBuilder builder(this, mode, limit, caller); // Build the regular stack trace, and remember the last relevant // frame ID and inlined index (for the async stack trace handling // below, which starts from this last frame). int last_frame_index = 0; StackFrame::Id last_frame_id = StackFrame::NO_ID; for (StackFrameIterator it(this); !it.done() && !builder.full(); it.Advance()) { StackFrame* const frame = it.frame(); switch (frame->type()) { case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION: case StackFrame::JAVA_SCRIPT_BUILTIN_CONTINUATION_WITH_CATCH: case StackFrame::OPTIMIZED: case StackFrame::INTERPRETED: case StackFrame::BUILTIN: case StackFrame::WASM_COMPILED: case StackFrame::WASM_INTERPRETER_ENTRY: { // A standard frame may include many summarized frames (due to // inlining). std::vector frames; StandardFrame::cast(frame)->Summarize(&frames); for (size_t i = frames.size(); i-- != 0 && !builder.full();) { const auto& summary = frames[i]; if (summary.IsJavaScript()) { //==================================================================== // Handle a JavaScript frame. //==================================================================== auto const& java_script = summary.AsJavaScript(); if (builder.AppendJavaScriptFrame(java_script)) { if (IsAsyncFunction(java_script.function()->shared()->kind())) { last_frame_id = frame->id(); last_frame_index = static_cast(i); } else { last_frame_id = StackFrame::NO_ID; last_frame_index = 0; } } } else if (summary.IsWasmCompiled()) { //==================================================================== // Handle a WASM compiled frame. //==================================================================== auto const& wasm_compiled = summary.AsWasmCompiled(); if (builder.AppendWasmCompiledFrame(wasm_compiled)) { last_frame_id = StackFrame::NO_ID; last_frame_index = 0; } } else if (summary.IsWasmInterpreted()) { //==================================================================== // Handle a WASM interpreted frame. //==================================================================== auto const& wasm_interpreted = summary.AsWasmInterpreted(); if (builder.AppendWasmInterpretedFrame(wasm_interpreted)) { last_frame_id = StackFrame::NO_ID; last_frame_index = 0; } } } break; } case StackFrame::BUILTIN_EXIT: // BuiltinExitFrames are not standard frames, so they do not have // Summarize(). However, they may have one JS frame worth showing. if (builder.AppendBuiltinExitFrame(BuiltinExitFrame::cast(frame))) { last_frame_id = StackFrame::NO_ID; last_frame_index = 0; } break; default: break; } } // If --async-stack-traces is enabled, and we ended on a regular JavaScript // frame above, we can enrich the stack trace with async frames (if this // last frame corresponds to an async function). if (FLAG_async_stack_traces && last_frame_id != StackFrame::NO_ID) { StackFrameIterator it(this); while (it.frame()->id() != last_frame_id) it.Advance(); FrameInspector inspector(StandardFrame::cast(it.frame()), last_frame_index, this); FunctionKind const kind = inspector.GetFunction()->shared()->kind(); if (IsAsyncGeneratorFunction(kind)) { Handle const dot_generator_object = inspector.GetExpression(DeclarationScope::kGeneratorObjectVarIndex); if (dot_generator_object->IsUndefined(this)) { // The .generator_object was not yet initialized (i.e. we see a // really early exception in the setup of the async generator). } else { // Check if there's a pending async request on the generator object. Handle async_generator_object = Handle::cast(dot_generator_object); if (!async_generator_object->queue()->IsUndefined(this)) { // Take the promise from the first async generatot request. Handle request( AsyncGeneratorRequest::cast(async_generator_object->queue()), this); // We can start collecting an async stack trace from the // promise on the {request}. Handle promise(JSPromise::cast(request->promise()), this); CaptureAsyncStackTrace(this, promise, &builder); } } } else { DCHECK(IsAsyncFunction(kind)); Handle const dot_promise = inspector.GetExpression(DeclarationScope::kPromiseVarIndex); if (dot_promise->IsJSPromise()) { // We can start collecting an async stack trace from .promise here. CaptureAsyncStackTrace(this, Handle::cast(dot_promise), &builder); } else { // If .promise was not yet initialized (i.e. we see a really // early exception in the setup of the function), it holds // the value undefined. Sanity check here to make sure that // we're not peaking into the completely wrong stack slot. CHECK(dot_promise->IsUndefined(this)); } } } // TODO(yangguo): Queue this structured stack trace for preprocessing on GC. return factory()->NewJSArrayWithElements(builder.GetElements()); } MaybeHandle Isolate::CaptureAndSetDetailedStackTrace( Handle error_object) { if (capture_stack_trace_for_uncaught_exceptions_) { // Capture stack trace for a detailed exception message. Handle key = factory()->detailed_stack_trace_symbol(); Handle stack_trace = CaptureCurrentStackTrace( stack_trace_for_uncaught_exceptions_frame_limit_, stack_trace_for_uncaught_exceptions_options_); RETURN_ON_EXCEPTION( this, JSReceiver::SetProperty(this, error_object, key, stack_trace, LanguageMode::kStrict), JSReceiver); } return error_object; } MaybeHandle Isolate::CaptureAndSetSimpleStackTrace( Handle error_object, FrameSkipMode mode, Handle caller) { // Capture stack trace for simple stack trace string formatting. Handle key = factory()->stack_trace_symbol(); Handle stack_trace = CaptureSimpleStackTrace(error_object, mode, caller); RETURN_ON_EXCEPTION( this, JSReceiver::SetProperty(this, error_object, key, stack_trace, LanguageMode::kStrict), JSReceiver); return error_object; } Handle Isolate::GetDetailedStackTrace( Handle error_object) { Handle key_detailed = factory()->detailed_stack_trace_symbol(); Handle stack_trace = JSReceiver::GetDataProperty(error_object, key_detailed); if (stack_trace->IsFixedArray()) return Handle::cast(stack_trace); return Handle(); } Address Isolate::GetAbstractPC(int* line, int* column) { JavaScriptFrameIterator it(this); if (it.done()) { *line = -1; *column = -1; return kNullAddress; } JavaScriptFrame* frame = it.frame(); DCHECK(!frame->is_builtin()); int position = frame->position(); Object* maybe_script = frame->function()->shared()->script(); if (maybe_script->IsScript()) { Handle