aboutsummaryrefslogtreecommitdiff
path: root/deps/v8/src/builtins/builtins-internal-gen.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/builtins/builtins-internal-gen.cc')
-rw-r--r--deps/v8/src/builtins/builtins-internal-gen.cc210
1 files changed, 120 insertions, 90 deletions
diff --git a/deps/v8/src/builtins/builtins-internal-gen.cc b/deps/v8/src/builtins/builtins-internal-gen.cc
index 7ff88c5a53..44a18099bf 100644
--- a/deps/v8/src/builtins/builtins-internal-gen.cc
+++ b/deps/v8/src/builtins/builtins-internal-gen.cc
@@ -24,10 +24,16 @@ using TNode = compiler::TNode<T>;
// Interrupt and stack checks.
void Builtins::Generate_InterruptCheck(MacroAssembler* masm) {
+#ifdef V8_TARGET_ARCH_IA32
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
+#endif
masm->TailCallRuntime(Runtime::kInterrupt);
}
void Builtins::Generate_StackCheck(MacroAssembler* masm) {
+#ifdef V8_TARGET_ARCH_IA32
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
+#endif
masm->TailCallRuntime(Runtime::kStackGuard);
}
@@ -350,18 +356,11 @@ class RecordWriteCodeStubAssembler : public CodeStubAssembler {
};
TF_BUILTIN(RecordWrite, RecordWriteCodeStubAssembler) {
- Node* object = BitcastTaggedToWord(Parameter(Descriptor::kObject));
- Node* slot = Parameter(Descriptor::kSlot);
- Node* isolate = Parameter(Descriptor::kIsolate);
- Node* remembered_set = Parameter(Descriptor::kRememberedSet);
- Node* fp_mode = Parameter(Descriptor::kFPMode);
-
- Node* value = Load(MachineType::Pointer(), slot);
-
Label generational_wb(this);
Label incremental_wb(this);
Label exit(this);
+ Node* remembered_set = Parameter(Descriptor::kRememberedSet);
Branch(ShouldEmitRememberSet(remembered_set), &generational_wb,
&incremental_wb);
@@ -369,40 +368,58 @@ TF_BUILTIN(RecordWrite, RecordWriteCodeStubAssembler) {
{
Label test_old_to_new_flags(this);
Label store_buffer_exit(this), store_buffer_incremental_wb(this);
+
// When incremental marking is not on, we skip cross generation pointer
// checking here, because there are checks for
// `kPointersFromHereAreInterestingMask` and
// `kPointersToHereAreInterestingMask` in
// `src/compiler/<arch>/code-generator-<arch>.cc` before calling this stub,
// which serves as the cross generation checking.
+ Node* slot = Parameter(Descriptor::kSlot);
Branch(IsMarking(), &test_old_to_new_flags, &store_buffer_exit);
BIND(&test_old_to_new_flags);
{
+ Node* value = Load(MachineType::Pointer(), slot);
+
// TODO(albertnetymk): Try to cache the page flag for value and object,
// instead of calling IsPageFlagSet each time.
Node* value_in_new_space =
IsPageFlagSet(value, MemoryChunk::kIsInNewSpaceMask);
GotoIfNot(value_in_new_space, &incremental_wb);
+ Node* object = BitcastTaggedToWord(Parameter(Descriptor::kObject));
Node* object_in_new_space =
IsPageFlagSet(object, MemoryChunk::kIsInNewSpaceMask);
- GotoIf(object_in_new_space, &incremental_wb);
-
- Goto(&store_buffer_incremental_wb);
+ Branch(object_in_new_space, &incremental_wb,
+ &store_buffer_incremental_wb);
}
BIND(&store_buffer_exit);
- { InsertToStoreBufferAndGoto(isolate, slot, fp_mode, &exit); }
+ {
+ Node* isolate_constant =
+ ExternalConstant(ExternalReference::isolate_address(isolate()));
+ Node* fp_mode = Parameter(Descriptor::kFPMode);
+ InsertToStoreBufferAndGoto(isolate_constant, slot, fp_mode, &exit);
+ }
BIND(&store_buffer_incremental_wb);
- { InsertToStoreBufferAndGoto(isolate, slot, fp_mode, &incremental_wb); }
+ {
+ Node* isolate_constant =
+ ExternalConstant(ExternalReference::isolate_address(isolate()));
+ Node* fp_mode = Parameter(Descriptor::kFPMode);
+ InsertToStoreBufferAndGoto(isolate_constant, slot, fp_mode,
+ &incremental_wb);
+ }
}
BIND(&incremental_wb);
{
Label call_incremental_wb(this);
+ Node* slot = Parameter(Descriptor::kSlot);
+ Node* value = Load(MachineType::Pointer(), slot);
+
// There are two cases we need to call incremental write barrier.
// 1) value_is_white
GotoIf(IsWhite(value), &call_incremental_wb);
@@ -411,20 +428,23 @@ TF_BUILTIN(RecordWrite, RecordWriteCodeStubAssembler) {
// is_compacting = true when is_marking = true
GotoIfNot(IsPageFlagSet(value, MemoryChunk::kEvacuationCandidateMask),
&exit);
- GotoIf(
- IsPageFlagSet(object, MemoryChunk::kSkipEvacuationSlotsRecordingMask),
- &exit);
- Goto(&call_incremental_wb);
+ Node* object = BitcastTaggedToWord(Parameter(Descriptor::kObject));
+ Branch(
+ IsPageFlagSet(object, MemoryChunk::kSkipEvacuationSlotsRecordingMask),
+ &exit, &call_incremental_wb);
BIND(&call_incremental_wb);
{
Node* function = ExternalConstant(
ExternalReference::incremental_marking_record_write_function());
+ Node* isolate_constant =
+ ExternalConstant(ExternalReference::isolate_address(isolate()));
+ Node* fp_mode = Parameter(Descriptor::kFPMode);
CallCFunction3WithCallerSavedRegistersMode(
MachineType::Int32(), MachineType::Pointer(), MachineType::Pointer(),
- MachineType::Pointer(), function, object, slot, isolate, fp_mode,
- &exit);
+ MachineType::Pointer(), function, object, slot, isolate_constant,
+ fp_mode, &exit);
}
}
@@ -454,7 +474,7 @@ class DeletePropertyBaseAssembler : public AccessorAssembler {
dont_delete);
// Overwrite the entry itself (see NameDictionary::SetEntry).
TNode<HeapObject> filler = TheHoleConstant();
- DCHECK(Heap::RootIsImmortalImmovable(Heap::kTheHoleValueRootIndex));
+ DCHECK(Heap::RootIsImmortalImmovable(RootIndex::kTheHoleValue));
StoreFixedArrayElement(properties, key_index, filler, SKIP_WRITE_BARRIER);
StoreValueByKeyIndex<NameDictionary>(properties, key_index, filler,
SKIP_WRITE_BARRIER);
@@ -609,11 +629,14 @@ class InternalBuiltinsAssembler : public CodeStubAssembler {
explicit InternalBuiltinsAssembler(compiler::CodeAssemblerState* state)
: CodeStubAssembler(state) {}
- TNode<IntPtrT> GetPendingMicrotaskCount();
- void SetPendingMicrotaskCount(TNode<IntPtrT> count);
-
- TNode<FixedArray> GetMicrotaskQueue();
- void SetMicrotaskQueue(TNode<FixedArray> queue);
+ TNode<MicrotaskQueue> GetDefaultMicrotaskQueue();
+ TNode<IntPtrT> GetPendingMicrotaskCount(
+ TNode<MicrotaskQueue> microtask_queue);
+ void SetPendingMicrotaskCount(TNode<MicrotaskQueue> microtask_queue,
+ TNode<IntPtrT> new_num_tasks);
+ TNode<FixedArray> GetQueuedMicrotasks(TNode<MicrotaskQueue> microtask_queue);
+ void SetQueuedMicrotasks(TNode<MicrotaskQueue> microtask_queue,
+ TNode<FixedArray> new_queue);
TNode<Context> GetCurrentContext();
void SetCurrentContext(TNode<Context> context);
@@ -700,37 +723,34 @@ TF_BUILTIN(AdaptorWithBuiltinExitFrame, InternalBuiltinsAssembler) {
GenerateAdaptorWithExitFrameType<Descriptor>(Builtins::BUILTIN_EXIT);
}
-TNode<IntPtrT> InternalBuiltinsAssembler::GetPendingMicrotaskCount() {
- auto ref = ExternalReference::pending_microtask_count_address(isolate());
- if (kIntSize == 8) {
- return TNode<IntPtrT>::UncheckedCast(
- Load(MachineType::Int64(), ExternalConstant(ref)));
- } else {
- Node* const value = Load(MachineType::Int32(), ExternalConstant(ref));
- return ChangeInt32ToIntPtr(value);
- }
+TNode<MicrotaskQueue> InternalBuiltinsAssembler::GetDefaultMicrotaskQueue() {
+ return TNode<MicrotaskQueue>::UncheckedCast(
+ LoadRoot(RootIndex::kDefaultMicrotaskQueue));
}
-void InternalBuiltinsAssembler::SetPendingMicrotaskCount(TNode<IntPtrT> count) {
- auto ref = ExternalReference::pending_microtask_count_address(isolate());
- auto rep = kIntSize == 8 ? MachineRepresentation::kWord64
- : MachineRepresentation::kWord32;
- if (kIntSize == 4 && kPointerSize == 8) {
- Node* const truncated_count =
- TruncateInt64ToInt32(TNode<Int64T>::UncheckedCast(count));
- StoreNoWriteBarrier(rep, ExternalConstant(ref), truncated_count);
- } else {
- StoreNoWriteBarrier(rep, ExternalConstant(ref), count);
- }
+TNode<IntPtrT> InternalBuiltinsAssembler::GetPendingMicrotaskCount(
+ TNode<MicrotaskQueue> microtask_queue) {
+ TNode<IntPtrT> result = LoadAndUntagObjectField(
+ microtask_queue, MicrotaskQueue::kPendingMicrotaskCountOffset);
+ return result;
}
-TNode<FixedArray> InternalBuiltinsAssembler::GetMicrotaskQueue() {
- return TNode<FixedArray>::UncheckedCast(
- LoadRoot(Heap::kMicrotaskQueueRootIndex));
+void InternalBuiltinsAssembler::SetPendingMicrotaskCount(
+ TNode<MicrotaskQueue> microtask_queue, TNode<IntPtrT> new_num_tasks) {
+ StoreObjectField(microtask_queue,
+ MicrotaskQueue::kPendingMicrotaskCountOffset,
+ SmiFromIntPtr(new_num_tasks));
}
-void InternalBuiltinsAssembler::SetMicrotaskQueue(TNode<FixedArray> queue) {
- StoreRoot(Heap::kMicrotaskQueueRootIndex, queue);
+TNode<FixedArray> InternalBuiltinsAssembler::GetQueuedMicrotasks(
+ TNode<MicrotaskQueue> microtask_queue) {
+ return LoadObjectField<FixedArray>(microtask_queue,
+ MicrotaskQueue::kQueueOffset);
+}
+
+void InternalBuiltinsAssembler::SetQueuedMicrotasks(
+ TNode<MicrotaskQueue> microtask_queue, TNode<FixedArray> new_queue) {
+ StoreObjectField(microtask_queue, MicrotaskQueue::kQueueOffset, new_queue);
}
TNode<Context> InternalBuiltinsAssembler::GetCurrentContext() {
@@ -819,9 +839,10 @@ void InternalBuiltinsAssembler::RunPromiseHook(
TF_BUILTIN(EnqueueMicrotask, InternalBuiltinsAssembler) {
Node* microtask = Parameter(Descriptor::kMicrotask);
- TNode<IntPtrT> num_tasks = GetPendingMicrotaskCount();
+ TNode<MicrotaskQueue> microtask_queue = GetDefaultMicrotaskQueue();
+ TNode<IntPtrT> num_tasks = GetPendingMicrotaskCount(microtask_queue);
TNode<IntPtrT> new_num_tasks = IntPtrAdd(num_tasks, IntPtrConstant(1));
- TNode<FixedArray> queue = GetMicrotaskQueue();
+ TNode<FixedArray> queue = GetQueuedMicrotasks(microtask_queue);
TNode<IntPtrT> queue_length = LoadAndUntagFixedArrayBaseLength(queue);
Label if_append(this), if_grow(this), done(this);
@@ -851,8 +872,8 @@ TF_BUILTIN(EnqueueMicrotask, InternalBuiltinsAssembler) {
StoreFixedArrayElement(new_queue, num_tasks, microtask,
SKIP_WRITE_BARRIER);
FillFixedArrayWithValue(PACKED_ELEMENTS, new_queue, new_num_tasks,
- new_queue_length, Heap::kUndefinedValueRootIndex);
- SetMicrotaskQueue(new_queue);
+ new_queue_length, RootIndex::kUndefinedValue);
+ SetQueuedMicrotasks(microtask_queue, new_queue);
Goto(&done);
}
@@ -865,8 +886,8 @@ TF_BUILTIN(EnqueueMicrotask, InternalBuiltinsAssembler) {
CopyFixedArrayElements(PACKED_ELEMENTS, queue, new_queue, num_tasks);
StoreFixedArrayElement(new_queue, num_tasks, microtask);
FillFixedArrayWithValue(PACKED_ELEMENTS, new_queue, new_num_tasks,
- new_queue_length, Heap::kUndefinedValueRootIndex);
- SetMicrotaskQueue(new_queue);
+ new_queue_length, RootIndex::kUndefinedValue);
+ SetQueuedMicrotasks(microtask_queue, new_queue);
Goto(&done);
}
}
@@ -878,13 +899,14 @@ TF_BUILTIN(EnqueueMicrotask, InternalBuiltinsAssembler) {
}
BIND(&done);
- SetPendingMicrotaskCount(new_num_tasks);
+ SetPendingMicrotaskCount(microtask_queue, new_num_tasks);
Return(UndefinedConstant());
}
TF_BUILTIN(RunMicrotasks, InternalBuiltinsAssembler) {
// Load the current context from the isolate.
TNode<Context> current_context = GetCurrentContext();
+ TNode<MicrotaskQueue> microtask_queue = GetDefaultMicrotaskQueue();
Label init_queue_loop(this);
Goto(&init_queue_loop);
@@ -893,17 +915,17 @@ TF_BUILTIN(RunMicrotasks, InternalBuiltinsAssembler) {
TVARIABLE(IntPtrT, index, IntPtrConstant(0));
Label loop(this, &index), loop_next(this);
- TNode<IntPtrT> num_tasks = GetPendingMicrotaskCount();
+ TNode<IntPtrT> num_tasks = GetPendingMicrotaskCount(microtask_queue);
ReturnIf(IntPtrEqual(num_tasks, IntPtrConstant(0)), UndefinedConstant());
- TNode<FixedArray> queue = GetMicrotaskQueue();
+ TNode<FixedArray> queue = GetQueuedMicrotasks(microtask_queue);
CSA_ASSERT(this, IntPtrGreaterThanOrEqual(
LoadAndUntagFixedArrayBaseLength(queue), num_tasks));
CSA_ASSERT(this, IntPtrGreaterThan(num_tasks, IntPtrConstant(0)));
- SetPendingMicrotaskCount(IntPtrConstant(0));
- SetMicrotaskQueue(EmptyFixedArrayConstant());
+ SetQueuedMicrotasks(microtask_queue, EmptyFixedArrayConstant());
+ SetPendingMicrotaskCount(microtask_queue, IntPtrConstant(0));
Goto(&loop);
BIND(&loop);
@@ -1099,20 +1121,20 @@ TF_BUILTIN(RunMicrotasks, InternalBuiltinsAssembler) {
}
TF_BUILTIN(AllocateInNewSpace, CodeStubAssembler) {
- TNode<Int32T> requested_size =
- UncheckedCast<Int32T>(Parameter(Descriptor::kRequestedSize));
+ TNode<IntPtrT> requested_size =
+ UncheckedCast<IntPtrT>(Parameter(Descriptor::kRequestedSize));
TailCallRuntime(Runtime::kAllocateInNewSpace, NoContextConstant(),
- SmiFromInt32(requested_size));
+ SmiFromIntPtr(requested_size));
}
TF_BUILTIN(AllocateInOldSpace, CodeStubAssembler) {
- TNode<Int32T> requested_size =
- UncheckedCast<Int32T>(Parameter(Descriptor::kRequestedSize));
+ TNode<IntPtrT> requested_size =
+ UncheckedCast<IntPtrT>(Parameter(Descriptor::kRequestedSize));
int flags = AllocateTargetSpace::encode(OLD_SPACE);
TailCallRuntime(Runtime::kAllocateInTargetSpace, NoContextConstant(),
- SmiFromInt32(requested_size), SmiConstant(flags));
+ SmiFromIntPtr(requested_size), SmiConstant(flags));
}
TF_BUILTIN(Abort, CodeStubAssembler) {
@@ -1178,6 +1200,9 @@ void Builtins::Generate_CEntry_Return2_SaveFPRegs_ArgvOnStack_BuiltinExit(
}
void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
+#ifdef V8_TARGET_ARCH_IA32
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
+#endif
// CallApiGetterStub only exists as a stub to avoid duplicating code between
// here and code-stubs-<arch>.cc. For example, see CallApiFunctionAndReturn.
// Here we abuse the instantiated stub to generate code.
@@ -1186,6 +1211,9 @@ void Builtins::Generate_CallApiGetter(MacroAssembler* masm) {
}
void Builtins::Generate_CallApiCallback_Argc0(MacroAssembler* masm) {
+#ifdef V8_TARGET_ARCH_IA32
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
+#endif
// The common variants of CallApiCallbackStub (i.e. all that are embedded into
// the snapshot) are generated as builtins. The rest remain available as code
// stubs. Here we abuse the instantiated stub to generate code and avoid
@@ -1196,6 +1224,9 @@ void Builtins::Generate_CallApiCallback_Argc0(MacroAssembler* masm) {
}
void Builtins::Generate_CallApiCallback_Argc1(MacroAssembler* masm) {
+#ifdef V8_TARGET_ARCH_IA32
+ Assembler::SupportsRootRegisterScope supports_root_register(masm);
+#endif
// The common variants of CallApiCallbackStub (i.e. all that are embedded into
// the snapshot) are generated as builtins. The rest remain available as code
// stubs. Here we abuse the instantiated stub to generate code and avoid
@@ -1207,27 +1238,23 @@ void Builtins::Generate_CallApiCallback_Argc1(MacroAssembler* masm) {
// ES6 [[Get]] operation.
TF_BUILTIN(GetProperty, CodeStubAssembler) {
- Label call_runtime(this, Label::kDeferred), return_undefined(this), end(this);
-
Node* object = Parameter(Descriptor::kObject);
Node* key = Parameter(Descriptor::kKey);
Node* context = Parameter(Descriptor::kContext);
- VARIABLE(var_result, MachineRepresentation::kTagged);
+ Label if_notfound(this), if_proxy(this, Label::kDeferred),
+ if_slow(this, Label::kDeferred);
CodeStubAssembler::LookupInHolder lookup_property_in_holder =
- [=, &var_result, &end](Node* receiver, Node* holder, Node* holder_map,
- Node* holder_instance_type, Node* unique_name,
- Label* next_holder, Label* if_bailout) {
+ [=](Node* receiver, Node* holder, Node* holder_map,
+ Node* holder_instance_type, Node* unique_name, Label* next_holder,
+ Label* if_bailout) {
VARIABLE(var_value, MachineRepresentation::kTagged);
Label if_found(this);
TryGetOwnProperty(context, receiver, holder, holder_map,
holder_instance_type, unique_name, &if_found,
&var_value, next_holder, if_bailout);
BIND(&if_found);
- {
- var_result.Bind(var_value.value());
- Goto(&end);
- }
+ Return(var_value.value());
};
CodeStubAssembler::LookupInHolder lookup_element_in_holder =
@@ -1240,23 +1267,26 @@ TF_BUILTIN(GetProperty, CodeStubAssembler) {
};
TryPrototypeChainLookup(object, key, lookup_property_in_holder,
- lookup_element_in_holder, &return_undefined,
- &call_runtime);
+ lookup_element_in_holder, &if_notfound, &if_slow,
+ &if_proxy);
- BIND(&return_undefined);
- {
- var_result.Bind(UndefinedConstant());
- Goto(&end);
- }
+ BIND(&if_notfound);
+ Return(UndefinedConstant());
+
+ BIND(&if_slow);
+ TailCallRuntime(Runtime::kGetProperty, context, object, key);
- BIND(&call_runtime);
+ BIND(&if_proxy);
{
- var_result.Bind(CallRuntime(Runtime::kGetProperty, context, object, key));
- Goto(&end);
+ // Convert the {key} to a Name first.
+ Node* name = CallBuiltin(Builtins::kToName, context, key);
+
+ // The {object} is a JSProxy instance, look up the {name} on it, passing
+ // {object} both as receiver and holder. If {name} is absent we can safely
+ // return undefined from here.
+ TailCallBuiltin(Builtins::kProxyGetProperty, context, object, name, object,
+ SmiConstant(OnNonExistent::kReturnUndefined));
}
-
- BIND(&end);
- Return(var_result.value());
}
// ES6 [[Set]] operation.