summaryrefslogtreecommitdiff
path: root/deps/v8/src/builtins/ia32/builtins-ia32.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/builtins/ia32/builtins-ia32.cc')
-rw-r--r--deps/v8/src/builtins/ia32/builtins-ia32.cc600
1 files changed, 170 insertions, 430 deletions
diff --git a/deps/v8/src/builtins/ia32/builtins-ia32.cc b/deps/v8/src/builtins/ia32/builtins-ia32.cc
index dfeec1339c..d1c0a5d5fb 100644
--- a/deps/v8/src/builtins/ia32/builtins-ia32.cc
+++ b/deps/v8/src/builtins/ia32/builtins-ia32.cc
@@ -20,7 +20,8 @@ namespace internal {
void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
ExitFrameType exit_frame_type) {
- __ mov(ebx, Immediate(ExternalReference::Create(address)));
+ __ mov(kJavaScriptCallExtraArg1Register,
+ Immediate(ExternalReference::Create(address)));
if (exit_frame_type == BUILTIN_EXIT) {
__ Jump(BUILTIN_CODE(masm->isolate(), AdaptorWithBuiltinExitFrame),
RelocInfo::CODE_TARGET);
@@ -31,60 +32,6 @@ void Builtins::Generate_Adaptor(MacroAssembler* masm, Address address,
}
}
-namespace {
-
-void AdaptorWithExitFrameType(MacroAssembler* masm,
- Builtins::ExitFrameType exit_frame_type) {
- // ----------- S t a t e -------------
- // -- eax : number of arguments excluding receiver
- // -- ebx : entry point
- // -- edi : target
- // -- edx : new.target
- // -- esp[0] : return address
- // -- esp[4] : last argument
- // -- ...
- // -- esp[4 * argc] : first argument
- // -- esp[4 * (argc +1)] : receiver
- // -----------------------------------
- __ AssertFunction(edi);
-
- // Make sure we operate in the context of the called function (for example
- // ConstructStubs implemented in C++ will be run in the context of the caller
- // instead of the callee, due to the way that [[Construct]] is defined for
- // ordinary functions).
- __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
-
- // CEntry expects eax to contain the number of arguments including the
- // receiver and the extra arguments.
- __ add(eax, Immediate(BuiltinExitFrameConstants::kNumExtraArgsWithReceiver));
-
- // Insert extra arguments.
- __ PopReturnAddressTo(ecx);
- __ SmiTag(eax);
- __ PushRoot(Heap::kTheHoleValueRootIndex); // Padding.
- __ Push(eax);
- __ SmiUntag(eax);
- __ Push(edi);
- __ Push(edx);
- __ PushReturnAddressFrom(ecx);
-
- // Jump to the C entry runtime stub directly here instead of using
- // JumpToExternalReference because ebx is loaded by Generate_adaptor.
- Handle<Code> code =
- CodeFactory::CEntry(masm->isolate(), 1, kDontSaveFPRegs, kArgvOnStack,
- exit_frame_type == Builtins::BUILTIN_EXIT);
- __ Jump(code, RelocInfo::CODE_TARGET);
-}
-} // namespace
-
-void Builtins::Generate_AdaptorWithExitFrame(MacroAssembler* masm) {
- AdaptorWithExitFrameType(masm, EXIT);
-}
-
-void Builtins::Generate_AdaptorWithBuiltinExitFrame(MacroAssembler* masm) {
- AdaptorWithExitFrameType(masm, BUILTIN_EXIT);
-}
-
static void GenerateTailCallToReturnedCode(MacroAssembler* masm,
Runtime::FunctionId function_id) {
// ----------- S t a t e -------------
@@ -539,21 +486,26 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
// -- esp[0] : generator receiver
// -----------------------------------
- // Push holes for arguments to generator function. Since the parser forced
- // context allocation for any variables in generators, the actual argument
- // values have already been copied into the context and these dummy values
- // will never be used.
+ // Copy the function arguments from the generator object's register file.
__ mov(ecx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
- __ mov(ecx,
- FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset));
+ __ movzx_w(
+ ecx, FieldOperand(ecx, SharedFunctionInfo::kFormalParameterCountOffset));
+ __ mov(ebx,
+ FieldOperand(edx, JSGeneratorObject::kParametersAndRegistersOffset));
{
Label done_loop, loop;
+ __ Set(edi, 0);
+
__ bind(&loop);
- __ sub(ecx, Immediate(1));
- __ j(carry, &done_loop, Label::kNear);
- __ PushRoot(Heap::kTheHoleValueRootIndex);
+ __ cmp(edi, ecx);
+ __ j(greater_equal, &done_loop);
+ __ Push(
+ FieldOperand(ebx, edi, times_pointer_size, FixedArray::kHeaderSize));
+ __ add(edi, Immediate(1));
__ jmp(&loop);
+
__ bind(&done_loop);
+ __ mov(edi, FieldOperand(edx, JSGeneratorObject::kFunctionOffset));
}
// Underlying function needs to have bytecode available.
@@ -571,8 +523,8 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
{
__ PushReturnAddressFrom(eax);
__ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
- __ mov(eax,
- FieldOperand(eax, SharedFunctionInfo::kFormalParameterCountOffset));
+ __ movzx_w(eax, FieldOperand(
+ eax, SharedFunctionInfo::kFormalParameterCountOffset));
// We abuse new.target both to indicate that this is a resume call and to
// pass in the generator object. In ordinary calls, new.target is always
// undefined because generator functions are non-constructable.
@@ -848,19 +800,14 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
__ push(esi); // Callee's context.
__ push(edi); // Callee's JS function.
- // Get the bytecode array from the function object (or from the DebugInfo if
- // it is present) and load it into kInterpreterBytecodeArrayRegister.
- Label maybe_load_debug_bytecode_array, bytecode_array_loaded,
- apply_instrumentation;
+ // Get the bytecode array from the function object and load it into
+ // kInterpreterBytecodeArrayRegister.
__ mov(eax, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
__ mov(kInterpreterBytecodeArrayRegister,
FieldOperand(eax, SharedFunctionInfo::kFunctionDataOffset));
__ Push(eax);
GetSharedFunctionInfoBytecode(masm, kInterpreterBytecodeArrayRegister, eax);
__ Pop(eax);
- __ JumpIfNotSmi(FieldOperand(eax, SharedFunctionInfo::kDebugInfoOffset),
- &maybe_load_debug_bytecode_array);
- __ bind(&bytecode_array_loaded);
__ inc(FieldOperand(feedback_vector, FeedbackVector::kInvocationCountOffset));
@@ -969,35 +916,6 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
// The return value is in eax.
LeaveInterpreterFrame(masm, ebx, ecx);
__ ret(0);
-
- // Load debug copy of the bytecode array if it exists.
- // kInterpreterBytecodeArrayRegister is already loaded with
- // SharedFunctionInfo::kFunctionDataOffset.
- __ bind(&maybe_load_debug_bytecode_array);
- __ mov(eax, FieldOperand(eax, SharedFunctionInfo::kDebugInfoOffset));
- __ mov(ecx, FieldOperand(eax, DebugInfo::kDebugBytecodeArrayOffset));
- __ JumpIfRoot(ecx, Heap::kUndefinedValueRootIndex, &bytecode_array_loaded);
-
- __ mov(kInterpreterBytecodeArrayRegister, ecx);
- __ mov(ecx, FieldOperand(eax, DebugInfo::kFlagsOffset));
- __ SmiUntag(ecx);
- __ and_(ecx, Immediate(DebugInfo::kDebugExecutionMode));
- STATIC_ASSERT(static_cast<int>(DebugInfo::kDebugExecutionMode) ==
- static_cast<int>(DebugInfo::kSideEffects));
- ExternalReference debug_execution_mode =
- ExternalReference::debug_execution_mode_address(masm->isolate());
- __ cmp(ecx, Operand::StaticVariable(debug_execution_mode));
- __ j(equal, &bytecode_array_loaded);
-
- __ pop(ecx); // get JSFunction from stack
- __ push(ecx);
- __ push(ebx); // preserve feedback_vector and bytecode array register
- __ push(kInterpreterBytecodeArrayRegister);
- __ push(ecx); // pass function as argument
- __ CallRuntime(Runtime::kDebugApplyInstrumentation);
- __ pop(kInterpreterBytecodeArrayRegister);
- __ pop(ebx);
- __ jmp(&bytecode_array_loaded);
}
@@ -1115,30 +1033,11 @@ void Generate_InterpreterPushZeroAndArgsAndReturnAddress(
Generate_StackOverflowCheck(masm, num_args, scratch1, scratch2,
stack_overflow, true);
-// Step 1 - Update the stack pointer. scratch1 already contains the required
-// increment to the stack. i.e. num_args + 1 stack slots. This is computed in
-// the Generate_StackOverflowCheck.
-
-#ifdef _MSC_VER
- // TODO(mythria): Move it to macro assembler.
- // In windows, we cannot increment the stack size by more than one page
- // (mimimum page size is 4KB) without accessing at least one byte on the
- // page. Check this:
- // https://msdn.microsoft.com/en-us/library/aa227153(v=vs.60).aspx.
- const int page_size = 4 * 1024;
- Label check_offset, update_stack_pointer;
- __ bind(&check_offset);
- __ cmp(scratch1, page_size);
- __ j(less, &update_stack_pointer);
- __ sub(esp, Immediate(page_size));
- // Just to touch the page, before we increment further.
- __ mov(Operand(esp, 0), Immediate(0));
- __ sub(scratch1, Immediate(page_size));
- __ jmp(&check_offset);
- __ bind(&update_stack_pointer);
-#endif
-
- __ sub(esp, scratch1);
+ // Step 1 - Update the stack pointer. scratch1 already contains the required
+ // increment to the stack. i.e. num_args + 1 stack slots. This is computed in
+ // Generate_StackOverflowCheck.
+
+ __ AllocateStackFrame(scratch1);
// Step 2 move return_address and slots above it to the correct locations.
// Move from top to bottom, otherwise we may overwrite when num_args = 0 or 1,
@@ -1214,8 +1113,8 @@ void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
// Tail call to the array construct stub (still in the caller
// context at this point).
__ AssertFunction(edi);
- ArrayConstructorStub array_constructor_stub(masm->isolate());
- __ Jump(array_constructor_stub.GetCode(), RelocInfo::CODE_TARGET);
+ Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayConstructorImpl);
+ __ Jump(code, RelocInfo::CODE_TARGET);
} else if (mode == InterpreterPushArgsMode::kWithFinalSpread) {
// Call the constructor with unmodified eax, edi, edx values.
__ Jump(BUILTIN_CODE(masm->isolate(), ConstructWithSpread),
@@ -1335,208 +1234,6 @@ void Builtins::Generate_InterpreterEnterBytecodeDispatch(MacroAssembler* masm) {
Generate_InterpreterEnterBytecode(masm);
}
-void Builtins::Generate_CompileLazyDeoptimizedCode(MacroAssembler* masm) {
- // Set the code slot inside the JSFunction to CompileLazy.
- __ Move(ecx, BUILTIN_CODE(masm->isolate(), CompileLazy));
- __ mov(FieldOperand(edi, JSFunction::kCodeOffset), ecx);
- __ RecordWriteField(edi, JSFunction::kCodeOffset, ecx, ebx, kDontSaveFPRegs,
- OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
- // Jump to compile lazy.
- Generate_CompileLazy(masm);
-}
-
-static void GetSharedFunctionInfoCode(MacroAssembler* masm, Register sfi_data,
- Register scratch1) {
- // Figure out the SFI's code object.
- Label done;
- Label check_is_bytecode_array;
- Label check_is_exported_function_data;
- Label check_is_fixed_array;
- Label check_is_pre_parsed_scope_data;
- Label check_is_function_template_info;
- Label check_is_interpreter_data;
-
- Register data_type = scratch1;
-
- // IsSmi: Is builtin
- __ JumpIfNotSmi(sfi_data, &check_is_bytecode_array);
- __ mov(scratch1,
- Immediate(ExternalReference::builtins_address(masm->isolate())));
- // Avoid untagging the Smi unnecessarily.
- STATIC_ASSERT(times_2 == times_pointer_size - kSmiTagSize);
- __ mov(sfi_data, Operand(scratch1, sfi_data, times_2, 0));
- __ jmp(&done);
-
- // Get map for subsequent checks.
- __ bind(&check_is_bytecode_array);
- __ mov(data_type, FieldOperand(sfi_data, HeapObject::kMapOffset));
- __ mov(data_type, FieldOperand(data_type, Map::kInstanceTypeOffset));
-
- // IsBytecodeArray: Interpret bytecode
- __ cmpw(data_type, Immediate(BYTECODE_ARRAY_TYPE));
- __ j(not_equal, &check_is_exported_function_data);
- __ Move(sfi_data, BUILTIN_CODE(masm->isolate(), InterpreterEntryTrampoline));
- __ jmp(&done);
-
- // IsWasmExportedFunctionData: Use the wrapper code
- __ bind(&check_is_exported_function_data);
- __ cmpw(data_type, Immediate(WASM_EXPORTED_FUNCTION_DATA_TYPE));
- __ j(not_equal, &check_is_fixed_array);
- __ mov(sfi_data,
- FieldOperand(sfi_data, WasmExportedFunctionData::kWrapperCodeOffset));
- __ jmp(&done);
-
- // IsFixedArray: Instantiate using AsmWasmData
- __ bind(&check_is_fixed_array);
- __ cmpw(data_type, Immediate(FIXED_ARRAY_TYPE));
- __ j(not_equal, &check_is_pre_parsed_scope_data);
- __ Move(sfi_data, BUILTIN_CODE(masm->isolate(), InstantiateAsmJs));
- __ jmp(&done);
-
- // IsPreParsedScopeData: Compile lazy
- __ bind(&check_is_pre_parsed_scope_data);
- __ cmpw(data_type, Immediate(TUPLE2_TYPE));
- __ j(not_equal, &check_is_function_template_info);
- __ Move(sfi_data, BUILTIN_CODE(masm->isolate(), CompileLazy));
- __ jmp(&done);
-
- // IsFunctionTemplateInfo: API call
- __ bind(&check_is_function_template_info);
- __ cmpw(data_type, Immediate(FUNCTION_TEMPLATE_INFO_TYPE));
- __ j(not_equal, &check_is_interpreter_data);
- __ Move(sfi_data, BUILTIN_CODE(masm->isolate(), HandleApiCall));
- __ jmp(&done);
-
- // IsInterpreterData: Interpret bytecode
- __ bind(&check_is_interpreter_data);
- if (FLAG_debug_code) {
- __ cmpw(data_type, Immediate(INTERPRETER_DATA_TYPE));
- __ Check(equal, AbortReason::kInvalidSharedFunctionInfoData);
- }
- __ mov(sfi_data,
- FieldOperand(sfi_data, InterpreterData::kInterpreterTrampolineOffset));
-
- __ bind(&done);
-}
-
-void Builtins::Generate_CompileLazy(MacroAssembler* masm) {
- // ----------- S t a t e -------------
- // -- eax : argument count (preserved for callee)
- // -- edx : new target (preserved for callee)
- // -- edi : target function (preserved for callee)
- // -----------------------------------
- // First lookup code, maybe we don't need to compile!
- Label gotta_call_runtime;
-
- Register closure = edi;
- Register feedback_vector = ebx;
-
- // Do we have a valid feedback vector?
- __ mov(feedback_vector,
- FieldOperand(closure, JSFunction::kFeedbackCellOffset));
- __ mov(feedback_vector, FieldOperand(feedback_vector, Cell::kValueOffset));
- __ JumpIfRoot(feedback_vector, Heap::kUndefinedValueRootIndex,
- &gotta_call_runtime);
-
- // Is there an optimization marker or optimized code in the feedback vector?
- MaybeTailCallOptimizedCodeSlot(masm, feedback_vector, ecx);
-
- // We found no optimized code. Infer the code object needed for the SFI.
- Register entry = ecx;
- __ mov(entry, FieldOperand(closure, JSFunction::kSharedFunctionInfoOffset));
- __ mov(entry, FieldOperand(entry, SharedFunctionInfo::kFunctionDataOffset));
- GetSharedFunctionInfoCode(masm, entry, ebx);
-
- // If code entry points to anything other than CompileLazy, install that.
- __ Move(ebx, masm->CodeObject());
- __ cmp(entry, ebx);
- __ j(equal, &gotta_call_runtime);
-
- // Install the SFI's code entry.
- __ mov(FieldOperand(closure, JSFunction::kCodeOffset), entry);
- __ RecordWriteField(closure, JSFunction::kCodeOffset, entry, ebx,
- kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
- __ mov(entry, FieldOperand(closure, JSFunction::kCodeOffset));
- __ lea(entry, FieldOperand(entry, Code::kHeaderSize));
- __ jmp(entry);
-
- __ bind(&gotta_call_runtime);
- GenerateTailCallToReturnedCode(masm, Runtime::kCompileLazy);
-}
-
-// Lazy deserialization design doc: http://goo.gl/dxkYDZ.
-void Builtins::Generate_DeserializeLazy(MacroAssembler* masm) {
- // ----------- S t a t e -------------
- // -- eax : argument count (preserved for callee)
- // -- edx : new target (preserved for callee)
- // -- edi : target function (preserved for callee)
- // -----------------------------------
-
- Label deserialize_in_runtime;
-
- Register target = edi; // Must be preserved
- Register scratch0 = ebx;
- Register scratch1 = ecx;
-
- CHECK(scratch0 != eax && scratch0 != edx && scratch0 != edi);
- CHECK(scratch1 != eax && scratch1 != edx && scratch1 != edi);
- CHECK(scratch0 != scratch1);
-
- // Load the builtin id for lazy deserialization from SharedFunctionInfo.
-
- __ AssertFunction(target);
- __ mov(scratch0, FieldOperand(target, JSFunction::kSharedFunctionInfoOffset));
-
- __ mov(scratch1,
- FieldOperand(scratch0, SharedFunctionInfo::kFunctionDataOffset));
- __ AssertSmi(scratch1);
-
- // The builtin may already have been deserialized. If that is the case, it is
- // stored in the builtins table, and we can copy to correct code object to
- // both the shared function info and function without calling into runtime.
- //
- // Otherwise, we need to call into runtime to deserialize.
-
- {
- // Load the code object at builtins_table[builtin_id] into scratch1.
-
- __ SmiUntag(scratch1);
- __ mov(scratch0,
- Immediate(ExternalReference::builtins_address(masm->isolate())));
- __ mov(scratch1, Operand(scratch0, scratch1, times_pointer_size, 0));
-
- // Check if the loaded code object has already been deserialized. This is
- // the case iff it does not equal DeserializeLazy.
-
- __ Move(scratch0, masm->CodeObject());
- __ cmp(scratch1, scratch0);
- __ j(equal, &deserialize_in_runtime);
- }
-
- {
- // If we've reached this spot, the target builtin has been deserialized and
- // we simply need to copy it over to the target function.
-
- Register target_builtin = scratch1;
-
- __ mov(FieldOperand(target, JSFunction::kCodeOffset), target_builtin);
- __ push(eax); // Write barrier clobbers these below.
- __ push(target_builtin);
- __ RecordWriteField(target, JSFunction::kCodeOffset, target_builtin, eax,
- kDontSaveFPRegs, OMIT_REMEMBERED_SET, OMIT_SMI_CHECK);
- __ pop(target_builtin);
- __ pop(eax);
-
- // All copying is done. Jump to the deserialized code object.
-
- __ lea(target_builtin, FieldOperand(target_builtin, Code::kHeaderSize));
- __ jmp(target_builtin);
- }
-
- __ bind(&deserialize_in_runtime);
- GenerateTailCallToReturnedCode(masm, Runtime::kDeserializeLazy);
-}
-
void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax : argument count (preserved for callee)
@@ -1910,44 +1607,9 @@ void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) {
// Run the native code for the InternalArray function called as a normal
// function.
- // tail call a stub
- __ mov(ebx, masm->isolate()->factory()->undefined_value());
- InternalArrayConstructorStub stub(masm->isolate());
- __ TailCallStub(&stub);
-}
-
-void Builtins::Generate_ArrayConstructor(MacroAssembler* masm) {
- // ----------- S t a t e -------------
- // -- eax : argc
- // -- edi : array function
- // -- esp[0] : return address
- // -- esp[4] : last argument
- // -----------------------------------
- Label generic_array_code;
-
- if (FLAG_debug_code) {
- // Initial map for the builtin Array function should be a map.
- __ mov(ebx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
- // Will both indicate a nullptr and a Smi.
- __ test(ebx, Immediate(kSmiTagMask));
- __ Assert(not_zero, AbortReason::kUnexpectedInitialMapForArrayFunction);
- __ CmpObjectType(ebx, MAP_TYPE, ecx);
- __ Assert(equal, AbortReason::kUnexpectedInitialMapForArrayFunction);
- }
-
- // ebx is the AllocationSite - here undefined.
__ mov(ebx, masm->isolate()->factory()->undefined_value());
- // If edx (new target) is undefined, then this is the 'Call' case, so move
- // edi (the constructor) to rdx.
- Label call;
- __ cmp(edx, ebx);
- __ j(not_equal, &call);
- __ mov(edx, edi);
-
- // Run the native code for the Array function called as a normal function.
- __ bind(&call);
- ArrayConstructorStub stub(masm->isolate());
- __ TailCallStub(&stub);
+ __ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl),
+ RelocInfo::CODE_TARGET);
}
static void EnterArgumentsAdaptorFrame(MacroAssembler* masm) {
@@ -1995,13 +1657,30 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm,
// -- edx : new.target (checked to be constructor or undefined)
// -- esp[0] : return address.
// -----------------------------------
- __ AssertFixedArray(ebx);
// We need to preserve eax, edi and ebx.
__ movd(xmm0, edx);
__ movd(xmm1, edi);
__ movd(xmm2, eax);
+ if (masm->emit_debug_code()) {
+ // Allow ebx to be a FixedArray, or a FixedDoubleArray if ecx == 0.
+ Label ok, fail;
+ __ AssertNotSmi(ebx);
+ __ mov(edx, FieldOperand(ebx, HeapObject::kMapOffset));
+ __ CmpInstanceType(edx, FIXED_ARRAY_TYPE);
+ __ j(equal, &ok);
+ __ CmpInstanceType(edx, FIXED_DOUBLE_ARRAY_TYPE);
+ __ j(not_equal, &fail);
+ __ cmp(ecx, 0);
+ __ j(equal, &ok);
+ // Fall through.
+ __ bind(&fail);
+ __ Abort(AbortReason::kOperandIsNotAFixedArray);
+
+ __ bind(&ok);
+ }
+
// Check for stack overflow.
{
// Check the stack for overflow. We are not trying to catch interruptions
@@ -2097,8 +1776,8 @@ void Builtins::Generate_CallOrConstructForwardVarargs(MacroAssembler* masm,
{
__ mov(edx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
__ mov(edx, FieldOperand(edx, JSFunction::kSharedFunctionInfoOffset));
- __ mov(edx,
- FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
+ __ movzx_w(edx, FieldOperand(
+ edx, SharedFunctionInfo::kFormalParameterCountOffset));
__ mov(ebx, ebp);
}
__ jmp(&arguments_done, Label::kNear);
@@ -2247,8 +1926,8 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm,
// -- esi : the function context.
// -----------------------------------
- __ mov(ebx,
- FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
+ __ movzx_w(
+ ebx, FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
ParameterCount actual(eax);
ParameterCount expected(ebx);
__ InvokeFunctionCode(edi, no_reg, expected, actual, JUMP_FUNCTION);
@@ -2526,48 +2205,6 @@ void Builtins::Generate_Construct(MacroAssembler* masm) {
RelocInfo::CODE_TARGET);
}
-// static
-void Builtins::Generate_AllocateInNewSpace(MacroAssembler* masm) {
- // ----------- S t a t e -------------
- // -- edx : requested object size (untagged)
- // -- esp[0] : return address
- // -----------------------------------
- __ SmiTag(edx);
- __ PopReturnAddressTo(ecx);
- __ Push(edx);
- __ PushReturnAddressFrom(ecx);
- __ Move(esi, Smi::kZero);
- __ TailCallRuntime(Runtime::kAllocateInNewSpace);
-}
-
-// static
-void Builtins::Generate_AllocateInOldSpace(MacroAssembler* masm) {
- // ----------- S t a t e -------------
- // -- edx : requested object size (untagged)
- // -- esp[0] : return address
- // -----------------------------------
- __ SmiTag(edx);
- __ PopReturnAddressTo(ecx);
- __ Push(edx);
- __ Push(Smi::FromInt(AllocateTargetSpace::encode(OLD_SPACE)));
- __ PushReturnAddressFrom(ecx);
- __ Move(esi, Smi::kZero);
- __ TailCallRuntime(Runtime::kAllocateInTargetSpace);
-}
-
-// static
-void Builtins::Generate_Abort(MacroAssembler* masm) {
- // ----------- S t a t e -------------
- // -- edx : message_id as Smi
- // -- esp[0] : return address
- // -----------------------------------
- __ PopReturnAddressTo(ecx);
- __ Push(edx);
- __ PushReturnAddressFrom(ecx);
- __ Move(esi, Smi::kZero);
- __ TailCallRuntime(Runtime::kAbort);
-}
-
void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- eax : actual number of arguments
@@ -2580,10 +2217,10 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) {
__ IncrementCounter(masm->isolate()->counters()->arguments_adaptors(), 1);
Label enough, too_few;
- __ cmp(eax, ebx);
- __ j(less, &too_few);
__ cmp(ebx, SharedFunctionInfo::kDontAdaptArgumentsSentinel);
__ j(equal, &dont_adapt_arguments);
+ __ cmp(eax, ebx);
+ __ j(less, &too_few);
{ // Enough parameters: Actual >= expected.
__ bind(&enough);
@@ -2742,43 +2379,54 @@ void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) {
}
void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
+ // The function index was put in edi by the jump table trampoline.
+ // Convert to Smi for the runtime call.
+ __ SmiTag(edi);
{
- FrameScope scope(masm, StackFrame::INTERNAL);
+ TrapOnAbortScope trap_on_abort_scope(masm); // Avoid calls to Abort.
+ FrameScope scope(masm, StackFrame::WASM_COMPILE_LAZY);
// Save all parameter registers (see wasm-linkage.cc). They might be
// overwritten in the runtime call below. We don't have any callee-saved
// registers in wasm, so no need to store anything else.
+ static_assert(WasmCompileLazyFrameConstants::kNumberOfSavedGpParamRegs ==
+ arraysize(wasm::kGpParamRegisters),
+ "frame size mismatch");
for (Register reg : wasm::kGpParamRegisters) {
- if (reg == kWasmInstanceRegister) continue;
__ Push(reg);
}
- __ sub(esp, Immediate(16 * arraysize(wasm::kFpParamRegisters)));
+ static_assert(WasmCompileLazyFrameConstants::kNumberOfSavedFpParamRegs ==
+ arraysize(wasm::kFpParamRegisters),
+ "frame size mismatch");
+ __ sub(esp, Immediate(kSimd128Size * arraysize(wasm::kFpParamRegisters)));
int offset = 0;
for (DoubleRegister reg : wasm::kFpParamRegisters) {
__ movdqu(Operand(esp, offset), reg);
- offset += 16;
+ offset += kSimd128Size;
}
- // Pass the WASM instance as an explicit argument to WasmCompileLazy.
+ // Push the WASM instance as an explicit argument to WasmCompileLazy.
__ Push(kWasmInstanceRegister);
+ // Push the function index as second argument.
+ __ Push(edi);
+ // Load the correct CEntry builtin from the instance object.
+ __ mov(ecx, FieldOperand(kWasmInstanceRegister,
+ WasmInstanceObject::kCEntryStubOffset));
// Initialize the JavaScript context with 0. CEntry will use it to
// set the current context on the isolate.
__ Move(kContextRegister, Smi::kZero);
- __ CallRuntime(Runtime::kWasmCompileLazy);
- // The entrypoint address is the first return value.
+ __ CallRuntimeWithCEntry(Runtime::kWasmCompileLazy, ecx);
+ // The entrypoint address is the return value.
__ mov(edi, kReturnRegister0);
- // The WASM instance is the second return value.
- __ mov(kWasmInstanceRegister, kReturnRegister1);
// Restore registers.
for (DoubleRegister reg : base::Reversed(wasm::kFpParamRegisters)) {
- offset -= 16;
+ offset -= kSimd128Size;
__ movdqu(reg, Operand(esp, offset));
}
DCHECK_EQ(0, offset);
- __ add(esp, Immediate(16 * arraysize(wasm::kFpParamRegisters)));
+ __ add(esp, Immediate(kSimd128Size * arraysize(wasm::kFpParamRegisters)));
for (Register reg : base::Reversed(wasm::kGpParamRegisters)) {
- if (reg == kWasmInstanceRegister) continue;
__ Pop(reg);
}
}
@@ -3013,8 +2661,7 @@ void Builtins::Generate_DoubleToI(MacroAssembler* masm) {
}
void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
- const Register exponent = MathPowTaggedDescriptor::exponent();
- DCHECK(exponent == eax);
+ const Register exponent = eax;
const Register scratch = ecx;
const XMMRegister double_result = xmm3;
const XMMRegister double_base = xmm2;
@@ -3144,6 +2791,99 @@ void Builtins::Generate_MathPowInternal(MacroAssembler* masm) {
__ ret(0);
}
+namespace {
+
+void GenerateInternalArrayConstructorCase(MacroAssembler* masm,
+ ElementsKind kind) {
+ Label not_zero_case, not_one_case;
+ Label normal_sequence;
+
+ __ test(eax, eax);
+ __ j(not_zero, &not_zero_case);
+ __ Jump(CodeFactory::InternalArrayNoArgumentConstructor(masm->isolate(), kind)
+ .code(),
+ RelocInfo::CODE_TARGET);
+
+ __ bind(&not_zero_case);
+ __ cmp(eax, 1);
+ __ j(greater, &not_one_case);
+
+ if (IsFastPackedElementsKind(kind)) {
+ // We might need to create a holey array
+ // look at the first argument
+ __ mov(ecx, Operand(esp, kPointerSize));
+ __ test(ecx, ecx);
+ __ j(zero, &normal_sequence);
+
+ __ Jump(CodeFactory::InternalArraySingleArgumentConstructor(
+ masm->isolate(), GetHoleyElementsKind(kind))
+ .code(),
+ RelocInfo::CODE_TARGET);
+ }
+
+ __ bind(&normal_sequence);
+ __ Jump(
+ CodeFactory::InternalArraySingleArgumentConstructor(masm->isolate(), kind)
+ .code(),
+ RelocInfo::CODE_TARGET);
+
+ __ bind(&not_one_case);
+ Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor);
+ __ Jump(code, RelocInfo::CODE_TARGET);
+}
+
+} // namespace
+
+void Builtins::Generate_InternalArrayConstructorImpl(MacroAssembler* masm) {
+ // ----------- S t a t e -------------
+ // -- eax : argc
+ // -- edi : constructor
+ // -- esp[0] : return address
+ // -- esp[4] : last argument
+ // -----------------------------------
+
+ if (FLAG_debug_code) {
+ // The array construct code is only set for the global and natives
+ // builtin Array functions which always have maps.
+
+ // Initial map for the builtin Array function should be a map.
+ __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
+ // Will both indicate a nullptr and a Smi.
+ __ test(ecx, Immediate(kSmiTagMask));
+ __ Assert(not_zero, AbortReason::kUnexpectedInitialMapForArrayFunction);
+ __ CmpObjectType(ecx, MAP_TYPE, ecx);
+ __ Assert(equal, AbortReason::kUnexpectedInitialMapForArrayFunction);
+ }
+
+ // Figure out the right elements kind
+ __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset));
+
+ // Load the map's "bit field 2" into |result|. We only need the first byte,
+ // but the following masking takes care of that anyway.
+ __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset));
+ // Retrieve elements_kind from bit field 2.
+ __ DecodeField<Map::ElementsKindBits>(ecx);
+
+ if (FLAG_debug_code) {
+ Label done;
+ __ cmp(ecx, Immediate(PACKED_ELEMENTS));
+ __ j(equal, &done);
+ __ cmp(ecx, Immediate(HOLEY_ELEMENTS));
+ __ Assert(
+ equal,
+ AbortReason::kInvalidElementsKindForInternalArrayOrInternalPackedArray);
+ __ bind(&done);
+ }
+
+ Label fast_elements_case;
+ __ cmp(ecx, Immediate(PACKED_ELEMENTS));
+ __ j(equal, &fast_elements_case);
+ GenerateInternalArrayConstructorCase(masm, HOLEY_ELEMENTS);
+
+ __ bind(&fast_elements_case);
+ GenerateInternalArrayConstructorCase(masm, PACKED_ELEMENTS);
+}
+
#undef __
} // namespace internal