// Copyright 2012 the V8 project authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #if V8_TARGET_ARCH_IA32 #include "src/api-arguments.h" #include "src/assembler-inl.h" #include "src/base/bits.h" #include "src/bootstrapper.h" #include "src/code-stubs.h" #include "src/frame-constants.h" #include "src/frames.h" #include "src/heap/heap-inl.h" #include "src/ic/ic.h" #include "src/ic/stub-cache.h" #include "src/isolate.h" #include "src/regexp/jsregexp.h" #include "src/regexp/regexp-macro-assembler.h" #include "src/runtime/runtime.h" namespace v8 { namespace internal { #define __ ACCESS_MASM(masm) void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) { __ pop(ecx); __ mov(MemOperand(esp, eax, times_4, 0), edi); __ push(edi); __ push(ebx); __ push(ecx); __ add(eax, Immediate(3)); __ TailCallRuntime(Runtime::kNewArray); } void DoubleToIStub::Generate(MacroAssembler* masm) { Register final_result_reg = this->destination(); Label check_negative, process_64_bits, done; // Account for return address and saved regs. const int kArgumentOffset = 3 * kPointerSize; MemOperand mantissa_operand(MemOperand(esp, kArgumentOffset)); MemOperand exponent_operand( MemOperand(esp, kArgumentOffset + kDoubleSize / 2)); Register scratch1 = no_reg; { Register scratch_candidates[3] = { ebx, edx, edi }; for (int i = 0; i < 3; i++) { scratch1 = scratch_candidates[i]; if (final_result_reg != scratch1) break; } } // Since we must use ecx for shifts below, use some other register (eax) // to calculate the result if ecx is the requested return register. Register result_reg = final_result_reg == ecx ? eax : final_result_reg; // Save ecx if it isn't the return register and therefore volatile, or if it // is the return register, then save the temp register we use in its stead for // the result. Register save_reg = final_result_reg == ecx ? eax : ecx; __ push(scratch1); __ push(save_reg); __ mov(scratch1, mantissa_operand); if (CpuFeatures::IsSupported(SSE3)) { CpuFeatureScope scope(masm, SSE3); // Load x87 register with heap number. __ fld_d(mantissa_operand); } __ mov(ecx, exponent_operand); __ and_(ecx, HeapNumber::kExponentMask); __ shr(ecx, HeapNumber::kExponentShift); __ lea(result_reg, MemOperand(ecx, -HeapNumber::kExponentBias)); __ cmp(result_reg, Immediate(HeapNumber::kMantissaBits)); __ j(below, &process_64_bits); // Result is entirely in lower 32-bits of mantissa int delta = HeapNumber::kExponentBias + Double::kPhysicalSignificandSize; if (CpuFeatures::IsSupported(SSE3)) { __ fstp(0); } __ sub(ecx, Immediate(delta)); __ xor_(result_reg, result_reg); __ cmp(ecx, Immediate(31)); __ j(above, &done); __ shl_cl(scratch1); __ jmp(&check_negative); __ bind(&process_64_bits); if (CpuFeatures::IsSupported(SSE3)) { CpuFeatureScope scope(masm, SSE3); // Reserve space for 64 bit answer. __ sub(esp, Immediate(kDoubleSize)); // Nolint. // Do conversion, which cannot fail because we checked the exponent. __ fisttp_d(Operand(esp, 0)); __ mov(result_reg, Operand(esp, 0)); // Load low word of answer as result __ add(esp, Immediate(kDoubleSize)); __ jmp(&done); } else { // Result must be extracted from shifted 32-bit mantissa __ sub(ecx, Immediate(delta)); __ neg(ecx); __ mov(result_reg, exponent_operand); __ and_(result_reg, Immediate(static_cast(Double::kSignificandMask >> 32))); __ add(result_reg, Immediate(static_cast(Double::kHiddenBit >> 32))); __ shrd_cl(scratch1, result_reg); __ shr_cl(result_reg); __ test(ecx, Immediate(32)); __ cmov(not_equal, scratch1, result_reg); } // If the double was negative, negate the integer result. __ bind(&check_negative); __ mov(result_reg, scratch1); __ neg(result_reg); __ cmp(exponent_operand, Immediate(0)); __ cmov(greater, result_reg, scratch1); // Restore registers __ bind(&done); if (final_result_reg != result_reg) { DCHECK(final_result_reg == ecx); __ mov(final_result_reg, result_reg); } __ pop(save_reg); __ pop(scratch1); __ ret(0); } void MathPowStub::Generate(MacroAssembler* masm) { const Register exponent = MathPowTaggedDescriptor::exponent(); DCHECK(exponent == eax); const Register scratch = ecx; const XMMRegister double_result = xmm3; const XMMRegister double_base = xmm2; const XMMRegister double_exponent = xmm1; const XMMRegister double_scratch = xmm4; Label call_runtime, done, exponent_not_smi, int_exponent; // Save 1 in double_result - we need this several times later on. __ mov(scratch, Immediate(1)); __ Cvtsi2sd(double_result, scratch); Label fast_power, try_arithmetic_simplification; __ DoubleToI(exponent, double_exponent, double_scratch, &try_arithmetic_simplification, &try_arithmetic_simplification); __ jmp(&int_exponent); __ bind(&try_arithmetic_simplification); // Skip to runtime if possibly NaN (indicated by the indefinite integer). __ cvttsd2si(exponent, Operand(double_exponent)); __ cmp(exponent, Immediate(0x1)); __ j(overflow, &call_runtime); // Using FPU instructions to calculate power. Label fast_power_failed; __ bind(&fast_power); __ fnclex(); // Clear flags to catch exceptions later. // Transfer (B)ase and (E)xponent onto the FPU register stack. __ sub(esp, Immediate(kDoubleSize)); __ movsd(Operand(esp, 0), double_exponent); __ fld_d(Operand(esp, 0)); // E __ movsd(Operand(esp, 0), double_base); __ fld_d(Operand(esp, 0)); // B, E // Exponent is in st(1) and base is in st(0) // B ^ E = (2^(E * log2(B)) - 1) + 1 = (2^X - 1) + 1 for X = E * log2(B) // FYL2X calculates st(1) * log2(st(0)) __ fyl2x(); // X __ fld(0); // X, X __ frndint(); // rnd(X), X __ fsub(1); // rnd(X), X-rnd(X) __ fxch(1); // X - rnd(X), rnd(X) // F2XM1 calculates 2^st(0) - 1 for -1 < st(0) < 1 __ f2xm1(); // 2^(X-rnd(X)) - 1, rnd(X) __ fld1(); // 1, 2^(X-rnd(X)) - 1, rnd(X) __ faddp(1); // 2^(X-rnd(X)), rnd(X) // FSCALE calculates st(0) * 2^st(1) __ fscale(); // 2^X, rnd(X) __ fstp(1); // 2^X // Bail out to runtime in case of exceptions in the status word. __ fnstsw_ax(); __ test_b(eax, Immediate(0x5F)); // We check for all but precision exception. __ j(not_zero, &fast_power_failed, Label::kNear); __ fstp_d(Operand(esp, 0)); __ movsd(double_result, Operand(esp, 0)); __ add(esp, Immediate(kDoubleSize)); __ jmp(&done); __ bind(&fast_power_failed); __ fninit(); __ add(esp, Immediate(kDoubleSize)); __ jmp(&call_runtime); // Calculate power with integer exponent. __ bind(&int_exponent); const XMMRegister double_scratch2 = double_exponent; __ mov(scratch, exponent); // Back up exponent. __ movsd(double_scratch, double_base); // Back up base. __ movsd(double_scratch2, double_result); // Load double_exponent with 1. // Get absolute value of exponent. Label no_neg, while_true, while_false; __ test(scratch, scratch); __ j(positive, &no_neg, Label::kNear); __ neg(scratch); __ bind(&no_neg); __ j(zero, &while_false, Label::kNear); __ shr(scratch, 1); // Above condition means CF==0 && ZF==0. This means that the // bit that has been shifted out is 0 and the result is not 0. __ j(above, &while_true, Label::kNear); __ movsd(double_result, double_scratch); __ j(zero, &while_false, Label::kNear); __ bind(&while_true); __ shr(scratch, 1); __ mulsd(double_scratch, double_scratch); __ j(above, &while_true, Label::kNear); __ mulsd(double_result, double_scratch); __ j(not_zero, &while_true); __ bind(&while_false); // scratch has the original value of the exponent - if the exponent is // negative, return 1/result. __ test(exponent, exponent); __ j(positive, &done); __ divsd(double_scratch2, double_result); __ movsd(double_result, double_scratch2); // Test whether result is zero. Bail out to check for subnormal result. // Due to subnormals, x^-y == (1/x)^y does not hold in all cases. __ xorps(double_scratch2, double_scratch2); __ ucomisd(double_scratch2, double_result); // Result cannot be NaN. // double_exponent aliased as double_scratch2 has already been overwritten // and may not have contained the exponent value in the first place when the // exponent is a smi. We reset it with exponent value before bailing out. __ j(not_equal, &done); __ Cvtsi2sd(double_exponent, exponent); // Returning or bailing out. __ bind(&call_runtime); { AllowExternalCallThatCantCauseGC scope(masm); __ PrepareCallCFunction(4, scratch); __ movsd(Operand(esp, 0 * kDoubleSize), double_base); __ movsd(Operand(esp, 1 * kDoubleSize), double_exponent); __ CallCFunction(ExternalReference::power_double_double_function(isolate()), 4); } // Return value is in st(0) on ia32. // Store it into the (fixed) result register. __ sub(esp, Immediate(kDoubleSize)); __ fstp_d(Operand(esp, 0)); __ movsd(double_result, Operand(esp, 0)); __ add(esp, Immediate(kDoubleSize)); __ bind(&done); __ ret(0); } Movability CEntryStub::NeedsImmovableCode() { return kMovable; } void CodeStub::GenerateStubsAheadOfTime(Isolate* isolate) { CEntryStub::GenerateAheadOfTime(isolate); // It is important that the store buffer overflow stubs are generated first. CommonArrayConstructorStub::GenerateStubsAheadOfTime(isolate); StoreFastElementStub::GenerateAheadOfTime(isolate); } void CodeStub::GenerateFPStubs(Isolate* isolate) { // Generate if not already in cache. CEntryStub(isolate, 1, kSaveFPRegs).GetCode(); } void CEntryStub::GenerateAheadOfTime(Isolate* isolate) { CEntryStub stub(isolate, 1, kDontSaveFPRegs); stub.GetCode(); } void CEntryStub::Generate(MacroAssembler* masm) { // eax: number of arguments including receiver // ebx: pointer to C function (C callee-saved) // ebp: frame pointer (restored after C call) // esp: stack pointer (restored after C call) // esi: current context (C callee-saved) // edi: JS function of the caller (C callee-saved) // // If argv_in_register(): // ecx: pointer to the first argument ProfileEntryHookStub::MaybeCallEntryHook(masm); // Reserve space on the stack for the three arguments passed to the call. If // result size is greater than can be returned in registers, also reserve // space for the hidden argument for the result location, and space for the // result itself. int arg_stack_space = 3; // Enter the exit frame that transitions from JavaScript to C++. if (argv_in_register()) { DCHECK(!save_doubles()); DCHECK(!is_builtin_exit()); __ EnterApiExitFrame(arg_stack_space); // Move argc and argv into the correct registers. __ mov(esi, ecx); __ mov(edi, eax); } else { __ EnterExitFrame( arg_stack_space, save_doubles(), is_builtin_exit() ? StackFrame::BUILTIN_EXIT : StackFrame::EXIT); } // ebx: pointer to C function (C callee-saved) // ebp: frame pointer (restored after C call) // esp: stack pointer (restored after C call) // edi: number of arguments including receiver (C callee-saved) // esi: pointer to the first argument (C callee-saved) // Result returned in eax, or eax+edx if result size is 2. // Check stack alignment. if (FLAG_debug_code) { __ CheckStackAlignment(); } // Call C function. __ mov(Operand(esp, 0 * kPointerSize), edi); // argc. __ mov(Operand(esp, 1 * kPointerSize), esi); // argv. __ mov(Operand(esp, 2 * kPointerSize), Immediate(ExternalReference::isolate_address(isolate()))); __ call(ebx); // Result is in eax or edx:eax - do not destroy these registers! // Check result for exception sentinel. Label exception_returned; __ cmp(eax, isolate()->factory()->exception()); __ j(equal, &exception_returned); // Check that there is no pending exception, otherwise we // should have returned the exception sentinel. if (FLAG_debug_code) { __ push(edx); __ mov(edx, Immediate(isolate()->factory()->the_hole_value())); Label okay; ExternalReference pending_exception_address( IsolateAddressId::kPendingExceptionAddress, isolate()); __ cmp(edx, Operand::StaticVariable(pending_exception_address)); // Cannot use check here as it attempts to generate call into runtime. __ j(equal, &okay, Label::kNear); __ int3(); __ bind(&okay); __ pop(edx); } // Exit the JavaScript to C++ exit frame. __ LeaveExitFrame(save_doubles(), !argv_in_register()); __ ret(0); // Handling of exception. __ bind(&exception_returned); ExternalReference pending_handler_context_address( IsolateAddressId::kPendingHandlerContextAddress, isolate()); ExternalReference pending_handler_entrypoint_address( IsolateAddressId::kPendingHandlerEntrypointAddress, isolate()); ExternalReference pending_handler_fp_address( IsolateAddressId::kPendingHandlerFPAddress, isolate()); ExternalReference pending_handler_sp_address( IsolateAddressId::kPendingHandlerSPAddress, isolate()); // Ask the runtime for help to determine the handler. This will set eax to // contain the current pending exception, don't clobber it. ExternalReference find_handler(Runtime::kUnwindAndFindExceptionHandler, isolate()); { FrameScope scope(masm, StackFrame::MANUAL); __ PrepareCallCFunction(3, eax); __ mov(Operand(esp, 0 * kPointerSize), Immediate(0)); // argc. __ mov(Operand(esp, 1 * kPointerSize), Immediate(0)); // argv. __ mov(Operand(esp, 2 * kPointerSize), Immediate(ExternalReference::isolate_address(isolate()))); __ CallCFunction(find_handler, 3); } // Retrieve the handler context, SP and FP. __ mov(esi, Operand::StaticVariable(pending_handler_context_address)); __ mov(esp, Operand::StaticVariable(pending_handler_sp_address)); __ mov(ebp, Operand::StaticVariable(pending_handler_fp_address)); // If the handler is a JS frame, restore the context to the frame. Note that // the context will be set to (esi == 0) for non-JS frames. Label skip; __ test(esi, esi); __ j(zero, &skip, Label::kNear); __ mov(Operand(ebp, StandardFrameConstants::kContextOffset), esi); __ bind(&skip); // Reset the masking register. This is done independent of the underlying // feature flag {FLAG_branch_load_poisoning} to make the snapshot work with // both configurations. It is safe to always do this, because the underlying // register is caller-saved and can be arbitrarily clobbered. __ ResetSpeculationPoisonRegister(); // Compute the handler entry address and jump to it. __ mov(edi, Operand::StaticVariable(pending_handler_entrypoint_address)); __ jmp(edi); } void JSEntryStub::Generate(MacroAssembler* masm) { Label invoke, handler_entry, exit; Label not_outermost_js, not_outermost_js_2; ProfileEntryHookStub::MaybeCallEntryHook(masm); // Set up frame. __ push(ebp); __ mov(ebp, esp); // Push marker in two places. StackFrame::Type marker = type(); __ push(Immediate(StackFrame::TypeToMarker(marker))); // marker ExternalReference context_address(IsolateAddressId::kContextAddress, isolate()); __ push(Operand::StaticVariable(context_address)); // context // Save callee-saved registers (C calling conventions). __ push(edi); __ push(esi); __ push(ebx); // Save copies of the top frame descriptor on the stack. ExternalReference c_entry_fp(IsolateAddressId::kCEntryFPAddress, isolate()); __ push(Operand::StaticVariable(c_entry_fp)); // If this is the outermost JS call, set js_entry_sp value. ExternalReference js_entry_sp(IsolateAddressId::kJSEntrySPAddress, isolate()); __ cmp(Operand::StaticVariable(js_entry_sp), Immediate(0)); __ j(not_equal, ¬_outermost_js, Label::kNear); __ mov(Operand::StaticVariable(js_entry_sp), ebp); __ push(Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME)); __ jmp(&invoke, Label::kNear); __ bind(¬_outermost_js); __ push(Immediate(StackFrame::INNER_JSENTRY_FRAME)); // Jump to a faked try block that does the invoke, with a faked catch // block that sets the pending exception. __ jmp(&invoke); __ bind(&handler_entry); handler_offset_ = handler_entry.pos(); // Caught exception: Store result (exception) in the pending exception // field in the JSEnv and return a failure sentinel. ExternalReference pending_exception( IsolateAddressId::kPendingExceptionAddress, isolate()); __ mov(Operand::StaticVariable(pending_exception), eax); __ mov(eax, Immediate(isolate()->factory()->exception())); __ jmp(&exit); // Invoke: Link this frame into the handler chain. __ bind(&invoke); __ PushStackHandler(); // Invoke the function by calling through JS entry trampoline builtin and // pop the faked function when we return. Notice that we cannot store a // reference to the trampoline code directly in this stub, because the // builtin stubs may not have been generated yet. __ Call(EntryTrampoline(), RelocInfo::CODE_TARGET); // Unlink this frame from the handler chain. __ PopStackHandler(); __ bind(&exit); // Check if the current stack frame is marked as the outermost JS frame. __ pop(ebx); __ cmp(ebx, Immediate(StackFrame::OUTERMOST_JSENTRY_FRAME)); __ j(not_equal, ¬_outermost_js_2); __ mov(Operand::StaticVariable(js_entry_sp), Immediate(0)); __ bind(¬_outermost_js_2); // Restore the top frame descriptor from the stack. __ pop(Operand::StaticVariable( ExternalReference(IsolateAddressId::kCEntryFPAddress, isolate()))); // Restore callee-saved registers (C calling conventions). __ pop(ebx); __ pop(esi); __ pop(edi); __ add(esp, Immediate(2 * kPointerSize)); // remove markers // Restore frame pointer and return. __ pop(ebp); __ ret(0); } void ProfileEntryHookStub::MaybeCallEntryHookDelayed(TurboAssembler* tasm, Zone* zone) { if (tasm->isolate()->function_entry_hook() != nullptr) { tasm->CallStubDelayed(new (zone) ProfileEntryHookStub(nullptr)); } } void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) { if (masm->isolate()->function_entry_hook() != nullptr) { ProfileEntryHookStub stub(masm->isolate()); masm->CallStub(&stub); } } void ProfileEntryHookStub::Generate(MacroAssembler* masm) { // Save volatile registers. const int kNumSavedRegisters = 3; __ push(eax); __ push(ecx); __ push(edx); // Calculate and push the original stack pointer. __ lea(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize)); __ push(eax); // Retrieve our return address and use it to calculate the calling // function's address. __ mov(eax, Operand(esp, (kNumSavedRegisters + 1) * kPointerSize)); __ sub(eax, Immediate(Assembler::kCallInstructionLength)); __ push(eax); // Call the entry hook. DCHECK_NOT_NULL(isolate()->function_entry_hook()); __ call(FUNCTION_ADDR(isolate()->function_entry_hook()), RelocInfo::RUNTIME_ENTRY); __ add(esp, Immediate(2 * kPointerSize)); // Restore ecx. __ pop(edx); __ pop(ecx); __ pop(eax); __ ret(0); } template static void CreateArrayDispatch(MacroAssembler* masm, AllocationSiteOverrideMode mode) { if (mode == DISABLE_ALLOCATION_SITES) { T stub(masm->isolate(), GetInitialFastElementsKind(), mode); __ TailCallStub(&stub); } else if (mode == DONT_OVERRIDE) { int last_index = GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND); for (int i = 0; i <= last_index; ++i) { Label next; ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); __ cmp(edx, kind); __ j(not_equal, &next); T stub(masm->isolate(), kind); __ TailCallStub(&stub); __ bind(&next); } // If we reached this point there is a problem. __ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor); } else { UNREACHABLE(); } } static void CreateArrayDispatchOneArgument(MacroAssembler* masm, AllocationSiteOverrideMode mode) { // ebx - allocation site (if mode != DISABLE_ALLOCATION_SITES) // edx - kind (if mode != DISABLE_ALLOCATION_SITES) // eax - number of arguments // edi - constructor? // esp[0] - return address // esp[4] - last argument STATIC_ASSERT(PACKED_SMI_ELEMENTS == 0); STATIC_ASSERT(HOLEY_SMI_ELEMENTS == 1); STATIC_ASSERT(PACKED_ELEMENTS == 2); STATIC_ASSERT(HOLEY_ELEMENTS == 3); STATIC_ASSERT(PACKED_DOUBLE_ELEMENTS == 4); STATIC_ASSERT(HOLEY_DOUBLE_ELEMENTS == 5); if (mode == DISABLE_ALLOCATION_SITES) { ElementsKind initial = GetInitialFastElementsKind(); ElementsKind holey_initial = GetHoleyElementsKind(initial); ArraySingleArgumentConstructorStub stub_holey(masm->isolate(), holey_initial, DISABLE_ALLOCATION_SITES); __ TailCallStub(&stub_holey); } else if (mode == DONT_OVERRIDE) { // is the low bit set? If so, we are holey and that is good. Label normal_sequence; __ test_b(edx, Immediate(1)); __ j(not_zero, &normal_sequence); // We are going to create a holey array, but our kind is non-holey. // Fix kind and retry. __ inc(edx); if (FLAG_debug_code) { Handle allocation_site_map = masm->isolate()->factory()->allocation_site_map(); __ cmp(FieldOperand(ebx, 0), Immediate(allocation_site_map)); __ Assert(equal, AbortReason::kExpectedAllocationSite); } // Save the resulting elements kind in type info. We can't just store r3 // in the AllocationSite::transition_info field because elements kind is // restricted to a portion of the field...upper bits need to be left alone. STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); __ add( FieldOperand(ebx, AllocationSite::kTransitionInfoOrBoilerplateOffset), Immediate(Smi::FromInt(kFastElementsKindPackedToHoley))); __ bind(&normal_sequence); int last_index = GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND); for (int i = 0; i <= last_index; ++i) { Label next; ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); __ cmp(edx, kind); __ j(not_equal, &next); ArraySingleArgumentConstructorStub stub(masm->isolate(), kind); __ TailCallStub(&stub); __ bind(&next); } // If we reached this point there is a problem. __ Abort(AbortReason::kUnexpectedElementsKindInArrayConstructor); } else { UNREACHABLE(); } } template static void ArrayConstructorStubAheadOfTimeHelper(Isolate* isolate) { int to_index = GetSequenceIndexFromFastElementsKind(TERMINAL_FAST_ELEMENTS_KIND); for (int i = 0; i <= to_index; ++i) { ElementsKind kind = GetFastElementsKindFromSequenceIndex(i); T stub(isolate, kind); stub.GetCode(); if (AllocationSite::ShouldTrack(kind)) { T stub1(isolate, kind, DISABLE_ALLOCATION_SITES); stub1.GetCode(); } } } void CommonArrayConstructorStub::GenerateStubsAheadOfTime(Isolate* isolate) { ArrayConstructorStubAheadOfTimeHelper( isolate); ArrayConstructorStubAheadOfTimeHelper( isolate); ArrayNArgumentsConstructorStub stub(isolate); stub.GetCode(); ElementsKind kinds[2] = {PACKED_ELEMENTS, HOLEY_ELEMENTS}; for (int i = 0; i < 2; i++) { // For internal arrays we only need a few things InternalArrayNoArgumentConstructorStub stubh1(isolate, kinds[i]); stubh1.GetCode(); InternalArraySingleArgumentConstructorStub stubh2(isolate, kinds[i]); stubh2.GetCode(); } } void ArrayConstructorStub::GenerateDispatchToArrayStub( MacroAssembler* masm, AllocationSiteOverrideMode mode) { Label not_zero_case, not_one_case; __ test(eax, eax); __ j(not_zero, ¬_zero_case); CreateArrayDispatch(masm, mode); __ bind(¬_zero_case); __ cmp(eax, 1); __ j(greater, ¬_one_case); CreateArrayDispatchOneArgument(masm, mode); __ bind(¬_one_case); ArrayNArgumentsConstructorStub stub(masm->isolate()); __ TailCallStub(&stub); } void ArrayConstructorStub::Generate(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- eax : argc (only if argument_count() is ANY or MORE_THAN_ONE) // -- ebx : AllocationSite or undefined // -- edi : constructor // -- edx : Original constructor // -- esp[0] : return address // -- esp[4] : last argument // ----------------------------------- if (FLAG_debug_code) { // The array construct code is only set for the global and natives // builtin Array functions which always have maps. // Initial map for the builtin Array function should be a map. __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset)); // Will both indicate a nullptr and a Smi. __ test(ecx, Immediate(kSmiTagMask)); __ Assert(not_zero, AbortReason::kUnexpectedInitialMapForArrayFunction); __ CmpObjectType(ecx, MAP_TYPE, ecx); __ Assert(equal, AbortReason::kUnexpectedInitialMapForArrayFunction); // We should either have undefined in ebx or a valid AllocationSite __ AssertUndefinedOrAllocationSite(ebx); } Label subclassing; // Enter the context of the Array function. __ mov(esi, FieldOperand(edi, JSFunction::kContextOffset)); __ cmp(edx, edi); __ j(not_equal, &subclassing); Label no_info; // If the feedback vector is the undefined value call an array constructor // that doesn't use AllocationSites. __ cmp(ebx, isolate()->factory()->undefined_value()); __ j(equal, &no_info); // Only look at the lower 16 bits of the transition info. __ mov(edx, FieldOperand(ebx, AllocationSite::kTransitionInfoOrBoilerplateOffset)); __ SmiUntag(edx); STATIC_ASSERT(AllocationSite::ElementsKindBits::kShift == 0); __ and_(edx, Immediate(AllocationSite::ElementsKindBits::kMask)); GenerateDispatchToArrayStub(masm, DONT_OVERRIDE); __ bind(&no_info); GenerateDispatchToArrayStub(masm, DISABLE_ALLOCATION_SITES); // Subclassing. __ bind(&subclassing); __ mov(Operand(esp, eax, times_pointer_size, kPointerSize), edi); __ add(eax, Immediate(3)); __ PopReturnAddressTo(ecx); __ Push(edx); __ Push(ebx); __ PushReturnAddressFrom(ecx); __ JumpToExternalReference(ExternalReference(Runtime::kNewArray, isolate())); } void InternalArrayConstructorStub::GenerateCase( MacroAssembler* masm, ElementsKind kind) { Label not_zero_case, not_one_case; Label normal_sequence; __ test(eax, eax); __ j(not_zero, ¬_zero_case); InternalArrayNoArgumentConstructorStub stub0(isolate(), kind); __ TailCallStub(&stub0); __ bind(¬_zero_case); __ cmp(eax, 1); __ j(greater, ¬_one_case); if (IsFastPackedElementsKind(kind)) { // We might need to create a holey array // look at the first argument __ mov(ecx, Operand(esp, kPointerSize)); __ test(ecx, ecx); __ j(zero, &normal_sequence); InternalArraySingleArgumentConstructorStub stub1_holey(isolate(), GetHoleyElementsKind(kind)); __ TailCallStub(&stub1_holey); } __ bind(&normal_sequence); InternalArraySingleArgumentConstructorStub stub1(isolate(), kind); __ TailCallStub(&stub1); __ bind(¬_one_case); ArrayNArgumentsConstructorStub stubN(isolate()); __ TailCallStub(&stubN); } void InternalArrayConstructorStub::Generate(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- eax : argc // -- edi : constructor // -- esp[0] : return address // -- esp[4] : last argument // ----------------------------------- if (FLAG_debug_code) { // The array construct code is only set for the global and natives // builtin Array functions which always have maps. // Initial map for the builtin Array function should be a map. __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset)); // Will both indicate a nullptr and a Smi. __ test(ecx, Immediate(kSmiTagMask)); __ Assert(not_zero, AbortReason::kUnexpectedInitialMapForArrayFunction); __ CmpObjectType(ecx, MAP_TYPE, ecx); __ Assert(equal, AbortReason::kUnexpectedInitialMapForArrayFunction); } // Figure out the right elements kind __ mov(ecx, FieldOperand(edi, JSFunction::kPrototypeOrInitialMapOffset)); // Load the map's "bit field 2" into |result|. We only need the first byte, // but the following masking takes care of that anyway. __ mov(ecx, FieldOperand(ecx, Map::kBitField2Offset)); // Retrieve elements_kind from bit field 2. __ DecodeField(ecx); if (FLAG_debug_code) { Label done; __ cmp(ecx, Immediate(PACKED_ELEMENTS)); __ j(equal, &done); __ cmp(ecx, Immediate(HOLEY_ELEMENTS)); __ Assert( equal, AbortReason::kInvalidElementsKindForInternalArrayOrInternalPackedArray); __ bind(&done); } Label fast_elements_case; __ cmp(ecx, Immediate(PACKED_ELEMENTS)); __ j(equal, &fast_elements_case); GenerateCase(masm, HOLEY_ELEMENTS); __ bind(&fast_elements_case); GenerateCase(masm, PACKED_ELEMENTS); } // Generates an Operand for saving parameters after PrepareCallApiFunction. static Operand ApiParameterOperand(int index) { return Operand(esp, index * kPointerSize); } // Prepares stack to put arguments (aligns and so on). Reserves // space for return value if needed (assumes the return value is a handle). // Arguments must be stored in ApiParameterOperand(0), ApiParameterOperand(1) // etc. Saves context (esi). If space was reserved for return value then // stores the pointer to the reserved slot into esi. static void PrepareCallApiFunction(MacroAssembler* masm, int argc) { __ EnterApiExitFrame(argc); if (__ emit_debug_code()) { __ mov(esi, Immediate(bit_cast(kZapValue))); } } // Calls an API function. Allocates HandleScope, extracts returned value // from handle and propagates exceptions. Clobbers ebx, edi and // caller-save registers. Restores context. On return removes // stack_space * kPointerSize (GCed). static void CallApiFunctionAndReturn(MacroAssembler* masm, Register function_address, ExternalReference thunk_ref, Operand thunk_last_arg, int stack_space, Operand* stack_space_operand, Operand return_value_operand) { Isolate* isolate = masm->isolate(); ExternalReference next_address = ExternalReference::handle_scope_next_address(isolate); ExternalReference limit_address = ExternalReference::handle_scope_limit_address(isolate); ExternalReference level_address = ExternalReference::handle_scope_level_address(isolate); DCHECK(edx == function_address); // Allocate HandleScope in callee-save registers. __ mov(ebx, Operand::StaticVariable(next_address)); __ mov(edi, Operand::StaticVariable(limit_address)); __ add(Operand::StaticVariable(level_address), Immediate(1)); if (FLAG_log_timer_events) { FrameScope frame(masm, StackFrame::MANUAL); __ PushSafepointRegisters(); __ PrepareCallCFunction(1, eax); __ mov(Operand(esp, 0), Immediate(ExternalReference::isolate_address(isolate))); __ CallCFunction(ExternalReference::log_enter_external_function(isolate), 1); __ PopSafepointRegisters(); } Label profiler_disabled; Label end_profiler_check; __ mov(eax, Immediate(ExternalReference::is_profiling_address(isolate))); __ cmpb(Operand(eax, 0), Immediate(0)); __ j(zero, &profiler_disabled); // Additional parameter is the address of the actual getter function. __ mov(thunk_last_arg, function_address); // Call the api function. __ mov(eax, Immediate(thunk_ref)); __ call(eax); __ jmp(&end_profiler_check); __ bind(&profiler_disabled); // Call the api function. __ call(function_address); __ bind(&end_profiler_check); if (FLAG_log_timer_events) { FrameScope frame(masm, StackFrame::MANUAL); __ PushSafepointRegisters(); __ PrepareCallCFunction(1, eax); __ mov(Operand(esp, 0), Immediate(ExternalReference::isolate_address(isolate))); __ CallCFunction(ExternalReference::log_leave_external_function(isolate), 1); __ PopSafepointRegisters(); } Label prologue; // Load the value from ReturnValue __ mov(eax, return_value_operand); Label promote_scheduled_exception; Label delete_allocated_handles; Label leave_exit_frame; __ bind(&prologue); // No more valid handles (the result handle was the last one). Restore // previous handle scope. __ mov(Operand::StaticVariable(next_address), ebx); __ sub(Operand::StaticVariable(level_address), Immediate(1)); __ Assert(above_equal, AbortReason::kInvalidHandleScopeLevel); __ cmp(edi, Operand::StaticVariable(limit_address)); __ j(not_equal, &delete_allocated_handles); // Leave the API exit frame. __ bind(&leave_exit_frame); if (stack_space_operand != nullptr) { __ mov(ebx, *stack_space_operand); } __ LeaveApiExitFrame(); // Check if the function scheduled an exception. ExternalReference scheduled_exception_address = ExternalReference::scheduled_exception_address(isolate); __ cmp(Operand::StaticVariable(scheduled_exception_address), Immediate(isolate->factory()->the_hole_value())); __ j(not_equal, &promote_scheduled_exception); #if DEBUG // Check if the function returned a valid JavaScript value. Label ok; Register return_value = eax; Register map = ecx; __ JumpIfSmi(return_value, &ok, Label::kNear); __ mov(map, FieldOperand(return_value, HeapObject::kMapOffset)); __ CmpInstanceType(map, LAST_NAME_TYPE); __ j(below_equal, &ok, Label::kNear); __ CmpInstanceType(map, FIRST_JS_RECEIVER_TYPE); __ j(above_equal, &ok, Label::kNear); __ cmp(map, isolate->factory()->heap_number_map()); __ j(equal, &ok, Label::kNear); __ cmp(return_value, isolate->factory()->undefined_value()); __ j(equal, &ok, Label::kNear); __ cmp(return_value, isolate->factory()->true_value()); __ j(equal, &ok, Label::kNear); __ cmp(return_value, isolate->factory()->false_value()); __ j(equal, &ok, Label::kNear); __ cmp(return_value, isolate->factory()->null_value()); __ j(equal, &ok, Label::kNear); __ Abort(AbortReason::kAPICallReturnedInvalidObject); __ bind(&ok); #endif if (stack_space_operand != nullptr) { DCHECK_EQ(0, stack_space); __ pop(ecx); __ add(esp, ebx); __ jmp(ecx); } else { __ ret(stack_space * kPointerSize); } // Re-throw by promoting a scheduled exception. __ bind(&promote_scheduled_exception); __ TailCallRuntime(Runtime::kPromoteScheduledException); // HandleScope limit has changed. Delete allocated extensions. ExternalReference delete_extensions = ExternalReference::delete_handle_scope_extensions(isolate); __ bind(&delete_allocated_handles); __ mov(Operand::StaticVariable(limit_address), edi); __ mov(edi, eax); __ mov(Operand(esp, 0), Immediate(ExternalReference::isolate_address(isolate))); __ mov(eax, Immediate(delete_extensions)); __ call(eax); __ mov(eax, edi); __ jmp(&leave_exit_frame); } void CallApiCallbackStub::Generate(MacroAssembler* masm) { // ----------- S t a t e ------------- // -- ebx : call_data // -- ecx : holder // -- edx : api_function_address // -- esi : context // -- // -- esp[0] : return address // -- esp[4] : last argument // -- ... // -- esp[argc * 4] : first argument // -- esp[(argc + 1) * 4] : receiver // ----------------------------------- Register call_data = ebx; Register holder = ecx; Register api_function_address = edx; Register return_address = eax; typedef FunctionCallbackArguments FCA; STATIC_ASSERT(FCA::kArgsLength == 6); STATIC_ASSERT(FCA::kNewTargetIndex == 5); STATIC_ASSERT(FCA::kDataIndex == 4); STATIC_ASSERT(FCA::kReturnValueOffset == 3); STATIC_ASSERT(FCA::kReturnValueDefaultValueIndex == 2); STATIC_ASSERT(FCA::kIsolateIndex == 1); STATIC_ASSERT(FCA::kHolderIndex == 0); __ pop(return_address); // new target __ PushRoot(Heap::kUndefinedValueRootIndex); // call data __ push(call_data); // return value __ PushRoot(Heap::kUndefinedValueRootIndex); // return value default __ PushRoot(Heap::kUndefinedValueRootIndex); // isolate __ push(Immediate(ExternalReference::isolate_address(isolate()))); // holder __ push(holder); Register scratch = call_data; __ mov(scratch, esp); // push return address __ push(return_address); // API function gets reference to the v8::Arguments. If CPU profiler // is enabled wrapper function will be called and we need to pass // address of the callback as additional parameter, always allocate // space for it. const int kApiArgc = 1 + 1; // Allocate the v8::Arguments structure in the arguments' space since // it's not controlled by GC. const int kApiStackSpace = 3; PrepareCallApiFunction(masm, kApiArgc + kApiStackSpace); // FunctionCallbackInfo::implicit_args_. __ mov(ApiParameterOperand(2), scratch); __ add(scratch, Immediate((argc() + FCA::kArgsLength - 1) * kPointerSize)); // FunctionCallbackInfo::values_. __ mov(ApiParameterOperand(3), scratch); // FunctionCallbackInfo::length_. __ Move(ApiParameterOperand(4), Immediate(argc())); // v8::InvocationCallback's argument. __ lea(scratch, ApiParameterOperand(2)); __ mov(ApiParameterOperand(0), scratch); ExternalReference thunk_ref = ExternalReference::invoke_function_callback(masm->isolate()); // Stores return the first js argument int return_value_offset = 2 + FCA::kReturnValueOffset; Operand return_value_operand(ebp, return_value_offset * kPointerSize); const int stack_space = argc() + FCA::kArgsLength + 1; Operand* stack_space_operand = nullptr; CallApiFunctionAndReturn(masm, api_function_address, thunk_ref, ApiParameterOperand(1), stack_space, stack_space_operand, return_value_operand); } void CallApiGetterStub::Generate(MacroAssembler* masm) { // Build v8::PropertyCallbackInfo::args_ array on the stack and push property // name below the exit frame to make GC aware of them. STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0); STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1); STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2); STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3); STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4); STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5); STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6); STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7); Register receiver = ApiGetterDescriptor::ReceiverRegister(); Register holder = ApiGetterDescriptor::HolderRegister(); Register callback = ApiGetterDescriptor::CallbackRegister(); Register scratch = ebx; DCHECK(!AreAliased(receiver, holder, callback, scratch)); __ pop(scratch); // Pop return address to extend the frame. __ push(receiver); __ push(FieldOperand(callback, AccessorInfo::kDataOffset)); __ PushRoot(Heap::kUndefinedValueRootIndex); // ReturnValue // ReturnValue default value __ PushRoot(Heap::kUndefinedValueRootIndex); __ push(Immediate(ExternalReference::isolate_address(isolate()))); __ push(holder); __ push(Immediate(Smi::kZero)); // should_throw_on_error -> false __ push(FieldOperand(callback, AccessorInfo::kNameOffset)); __ push(scratch); // Restore return address. // v8::PropertyCallbackInfo::args_ array and name handle. const int kStackUnwindSpace = PropertyCallbackArguments::kArgsLength + 1; // Allocate v8::PropertyCallbackInfo object, arguments for callback and // space for optional callback address parameter (in case CPU profiler is // active) in non-GCed stack space. const int kApiArgc = 3 + 1; // Load address of v8::PropertyAccessorInfo::args_ array. __ lea(scratch, Operand(esp, 2 * kPointerSize)); PrepareCallApiFunction(masm, kApiArgc); // Create v8::PropertyCallbackInfo object on the stack and initialize // it's args_ field. Operand info_object = ApiParameterOperand(3); __ mov(info_object, scratch); // Name as handle. __ sub(scratch, Immediate(kPointerSize)); __ mov(ApiParameterOperand(0), scratch); // Arguments pointer. __ lea(scratch, info_object); __ mov(ApiParameterOperand(1), scratch); // Reserve space for optional callback address parameter. Operand thunk_last_arg = ApiParameterOperand(2); ExternalReference thunk_ref = ExternalReference::invoke_accessor_getter_callback(isolate()); __ mov(scratch, FieldOperand(callback, AccessorInfo::kJsGetterOffset)); Register function_address = edx; __ mov(function_address, FieldOperand(scratch, Foreign::kForeignAddressOffset)); // +3 is to skip prolog, return address and name handle. Operand return_value_operand( ebp, (PropertyCallbackArguments::kReturnValueOffset + 3) * kPointerSize); CallApiFunctionAndReturn(masm, function_address, thunk_ref, thunk_last_arg, kStackUnwindSpace, nullptr, return_value_operand); } #undef __ } // namespace internal } // namespace v8 #endif // V8_TARGET_ARCH_IA32