diff options
Diffstat (limited to 'deps/v8/src/builtins/x64/builtins-x64.cc')
-rw-r--r-- | deps/v8/src/builtins/x64/builtins-x64.cc | 173 |
1 files changed, 81 insertions, 92 deletions
diff --git a/deps/v8/src/builtins/x64/builtins-x64.cc b/deps/v8/src/builtins/x64/builtins-x64.cc index 5c2094105c..2bc7768417 100644 --- a/deps/v8/src/builtins/x64/builtins-x64.cc +++ b/deps/v8/src/builtins/x64/builtins-x64.cc @@ -87,7 +87,7 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) { __ Push(rcx); // The receiver for the builtin/api call. - __ PushRoot(Heap::kTheHoleValueRootIndex); + __ PushRoot(RootIndex::kTheHoleValue); // Set up pointer to last argument. __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset)); @@ -135,6 +135,26 @@ void Generate_JSBuiltinsConstructStubHelper(MacroAssembler* masm) { __ ret(0); } + +void Generate_StackOverflowCheck( + MacroAssembler* masm, Register num_args, Register scratch, + Label* stack_overflow, + Label::Distance stack_overflow_distance = Label::kFar) { + // Check the stack for overflow. We are not trying to catch + // interruptions (e.g. debug break and preemption) here, so the "real stack + // limit" is checked. + __ LoadRoot(kScratchRegister, RootIndex::kRealStackLimit); + __ movp(scratch, rsp); + // Make scratch the space we have left. The stack might already be overflowed + // here which will cause scratch to become negative. + __ subp(scratch, kScratchRegister); + __ sarp(scratch, Immediate(kPointerSizeLog2)); + // Check if the arguments will overflow the stack. + __ cmpp(scratch, num_args); + // Signed comparison. + __ j(less_equal, stack_overflow, stack_overflow_distance); +} + } // namespace // The construct stub for ES5 constructor functions and ES6 class constructors. @@ -157,7 +177,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { __ Push(rsi); __ Push(rcx); __ Push(rdi); - __ PushRoot(Heap::kTheHoleValueRootIndex); + __ PushRoot(RootIndex::kTheHoleValue); __ Push(rdx); // ----------- S t a t e ------------- @@ -181,7 +201,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { // Else: use TheHoleValue as receiver for constructor call __ bind(¬_create_implicit_receiver); - __ LoadRoot(rax, Heap::kTheHoleValueRootIndex); + __ LoadRoot(rax, RootIndex::kTheHoleValue); // ----------- S t a t e ------------- // -- rax implicit receiver @@ -221,6 +241,21 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { // Set up pointer to last argument. __ leap(rbx, Operand(rbp, StandardFrameConstants::kCallerSPOffset)); + // Check if we have enough stack space to push all arguments. + // Argument count in rax. Clobbers rcx. + Label enough_stack_space, stack_overflow; + Generate_StackOverflowCheck(masm, rax, rcx, &stack_overflow, Label::kNear); + __ jmp(&enough_stack_space, Label::kNear); + + __ bind(&stack_overflow); + // Restore context from the frame. + __ movp(rsi, Operand(rbp, ConstructFrameConstants::kContextOffset)); + __ CallRuntime(Runtime::kThrowStackOverflow); + // This should be unreachable. + __ int3(); + + __ bind(&enough_stack_space); + // Copy arguments and receiver to the expression stack. Label loop, entry; __ movp(rcx, rax); @@ -269,8 +304,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { Label use_receiver, do_throw, leave_frame; // If the result is undefined, we jump out to using the implicit receiver. - __ JumpIfRoot(rax, Heap::kUndefinedValueRootIndex, &use_receiver, - Label::kNear); + __ JumpIfRoot(rax, RootIndex::kUndefinedValue, &use_receiver, Label::kNear); // Otherwise we do a smi check and fall through to check if the return value // is a valid receiver. @@ -292,7 +326,7 @@ void Builtins::Generate_JSConstructStubGeneric(MacroAssembler* masm) { // on-stack receiver as the result. __ bind(&use_receiver); __ movp(rax, Operand(rsp, 0 * kPointerSize)); - __ JumpIfRoot(rax, Heap::kTheHoleValueRootIndex, &do_throw, Label::kNear); + __ JumpIfRoot(rax, RootIndex::kTheHoleValue, &do_throw, Label::kNear); __ bind(&leave_frame); // Restore the arguments count. @@ -317,25 +351,6 @@ void Builtins::Generate_ConstructedNonConstructable(MacroAssembler* masm) { __ CallRuntime(Runtime::kThrowConstructedNonConstructable); } -static void Generate_StackOverflowCheck( - MacroAssembler* masm, Register num_args, Register scratch, - Label* stack_overflow, - Label::Distance stack_overflow_distance = Label::kFar) { - // Check the stack for overflow. We are not trying to catch - // interruptions (e.g. debug break and preemption) here, so the "real stack - // limit" is checked. - __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex); - __ movp(scratch, rsp); - // Make scratch the space we have left. The stack might already be overflowed - // here which will cause scratch to become negative. - __ subp(scratch, kScratchRegister); - __ sarp(scratch, Immediate(kPointerSizeLog2)); - // Check if the arguments will overflow the stack. - __ cmpp(scratch, num_args); - // Signed comparison. - __ j(less_equal, stack_overflow, stack_overflow_distance); -} - static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm, bool is_construct) { ProfileEntryHookStub::MaybeCallEntryHook(masm); @@ -533,7 +548,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { // Check the stack for overflow. We are not trying to catch interruptions // (i.e. debug break and preemption) here, so check the "real stack limit". Label stack_overflow; - __ CompareRoot(rsp, Heap::kRealStackLimitRootIndex); + __ CompareRoot(rsp, RootIndex::kRealStackLimit); __ j(below, &stack_overflow); // Pop return address. @@ -602,7 +617,7 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { __ Push(rdx); __ Push(rdi); // Push hole as receiver since we do not use it for stepping. - __ PushRoot(Heap::kTheHoleValueRootIndex); + __ PushRoot(RootIndex::kTheHoleValue); __ CallRuntime(Runtime::kDebugOnFunctionCall); __ Pop(rdx); __ movp(rdi, FieldOperand(rdx, JSGeneratorObject::kFunctionOffset)); @@ -905,7 +920,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { Label ok; __ movp(rax, rsp); __ subp(rax, rcx); - __ CompareRoot(rax, Heap::kRealStackLimitRootIndex); + __ CompareRoot(rax, RootIndex::kRealStackLimit); __ j(above_equal, &ok, Label::kNear); __ CallRuntime(Runtime::kThrowStackOverflow); __ bind(&ok); @@ -913,7 +928,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { // If ok, push undefined as the initial value for all register file entries. Label loop_header; Label loop_check; - __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); + __ LoadRoot(rax, RootIndex::kUndefinedValue); __ j(always, &loop_check, Label::kNear); __ bind(&loop_header); // TODO(rmcilroy): Consider doing more than one push per loop iteration. @@ -937,7 +952,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { __ bind(&no_incoming_new_target_or_generator_register); // Load accumulator with undefined. - __ LoadRoot(kInterpreterAccumulatorRegister, Heap::kUndefinedValueRootIndex); + __ LoadRoot(kInterpreterAccumulatorRegister, RootIndex::kUndefinedValue); // Load the dispatch table into a register and dispatch to the bytecode // handler at the current bytecode offset. @@ -1026,7 +1041,7 @@ void Builtins::Generate_InterpreterPushArgsThenCallImpl( // Push "undefined" as the receiver arg if we need to. if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) { - __ PushRoot(Heap::kUndefinedValueRootIndex); + __ PushRoot(RootIndex::kUndefinedValue); __ decl(rcx); // Subtract one for receiver. } @@ -1251,7 +1266,7 @@ void Builtins::Generate_InstantiateAsmJs(MacroAssembler* masm) { rbp, StandardFrameConstants::kCallerSPOffset + i * kPointerSize)); } for (int i = 0; i < 3 - j; ++i) { - __ PushRoot(Heap::kUndefinedValueRootIndex); + __ PushRoot(RootIndex::kUndefinedValue); } if (j < 3) { __ jmp(&args_done, Label::kNear); @@ -1370,7 +1385,7 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) { { Label no_arg_array, no_this_arg; StackArgumentsAccessor args(rsp, rax); - __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); + __ LoadRoot(rdx, RootIndex::kUndefinedValue); __ movp(rbx, rdx); __ movp(rdi, args.GetReceiverOperand()); __ testp(rax, rax); @@ -1402,9 +1417,8 @@ void Builtins::Generate_FunctionPrototypeApply(MacroAssembler* masm) { // 3. Tail call with no arguments if argArray is null or undefined. Label no_arguments; - __ JumpIfRoot(rbx, Heap::kNullValueRootIndex, &no_arguments, Label::kNear); - __ JumpIfRoot(rbx, Heap::kUndefinedValueRootIndex, &no_arguments, - Label::kNear); + __ JumpIfRoot(rbx, RootIndex::kNullValue, &no_arguments, Label::kNear); + __ JumpIfRoot(rbx, RootIndex::kUndefinedValue, &no_arguments, Label::kNear); // 4a. Apply the receiver to the given argArray. __ Jump(BUILTIN_CODE(masm->isolate(), CallWithArrayLike), @@ -1438,7 +1452,7 @@ void Builtins::Generate_FunctionPrototypeCall(MacroAssembler* masm) { __ testp(rax, rax); __ j(not_zero, &done, Label::kNear); __ PopReturnAddressTo(rbx); - __ PushRoot(Heap::kUndefinedValueRootIndex); + __ PushRoot(RootIndex::kUndefinedValue); __ PushReturnAddressFrom(rbx); __ incp(rax); __ bind(&done); @@ -1488,7 +1502,7 @@ void Builtins::Generate_ReflectApply(MacroAssembler* masm) { { Label done; StackArgumentsAccessor args(rsp, rax); - __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex); + __ LoadRoot(rdi, RootIndex::kUndefinedValue); __ movp(rdx, rdi); __ movp(rbx, rdi); __ cmpp(rax, Immediate(1)); @@ -1539,7 +1553,7 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { { Label done; StackArgumentsAccessor args(rsp, rax); - __ LoadRoot(rdi, Heap::kUndefinedValueRootIndex); + __ LoadRoot(rdi, RootIndex::kUndefinedValue); __ movp(rdx, rdi); __ movp(rbx, rdi); __ cmpp(rax, Immediate(1)); @@ -1554,7 +1568,7 @@ void Builtins::Generate_ReflectConstruct(MacroAssembler* masm) { __ bind(&done); __ PopReturnAddressTo(rcx); __ leap(rsp, Operand(rsp, rax, times_pointer_size, kPointerSize)); - __ PushRoot(Heap::kUndefinedValueRootIndex); + __ PushRoot(RootIndex::kUndefinedValue); __ PushReturnAddressFrom(rcx); } @@ -1601,7 +1615,6 @@ void Builtins::Generate_InternalArrayConstructor(MacroAssembler* masm) { // Run the native code for the InternalArray function called as a normal // function. - __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); __ Jump(BUILTIN_CODE(masm->isolate(), InternalArrayConstructorImpl), RelocInfo::CODE_TARGET); } @@ -1701,7 +1714,7 @@ void Builtins::Generate_ArgumentsAdaptorTrampoline(MacroAssembler* masm) { // Fill remaining expected arguments with undefined values. Label fill; - __ LoadRoot(kScratchRegister, Heap::kUndefinedValueRootIndex); + __ LoadRoot(kScratchRegister, RootIndex::kUndefinedValue); __ bind(&fill); __ incp(r8); __ Push(kScratchRegister); @@ -1777,23 +1790,8 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm, __ bind(&ok); } - // Check for stack overflow. - { - // Check the stack for overflow. We are not trying to catch interruptions - // (i.e. debug break and preemption) here, so check the "real stack limit". - Label done; - __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex); - __ movp(r8, rsp); - // Make r8 the space we have left. The stack might already be overflowed - // here which will cause r8 to become negative. - __ subp(r8, kScratchRegister); - __ sarp(r8, Immediate(kPointerSizeLog2)); - // Check if the arguments will overflow the stack. - __ cmpp(r8, rcx); - __ j(greater, &done, Label::kNear); // Signed comparison. - __ TailCallRuntime(Runtime::kThrowStackOverflow); - __ bind(&done); - } + Label stack_overflow; + Generate_StackOverflowCheck(masm, rcx, r8, &stack_overflow, Label::kNear); // Push additional arguments onto the stack. { @@ -1806,9 +1804,9 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm, // Turn the hole into undefined as we go. __ movp(r11, FieldOperand(rbx, r9, times_pointer_size, FixedArray::kHeaderSize)); - __ CompareRoot(r11, Heap::kTheHoleValueRootIndex); + __ CompareRoot(r11, RootIndex::kTheHoleValue); __ j(not_equal, &push, Label::kNear); - __ LoadRoot(r11, Heap::kUndefinedValueRootIndex); + __ LoadRoot(r11, RootIndex::kUndefinedValue); __ bind(&push); __ Push(r11); __ incl(r9); @@ -1820,6 +1818,9 @@ void Builtins::Generate_CallOrConstructVarargs(MacroAssembler* masm, // Tail-call to the actual Call or Construct builtin. __ Jump(code, RelocInfo::CODE_TARGET); + + __ bind(&stack_overflow); + __ TailCallRuntime(Runtime::kThrowStackOverflow); } // static @@ -1957,9 +1958,9 @@ void Builtins::Generate_CallFunction(MacroAssembler* masm, __ j(above_equal, &done_convert); if (mode != ConvertReceiverMode::kNotNullOrUndefined) { Label convert_global_proxy; - __ JumpIfRoot(rcx, Heap::kUndefinedValueRootIndex, - &convert_global_proxy, Label::kNear); - __ JumpIfNotRoot(rcx, Heap::kNullValueRootIndex, &convert_to_object, + __ JumpIfRoot(rcx, RootIndex::kUndefinedValue, &convert_global_proxy, + Label::kNear); + __ JumpIfNotRoot(rcx, RootIndex::kNullValue, &convert_to_object, Label::kNear); __ bind(&convert_global_proxy); { @@ -2049,8 +2050,8 @@ void Generate_PushBoundArguments(MacroAssembler* masm) { // Check the stack for overflow. We are not trying to catch interruptions // (i.e. debug break and preemption) here, so check the "real stack // limit". - __ CompareRoot(rsp, Heap::kRealStackLimitRootIndex); - __ j(greater, &done, Label::kNear); // Signed comparison. + __ CompareRoot(rsp, RootIndex::kRealStackLimit); + __ j(above_equal, &done, Label::kNear); // Restore the stack pointer. __ leap(rsp, Operand(rsp, rbx, times_pointer_size, 0)); { @@ -2183,7 +2184,7 @@ void Builtins::Generate_ConstructFunction(MacroAssembler* masm) { // Calling convention for function specific ConstructStubs require // rbx to contain either an AllocationSite or undefined. - __ LoadRoot(rbx, Heap::kUndefinedValueRootIndex); + __ LoadRoot(rbx, RootIndex::kUndefinedValue); // Jump to JSBuiltinsConstructStub or JSConstructStubGeneric. __ movp(rcx, FieldOperand(rdi, JSFunction::kSharedFunctionInfoOffset)); @@ -2277,15 +2278,10 @@ void Builtins::Generate_Construct(MacroAssembler* masm) { RelocInfo::CODE_TARGET); } -static void Generate_OnStackReplacementHelper(MacroAssembler* masm, - bool has_handler_frame) { +void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) { // Lookup the function in the JavaScript frame. - if (has_handler_frame) { - __ movp(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); - __ movp(rax, Operand(rax, JavaScriptFrameConstants::kFunctionOffset)); - } else { - __ movp(rax, Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); - } + __ movp(rax, Operand(rbp, StandardFrameConstants::kCallerFPOffset)); + __ movp(rax, Operand(rax, JavaScriptFrameConstants::kFunctionOffset)); { FrameScope scope(masm, StackFrame::INTERNAL); @@ -2302,11 +2298,9 @@ static void Generate_OnStackReplacementHelper(MacroAssembler* masm, __ bind(&skip); - // Drop any potential handler frame that is be sitting on top of the actual + // Drop the handler frame that is be sitting on top of the actual // JavaScript frame. This is the case then OSR is triggered from bytecode. - if (has_handler_frame) { - __ leave(); - } + __ leave(); // Load deoptimization data from the code object. __ movp(rbx, Operand(rax, Code::kDeoptimizationDataOffset - kHeapObjectTag)); @@ -2326,14 +2320,6 @@ static void Generate_OnStackReplacementHelper(MacroAssembler* masm, __ ret(0); } -void Builtins::Generate_OnStackReplacement(MacroAssembler* masm) { - Generate_OnStackReplacementHelper(masm, false); -} - -void Builtins::Generate_InterpreterOnStackReplacement(MacroAssembler* masm) { - Generate_OnStackReplacementHelper(masm, true); -} - void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) { // The function index was pushed to the stack by the caller as int32. __ Pop(r11); @@ -2486,14 +2472,14 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size, // Check result for exception sentinel. Label exception_returned; - __ CompareRoot(rax, Heap::kExceptionRootIndex); + __ CompareRoot(rax, RootIndex::kException); __ j(equal, &exception_returned); // Check that there is no pending exception, otherwise we // should have returned the exception sentinel. if (FLAG_debug_code) { Label okay; - __ LoadRoot(r14, Heap::kTheHoleValueRootIndex); + __ LoadRoot(r14, RootIndex::kTheHoleValue); ExternalReference pending_exception_address = ExternalReference::Create( IsolateAddressId::kPendingExceptionAddress, masm->isolate()); Operand pending_exception_operand = @@ -2547,9 +2533,9 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size, __ bind(&skip); // Reset the masking register. This is done independent of the underlying - // feature flag {FLAG_branch_load_poisoning} to make the snapshot work with - // both configurations. It is safe to always do this, because the underlying - // register is caller-saved and can be arbitrarily clobbered. + // feature flag {FLAG_untrusted_code_mitigations} to make the snapshot work + // with both configurations. It is safe to always do this, because the + // underlying register is caller-saved and can be arbitrarily clobbered. __ ResetSpeculationPoisonRegister(); // Compute the handler entry address and jump to it. @@ -2789,6 +2775,9 @@ void GenerateInternalArrayConstructorCase(MacroAssembler* masm, RelocInfo::CODE_TARGET); __ bind(¬_one_case); + // Load undefined into the allocation site parameter as required by + // ArrayNArgumentsConstructor. + __ LoadRoot(kJavaScriptCallExtraArg1Register, RootIndex::kUndefinedValue); Handle<Code> code = BUILTIN_CODE(masm->isolate(), ArrayNArgumentsConstructor); __ Jump(code, RelocInfo::CODE_TARGET); } |