summaryrefslogtreecommitdiff
path: root/deps/v8/src/builtins/s390
diff options
context:
space:
mode:
authorMichaël Zasso <targos@protonmail.com>2021-08-29 14:20:49 +0200
committerMichaël Zasso <targos@protonmail.com>2021-08-30 21:02:51 +0200
commit50930a0fa08297d0ce7e67fa6594fe47937b99ff (patch)
tree96bd30c0c63790bc1992a2f241a3df94d563b283 /deps/v8/src/builtins/s390
parentb63e449b2eade1111b52f6559669400a4e855903 (diff)
downloadnode-new-50930a0fa08297d0ce7e67fa6594fe47937b99ff.tar.gz
deps: update V8 to 9.3.345.16
PR-URL: https://github.com/nodejs/node/pull/39469 Reviewed-By: Jiawen Geng <technicalcute@gmail.com> Reviewed-By: James M Snell <jasnell@gmail.com> Reviewed-By: Colin Ihrig <cjihrig@gmail.com>
Diffstat (limited to 'deps/v8/src/builtins/s390')
-rw-r--r--deps/v8/src/builtins/s390/builtins-s390.cc117
1 files changed, 52 insertions, 65 deletions
diff --git a/deps/v8/src/builtins/s390/builtins-s390.cc b/deps/v8/src/builtins/s390/builtins-s390.cc
index 0272621ac0..2370f5ed57 100644
--- a/deps/v8/src/builtins/s390/builtins-s390.cc
+++ b/deps/v8/src/builtins/s390/builtins-s390.cc
@@ -334,13 +334,13 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) {
// -- r3 : the JSGeneratorObject to resume
// -- lr : return address
// -----------------------------------
- __ AssertGeneratorObject(r3);
-
// Store input value into generator object.
__ StoreTaggedField(
r2, FieldMemOperand(r3, JSGeneratorObject::kInputOrDebugPosOffset), r0);
__ RecordWriteField(r3, JSGeneratorObject::kInputOrDebugPosOffset, r2, r5,
kLRHasNotBeenSaved, SaveFPRegsMode::kIgnore);
+ // Check that r3 is still valid, RecordWrite might have clobbered it.
+ __ AssertGeneratorObject(r3);
// Load suspended function and context.
__ LoadTaggedPointerField(
@@ -496,7 +496,7 @@ constexpr int kPushedStackSpace =
// using JSEntryFunction = GeneratedCode<Address(
// Address root_register_value, MicrotaskQueue* microtask_queue)>;
void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
- Builtins::Name entry_trampoline) {
+ Builtin entry_trampoline) {
// The register state is either:
// r2: root register value
// r3: code entry
@@ -647,7 +647,7 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
// Invoke the function by calling through JS entry trampoline builtin and
// pop the faked function when we return.
Handle<Code> trampoline_code =
- masm->isolate()->builtins()->builtin_handle(entry_trampoline);
+ masm->isolate()->builtins()->code_handle(entry_trampoline);
DCHECK_EQ(kPushedStackSpace, pushed_stack_space);
__ Call(trampoline_code, RelocInfo::CODE_TARGET);
@@ -704,18 +704,17 @@ void Generate_JSEntryVariant(MacroAssembler* masm, StackFrame::Type type,
} // namespace
void Builtins::Generate_JSEntry(MacroAssembler* masm) {
- Generate_JSEntryVariant(masm, StackFrame::ENTRY,
- Builtins::kJSEntryTrampoline);
+ Generate_JSEntryVariant(masm, StackFrame::ENTRY, Builtin::kJSEntryTrampoline);
}
void Builtins::Generate_JSConstructEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::CONSTRUCT_ENTRY,
- Builtins::kJSConstructEntryTrampoline);
+ Builtin::kJSConstructEntryTrampoline);
}
void Builtins::Generate_JSRunMicrotasksEntry(MacroAssembler* masm) {
Generate_JSEntryVariant(masm, StackFrame::ENTRY,
- Builtins::kRunMicrotasksTrampoline);
+ Builtin::kRunMicrotasksTrampoline);
}
static void Generate_JSEntryTrampolineHelper(MacroAssembler* masm,
@@ -848,13 +847,18 @@ static void ReplaceClosureCodeWithOptimizedCode(MacroAssembler* masm,
Register optimized_code,
Register closure,
Register scratch1,
- Register scratch2) {
+ Register slot_address) {
+ DCHECK(!AreAliased(optimized_code, closure, scratch1, slot_address));
+ DCHECK_EQ(closure, kJSFunctionRegister);
+ DCHECK(!AreAliased(optimized_code, closure));
// Store code entry in the closure.
__ StoreTaggedField(optimized_code,
FieldMemOperand(closure, JSFunction::kCodeOffset), r0);
- __ mov(scratch1,
- optimized_code); // Write barrier clobbers scratch1 below.
- __ RecordWriteField(closure, JSFunction::kCodeOffset, scratch1, scratch2,
+ // Write barrier clobbers scratch1 below.
+ Register value = scratch1;
+ __ mov(value, optimized_code);
+
+ __ RecordWriteField(closure, JSFunction::kCodeOffset, value, slot_address,
kLRHasNotBeenSaved, SaveFPRegsMode::kIgnore,
RememberedSetAction::kOmit, SmiCheck::kOmit);
}
@@ -1062,6 +1066,7 @@ static void AdvanceBytecodeOffsetOrReturn(MacroAssembler* masm,
static void MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(
MacroAssembler* masm, Register optimization_state,
Register feedback_vector) {
+ DCHECK(!AreAliased(optimization_state, feedback_vector));
Label maybe_has_optimized_code;
// Check if optimized code is available
__ TestBitMask(optimization_state,
@@ -1097,7 +1102,7 @@ static void MaybeOptimizeCodeOrTailCallOptimizedCodeSlot(
// o lr: return address
//
// The function builds an interpreter frame. See InterpreterFrameConstants in
-// frames.h for its layout.
+// frame-constants.h for its layout.
void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
Register closure = r3;
Register feedback_vector = r4;
@@ -1313,10 +1318,10 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
__ bkpt(0); // Should not return.
}
-static void Generate_InterpreterPushArgs(MacroAssembler* masm,
- Register num_args,
- Register start_address,
- Register scratch) {
+static void GenerateInterpreterPushArgs(MacroAssembler* masm, Register num_args,
+ Register start_address,
+ Register scratch) {
+ ASM_CODE_COMMENT(masm);
__ SubS64(scratch, num_args, Operand(1));
__ ShiftLeftU64(scratch, scratch, Operand(kSystemPointerSizeLog2));
__ SubS64(start_address, start_address, scratch);
@@ -1353,7 +1358,7 @@ void Builtins::Generate_InterpreterPushArgsThenCallImpl(
}
// Push the arguments.
- Generate_InterpreterPushArgs(masm, r5, r4, r6);
+ GenerateInterpreterPushArgs(masm, r5, r4, r6);
if (receiver_mode == ConvertReceiverMode::kNullOrUndefined) {
__ PushRoot(RootIndex::kUndefinedValue);
@@ -1403,7 +1408,7 @@ void Builtins::Generate_InterpreterPushArgsThenConstructImpl(
}
// Push the arguments. r4 and r5 will be modified.
- Generate_InterpreterPushArgs(masm, r2, r6, r7);
+ GenerateInterpreterPushArgs(masm, r2, r6, r7);
// Push a slot for the receiver to be constructed.
__ mov(r0, Operand::Zero());
@@ -2473,24 +2478,7 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
NumRegs(fp_regs));
__ MultiPush(gp_regs);
- // Check if machine has simd enabled, if so push vector registers. If not
- // then only push double registers.
- Label push_doubles, simd_pushed;
- __ Move(r1, ExternalReference::supports_wasm_simd_128_address());
- __ LoadU8(r1, MemOperand(r1));
- __ LoadAndTestP(r1, r1); // If > 0 then simd is available.
- __ ble(&push_doubles, Label::kNear);
- // Save vector registers, don't save double registers anymore.
- __ MultiPushV128(fp_regs);
- __ b(&simd_pushed);
- __ bind(&push_doubles);
- // Simd not supported, only save double registers.
- __ MultiPushDoubles(fp_regs);
- // kFixedFrameSizeFromFp is hard coded to include space for Simd
- // registers, so we still need to allocate extra (unused) space on the stack
- // as if they were saved.
- __ lay(sp, MemOperand(sp, -(NumRegs(fp_regs) * kDoubleSize)));
- __ bind(&simd_pushed);
+ __ MultiPushF64OrV128(fp_regs);
// Pass instance and function index as explicit arguments to the runtime
// function.
@@ -2503,19 +2491,7 @@ void Builtins::Generate_WasmCompileLazy(MacroAssembler* masm) {
__ mov(ip, r2);
// Restore registers.
- __ Move(r1, ExternalReference::supports_wasm_simd_128_address());
- __ LoadU8(r1, MemOperand(r1));
- Label pop_doubles, simd_popped;
- __ LoadAndTestP(r1, r1); // If > 0 then simd is available.
- __ ble(&pop_doubles, Label::kNear);
- // Pop vector registers, don't pop double registers anymore.
- __ MultiPopV128(fp_regs);
- __ b(&simd_popped);
- __ bind(&pop_doubles);
- // Simd not supported, only pop double registers.
- __ lay(sp, MemOperand(sp, NumRegs(fp_regs) * kDoubleSize));
- __ MultiPopDoubles(fp_regs);
- __ bind(&simd_popped);
+ __ MultiPopF64OrV128(fp_regs);
__ MultiPop(gp_regs);
}
// Finally, jump to the entrypoint.
@@ -2530,7 +2506,7 @@ void Builtins::Generate_WasmDebugBreak(MacroAssembler* masm) {
// Save all parameter registers. They might hold live values, we restore
// them after the runtime call.
__ MultiPush(WasmDebugBreakFrameConstants::kPushedGpRegs);
- __ MultiPushDoubles(WasmDebugBreakFrameConstants::kPushedFpRegs);
+ __ MultiPushF64OrV128(WasmDebugBreakFrameConstants::kPushedFpRegs);
// Initialize the JavaScript context with 0. CEntry will use it to
// set the current context on the isolate.
@@ -2538,7 +2514,7 @@ void Builtins::Generate_WasmDebugBreak(MacroAssembler* masm) {
__ CallRuntime(Runtime::kWasmDebugBreak, 0);
// Restore registers.
- __ MultiPopDoubles(WasmDebugBreakFrameConstants::kPushedFpRegs);
+ __ MultiPopF64OrV128(WasmDebugBreakFrameConstants::kPushedFpRegs);
__ MultiPop(WasmDebugBreakFrameConstants::kPushedGpRegs);
}
__ Ret();
@@ -3452,22 +3428,34 @@ void Builtins::Generate_InterpreterOnStackReplacement_ToBaseline(
}
void Builtins::Generate_DynamicCheckMapsTrampoline(MacroAssembler* masm) {
+ Generate_DynamicCheckMapsTrampoline<DynamicCheckMapsDescriptor>(
+ masm, BUILTIN_CODE(masm->isolate(), DynamicCheckMaps));
+}
+
+void Builtins::Generate_DynamicCheckMapsWithFeedbackVectorTrampoline(
+ MacroAssembler* masm) {
+ Generate_DynamicCheckMapsTrampoline<
+ DynamicCheckMapsWithFeedbackVectorDescriptor>(
+ masm, BUILTIN_CODE(masm->isolate(), DynamicCheckMapsWithFeedbackVector));
+}
+
+template <class Descriptor>
+void Builtins::Generate_DynamicCheckMapsTrampoline(
+ MacroAssembler* masm, Handle<Code> builtin_target) {
FrameScope scope(masm, StackFrame::MANUAL);
__ EnterFrame(StackFrame::INTERNAL);
// Only save the registers that the DynamicCheckMaps builtin can clobber.
- DynamicCheckMapsDescriptor descriptor;
+ Descriptor descriptor;
RegList registers = descriptor.allocatable_registers();
// FLAG_debug_code is enabled CSA checks will call C function and so we need
// to save all CallerSaved registers too.
if (FLAG_debug_code) registers |= kJSCallerSaved;
- __ SaveRegisters(registers);
+ __ MaybeSaveRegisters(registers);
// Load the immediate arguments from the deopt exit to pass to the builtin.
- Register slot_arg =
- descriptor.GetRegisterParameter(DynamicCheckMapsDescriptor::kSlot);
- Register handler_arg =
- descriptor.GetRegisterParameter(DynamicCheckMapsDescriptor::kHandler);
+ Register slot_arg = descriptor.GetRegisterParameter(Descriptor::kSlot);
+ Register handler_arg = descriptor.GetRegisterParameter(Descriptor::kHandler);
__ LoadU64(handler_arg,
MemOperand(fp, CommonFrameConstants::kCallerPCOffset));
__ LoadU64(
@@ -3477,14 +3465,13 @@ void Builtins::Generate_DynamicCheckMapsTrampoline(MacroAssembler* masm) {
handler_arg,
MemOperand(handler_arg, Deoptimizer::kEagerWithResumeImmedArgs2PcOffset));
- __ Call(BUILTIN_CODE(masm->isolate(), DynamicCheckMaps),
- RelocInfo::CODE_TARGET);
+ __ Call(builtin_target, RelocInfo::CODE_TARGET);
Label deopt, bailout;
__ CmpS64(r2, Operand(static_cast<int>(DynamicCheckMapsStatus::kSuccess)));
__ bne(&deopt);
- __ RestoreRegisters(registers);
+ __ MaybeRestoreRegisters(registers);
__ LeaveFrame(StackFrame::INTERNAL);
__ Ret();
@@ -3496,16 +3483,16 @@ void Builtins::Generate_DynamicCheckMapsTrampoline(MacroAssembler* masm) {
__ CmpS64(r2, Operand(static_cast<int>(DynamicCheckMapsStatus::kDeopt)));
__ Assert(eq, AbortReason::kUnexpectedDynamicCheckMapsStatus);
}
- __ RestoreRegisters(registers);
+ __ MaybeRestoreRegisters(registers);
__ LeaveFrame(StackFrame::INTERNAL);
- Handle<Code> deopt_eager = masm->isolate()->builtins()->builtin_handle(
+ Handle<Code> deopt_eager = masm->isolate()->builtins()->code_handle(
Deoptimizer::GetDeoptimizationEntry(DeoptimizeKind::kEager));
__ Jump(deopt_eager, RelocInfo::CODE_TARGET);
__ bind(&bailout);
- __ RestoreRegisters(registers);
+ __ MaybeRestoreRegisters(registers);
__ LeaveFrame(StackFrame::INTERNAL);
- Handle<Code> deopt_bailout = masm->isolate()->builtins()->builtin_handle(
+ Handle<Code> deopt_bailout = masm->isolate()->builtins()->code_handle(
Deoptimizer::GetDeoptimizationEntry(DeoptimizeKind::kBailout));
__ Jump(deopt_bailout, RelocInfo::CODE_TARGET);
}