summaryrefslogtreecommitdiff
path: root/deps/v8/src/builtins/mips64
diff options
context:
space:
mode:
authorMichaël Zasso <targos@protonmail.com>2021-10-10 11:10:43 +0200
committerMichaël Zasso <targos@protonmail.com>2021-10-12 08:07:50 +0200
commit62719c5fd2ab7dee1ac4019c1715061d556ac457 (patch)
tree356fed3842e577ab58fd51d5cc02f071cf7ee216 /deps/v8/src/builtins/mips64
parenta784258444b052dfd31cca90db57b21dc38bb1eb (diff)
downloadnode-new-62719c5fd2ab7dee1ac4019c1715061d556ac457.tar.gz
deps: update V8 to 9.5.172.19
PR-URL: https://github.com/nodejs/node/pull/40178 Reviewed-By: Antoine du Hamel <duhamelantoine1995@gmail.com> Reviewed-By: Jiawen Geng <technicalcute@gmail.com> Reviewed-By: Colin Ihrig <cjihrig@gmail.com>
Diffstat (limited to 'deps/v8/src/builtins/mips64')
-rw-r--r--deps/v8/src/builtins/mips64/builtins-mips64.cc43
1 files changed, 27 insertions, 16 deletions
diff --git a/deps/v8/src/builtins/mips64/builtins-mips64.cc b/deps/v8/src/builtins/mips64/builtins-mips64.cc
index 45e1c32f82..3f8824d97d 100644
--- a/deps/v8/src/builtins/mips64/builtins-mips64.cc
+++ b/deps/v8/src/builtins/mips64/builtins-mips64.cc
@@ -300,6 +300,16 @@ void Builtins::Generate_JSBuiltinsConstructStub(MacroAssembler* masm) {
Generate_JSBuiltinsConstructStubHelper(masm);
}
+static void AssertCodeIsBaseline(MacroAssembler* masm, Register code,
+ Register scratch) {
+ DCHECK(!AreAliased(code, scratch));
+ // Verify that the code kind is baseline code via the CodeKind.
+ __ Ld(scratch, FieldMemOperand(code, Code::kFlagsOffset));
+ __ DecodeField<Code::KindField>(scratch);
+ __ Assert(eq, AbortReason::kExpectedBaselineData, scratch,
+ Operand(static_cast<int>(CodeKind::BASELINE)));
+}
+
// TODO(v8:11429): Add a path for "not_compiled" and unify the two uses under
// the more general dispatch.
static void GetSharedFunctionInfoBytecodeOrBaseline(MacroAssembler* masm,
@@ -309,11 +319,18 @@ static void GetSharedFunctionInfoBytecodeOrBaseline(MacroAssembler* masm,
Label done;
__ GetObjectType(sfi_data, scratch1, scratch1);
- __ Branch(is_baseline, eq, scratch1, Operand(BASELINE_DATA_TYPE));
+ if (FLAG_debug_code) {
+ Label not_baseline;
+ __ Branch(&not_baseline, ne, scratch1, Operand(CODET_TYPE));
+ AssertCodeIsBaseline(masm, sfi_data, scratch1);
+ __ Branch(is_baseline);
+ __ bind(&not_baseline);
+ } else {
+ __ Branch(is_baseline, eq, scratch1, Operand(CODET_TYPE));
+ }
__ Branch(&done, ne, scratch1, Operand(INTERPRETER_DATA_TYPE));
__ Ld(sfi_data,
FieldMemOperand(sfi_data, InterpreterData::kBytecodeArrayOffset));
-
__ bind(&done);
}
@@ -1402,8 +1419,7 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) {
&has_optimized_code_or_marker);
// Load the baseline code into the closure.
- __ Ld(a2, FieldMemOperand(kInterpreterBytecodeArrayRegister,
- BaselineData::kBaselineCodeOffset));
+ __ Move(a2, kInterpreterBytecodeArrayRegister);
static_assert(kJavaScriptCallCodeStartRegister == a2, "ABI mismatch");
ReplaceClosureCodeWithOptimizedCode(masm, a2, closure, t0, t1);
__ JumpCodeObject(a2);
@@ -1788,7 +1804,8 @@ void OnStackReplacement(MacroAssembler* masm, bool is_interpreter) {
}
// Load deoptimization data from the code object.
// <deopt_data> = <code>[#deoptimization_data_offset]
- __ Ld(a1, MemOperand(v0, Code::kDeoptimizationDataOffset - kHeapObjectTag));
+ __ Ld(a1, MemOperand(v0, Code::kDeoptimizationDataOrInterpreterDataOffset -
+ kHeapObjectTag));
// Load the OSR entrypoint offset from the deoptimization data.
// <osr_offset> = <deopt_data>[#header_size + #osr_pc_offset]
@@ -2814,12 +2831,6 @@ void Builtins::Generate_CEntry(MacroAssembler* masm, int result_size,
__ Sd(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
__ bind(&zero);
- // Reset the masking register. This is done independent of the underlying
- // feature flag {FLAG_untrusted_code_mitigations} to make the snapshot work
- // with both configurations. It is safe to always do this, because the
- // underlying register is caller-saved and can be arbitrarily clobbered.
- __ ResetSpeculationPoisonRegister();
-
// Clear c_entry_fp, like we do in `LeaveExitFrame`.
{
UseScratchRegisterScope temps(masm);
@@ -3549,7 +3560,7 @@ void Generate_BaselineOrInterpreterEntry(MacroAssembler* masm,
if (!is_osr) {
Label start_with_baseline;
__ GetObjectType(code_obj, t2, t2);
- __ Branch(&start_with_baseline, eq, t2, Operand(BASELINE_DATA_TYPE));
+ __ Branch(&start_with_baseline, eq, t2, Operand(CODET_TYPE));
// Start with bytecode as there is no baseline code.
Builtin builtin_id = next_bytecode
@@ -3562,12 +3573,12 @@ void Generate_BaselineOrInterpreterEntry(MacroAssembler* masm,
__ bind(&start_with_baseline);
} else if (FLAG_debug_code) {
__ GetObjectType(code_obj, t2, t2);
- __ Assert(eq, AbortReason::kExpectedBaselineData, t2,
- Operand(BASELINE_DATA_TYPE));
+ __ Assert(eq, AbortReason::kExpectedBaselineData, t2, Operand(CODET_TYPE));
}
- // Load baseline code from baseline data.
- __ Ld(code_obj, FieldMemOperand(code_obj, BaselineData::kBaselineCodeOffset));
+ if (FLAG_debug_code) {
+ AssertCodeIsBaseline(masm, code_obj, t2);
+ }
// Replace BytecodeOffset with the feedback vector.
Register feedback_vector = a2;