summaryrefslogtreecommitdiff
path: root/deps/v8/src/compiler/mips/code-generator-mips.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/compiler/mips/code-generator-mips.cc')
-rw-r--r--deps/v8/src/compiler/mips/code-generator-mips.cc98
1 files changed, 97 insertions, 1 deletions
diff --git a/deps/v8/src/compiler/mips/code-generator-mips.cc b/deps/v8/src/compiler/mips/code-generator-mips.cc
index 00575fe117..e44ffee34b 100644
--- a/deps/v8/src/compiler/mips/code-generator-mips.cc
+++ b/deps/v8/src/compiler/mips/code-generator-mips.cc
@@ -84,6 +84,9 @@ class MipsOperandConverter final : public InstructionOperandConverter {
// TODO(plind): Maybe we should handle ExtRef & HeapObj here?
// maybe not done on arm due to const pool ??
break;
+ case Constant::kDelayedStringConstant:
+ return Operand::EmbeddedStringConstant(
+ constant.ToDelayedStringConstant());
case Constant::kRpoNumber:
UNREACHABLE(); // TODO(titzer): RPO immediates on mips?
break;
@@ -353,6 +356,41 @@ void EmitWordLoadPoisoningIfNeeded(CodeGenerator* codegen,
__ sync(); \
} while (0)
+#define ASSEMBLE_ATOMIC64_LOGIC_BINOP(bin_instr) \
+ do { \
+ if (IsMipsArchVariant(kMips32r6)) { \
+ Label binop; \
+ __ sync(); \
+ __ bind(&binop); \
+ __ llwp(i.TempRegister(0), i.TempRegister(1), i.InputRegister(2)); \
+ __ bin_instr(i.TempRegister(0), i.TempRegister(1), i.TempRegister(0), \
+ i.TempRegister(1), i.InputRegister(0), i.InputRegister(1)); \
+ __ scwp(i.TempRegister(0), i.TempRegister(1), i.InputRegister(2)); \
+ __ BranchShort(&binop, eq, i.TempRegister(1), Operand(zero_reg)); \
+ __ sync(); \
+ } else { \
+ UNREACHABLE(); \
+ } \
+ } while (0)
+
+#define ASSEMBLE_ATOMIC64_ARITH_BINOP(bin_instr) \
+ do { \
+ if (IsMipsArchVariant(kMips32r6)) { \
+ Label binop; \
+ __ sync(); \
+ __ bind(&binop); \
+ __ llwp(i.TempRegister(0), i.TempRegister(1), i.InputRegister(2)); \
+ __ bin_instr(i.TempRegister(0), i.TempRegister(1), i.TempRegister(0), \
+ i.TempRegister(1), i.InputRegister(0), i.InputRegister(1), \
+ i.TempRegister(2), i.TempRegister(3)); \
+ __ scwp(i.TempRegister(0), i.TempRegister(1), i.InputRegister(2)); \
+ __ BranchShort(&binop, eq, i.TempRegister(1), Operand(zero_reg)); \
+ __ sync(); \
+ } else { \
+ UNREACHABLE(); \
+ } \
+ } while (0)
+
#define ASSEMBLE_ATOMIC_BINOP_EXT(sign_extend, size, bin_instr) \
do { \
Label binop; \
@@ -1701,6 +1739,61 @@ CodeGenerator::CodeGenResult CodeGenerator::AssembleArchInstruction(
ATOMIC_BINOP_CASE(Or, Or)
ATOMIC_BINOP_CASE(Xor, Xor)
#undef ATOMIC_BINOP_CASE
+ case kMipsWord32AtomicPairLoad: {
+ if (IsMipsArchVariant(kMips32r6)) {
+ Register second_output =
+ instr->OutputCount() == 2 ? i.OutputRegister(1) : i.TempRegister(0);
+ __ llwp(i.OutputRegister(0), second_output, i.InputRegister(0));
+ __ sync();
+ } else {
+ UNREACHABLE();
+ }
+ break;
+ }
+ case kMipsWord32AtomicPairStore: {
+ if (IsMipsArchVariant(kMips32r6)) {
+ Label store;
+ __ sync();
+ __ bind(&store);
+ __ llwp(i.TempRegister(0), i.TempRegister(1), i.InputRegister(0));
+ __ Move(i.TempRegister(0), i.InputRegister(2));
+ __ scwp(i.InputRegister(1), i.TempRegister(0), i.InputRegister(0));
+ __ BranchShort(&store, eq, i.TempRegister(0), Operand(zero_reg));
+ __ sync();
+ } else {
+ UNREACHABLE();
+ }
+ break;
+ }
+#define ATOMIC64_BINOP_ARITH_CASE(op, instr) \
+ case kMipsWord32AtomicPair##op: \
+ ASSEMBLE_ATOMIC64_ARITH_BINOP(instr); \
+ break;
+ ATOMIC64_BINOP_ARITH_CASE(Add, AddPair)
+ ATOMIC64_BINOP_ARITH_CASE(Sub, SubPair)
+#undef ATOMIC64_BINOP_ARITH_CASE
+#define ATOMIC64_BINOP_LOGIC_CASE(op, instr) \
+ case kMipsWord32AtomicPair##op: \
+ ASSEMBLE_ATOMIC64_LOGIC_BINOP(instr); \
+ break;
+ ATOMIC64_BINOP_LOGIC_CASE(And, AndPair)
+ ATOMIC64_BINOP_LOGIC_CASE(Or, OrPair)
+ ATOMIC64_BINOP_LOGIC_CASE(Xor, XorPair)
+#undef ATOMIC64_BINOP_LOGIC_CASE
+ case kMipsWord32AtomicPairExchange:
+ UNREACHABLE();
+ break;
+ case kMipsWord32AtomicPairCompareExchange: {
+ FrameScope scope(tasm(), StackFrame::MANUAL);
+ __ PushCallerSaved(kDontSaveFPRegs, v0, v1);
+ __ PrepareCallCFunction(5, 0, kScratchReg);
+ __ addu(a0, i.InputRegister(0), i.InputRegister(1));
+ __ sw(i.InputRegister(5), MemOperand(sp, 16));
+ __ CallCFunction(
+ ExternalReference::atomic_pair_compare_exchange_function(), 5, 0);
+ __ PopCallerSaved(kDontSaveFPRegs, v0, v1);
+ break;
+ }
case kMipsS128Zero: {
CpuFeatureScope msa_scope(tasm(), MIPS_SIMD);
__ xor_v(i.OutputSimd128Register(), i.OutputSimd128Register(),
@@ -3371,9 +3464,12 @@ void CodeGenerator::AssembleMove(InstructionOperand* source,
case Constant::kExternalReference:
__ li(dst, src.ToExternalReference());
break;
+ case Constant::kDelayedStringConstant:
+ __ li(dst, src.ToDelayedStringConstant());
+ break;
case Constant::kHeapObject: {
Handle<HeapObject> src_object = src.ToHeapObject();
- Heap::RootListIndex index;
+ RootIndex index;
if (IsMaterializableFromRoot(src_object, &index)) {
__ LoadRoot(dst, index);
} else {