summaryrefslogtreecommitdiff
path: root/deps/v8/src/compiler/backend/loong64/instruction-selector-loong64.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/compiler/backend/loong64/instruction-selector-loong64.cc')
-rw-r--r--deps/v8/src/compiler/backend/loong64/instruction-selector-loong64.cc280
1 files changed, 195 insertions, 85 deletions
diff --git a/deps/v8/src/compiler/backend/loong64/instruction-selector-loong64.cc b/deps/v8/src/compiler/backend/loong64/instruction-selector-loong64.cc
index 091c7ad9bc..69b10e29a9 100644
--- a/deps/v8/src/compiler/backend/loong64/instruction-selector-loong64.cc
+++ b/deps/v8/src/compiler/backend/loong64/instruction-selector-loong64.cc
@@ -41,6 +41,10 @@ class Loong64OperandGenerator final : public OperandGenerator {
return UseRegister(node);
}
+ MachineRepresentation GetRepresentation(Node* node) {
+ return sequence()->GetRepresentation(selector()->GetVirtualRegister(node));
+ }
+
bool IsIntegerConstant(Node* node) {
return (node->opcode() == IrOpcode::kInt32Constant) ||
(node->opcode() == IrOpcode::kInt64Constant);
@@ -74,6 +78,9 @@ class Loong64OperandGenerator final : public OperandGenerator {
bool CanBeImmediate(int64_t value, InstructionCode opcode) {
switch (ArchOpcodeField::decode(opcode)) {
+ case kLoong64Cmp32:
+ case kLoong64Cmp64:
+ return true;
case kLoong64Sll_w:
case kLoong64Srl_w:
case kLoong64Sra_w:
@@ -360,7 +367,7 @@ void EmitLoad(InstructionSelector* selector, Node* node, InstructionCode opcode,
selector->CanAddressRelativeToRootsRegister(m.ResolvedValue())) {
ptrdiff_t const delta =
g.GetIntegerConstantValue(index) +
- TurboAssemblerBase::RootRegisterOffsetForExternalReference(
+ MacroAssemblerBase::RootRegisterOffsetForExternalReference(
selector->isolate(), m.ResolvedValue());
// Check that the delta is a 32-bit integer due to the limitations of
// immediate operands.
@@ -373,6 +380,13 @@ void EmitLoad(InstructionSelector* selector, Node* node, InstructionCode opcode,
}
}
+ if (base != nullptr && base->opcode() == IrOpcode::kLoadRootRegister) {
+ selector->Emit(opcode | AddressingModeField::encode(kMode_Root),
+ g.DefineAsRegister(output == nullptr ? node : output),
+ g.UseImmediate(index));
+ return;
+ }
+
if (g.CanBeImmediate(index, opcode)) {
selector->Emit(opcode | AddressingModeField::encode(kMode_MRI),
g.DefineAsRegister(output == nullptr ? node : output),
@@ -458,14 +472,32 @@ void InstructionSelector::VisitLoad(Node* node) {
case MachineRepresentation::kWord32:
opcode = kLoong64Ld_w;
break;
+#ifdef V8_COMPRESS_POINTERS
+ case MachineRepresentation::kTaggedSigned:
+ opcode = kLoong64LoadDecompressTaggedSigned;
+ break;
+ case MachineRepresentation::kTaggedPointer:
+ case MachineRepresentation::kTagged:
+ opcode = kLoong64LoadDecompressTagged;
+ break;
+#else
case MachineRepresentation::kTaggedSigned: // Fall through.
case MachineRepresentation::kTaggedPointer: // Fall through.
case MachineRepresentation::kTagged: // Fall through.
+#endif
case MachineRepresentation::kWord64:
opcode = kLoong64Ld_d;
break;
case MachineRepresentation::kCompressedPointer: // Fall through.
+#ifdef V8_COMPRESS_POINTERS
+ opcode = kLoong64Ld_wu;
+ break;
+#endif
case MachineRepresentation::kCompressed: // Fall through.
+#ifdef V8_COMPRESS_POINTERS
+ opcode = kLoong64Ld_wu;
+ break;
+#endif
case MachineRepresentation::kSandboxedPointer: // Fall through.
case MachineRepresentation::kMapWord: // Fall through.
case MachineRepresentation::kNone: // Fall through.
@@ -492,14 +524,15 @@ void InstructionSelector::VisitStore(Node* node) {
WriteBarrierKind write_barrier_kind = store_rep.write_barrier_kind();
MachineRepresentation rep = store_rep.representation();
- if (v8_flags.enable_unconditional_write_barriers && CanBeTaggedPointer(rep)) {
+ if (v8_flags.enable_unconditional_write_barriers &&
+ CanBeTaggedOrCompressedPointer(rep)) {
write_barrier_kind = kFullWriteBarrier;
}
// TODO(loong64): I guess this could be done in a better way.
if (write_barrier_kind != kNoWriteBarrier &&
!v8_flags.disable_write_barriers) {
- DCHECK(CanBeTaggedPointer(rep));
+ DCHECK(CanBeTaggedOrCompressedPointer(rep));
AddressingMode addressing_mode;
InstructionOperand inputs[3];
size_t input_count = 0;
@@ -518,7 +551,7 @@ void InstructionSelector::VisitStore(Node* node) {
WriteBarrierKindToRecordWriteMode(write_barrier_kind);
InstructionCode code = kArchStoreWithWriteBarrier;
code |= AddressingModeField::encode(addressing_mode);
- code |= MiscField::encode(static_cast<int>(record_write_mode));
+ code |= RecordWriteModeField::encode(record_write_mode);
Emit(code, 0, nullptr, input_count, inputs);
} else {
ArchOpcode opcode;
@@ -539,14 +572,20 @@ void InstructionSelector::VisitStore(Node* node) {
case MachineRepresentation::kWord32:
opcode = kLoong64St_w;
break;
+ case MachineRepresentation::kWord64:
+ opcode = kLoong64St_d;
+ break;
case MachineRepresentation::kTaggedSigned: // Fall through.
case MachineRepresentation::kTaggedPointer: // Fall through.
case MachineRepresentation::kTagged: // Fall through.
- case MachineRepresentation::kWord64:
- opcode = kLoong64St_d;
+ opcode = kLoong64StoreCompressTagged;
break;
case MachineRepresentation::kCompressedPointer: // Fall through.
case MachineRepresentation::kCompressed: // Fall through.
+#ifdef V8_COMPRESS_POINTERS
+ opcode = kLoong64StoreCompressTagged;
+ break;
+#endif
case MachineRepresentation::kSandboxedPointer: // Fall through.
case MachineRepresentation::kMapWord: // Fall through.
case MachineRepresentation::kNone: // Fall through.
@@ -560,18 +599,25 @@ void InstructionSelector::VisitStore(Node* node) {
CanAddressRelativeToRootsRegister(m.ResolvedValue())) {
ptrdiff_t const delta =
g.GetIntegerConstantValue(index) +
- TurboAssemblerBase::RootRegisterOffsetForExternalReference(
+ MacroAssemblerBase::RootRegisterOffsetForExternalReference(
isolate(), m.ResolvedValue());
// Check that the delta is a 32-bit integer due to the limitations of
// immediate operands.
if (is_int32(delta)) {
Emit(opcode | AddressingModeField::encode(kMode_Root), g.NoOutput(),
- g.UseImmediate(static_cast<int32_t>(delta)), g.UseImmediate(0),
+ g.UseImmediate(static_cast<int32_t>(delta)),
g.UseRegisterOrImmediateZero(value));
return;
}
}
+ if (base != nullptr && base->opcode() == IrOpcode::kLoadRootRegister) {
+ // This will only work if {index} is a constant.
+ Emit(opcode | AddressingModeField::encode(kMode_Root), g.NoOutput(),
+ g.UseImmediate(index), g.UseRegisterOrImmediateZero(value));
+ return;
+ }
+
if (g.CanBeImmediate(index, opcode)) {
Emit(opcode | AddressingModeField::encode(kMode_MRI), g.NoOutput(),
g.UseRegister(base), g.UseImmediate(index),
@@ -804,25 +850,32 @@ void InstructionSelector::VisitWord32Shr(Node* node) {
void InstructionSelector::VisitWord32Sar(Node* node) {
Int32BinopMatcher m(node);
- if (m.left().IsWord32Shl() && CanCover(node, m.left().node())) {
- Int32BinopMatcher mleft(m.left().node());
- if (m.right().HasResolvedValue() && mleft.right().HasResolvedValue()) {
- Loong64OperandGenerator g(this);
- uint32_t sar = m.right().ResolvedValue();
- uint32_t shl = mleft.right().ResolvedValue();
- if ((sar == shl) && (sar == 16)) {
- Emit(kLoong64Ext_w_h, g.DefineAsRegister(node),
- g.UseRegister(mleft.left().node()));
- return;
- } else if ((sar == shl) && (sar == 24)) {
- Emit(kLoong64Ext_w_b, g.DefineAsRegister(node),
- g.UseRegister(mleft.left().node()));
- return;
- } else if ((sar == shl) && (sar == 32)) {
- Emit(kLoong64Sll_w, g.DefineAsRegister(node),
- g.UseRegister(mleft.left().node()), g.TempImmediate(0));
- return;
+ if (CanCover(node, m.left().node())) {
+ Loong64OperandGenerator g(this);
+ if (m.left().IsWord32Shl()) {
+ Int32BinopMatcher mleft(m.left().node());
+ if (m.right().HasResolvedValue() && mleft.right().HasResolvedValue()) {
+ uint32_t sar = m.right().ResolvedValue();
+ uint32_t shl = mleft.right().ResolvedValue();
+ if ((sar == shl) && (sar == 16)) {
+ Emit(kLoong64Ext_w_h, g.DefineAsRegister(node),
+ g.UseRegister(mleft.left().node()));
+ return;
+ } else if ((sar == shl) && (sar == 24)) {
+ Emit(kLoong64Ext_w_b, g.DefineAsRegister(node),
+ g.UseRegister(mleft.left().node()));
+ return;
+ } else if ((sar == shl) && (sar == 32)) {
+ Emit(kLoong64Sll_w, g.DefineAsRegister(node),
+ g.UseRegister(mleft.left().node()), g.TempImmediate(0));
+ return;
+ }
}
+ } else if (m.left().IsTruncateInt64ToInt32()) {
+ Emit(kLoong64Sra_w, g.DefineAsRegister(node),
+ g.UseRegister(m.left().InputAt(0)),
+ g.UseOperand(node->InputAt(1), kLoong64Sra_w));
+ return;
}
}
VisitRRO(this, kLoong64Sra_w, node);
@@ -895,6 +948,21 @@ void InstructionSelector::VisitWord64Shr(Node* node) {
void InstructionSelector::VisitWord64Sar(Node* node) {
if (TryEmitExtendingLoad(this, node, node)) return;
+
+ Int64BinopMatcher m(node);
+ if (m.left().IsChangeInt32ToInt64() && m.right().HasResolvedValue() &&
+ is_uint5(m.right().ResolvedValue()) && CanCover(node, m.left().node())) {
+ if ((m.left().InputAt(0)->opcode() != IrOpcode::kLoad &&
+ m.left().InputAt(0)->opcode() != IrOpcode::kLoadImmutable) ||
+ !CanCover(m.left().node(), m.left().InputAt(0))) {
+ Loong64OperandGenerator g(this);
+ Emit(kLoong64Sra_w, g.DefineAsRegister(node),
+ g.UseRegister(m.left().node()->InputAt(0)),
+ g.UseImmediate(m.right().node()));
+ return;
+ }
+ }
+
VisitRRO(this, kLoong64Sra_d, node);
}
@@ -1394,25 +1462,42 @@ void InstructionSelector::VisitTryTruncateFloat64ToUint32(Node* node) {
}
void InstructionSelector::VisitBitcastWord32ToWord64(Node* node) {
- UNIMPLEMENTED();
+ DCHECK(SmiValuesAre31Bits());
+ DCHECK(COMPRESS_POINTERS_BOOL);
+ EmitIdentity(node);
}
void InstructionSelector::VisitChangeInt32ToInt64(Node* node) {
- // On LoongArch64, int32 values should all be sign-extended to 64-bit, so
- // no need to sign-extend them here.
- // But when call to a host function in simulator, if the function return an
- // int32 value, the simulator do not sign-extend to int64, because in
- // simulator we do not know the function whether return an int32 or int64.
-#ifdef USE_SIMULATOR
Node* value = node->InputAt(0);
- if (value->opcode() == IrOpcode::kCall) {
+ if ((value->opcode() == IrOpcode::kLoad ||
+ value->opcode() == IrOpcode::kLoadImmutable) &&
+ CanCover(node, value)) {
+ // Generate sign-extending load.
+ LoadRepresentation load_rep = LoadRepresentationOf(value->op());
+ InstructionCode opcode = kArchNop;
+ switch (load_rep.representation()) {
+ case MachineRepresentation::kBit: // Fall through.
+ case MachineRepresentation::kWord8:
+ opcode = load_rep.IsUnsigned() ? kLoong64Ld_bu : kLoong64Ld_b;
+ break;
+ case MachineRepresentation::kWord16:
+ opcode = load_rep.IsUnsigned() ? kLoong64Ld_hu : kLoong64Ld_h;
+ break;
+ case MachineRepresentation::kTaggedSigned:
+ case MachineRepresentation::kTagged:
+ case MachineRepresentation::kWord32:
+ opcode = kLoong64Ld_w;
+ break;
+ default:
+ UNREACHABLE();
+ }
+ EmitLoad(this, value, opcode, node);
+ } else {
Loong64OperandGenerator g(this);
Emit(kLoong64Sll_w, g.DefineAsRegister(node), g.UseRegister(value),
g.TempImmediate(0));
return;
}
-#endif
- EmitIdentity(node);
}
bool InstructionSelector::ZeroExtendsWord32ToWord64NoPhis(Node* node) {
@@ -1803,6 +1888,16 @@ namespace {
static void VisitCompare(InstructionSelector* selector, InstructionCode opcode,
InstructionOperand left, InstructionOperand right,
FlagsContinuation* cont) {
+#ifdef V8_COMPRESS_POINTERS
+ if (opcode == kLoong64Cmp32) {
+ Loong64OperandGenerator g(selector);
+ InstructionOperand temps[] = {g.TempRegister(), g.TempRegister()};
+ InstructionOperand inputs[] = {left, right};
+ selector->EmitWithContinuation(opcode, 0, nullptr, arraysize(inputs),
+ inputs, arraysize(temps), temps, cont);
+ return;
+ }
+#endif
selector->EmitWithContinuation(opcode, left, right, cont);
}
@@ -1857,23 +1952,23 @@ void VisitWordCompare(InstructionSelector* selector, Node* node,
case kEqual:
case kNotEqual:
if (cont->IsSet()) {
- VisitCompare(selector, opcode, g.UseRegister(left),
+ VisitCompare(selector, opcode, g.UseUniqueRegister(left),
g.UseImmediate(right), cont);
} else {
- VisitCompare(selector, opcode, g.UseRegister(left),
- g.UseRegister(right), cont);
+ VisitCompare(selector, opcode, g.UseUniqueRegister(left),
+ g.UseImmediate(right), cont);
}
break;
case kSignedLessThan:
case kSignedGreaterThanOrEqual:
case kUnsignedLessThan:
case kUnsignedGreaterThanOrEqual:
- VisitCompare(selector, opcode, g.UseRegister(left),
+ VisitCompare(selector, opcode, g.UseUniqueRegister(left),
g.UseImmediate(right), cont);
break;
default:
- VisitCompare(selector, opcode, g.UseRegister(left),
- g.UseRegister(right), cont);
+ VisitCompare(selector, opcode, g.UseUniqueRegister(left),
+ g.UseUniqueRegister(right), cont);
}
}
} else if (g.CanBeImmediate(left, opcode)) {
@@ -1886,28 +1981,28 @@ void VisitWordCompare(InstructionSelector* selector, Node* node,
case kEqual:
case kNotEqual:
if (cont->IsSet()) {
- VisitCompare(selector, opcode, g.UseRegister(right),
+ VisitCompare(selector, opcode, g.UseUniqueRegister(right),
g.UseImmediate(left), cont);
} else {
- VisitCompare(selector, opcode, g.UseRegister(right),
- g.UseRegister(left), cont);
+ VisitCompare(selector, opcode, g.UseUniqueRegister(right),
+ g.UseImmediate(left), cont);
}
break;
case kSignedLessThan:
case kSignedGreaterThanOrEqual:
case kUnsignedLessThan:
case kUnsignedGreaterThanOrEqual:
- VisitCompare(selector, opcode, g.UseRegister(right),
+ VisitCompare(selector, opcode, g.UseUniqueRegister(right),
g.UseImmediate(left), cont);
break;
default:
- VisitCompare(selector, opcode, g.UseRegister(right),
- g.UseRegister(left), cont);
+ VisitCompare(selector, opcode, g.UseUniqueRegister(right),
+ g.UseUniqueRegister(left), cont);
}
}
} else {
- VisitCompare(selector, opcode, g.UseRegister(left), g.UseRegister(right),
- cont);
+ VisitCompare(selector, opcode, g.UseUniqueRegister(left),
+ g.UseUniqueRegister(right), cont);
}
}
@@ -1947,23 +2042,16 @@ void VisitWord32Compare(InstructionSelector* selector, Node* node,
// so we need do a full word32 compare in this case.
if (node->InputAt(0)->opcode() == IrOpcode::kCall ||
node->InputAt(1)->opcode() == IrOpcode::kCall) {
- VisitFullWord32Compare(selector, node, kLoong64Cmp, cont);
+ VisitFullWord32Compare(selector, node, kLoong64Cmp64, cont);
return;
}
#endif
- VisitOptimizedWord32Compare(selector, node, kLoong64Cmp, cont);
+ VisitOptimizedWord32Compare(selector, node, kLoong64Cmp32, cont);
}
void VisitWord64Compare(InstructionSelector* selector, Node* node,
FlagsContinuation* cont) {
- VisitWordCompare(selector, node, kLoong64Cmp, cont, false);
-}
-
-void EmitWordCompareZero(InstructionSelector* selector, Node* value,
- FlagsContinuation* cont) {
- Loong64OperandGenerator g(selector);
- selector->EmitWithContinuation(kLoong64Cmp, g.UseRegister(value),
- g.TempImmediate(0), cont);
+ VisitWordCompare(selector, node, kLoong64Cmp64, cont, false);
}
void VisitAtomicLoad(InstructionSelector* selector, Node* node,
@@ -1992,12 +2080,26 @@ void VisitAtomicLoad(InstructionSelector* selector, Node* node,
case MachineRepresentation::kWord64:
code = kLoong64Word64AtomicLoadUint64;
break;
+#ifdef V8_COMPRESS_POINTERS
+ case MachineRepresentation::kTaggedSigned:
+ code = kLoong64AtomicLoadDecompressTaggedSigned;
+ break;
+ case MachineRepresentation::kTaggedPointer:
+ case MachineRepresentation::kTagged:
+ code = kLoong64AtomicLoadDecompressTagged;
+ break;
+#else
case MachineRepresentation::kTaggedSigned: // Fall through.
case MachineRepresentation::kTaggedPointer: // Fall through.
case MachineRepresentation::kTagged:
- DCHECK_EQ(kTaggedSize, 8);
code = kLoong64Word64AtomicLoadUint64;
break;
+#endif
+ case MachineRepresentation::kCompressedPointer: // Fall through.
+ case MachineRepresentation::kCompressed:
+ DCHECK(COMPRESS_POINTERS_BOOL);
+ code = kLoong64Word64AtomicLoadUint32;
+ break;
default:
UNREACHABLE();
}
@@ -2045,7 +2147,7 @@ void VisitAtomicStore(InstructionSelector* selector, Node* node,
RecordWriteMode record_write_mode =
WriteBarrierKindToRecordWriteMode(write_barrier_kind);
code = kArchAtomicStoreWithWriteBarrier;
- code |= MiscField::encode(static_cast<int>(record_write_mode));
+ code |= RecordWriteModeField::encode(record_write_mode);
} else {
switch (rep) {
case MachineRepresentation::kWord8:
@@ -2064,8 +2166,14 @@ void VisitAtomicStore(InstructionSelector* selector, Node* node,
case MachineRepresentation::kTaggedSigned: // Fall through.
case MachineRepresentation::kTaggedPointer: // Fall through.
case MachineRepresentation::kTagged:
- DCHECK_EQ(kTaggedSize, 8);
- code = kLoong64StoreCompressTagged;
+ DCHECK_EQ(AtomicWidthSize(width), kTaggedSize);
+ code = kLoong64AtomicStoreCompressTagged;
+ break;
+ case MachineRepresentation::kCompressedPointer: // Fall through.
+ case MachineRepresentation::kCompressed:
+ DCHECK(COMPRESS_POINTERS_BOOL);
+ DCHECK_EQ(width, AtomicWidth::kWord32);
+ code = kLoong64AtomicStoreCompressTagged;
break;
default:
UNREACHABLE();
@@ -2199,22 +2307,13 @@ void InstructionSelector::VisitStackPointerGreaterThan(
// Shared routine for word comparisons against zero.
void InstructionSelector::VisitWordCompareZero(Node* user, Node* value,
FlagsContinuation* cont) {
+ Loong64OperandGenerator g(this);
// Try to combine with comparisons against 0 by simply inverting the branch.
- while (CanCover(user, value)) {
- if (value->opcode() == IrOpcode::kWord32Equal) {
- Int32BinopMatcher m(value);
- if (!m.right().Is(0)) break;
- user = value;
- value = m.left().node();
- } else if (value->opcode() == IrOpcode::kWord64Equal) {
- Int64BinopMatcher m(value);
- if (!m.right().Is(0)) break;
- user = value;
- value = m.left().node();
- } else {
- break;
- }
-
+ while (value->opcode() == IrOpcode::kWord32Equal && CanCover(user, value)) {
+ Int32BinopMatcher m(value);
+ if (!m.right().Is(0)) break;
+ user = value;
+ value = m.left().node();
cont->Negate();
}
@@ -2317,7 +2416,8 @@ void InstructionSelector::VisitWordCompareZero(Node* user, Node* value,
}
// Continuation could not be combined with a compare, emit compare against 0.
- EmitWordCompareZero(this, value, cont);
+ VisitCompare(this, kLoong64Cmp32, g.UseRegister(value), g.TempImmediate(0),
+ cont);
}
void InstructionSelector::VisitSwitch(Node* node, const SwitchInfo& sw) {
@@ -2439,11 +2539,6 @@ void InstructionSelector::VisitInt64SubWithOverflow(Node* node) {
void InstructionSelector::VisitWord64Equal(Node* const node) {
FlagsContinuation cont = FlagsContinuation::ForSet(kEqual, node);
- Int64BinopMatcher m(node);
- if (m.right().Is(0)) {
- return VisitWordCompareZero(m.node(), m.left().node(), &cont);
- }
-
VisitWord64Compare(this, node, &cont);
}
@@ -2967,6 +3062,21 @@ void InstructionSelector::VisitI64x2RelaxedLaneSelect(Node* node) {
VisitS128Select(node);
}
+#define SIMD_UNIMP_OP_LIST(V) \
+ V(F64x2Qfma) \
+ V(F64x2Qfms) \
+ V(F32x4Qfma) \
+ V(F32x4Qfms) \
+ V(I16x8DotI8x16I7x16S) \
+ V(I32x4DotI8x16I7x16AddS)
+
+#define SIMD_VISIT_UNIMP_OP(Name) \
+ void InstructionSelector::Visit##Name(Node* node) { UNIMPLEMENTED(); }
+SIMD_UNIMP_OP_LIST(SIMD_VISIT_UNIMP_OP)
+
+#undef SIMD_VISIT_UNIMP_OP
+#undef SIMD_UNIMP_OP_LIST
+
#if V8_ENABLE_WEBASSEMBLY
namespace {