summaryrefslogtreecommitdiff
path: root/deps/v8/src/wasm/baseline/ppc/liftoff-assembler-ppc.h
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/wasm/baseline/ppc/liftoff-assembler-ppc.h')
-rw-r--r--deps/v8/src/wasm/baseline/ppc/liftoff-assembler-ppc.h168
1 files changed, 151 insertions, 17 deletions
diff --git a/deps/v8/src/wasm/baseline/ppc/liftoff-assembler-ppc.h b/deps/v8/src/wasm/baseline/ppc/liftoff-assembler-ppc.h
index bedee1a939..10d574301e 100644
--- a/deps/v8/src/wasm/baseline/ppc/liftoff-assembler-ppc.h
+++ b/deps/v8/src/wasm/baseline/ppc/liftoff-assembler-ppc.h
@@ -45,6 +45,47 @@ inline MemOperand GetHalfStackSlot(int offset, RegPairHalf half) {
return MemOperand(fp, -kInstanceOffset - offset + half_offset);
}
+inline constexpr Condition ToCondition(LiftoffCondition liftoff_cond) {
+ switch (liftoff_cond) {
+ case kEqual:
+ return eq;
+ case kUnequal:
+ return ne;
+ case kSignedLessThan:
+ case kUnsignedLessThan:
+ return lt;
+ case kSignedLessEqual:
+ case kUnsignedLessEqual:
+ return le;
+ case kSignedGreaterEqual:
+ case kUnsignedGreaterEqual:
+ return ge;
+ case kSignedGreaterThan:
+ case kUnsignedGreaterThan:
+ return gt;
+ }
+}
+
+inline constexpr bool UseSignedOp(LiftoffCondition liftoff_cond) {
+ switch (liftoff_cond) {
+ case kEqual:
+ case kUnequal:
+ case kSignedLessThan:
+ case kSignedLessEqual:
+ case kSignedGreaterThan:
+ case kSignedGreaterEqual:
+ return true;
+ case kUnsignedLessThan:
+ case kUnsignedLessEqual:
+ case kUnsignedGreaterThan:
+ case kUnsignedGreaterEqual:
+ return false;
+ default:
+ UNREACHABLE();
+ }
+ return false;
+}
+
} // namespace liftoff
int LiftoffAssembler::PrepareStackFrame() {
@@ -87,7 +128,30 @@ bool LiftoffAssembler::NeedsAlignment(ValueKind kind) {
void LiftoffAssembler::LoadConstant(LiftoffRegister reg, WasmValue value,
RelocInfo::Mode rmode) {
- bailout(kUnsupportedArchitecture, "LoadConstant");
+ switch (value.type().kind()) {
+ case kI32:
+ mov(reg.gp(), Operand(value.to_i32(), rmode));
+ break;
+ case kI64:
+ mov(reg.gp(), Operand(value.to_i64(), rmode));
+ break;
+ case kF32: {
+ UseScratchRegisterScope temps(this);
+ Register scratch = temps.Acquire();
+ mov(scratch, Operand(value.to_f32_boxed().get_scalar()));
+ MovIntToFloat(reg.fp(), scratch);
+ break;
+ }
+ case kF64: {
+ UseScratchRegisterScope temps(this);
+ Register scratch = temps.Acquire();
+ mov(scratch, Operand(value.to_f32_boxed().get_scalar()));
+ MovInt64ToDouble(reg.fp(), scratch);
+ break;
+ }
+ default:
+ UNREACHABLE();
+ }
}
void LiftoffAssembler::LoadInstanceFromFrame(Register dst) {
@@ -109,6 +173,8 @@ void LiftoffAssembler::SpillInstance(Register instance) {
bailout(kUnsupportedArchitecture, "SpillInstance");
}
+void LiftoffAssembler::ResetOSRTarget() {}
+
void LiftoffAssembler::FillInstanceInto(Register dst) {
bailout(kUnsupportedArchitecture, "FillInstanceInto");
}
@@ -137,8 +203,7 @@ void LiftoffAssembler::StoreTaggedPointer(Register dst_addr,
void LiftoffAssembler::Load(LiftoffRegister dst, Register src_addr,
Register offset_reg, uintptr_t offset_imm,
LoadType type, LiftoffRegList pinned,
- uint32_t* protected_load_pc, bool is_load_mem,
- bool i64_offset) {
+ uint32_t* protected_load_pc, bool is_load_mem) {
bailout(kUnsupportedArchitecture, "Load");
}
@@ -525,56 +590,123 @@ void LiftoffAssembler::emit_i64_signextend_i32(LiftoffRegister dst,
bailout(kUnsupportedArchitecture, "emit_i64_signextend_i32");
}
-void LiftoffAssembler::emit_jump(Label* label) {
- bailout(kUnsupportedArchitecture, "emit_jump");
-}
+void LiftoffAssembler::emit_jump(Label* label) { b(al, label); }
-void LiftoffAssembler::emit_jump(Register target) {
- bailout(kUnsupportedArchitecture, "emit_jump");
-}
+void LiftoffAssembler::emit_jump(Register target) { Jump(target); }
void LiftoffAssembler::emit_cond_jump(LiftoffCondition liftoff_cond,
Label* label, ValueKind kind,
Register lhs, Register rhs) {
- bailout(kUnsupportedArchitecture, "emit_cond_jump");
+ Condition cond = liftoff::ToCondition(liftoff_cond);
+ bool use_signed = liftoff::UseSignedOp(liftoff_cond);
+
+ if (rhs != no_reg) {
+ switch (kind) {
+ case kI32:
+ if (use_signed) {
+ cmpw(lhs, rhs);
+ } else {
+ cmplw(lhs, rhs);
+ }
+ break;
+ case kRef:
+ case kOptRef:
+ case kRtt:
+ case kRttWithDepth:
+ DCHECK(liftoff_cond == kEqual || liftoff_cond == kUnequal);
+ V8_FALLTHROUGH;
+ case kI64:
+ if (use_signed) {
+ cmp(lhs, rhs);
+ } else {
+ cmpl(lhs, rhs);
+ }
+ break;
+ default:
+ UNREACHABLE();
+ }
+ } else {
+ DCHECK_EQ(kind, kI32);
+ CHECK(use_signed);
+ cmpwi(lhs, Operand::Zero());
+ }
+
+ b(cond, label);
}
void LiftoffAssembler::emit_i32_cond_jumpi(LiftoffCondition liftoff_cond,
Label* label, Register lhs,
int32_t imm) {
- bailout(kUnsupportedArchitecture, "emit_i32_cond_jumpi");
+ Condition cond = liftoff::ToCondition(liftoff_cond);
+ Cmpwi(lhs, Operand(imm), r0);
+ b(cond, label);
}
void LiftoffAssembler::emit_i32_eqz(Register dst, Register src) {
- bailout(kUnsupportedArchitecture, "emit_i32_eqz");
+ Label done;
+ cmpwi(src, Operand(0));
+ mov(dst, Operand(1));
+ beq(&done);
+ mov(dst, Operand::Zero());
+ bind(&done);
}
void LiftoffAssembler::emit_i32_set_cond(LiftoffCondition liftoff_cond,
Register dst, Register lhs,
Register rhs) {
- bailout(kUnsupportedArchitecture, "emit_i32_set_cond");
+ bool use_signed = liftoff::UseSignedOp(liftoff_cond);
+ if (use_signed) {
+ cmpw(lhs, rhs);
+ } else {
+ cmplw(lhs, rhs);
+ }
+ Label done;
+ mov(dst, Operand(1));
+ b(liftoff::ToCondition(liftoff_cond), &done);
+ mov(dst, Operand::Zero());
+ bind(&done);
}
void LiftoffAssembler::emit_i64_eqz(Register dst, LiftoffRegister src) {
- bailout(kUnsupportedArchitecture, "emit_i64_eqz");
+ Label done;
+ cmpi(src.gp(), Operand(0));
+ mov(dst, Operand(1));
+ beq(&done);
+ mov(dst, Operand::Zero());
+ bind(&done);
}
void LiftoffAssembler::emit_i64_set_cond(LiftoffCondition liftoff_cond,
Register dst, LiftoffRegister lhs,
LiftoffRegister rhs) {
- bailout(kUnsupportedArchitecture, "emit_i64_set_cond");
+ bool use_signed = liftoff::UseSignedOp(liftoff_cond);
+ if (use_signed) {
+ cmp(lhs.gp(), rhs.gp());
+ } else {
+ cmpl(lhs.gp(), rhs.gp());
+ }
+ Label done;
+ mov(dst, Operand(1));
+ b(liftoff::ToCondition(liftoff_cond), &done);
+ mov(dst, Operand::Zero());
+ bind(&done);
}
void LiftoffAssembler::emit_f32_set_cond(LiftoffCondition liftoff_cond,
Register dst, DoubleRegister lhs,
DoubleRegister rhs) {
- bailout(kUnsupportedArchitecture, "emit_f32_set_cond");
+ fcmpu(lhs, rhs);
+ Label done;
+ mov(dst, Operand(1));
+ b(liftoff::ToCondition(liftoff_cond), &done);
+ mov(dst, Operand::Zero());
+ bind(&done);
}
void LiftoffAssembler::emit_f64_set_cond(LiftoffCondition liftoff_cond,
Register dst, DoubleRegister lhs,
DoubleRegister rhs) {
- bailout(kUnsupportedArchitecture, "emit_f64_set_cond");
+ emit_f32_set_cond(liftoff_cond, dst, lhs, rhs);
}
bool LiftoffAssembler::emit_select(LiftoffRegister dst, Register condition,
@@ -1802,6 +1934,8 @@ void LiftoffAssembler::DeallocateStackSlot(uint32_t size) {
bailout(kUnsupportedArchitecture, "DeallocateStackSlot");
}
+void LiftoffAssembler::MaybeOSR() {}
+
void LiftoffStackSlots::Construct(int param_slots) {
asm_->bailout(kUnsupportedArchitecture, "LiftoffStackSlots::Construct");
}