summaryrefslogtreecommitdiff
path: root/deps/v8/src/codegen/x64/assembler-x64.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/codegen/x64/assembler-x64.cc')
-rw-r--r--deps/v8/src/codegen/x64/assembler-x64.cc117
1 files changed, 97 insertions, 20 deletions
diff --git a/deps/v8/src/codegen/x64/assembler-x64.cc b/deps/v8/src/codegen/x64/assembler-x64.cc
index c1e2ec9808..5327745a02 100644
--- a/deps/v8/src/codegen/x64/assembler-x64.cc
+++ b/deps/v8/src/codegen/x64/assembler-x64.cc
@@ -246,6 +246,9 @@ bool ConstPool::AddSharedEntry(uint64_t data, int offset) {
bool ConstPool::TryRecordEntry(intptr_t data, RelocInfo::Mode mode) {
if (!FLAG_partial_constant_pool) return false;
+ DCHECK_WITH_MSG(
+ FLAG_text_is_readable,
+ "The partial constant pool requires a readable .text section");
if (!RelocInfo::IsShareableRelocMode(mode)) return false;
// Currently, partial constant pool only handles the following kinds of
@@ -332,6 +335,15 @@ Assembler::Assembler(const AssemblerOptions& options,
void Assembler::GetCode(Isolate* isolate, CodeDesc* desc,
SafepointTableBuilder* safepoint_table_builder,
int handler_table_offset) {
+ // As a crutch to avoid having to add manual Align calls wherever we use a
+ // raw workflow to create Code objects (mostly in tests), add another Align
+ // call here. It does no harm - the end of the Code object is aligned to the
+ // (larger) kCodeAlignment anyways.
+ // TODO(jgruber): Consider moving responsibility for proper alignment to
+ // metadata table builders (safepoint, handler, constant pool, code
+ // comments).
+ DataAlign(Code::kMetadataAlignment);
+
PatchConstPool();
DCHECK(constpool_.IsEmpty());
@@ -1207,13 +1219,6 @@ void Assembler::decb(Operand dst) {
emit_operand(1, dst);
}
-void Assembler::enter(Immediate size) {
- EnsureSpace ensure_space(this);
- emit(0xC8);
- emitw(size.value_); // 16 bit operand, always.
- emit(0);
-}
-
void Assembler::hlt() {
EnsureSpace ensure_space(this);
emit(0xF4);
@@ -2757,8 +2762,16 @@ void Assembler::movdqu(XMMRegister dst, Operand src) {
emit_sse_operand(dst, src);
}
-void Assembler::pinsrw(XMMRegister dst, Register src, int8_t imm8) {
- DCHECK(is_uint8(imm8));
+void Assembler::movdqu(XMMRegister dst, XMMRegister src) {
+ EnsureSpace ensure_space(this);
+ emit(0xF3);
+ emit_rex_64(dst, src);
+ emit(0x0F);
+ emit(0x6F);
+ emit_sse_operand(dst, src);
+}
+
+void Assembler::pinsrw(XMMRegister dst, Register src, uint8_t imm8) {
EnsureSpace ensure_space(this);
emit(0x66);
emit_optional_rex_32(dst, src);
@@ -2768,8 +2781,7 @@ void Assembler::pinsrw(XMMRegister dst, Register src, int8_t imm8) {
emit(imm8);
}
-void Assembler::pinsrw(XMMRegister dst, Operand src, int8_t imm8) {
- DCHECK(is_uint8(imm8));
+void Assembler::pinsrw(XMMRegister dst, Operand src, uint8_t imm8) {
EnsureSpace ensure_space(this);
emit(0x66);
emit_optional_rex_32(dst, src);
@@ -2791,7 +2803,7 @@ void Assembler::pextrq(Register dst, XMMRegister src, int8_t imm8) {
emit(imm8);
}
-void Assembler::pinsrq(XMMRegister dst, Register src, int8_t imm8) {
+void Assembler::pinsrq(XMMRegister dst, Register src, uint8_t imm8) {
DCHECK(IsEnabled(SSE4_1));
EnsureSpace ensure_space(this);
emit(0x66);
@@ -2803,9 +2815,8 @@ void Assembler::pinsrq(XMMRegister dst, Register src, int8_t imm8) {
emit(imm8);
}
-void Assembler::pinsrq(XMMRegister dst, Operand src, int8_t imm8) {
+void Assembler::pinsrq(XMMRegister dst, Operand src, uint8_t imm8) {
DCHECK(IsEnabled(SSE4_1));
- DCHECK(is_uint8(imm8));
EnsureSpace ensure_space(this);
emit(0x66);
emit_rex_64(dst, src);
@@ -2816,22 +2827,20 @@ void Assembler::pinsrq(XMMRegister dst, Operand src, int8_t imm8) {
emit(imm8);
}
-void Assembler::pinsrd(XMMRegister dst, Register src, int8_t imm8) {
+void Assembler::pinsrd(XMMRegister dst, Register src, uint8_t imm8) {
sse4_instr(dst, src, 0x66, 0x0F, 0x3A, 0x22, imm8);
}
-void Assembler::pinsrd(XMMRegister dst, Operand src, int8_t imm8) {
- DCHECK(is_uint8(imm8));
+void Assembler::pinsrd(XMMRegister dst, Operand src, uint8_t imm8) {
sse4_instr(dst, src, 0x66, 0x0F, 0x3A, 0x22);
emit(imm8);
}
-void Assembler::pinsrb(XMMRegister dst, Register src, int8_t imm8) {
+void Assembler::pinsrb(XMMRegister dst, Register src, uint8_t imm8) {
sse4_instr(dst, src, 0x66, 0x0F, 0x3A, 0x20, imm8);
}
-void Assembler::pinsrb(XMMRegister dst, Operand src, int8_t imm8) {
- DCHECK(is_uint8(imm8));
+void Assembler::pinsrb(XMMRegister dst, Operand src, uint8_t imm8) {
sse4_instr(dst, src, 0x66, 0x0F, 0x3A, 0x20);
emit(imm8);
}
@@ -2990,6 +2999,42 @@ void Assembler::movss(Operand src, XMMRegister dst) {
emit_sse_operand(dst, src);
}
+void Assembler::movlps(XMMRegister dst, Operand src) {
+ DCHECK(!IsEnabled(AVX));
+ EnsureSpace ensure_space(this);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x12);
+ emit_sse_operand(dst, src);
+}
+
+void Assembler::movlps(Operand src, XMMRegister dst) {
+ DCHECK(!IsEnabled(AVX));
+ EnsureSpace ensure_space(this);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x13);
+ emit_sse_operand(dst, src);
+}
+
+void Assembler::movhps(XMMRegister dst, Operand src) {
+ DCHECK(!IsEnabled(AVX));
+ EnsureSpace ensure_space(this);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x16);
+ emit_sse_operand(dst, src);
+}
+
+void Assembler::movhps(Operand src, XMMRegister dst) {
+ DCHECK(!IsEnabled(AVX));
+ EnsureSpace ensure_space(this);
+ emit_optional_rex_32(dst, src);
+ emit(0x0F);
+ emit(0x17);
+ emit_sse_operand(dst, src);
+}
+
void Assembler::cmpps(XMMRegister dst, XMMRegister src, int8_t cmp) {
EnsureSpace ensure_space(this);
emit_optional_rex_32(dst, src);
@@ -3463,6 +3508,38 @@ void Assembler::vmovdqu(Operand dst, XMMRegister src) {
emit_sse_operand(src, dst);
}
+void Assembler::vmovlps(XMMRegister dst, XMMRegister src1, Operand src2) {
+ DCHECK(IsEnabled(AVX));
+ EnsureSpace ensure_space(this);
+ emit_vex_prefix(dst, src1, src2, kL128, kNone, k0F, kWIG);
+ emit(0x12);
+ emit_sse_operand(dst, src2);
+}
+
+void Assembler::vmovlps(Operand dst, XMMRegister src) {
+ DCHECK(IsEnabled(AVX));
+ EnsureSpace ensure_space(this);
+ emit_vex_prefix(src, xmm0, dst, kL128, kNone, k0F, kWIG);
+ emit(0x13);
+ emit_sse_operand(src, dst);
+}
+
+void Assembler::vmovhps(XMMRegister dst, XMMRegister src1, Operand src2) {
+ DCHECK(IsEnabled(AVX));
+ EnsureSpace ensure_space(this);
+ emit_vex_prefix(dst, src1, src2, kL128, kNone, k0F, kWIG);
+ emit(0x16);
+ emit_sse_operand(dst, src2);
+}
+
+void Assembler::vmovhps(Operand dst, XMMRegister src) {
+ DCHECK(IsEnabled(AVX));
+ EnsureSpace ensure_space(this);
+ emit_vex_prefix(src, xmm0, dst, kL128, kNone, k0F, kWIG);
+ emit(0x17);
+ emit_sse_operand(src, dst);
+}
+
void Assembler::vinstr(byte op, XMMRegister dst, XMMRegister src1,
XMMRegister src2, SIMDPrefix pp, LeadingOpcode m,
VexW w) {