summaryrefslogtreecommitdiff
path: root/deps/v8/src/x64/macro-assembler-x64.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/x64/macro-assembler-x64.cc')
-rw-r--r--deps/v8/src/x64/macro-assembler-x64.cc58
1 files changed, 47 insertions, 11 deletions
diff --git a/deps/v8/src/x64/macro-assembler-x64.cc b/deps/v8/src/x64/macro-assembler-x64.cc
index e4a76270a2..2ee506d7c3 100644
--- a/deps/v8/src/x64/macro-assembler-x64.cc
+++ b/deps/v8/src/x64/macro-assembler-x64.cc
@@ -923,7 +923,7 @@ void MacroAssembler::LoadSmiConstant(Register dst, Smi* source) {
void MacroAssembler::Integer32ToSmi(Register dst, Register src) {
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTag == 0);
if (!dst.is(src)) {
movl(dst, src);
}
@@ -961,7 +961,7 @@ void MacroAssembler::Integer64PlusConstantToSmi(Register dst,
void MacroAssembler::SmiToInteger32(Register dst, Register src) {
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTag == 0);
if (!dst.is(src)) {
movq(dst, src);
}
@@ -975,7 +975,7 @@ void MacroAssembler::SmiToInteger32(Register dst, const Operand& src) {
void MacroAssembler::SmiToInteger64(Register dst, Register src) {
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTag == 0);
if (!dst.is(src)) {
movq(dst, src);
}
@@ -1111,21 +1111,21 @@ void MacroAssembler::SmiOrIfSmis(Register dst, Register src1, Register src2,
Condition MacroAssembler::CheckSmi(Register src) {
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTag == 0);
testb(src, Immediate(kSmiTagMask));
return zero;
}
Condition MacroAssembler::CheckSmi(const Operand& src) {
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTag == 0);
testb(src, Immediate(kSmiTagMask));
return zero;
}
Condition MacroAssembler::CheckNonNegativeSmi(Register src) {
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTag == 0);
// Test that both bits of the mask 0x8000000000000001 are zero.
movq(kScratchRegister, src);
rol(kScratchRegister, Immediate(1));
@@ -1138,7 +1138,7 @@ Condition MacroAssembler::CheckBothSmi(Register first, Register second) {
if (first.is(second)) {
return CheckSmi(first);
}
- ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
+ STATIC_ASSERT(kSmiTag == 0 && kHeapObjectTag == 1 && kHeapObjectTagMask == 3);
leal(kScratchRegister, Operand(first, second, times_1, 0));
testb(kScratchRegister, Immediate(0x03));
return zero;
@@ -1294,7 +1294,7 @@ void MacroAssembler::SmiTryAddConstant(Register dst,
Label::Distance near_jump) {
// Does not assume that src is a smi.
ASSERT_EQ(static_cast<int>(1), static_cast<int>(kSmiTagMask));
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTag == 0);
ASSERT(!dst.is(kScratchRegister));
ASSERT(!src.is(kScratchRegister));
@@ -1998,7 +1998,7 @@ void MacroAssembler::SelectNonSmi(Register dst,
Check(not_both_smis, "Both registers were smis in SelectNonSmi.");
}
#endif
- ASSERT_EQ(0, kSmiTag);
+ STATIC_ASSERT(kSmiTag == 0);
ASSERT_EQ(0, Smi::FromInt(0));
movl(kScratchRegister, Immediate(kSmiTagMask));
and_(kScratchRegister, src1);
@@ -2699,7 +2699,7 @@ Condition MacroAssembler::IsObjectStringType(Register heap_object,
Register instance_type) {
movq(map, FieldOperand(heap_object, HeapObject::kMapOffset));
movzxbl(instance_type, FieldOperand(map, Map::kInstanceTypeOffset));
- ASSERT(kNotStringTag != 0);
+ STATIC_ASSERT(kNotStringTag != 0);
testb(instance_type, Immediate(kIsNotStringMask));
return zero;
}
@@ -3623,7 +3623,7 @@ void MacroAssembler::AllocateAsciiString(Register result,
}
-void MacroAssembler::AllocateConsString(Register result,
+void MacroAssembler::AllocateTwoByteConsString(Register result,
Register scratch1,
Register scratch2,
Label* gc_required) {
@@ -3659,6 +3659,42 @@ void MacroAssembler::AllocateAsciiConsString(Register result,
}
+void MacroAssembler::AllocateTwoByteSlicedString(Register result,
+ Register scratch1,
+ Register scratch2,
+ Label* gc_required) {
+ // Allocate heap number in new space.
+ AllocateInNewSpace(SlicedString::kSize,
+ result,
+ scratch1,
+ scratch2,
+ gc_required,
+ TAG_OBJECT);
+
+ // Set the map. The other fields are left uninitialized.
+ LoadRoot(kScratchRegister, Heap::kSlicedStringMapRootIndex);
+ movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
+}
+
+
+void MacroAssembler::AllocateAsciiSlicedString(Register result,
+ Register scratch1,
+ Register scratch2,
+ Label* gc_required) {
+ // Allocate heap number in new space.
+ AllocateInNewSpace(SlicedString::kSize,
+ result,
+ scratch1,
+ scratch2,
+ gc_required,
+ TAG_OBJECT);
+
+ // Set the map. The other fields are left uninitialized.
+ LoadRoot(kScratchRegister, Heap::kSlicedAsciiStringMapRootIndex);
+ movq(FieldOperand(result, HeapObject::kMapOffset), kScratchRegister);
+}
+
+
// Copy memory, byte-by-byte, from source to destination. Not optimized for
// long or aligned copies. The contents of scratch and length are destroyed.
// Destination is incremented by length, source, length and scratch are