summaryrefslogtreecommitdiff
path: root/deps/v8/src/mips
diff options
context:
space:
mode:
authorBen Noordhuis <info@bnoordhuis.nl>2013-08-10 16:27:43 +0200
committerBen Noordhuis <info@bnoordhuis.nl>2013-08-10 16:27:43 +0200
commitf69be329f0d78f19e71ac9e75d6e4ee816e13c97 (patch)
tree022e53d1aff74dbe50f3984c154461bf6c19efb0 /deps/v8/src/mips
parent39aa894035f9e3b58e04ce1a2b598e496e1f6bd6 (diff)
downloadnode-new-f69be329f0d78f19e71ac9e75d6e4ee816e13c97.tar.gz
v8: upgrade v8 to 3.20.14.1
Diffstat (limited to 'deps/v8/src/mips')
-rw-r--r--deps/v8/src/mips/assembler-mips.h5
-rw-r--r--deps/v8/src/mips/builtins-mips.cc20
-rw-r--r--deps/v8/src/mips/code-stubs-mips.cc58
-rw-r--r--deps/v8/src/mips/codegen-mips.cc4
-rw-r--r--deps/v8/src/mips/debug-mips.cc7
-rw-r--r--deps/v8/src/mips/frames-mips.h5
-rw-r--r--deps/v8/src/mips/full-codegen-mips.cc58
-rw-r--r--deps/v8/src/mips/lithium-codegen-mips.cc174
-rw-r--r--deps/v8/src/mips/lithium-codegen-mips.h6
-rw-r--r--deps/v8/src/mips/lithium-gap-resolver-mips.cc2
-rw-r--r--deps/v8/src/mips/lithium-mips.cc86
-rw-r--r--deps/v8/src/mips/lithium-mips.h70
-rw-r--r--deps/v8/src/mips/macro-assembler-mips.cc63
-rw-r--r--deps/v8/src/mips/macro-assembler-mips.h12
14 files changed, 206 insertions, 364 deletions
diff --git a/deps/v8/src/mips/assembler-mips.h b/deps/v8/src/mips/assembler-mips.h
index 8d533b36f4..cb0896a8de 100644
--- a/deps/v8/src/mips/assembler-mips.h
+++ b/deps/v8/src/mips/assembler-mips.h
@@ -358,6 +358,11 @@ class Operand BASE_EMBEDDED {
// Return true if this is a register operand.
INLINE(bool is_reg() const);
+ inline int32_t immediate() const {
+ ASSERT(!is_reg());
+ return imm32_;
+ }
+
Register rm() const { return rm_; }
private:
diff --git a/deps/v8/src/mips/builtins-mips.cc b/deps/v8/src/mips/builtins-mips.cc
index 3f5dca0009..d424cbc726 100644
--- a/deps/v8/src/mips/builtins-mips.cc
+++ b/deps/v8/src/mips/builtins-mips.cc
@@ -123,10 +123,10 @@ void Builtins::Generate_InternalArrayCode(MacroAssembler* masm) {
// Initial map for the builtin InternalArray functions should be maps.
__ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
__ And(t0, a2, Operand(kSmiTagMask));
- __ Assert(ne, "Unexpected initial map for InternalArray function",
+ __ Assert(ne, kUnexpectedInitialMapForInternalArrayFunction,
t0, Operand(zero_reg));
__ GetObjectType(a2, a3, t0);
- __ Assert(eq, "Unexpected initial map for InternalArray function",
+ __ Assert(eq, kUnexpectedInitialMapForInternalArrayFunction,
t0, Operand(MAP_TYPE));
}
@@ -153,10 +153,10 @@ void Builtins::Generate_ArrayCode(MacroAssembler* masm) {
// Initial map for the builtin Array functions should be maps.
__ lw(a2, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
__ And(t0, a2, Operand(kSmiTagMask));
- __ Assert(ne, "Unexpected initial map for Array function (1)",
+ __ Assert(ne, kUnexpectedInitialMapForArrayFunction1,
t0, Operand(zero_reg));
__ GetObjectType(a2, a3, t0);
- __ Assert(eq, "Unexpected initial map for Array function (2)",
+ __ Assert(eq, kUnexpectedInitialMapForArrayFunction2,
t0, Operand(MAP_TYPE));
}
@@ -185,7 +185,7 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
Register function = a1;
if (FLAG_debug_code) {
__ LoadGlobalFunction(Context::STRING_FUNCTION_INDEX, a2);
- __ Assert(eq, "Unexpected String function", function, Operand(a2));
+ __ Assert(eq, kUnexpectedStringFunction, function, Operand(a2));
}
// Load the first arguments in a0 and get rid of the rest.
@@ -231,10 +231,10 @@ void Builtins::Generate_StringConstructCode(MacroAssembler* masm) {
__ LoadGlobalFunctionInitialMap(function, map, t0);
if (FLAG_debug_code) {
__ lbu(t0, FieldMemOperand(map, Map::kInstanceSizeOffset));
- __ Assert(eq, "Unexpected string wrapper instance size",
+ __ Assert(eq, kUnexpectedStringWrapperInstanceSize,
t0, Operand(JSValue::kSize >> kPointerSizeLog2));
__ lbu(t0, FieldMemOperand(map, Map::kUnusedPropertyFieldsOffset));
- __ Assert(eq, "Unexpected unused properties of string wrapper",
+ __ Assert(eq, kUnexpectedUnusedPropertiesOfStringWrapper,
t0, Operand(zero_reg));
}
__ sw(map, FieldMemOperand(v0, HeapObject::kMapOffset));
@@ -489,7 +489,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
__ addu(a0, t5, t0);
// a0: offset of first field after pre-allocated fields
if (FLAG_debug_code) {
- __ Assert(le, "Unexpected number of pre-allocated property fields.",
+ __ Assert(le, kUnexpectedNumberOfPreAllocatedPropertyFields,
a0, Operand(t6));
}
__ InitializeFieldsWithFiller(t5, a0, t7);
@@ -522,7 +522,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
// Done if no extra properties are to be allocated.
__ Branch(&allocated, eq, a3, Operand(zero_reg));
- __ Assert(greater_equal, "Property allocation count failed.",
+ __ Assert(greater_equal, kPropertyAllocationCountFailed,
a3, Operand(zero_reg));
// Scale the number of elements by pointer size and add the header for
@@ -569,7 +569,7 @@ static void Generate_JSConstructStubHelper(MacroAssembler* masm,
__ LoadRoot(t7, Heap::kUndefinedValueRootIndex);
} else if (FLAG_debug_code) {
__ LoadRoot(t8, Heap::kUndefinedValueRootIndex);
- __ Assert(eq, "Undefined value not loaded.", t7, Operand(t8));
+ __ Assert(eq, kUndefinedValueNotLoaded, t7, Operand(t8));
}
__ jmp(&entry);
__ bind(&loop);
diff --git a/deps/v8/src/mips/code-stubs-mips.cc b/deps/v8/src/mips/code-stubs-mips.cc
index 0e1b224ead..8a03a9a31a 100644
--- a/deps/v8/src/mips/code-stubs-mips.cc
+++ b/deps/v8/src/mips/code-stubs-mips.cc
@@ -247,17 +247,6 @@ void InternalArrayNArgumentsConstructorStub::InitializeInterfaceDescriptor(
}
-void UnaryOpStub::InitializeInterfaceDescriptor(
- Isolate* isolate,
- CodeStubInterfaceDescriptor* descriptor) {
- static Register registers[] = { a0 };
- descriptor->register_param_count_ = 1;
- descriptor->register_params_ = registers;
- descriptor->deoptimization_handler_ =
- FUNCTION_ADDR(UnaryOpIC_Miss);
-}
-
-
void StoreGlobalStub::InitializeInterfaceDescriptor(
Isolate* isolate,
CodeStubInterfaceDescriptor* descriptor) {
@@ -520,8 +509,7 @@ void FastNewBlockContextStub::Generate(MacroAssembler* masm) {
Label after_sentinel;
__ JumpIfNotSmi(a3, &after_sentinel);
if (FLAG_debug_code) {
- const char* message = "Expected 0 as a Smi sentinel";
- __ Assert(eq, message, a3, Operand(zero_reg));
+ __ Assert(eq, kExpected0AsASmiSentinel, a3, Operand(zero_reg));
}
__ lw(a3, GlobalObjectOperand());
__ lw(a3, FieldMemOperand(a3, GlobalObject::kNativeContextOffset));
@@ -679,7 +667,7 @@ void FloatingPointHelper::LoadNumber(MacroAssembler* masm,
Label* not_number) {
__ AssertRootValue(heap_number_map,
Heap::kHeapNumberMapRootIndex,
- "HeapNumberMap register clobbered.");
+ kHeapNumberMapRegisterClobbered);
Label is_smi, done;
@@ -729,7 +717,7 @@ void FloatingPointHelper::ConvertNumberToInt32(MacroAssembler* masm,
Label* not_number) {
__ AssertRootValue(heap_number_map,
Heap::kHeapNumberMapRootIndex,
- "HeapNumberMap register clobbered.");
+ kHeapNumberMapRegisterClobbered);
Label done;
Label not_in_int32_range;
@@ -806,7 +794,7 @@ void FloatingPointHelper::LoadNumberAsInt32Double(MacroAssembler* masm,
__ bind(&obj_is_not_smi);
__ AssertRootValue(heap_number_map,
Heap::kHeapNumberMapRootIndex,
- "HeapNumberMap register clobbered.");
+ kHeapNumberMapRegisterClobbered);
__ JumpIfNotHeapNumber(object, heap_number_map, scratch1, not_int32);
// Load the number.
@@ -853,7 +841,7 @@ void FloatingPointHelper::LoadNumberAsInt32(MacroAssembler* masm,
__ AssertRootValue(heap_number_map,
Heap::kHeapNumberMapRootIndex,
- "HeapNumberMap register clobbered.");
+ kHeapNumberMapRegisterClobbered);
__ JumpIfNotHeapNumber(object, heap_number_map, scratch1, &maybe_undefined);
@@ -4279,12 +4267,12 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
if (FLAG_debug_code) {
__ And(t0, regexp_data, Operand(kSmiTagMask));
__ Check(nz,
- "Unexpected type for RegExp data, FixedArray expected",
+ kUnexpectedTypeForRegExpDataFixedArrayExpected,
t0,
Operand(zero_reg));
__ GetObjectType(regexp_data, a0, a0);
__ Check(eq,
- "Unexpected type for RegExp data, FixedArray expected",
+ kUnexpectedTypeForRegExpDataFixedArrayExpected,
a0,
Operand(FIXED_ARRAY_TYPE));
}
@@ -4639,7 +4627,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// Sequential strings have already been ruled out.
__ And(at, a0, Operand(kIsIndirectStringMask));
__ Assert(eq,
- "external string expected, but not found",
+ kExternalStringExpectedButNotFound,
at,
Operand(zero_reg));
}
@@ -5020,7 +5008,7 @@ void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
void StringCharCodeAtGenerator::GenerateSlow(
MacroAssembler* masm,
const RuntimeCallHelper& call_helper) {
- __ Abort("Unexpected fallthrough to CharCodeAt slow case");
+ __ Abort(kUnexpectedFallthroughToCharCodeAtSlowCase);
// Index is not a smi.
__ bind(&index_not_smi_);
@@ -5069,7 +5057,7 @@ void StringCharCodeAtGenerator::GenerateSlow(
call_helper.AfterCall(masm);
__ jmp(&exit_);
- __ Abort("Unexpected fallthrough from CharCodeAt slow case");
+ __ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
}
@@ -5106,7 +5094,7 @@ void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
void StringCharFromCodeGenerator::GenerateSlow(
MacroAssembler* masm,
const RuntimeCallHelper& call_helper) {
- __ Abort("Unexpected fallthrough to CharFromCode slow case");
+ __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
__ bind(&slow_case_);
call_helper.BeforeCall(masm);
@@ -5117,7 +5105,7 @@ void StringCharFromCodeGenerator::GenerateSlow(
call_helper.AfterCall(masm);
__ Branch(&exit_);
- __ Abort("Unexpected fallthrough from CharFromCode slow case");
+ __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
}
@@ -5172,7 +5160,7 @@ void StringHelper::GenerateCopyCharactersLong(MacroAssembler* masm,
// that it is.
__ And(scratch4, dest, Operand(kPointerAlignmentMask));
__ Check(eq,
- "Destination of copy not aligned.",
+ kDestinationOfCopyNotAligned,
scratch4,
Operand(zero_reg));
}
@@ -5372,7 +5360,7 @@ void StringHelper::GenerateTwoCharacterStringTableProbe(MacroAssembler* masm,
// Must be the hole (deleted entry).
if (FLAG_debug_code) {
__ LoadRoot(scratch, Heap::kTheHoleValueRootIndex);
- __ Assert(eq, "oddball in string table is not undefined or the hole",
+ __ Assert(eq, kOddballInStringTableIsNotUndefinedOrTheHole,
scratch, Operand(candidate));
}
__ jmp(&next_probe[i]);
@@ -6580,7 +6568,7 @@ void DirectCEntryStub::Generate(MacroAssembler* masm) {
// filled with kZapValue by the GC.
// Dereference the address and check for this.
__ lw(t0, MemOperand(t9));
- __ Assert(ne, "Received invalid return address.", t0,
+ __ Assert(ne, kReceivedInvalidReturnAddress, t0,
Operand(reinterpret_cast<uint32_t>(kZapValue)));
}
__ Jump(t9);
@@ -7331,7 +7319,7 @@ static void CreateArrayDispatch(MacroAssembler* masm) {
}
// If we reached this point there is a problem.
- __ Abort("Unexpected ElementsKind in array constructor");
+ __ Abort(kUnexpectedElementsKindInArrayConstructor);
}
@@ -7386,7 +7374,7 @@ static void CreateArrayDispatchOneArgument(MacroAssembler* masm) {
}
// If we reached this point there is a problem.
- __ Abort("Unexpected ElementsKind in array constructor");
+ __ Abort(kUnexpectedElementsKindInArrayConstructor);
}
@@ -7447,10 +7435,10 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
__ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
__ And(at, a3, Operand(kSmiTagMask));
- __ Assert(ne, "Unexpected initial map for Array function",
+ __ Assert(ne, kUnexpectedInitialMapForArrayFunction,
at, Operand(zero_reg));
__ GetObjectType(a3, a3, t0);
- __ Assert(eq, "Unexpected initial map for Array function",
+ __ Assert(eq, kUnexpectedInitialMapForArrayFunction,
t0, Operand(MAP_TYPE));
// We should either have undefined in a2 or a valid cell.
@@ -7459,7 +7447,7 @@ void ArrayConstructorStub::Generate(MacroAssembler* masm) {
__ LoadRoot(at, Heap::kUndefinedValueRootIndex);
__ Branch(&okay_here, eq, a2, Operand(at));
__ lw(a3, FieldMemOperand(a2, 0));
- __ Assert(eq, "Expected property cell in register a2",
+ __ Assert(eq, kExpectedPropertyCellInRegisterA2,
a3, Operand(cell_map));
__ bind(&okay_here);
}
@@ -7559,10 +7547,10 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
__ lw(a3, FieldMemOperand(a1, JSFunction::kPrototypeOrInitialMapOffset));
// Will both indicate a NULL and a Smi.
__ And(at, a3, Operand(kSmiTagMask));
- __ Assert(ne, "Unexpected initial map for Array function",
+ __ Assert(ne, kUnexpectedInitialMapForArrayFunction,
at, Operand(zero_reg));
__ GetObjectType(a3, a3, t0);
- __ Assert(eq, "Unexpected initial map for Array function",
+ __ Assert(eq, kUnexpectedInitialMapForArrayFunction,
t0, Operand(MAP_TYPE));
}
@@ -7579,7 +7567,7 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
Label done;
__ Branch(&done, eq, a3, Operand(FAST_ELEMENTS));
__ Assert(
- eq, "Invalid ElementsKind for InternalArray or InternalPackedArray",
+ eq, kInvalidElementsKindForInternalArrayOrInternalPackedArray,
a3, Operand(FAST_HOLEY_ELEMENTS));
__ bind(&done);
}
diff --git a/deps/v8/src/mips/codegen-mips.cc b/deps/v8/src/mips/codegen-mips.cc
index 3f74154f58..10490e7a7b 100644
--- a/deps/v8/src/mips/codegen-mips.cc
+++ b/deps/v8/src/mips/codegen-mips.cc
@@ -289,7 +289,7 @@ void ElementsTransitionGenerator::GenerateSmiToDouble(
__ SmiTag(t5);
__ Or(t5, t5, Operand(1));
__ LoadRoot(at, Heap::kTheHoleValueRootIndex);
- __ Assert(eq, "object found in smi-only array", at, Operand(t5));
+ __ Assert(eq, kObjectFoundInSmiOnlyArray, at, Operand(t5));
}
__ sw(t0, MemOperand(t3)); // mantissa
__ sw(t1, MemOperand(t3, kIntSize)); // exponent
@@ -489,7 +489,7 @@ void StringCharLoadGenerator::Generate(MacroAssembler* masm,
// Assert that we do not have a cons or slice (indirect strings) here.
// Sequential strings have already been ruled out.
__ And(at, result, Operand(kIsIndirectStringMask));
- __ Assert(eq, "external string expected, but not found",
+ __ Assert(eq, kExternalStringExpectedButNotFound,
at, Operand(zero_reg));
}
// Rule out short external strings.
diff --git a/deps/v8/src/mips/debug-mips.cc b/deps/v8/src/mips/debug-mips.cc
index 30cc4db634..020228fc6b 100644
--- a/deps/v8/src/mips/debug-mips.cc
+++ b/deps/v8/src/mips/debug-mips.cc
@@ -142,8 +142,7 @@ static void Generate_DebugBreakCallHelper(MacroAssembler* masm,
if ((non_object_regs & (1 << r)) != 0) {
if (FLAG_debug_code) {
__ And(at, reg, 0xc0000000);
- __ Assert(
- eq, "Unable to encode value as smi", at, Operand(zero_reg));
+ __ Assert(eq, kUnableToEncodeValueAsSmi, at, Operand(zero_reg));
}
__ sll(reg, reg, kSmiTagSize);
}
@@ -325,12 +324,12 @@ void Debug::GenerateSlotDebugBreak(MacroAssembler* masm) {
void Debug::GeneratePlainReturnLiveEdit(MacroAssembler* masm) {
- masm->Abort("LiveEdit frame dropping is not supported on mips");
+ masm->Abort(kLiveEditFrameDroppingIsNotSupportedOnMips);
}
void Debug::GenerateFrameDropperLiveEdit(MacroAssembler* masm) {
- masm->Abort("LiveEdit frame dropping is not supported on mips");
+ masm->Abort(kLiveEditFrameDroppingIsNotSupportedOnMips);
}
diff --git a/deps/v8/src/mips/frames-mips.h b/deps/v8/src/mips/frames-mips.h
index f6f20cd20c..437bf3a9f1 100644
--- a/deps/v8/src/mips/frames-mips.h
+++ b/deps/v8/src/mips/frames-mips.h
@@ -230,6 +230,11 @@ inline Object* JavaScriptFrame::function_slot_object() const {
}
+inline void StackHandler::SetFp(Address slot, Address fp) {
+ Memory::Address_at(slot) = fp;
+}
+
+
} } // namespace v8::internal
#endif
diff --git a/deps/v8/src/mips/full-codegen-mips.cc b/deps/v8/src/mips/full-codegen-mips.cc
index 9c610c32f9..b60502c9a5 100644
--- a/deps/v8/src/mips/full-codegen-mips.cc
+++ b/deps/v8/src/mips/full-codegen-mips.cc
@@ -786,10 +786,10 @@ void FullCodeGenerator::EmitDebugCheckDeclarationContext(Variable* variable) {
// Check that we're not inside a with or catch context.
__ lw(a1, FieldMemOperand(cp, HeapObject::kMapOffset));
__ LoadRoot(t0, Heap::kWithContextMapRootIndex);
- __ Check(ne, "Declaration in with context.",
+ __ Check(ne, kDeclarationInWithContext,
a1, Operand(t0));
__ LoadRoot(t0, Heap::kCatchContextMapRootIndex);
- __ Check(ne, "Declaration in catch context.",
+ __ Check(ne, kDeclarationInCatchContext,
a1, Operand(t0));
}
}
@@ -2234,7 +2234,7 @@ void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
Handle<Map> map(isolate()->native_context()->generator_result_map());
- __ Allocate(map->instance_size(), a0, a2, a3, &gc_required, TAG_OBJECT);
+ __ Allocate(map->instance_size(), v0, a2, a3, &gc_required, TAG_OBJECT);
__ jmp(&allocated);
__ bind(&gc_required);
@@ -2249,19 +2249,18 @@ void FullCodeGenerator::EmitCreateIteratorResult(bool done) {
__ li(a3, Operand(isolate()->factory()->ToBoolean(done)));
__ li(t0, Operand(isolate()->factory()->empty_fixed_array()));
ASSERT_EQ(map->instance_size(), 5 * kPointerSize);
- __ sw(a1, FieldMemOperand(a0, HeapObject::kMapOffset));
- __ sw(t0, FieldMemOperand(a0, JSObject::kPropertiesOffset));
- __ sw(t0, FieldMemOperand(a0, JSObject::kElementsOffset));
+ __ sw(a1, FieldMemOperand(v0, HeapObject::kMapOffset));
+ __ sw(t0, FieldMemOperand(v0, JSObject::kPropertiesOffset));
+ __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
__ sw(a2,
- FieldMemOperand(a0, JSGeneratorObject::kResultValuePropertyOffset));
+ FieldMemOperand(v0, JSGeneratorObject::kResultValuePropertyOffset));
__ sw(a3,
- FieldMemOperand(a0, JSGeneratorObject::kResultDonePropertyOffset));
+ FieldMemOperand(v0, JSGeneratorObject::kResultDonePropertyOffset));
// Only the value field needs a write barrier, as the other values are in the
// root set.
- __ RecordWriteField(a0, JSGeneratorObject::kResultValuePropertyOffset,
+ __ RecordWriteField(v0, JSGeneratorObject::kResultValuePropertyOffset,
a2, a3, kRAHasBeenSaved, kDontSaveFPRegs);
- __ mov(result_register(), a0);
}
@@ -2530,7 +2529,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
// Check for an uninitialized let binding.
__ lw(a2, location);
__ LoadRoot(t0, Heap::kTheHoleValueRootIndex);
- __ Check(eq, "Let binding re-initialization.", a2, Operand(t0));
+ __ Check(eq, kLetBindingReInitialization, a2, Operand(t0));
}
// Perform the assignment.
__ sw(v0, location);
@@ -3493,21 +3492,21 @@ void FullCodeGenerator::EmitSeqStringSetCharCheck(Register string,
Register value,
uint32_t encoding_mask) {
__ And(at, index, Operand(kSmiTagMask));
- __ Check(eq, "Non-smi index", at, Operand(zero_reg));
+ __ Check(eq, kNonSmiIndex, at, Operand(zero_reg));
__ And(at, value, Operand(kSmiTagMask));
- __ Check(eq, "Non-smi value", at, Operand(zero_reg));
+ __ Check(eq, kNonSmiValue, at, Operand(zero_reg));
__ lw(at, FieldMemOperand(string, String::kLengthOffset));
- __ Check(lt, "Index is too large", index, Operand(at));
+ __ Check(lt, kIndexIsTooLarge, index, Operand(at));
- __ Check(ge, "Index is negative", index, Operand(zero_reg));
+ __ Check(ge, kIndexIsNegative, index, Operand(zero_reg));
__ lw(at, FieldMemOperand(string, HeapObject::kMapOffset));
__ lbu(at, FieldMemOperand(at, Map::kInstanceTypeOffset));
__ And(at, at, Operand(kStringRepresentationMask | kStringEncodingMask));
__ Subu(at, at, Operand(encoding_mask));
- __ Check(eq, "Unexpected string type", at, Operand(zero_reg));
+ __ Check(eq, kUnexpectedStringType, at, Operand(zero_reg));
}
@@ -3882,7 +3881,7 @@ void FullCodeGenerator::EmitGetFromCache(CallRuntime* expr) {
Handle<FixedArray> jsfunction_result_caches(
isolate()->native_context()->jsfunction_result_caches());
if (jsfunction_result_caches->length() <= cache_id) {
- __ Abort("Attempt to use undefined cache.");
+ __ Abort(kAttemptToUseUndefinedCache);
__ LoadRoot(v0, Heap::kUndefinedValueRootIndex);
context()->Plug(v0);
return;
@@ -4064,7 +4063,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
// element: Current array element.
// elements_end: Array end.
if (generate_debug_code_) {
- __ Assert(gt, "No empty arrays here in EmitFastAsciiArrayJoin",
+ __ Assert(gt, kNoEmptyArraysHereInEmitFastAsciiArrayJoin,
array_length, Operand(zero_reg));
}
__ bind(&loop);
@@ -4383,35 +4382,12 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
break;
}
- case Token::SUB:
- EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
- break;
-
- case Token::BIT_NOT:
- EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
- break;
-
default:
UNREACHABLE();
}
}
-void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
- const char* comment) {
- // TODO(svenpanne): Allowing format strings in Comment would be nice here...
- Comment cmt(masm_, comment);
- UnaryOpStub stub(expr->op());
- // GenericUnaryOpStub expects the argument to be in a0.
- VisitForAccumulatorValue(expr->expression());
- SetSourcePosition(expr->position());
- __ mov(a0, result_register());
- CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
- expr->UnaryOperationFeedbackId());
- context()->Plug(v0);
-}
-
-
void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Comment cmnt(masm_, "[ CountOperation");
SetSourcePosition(expr->position());
diff --git a/deps/v8/src/mips/lithium-codegen-mips.cc b/deps/v8/src/mips/lithium-codegen-mips.cc
index 8db5f00fbf..34e601ccaa 100644
--- a/deps/v8/src/mips/lithium-codegen-mips.cc
+++ b/deps/v8/src/mips/lithium-codegen-mips.cc
@@ -91,7 +91,7 @@ void LCodeGen::FinishCode(Handle<Code> code) {
}
-void LChunkBuilder::Abort(const char* reason) {
+void LChunkBuilder::Abort(BailoutReason reason) {
info()->set_bailout_reason(reason);
status_ = ABORTED;
}
@@ -324,7 +324,7 @@ bool LCodeGen::GenerateDeoptJumpTable() {
// end of the jump table.
if (!is_int16((masm()->pc_offset() / Assembler::kInstrSize) +
deopt_jump_table_.length() * 12)) {
- Abort("Generated code is too large");
+ Abort(kGeneratedCodeIsTooLarge);
}
if (deopt_jump_table_.length() > 0) {
@@ -411,7 +411,7 @@ Register LCodeGen::EmitLoadRegister(LOperand* op, Register scratch) {
ASSERT(constant->HasSmiValue());
__ li(scratch, Operand(Smi::FromInt(constant->Integer32Value())));
} else if (r.IsDouble()) {
- Abort("EmitLoadRegister: Unsupported double immediate.");
+ Abort(kEmitLoadRegisterUnsupportedDoubleImmediate);
} else {
ASSERT(r.IsTagged());
__ LoadObject(scratch, literal);
@@ -449,9 +449,9 @@ DoubleRegister LCodeGen::EmitLoadDoubleRegister(LOperand* op,
__ cvt_d_w(dbl_scratch, flt_scratch);
return dbl_scratch;
} else if (r.IsDouble()) {
- Abort("unsupported double immediate");
+ Abort(kUnsupportedDoubleImmediate);
} else if (r.IsTagged()) {
- Abort("unsupported tagged immediate");
+ Abort(kUnsupportedTaggedImmediate);
}
} else if (op->IsStackSlot() || op->IsArgument()) {
MemOperand mem_op = ToMemOperand(op);
@@ -520,14 +520,14 @@ Operand LCodeGen::ToOperand(LOperand* op) {
ASSERT(constant->HasInteger32Value());
return Operand(constant->Integer32Value());
} else if (r.IsDouble()) {
- Abort("ToOperand Unsupported double immediate.");
+ Abort(kToOperandUnsupportedDoubleImmediate);
}
ASSERT(r.IsTagged());
return Operand(constant->handle());
} else if (op->IsRegister()) {
return Operand(ToRegister(op));
} else if (op->IsDoubleRegister()) {
- Abort("ToOperand IsDoubleRegister unimplemented");
+ Abort(kToOperandIsDoubleRegisterUnimplemented);
return Operand(0);
}
// Stack slots not implemented, use ToMemOperand instead.
@@ -748,7 +748,7 @@ void LCodeGen::DeoptimizeIf(Condition cc,
Address entry =
Deoptimizer::GetDeoptimizationEntry(isolate(), id, bailout_type);
if (entry == NULL) {
- Abort("bailout was not prepared");
+ Abort(kBailoutWasNotPrepared);
return;
}
@@ -1057,20 +1057,16 @@ void LCodeGen::DoModI(LModI* instr) {
HValue* left = hmod->left();
HValue* right = hmod->right();
if (hmod->HasPowerOf2Divisor()) {
- const Register scratch = scratch0();
const Register left_reg = ToRegister(instr->left());
- ASSERT(!left_reg.is(scratch));
const Register result_reg = ToRegister(instr->result());
// Note: The code below even works when right contains kMinInt.
int32_t divisor = Abs(right->GetInteger32Constant());
- __ mov(scratch, left_reg);
-
Label left_is_not_negative, done;
if (left->CanBeNegative()) {
- __ Branch(USE_DELAY_SLOT, &left_is_not_negative,
- ge, left_reg, Operand(zero_reg));
+ __ Branch(left_reg.is(result_reg) ? PROTECT : USE_DELAY_SLOT,
+ &left_is_not_negative, ge, left_reg, Operand(zero_reg));
__ subu(result_reg, zero_reg, left_reg);
__ And(result_reg, result_reg, divisor - 1);
if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) {
@@ -1081,15 +1077,13 @@ void LCodeGen::DoModI(LModI* instr) {
}
__ bind(&left_is_not_negative);
- __ And(result_reg, scratch, divisor - 1);
+ __ And(result_reg, left_reg, divisor - 1);
__ bind(&done);
} else if (hmod->fixed_right_arg().has_value) {
- const Register scratch = scratch0();
const Register left_reg = ToRegister(instr->left());
const Register result_reg = ToRegister(instr->result());
-
- Register right_reg = EmitLoadRegister(instr->right(), scratch);
+ const Register right_reg = ToRegister(instr->right());
int32_t divisor = hmod->fixed_right_arg().value;
ASSERT(IsPowerOf2(divisor));
@@ -1099,8 +1093,8 @@ void LCodeGen::DoModI(LModI* instr) {
Label left_is_not_negative, done;
if (left->CanBeNegative()) {
- __ Branch(USE_DELAY_SLOT, &left_is_not_negative,
- ge, left_reg, Operand(zero_reg));
+ __ Branch(left_reg.is(result_reg) ? PROTECT : USE_DELAY_SLOT,
+ &left_is_not_negative, ge, left_reg, Operand(zero_reg));
__ subu(result_reg, zero_reg, left_reg);
__ And(result_reg, result_reg, divisor - 1);
if (hmod->CheckFlag(HValue::kBailoutOnMinusZero)) {
@@ -1509,7 +1503,11 @@ void LCodeGen::DoBitI(LBitI* instr) {
__ Or(result, left, right);
break;
case Token::BIT_XOR:
- __ Xor(result, left, right);
+ if (right_op->IsConstantOperand() && right.immediate() == int32_t(~0)) {
+ __ Nor(result, zero_reg, left);
+ } else {
+ __ Xor(result, left, right);
+ }
break;
default:
UNREACHABLE();
@@ -1583,8 +1581,12 @@ void LCodeGen::DoShiftI(LShiftI* instr) {
if (shift_count != 0) {
if (instr->hydrogen_value()->representation().IsSmi() &&
instr->can_deopt()) {
- __ sll(result, left, shift_count - 1);
- __ SmiTagCheckOverflow(result, result, scratch);
+ if (shift_count != 1) {
+ __ sll(result, left, shift_count - 1);
+ __ SmiTagCheckOverflow(result, result, scratch);
+ } else {
+ __ SmiTagCheckOverflow(result, left, scratch);
+ }
DeoptimizeIf(lt, instr->environment(), scratch, Operand(zero_reg));
} else {
__ sll(result, left, shift_count);
@@ -1766,7 +1768,7 @@ void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) {
static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
__ Subu(at, at, Operand(encoding == String::ONE_BYTE_ENCODING
? one_byte_seq_type : two_byte_seq_type));
- __ Check(eq, "Unexpected string type", at, Operand(zero_reg));
+ __ Check(eq, kUnexpectedStringType, at, Operand(zero_reg));
}
__ Addu(scratch,
@@ -1783,13 +1785,6 @@ void LCodeGen::DoSeqStringSetChar(LSeqStringSetChar* instr) {
}
-void LCodeGen::DoBitNotI(LBitNotI* instr) {
- Register input = ToRegister(instr->value());
- Register result = ToRegister(instr->result());
- __ Nor(result, zero_reg, Operand(input));
-}
-
-
void LCodeGen::DoThrow(LThrow* instr) {
Register input_reg = EmitLoadRegister(instr->value(), at);
__ push(input_reg);
@@ -2808,19 +2803,6 @@ void LCodeGen::DoStoreGlobalGeneric(LStoreGlobalGeneric* instr) {
}
-void LCodeGen::DoLinkObjectInList(LLinkObjectInList* instr) {
- Register object = ToRegister(instr->object());
- ExternalReference sites_list_address = instr->GetReference(isolate());
-
- __ li(at, Operand(sites_list_address));
- __ lw(at, MemOperand(at));
- __ sw(at, FieldMemOperand(object,
- instr->hydrogen()->store_field().offset()));
- __ li(at, Operand(sites_list_address));
- __ sw(object, MemOperand(at));
-}
-
-
void LCodeGen::DoLoadContextSlot(LLoadContextSlot* instr) {
Register context = ToRegister(instr->context());
Register result = ToRegister(instr->result());
@@ -3085,7 +3067,7 @@ void LCodeGen::DoLoadKeyedExternalArray(LLoadKeyed* instr) {
if (key_is_constant) {
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
if (constant_key & 0xF0000000) {
- Abort("array index constant value too big.");
+ Abort(kArrayIndexConstantValueTooBig);
}
} else {
key = ToRegister(instr->key());
@@ -3171,7 +3153,7 @@ void LCodeGen::DoLoadKeyedFixedDoubleArray(LLoadKeyed* instr) {
if (key_is_constant) {
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
if (constant_key & 0xF0000000) {
- Abort("array index constant value too big.");
+ Abort(kArrayIndexConstantValueTooBig);
}
} else {
key = ToRegister(instr->key());
@@ -3442,7 +3424,7 @@ void LCodeGen::DoApplyArguments(LApplyArguments* instr) {
void LCodeGen::DoPushArgument(LPushArgument* instr) {
LOperand* argument = instr->value();
if (argument->IsDoubleRegister() || argument->IsDoubleStackSlot()) {
- Abort("DoPushArgument not implemented for double type.");
+ Abort(kDoPushArgumentNotImplementedForDoubleType);
} else {
Register argument_reg = EmitLoadRegister(argument, at);
__ push(argument_reg);
@@ -3661,7 +3643,7 @@ void LCodeGen::DoMathAbs(LMathAbs* instr) {
FPURegister input = ToDoubleRegister(instr->value());
FPURegister result = ToDoubleRegister(instr->result());
__ abs_d(result, input);
- } else if (r.IsInteger32()) {
+ } else if (r.IsSmiOrInteger32()) {
EmitIntegerMathAbs(instr);
} else {
// Representation is tagged.
@@ -4267,7 +4249,7 @@ void LCodeGen::DoStoreKeyedExternalArray(LStoreKeyed* instr) {
if (key_is_constant) {
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
if (constant_key & 0xF0000000) {
- Abort("array index constant value too big.");
+ Abort(kArrayIndexConstantValueTooBig);
}
} else {
key = ToRegister(instr->key());
@@ -4345,7 +4327,7 @@ void LCodeGen::DoStoreKeyedFixedDoubleArray(LStoreKeyed* instr) {
if (key_is_constant) {
constant_key = ToInteger32(LConstantOperand::cast(instr->key()));
if (constant_key & 0xF0000000) {
- Abort("array index constant value too big.");
+ Abort(kArrayIndexConstantValueTooBig);
}
} else {
key = ToRegister(instr->key());
@@ -4605,13 +4587,6 @@ void LCodeGen::DoDeferredStringCharFromCode(LStringCharFromCode* instr) {
}
-void LCodeGen::DoStringLength(LStringLength* instr) {
- Register string = ToRegister(instr->string());
- Register result = ToRegister(instr->result());
- __ lw(result, FieldMemOperand(string, String::kLengthOffset));
-}
-
-
void LCodeGen::DoInteger32ToDouble(LInteger32ToDouble* instr) {
LOperand* input = instr->value();
ASSERT(input->IsRegister() || input->IsStackSlot());
@@ -5209,31 +5184,63 @@ void LCodeGen::DoCheckFunction(LCheckFunction* instr) {
}
-void LCodeGen::DoCheckMapCommon(Register map_reg,
- Handle<Map> map,
- LEnvironment* env) {
- Label success;
- __ CompareMapAndBranch(map_reg, map, &success, eq, &success);
- DeoptimizeIf(al, env);
- __ bind(&success);
+void LCodeGen::DoDeferredInstanceMigration(LCheckMaps* instr, Register object) {
+ {
+ PushSafepointRegistersScope scope(this, Safepoint::kWithRegisters);
+ __ push(object);
+ CallRuntimeFromDeferred(Runtime::kMigrateInstance, 1, instr);
+ __ StoreToSafepointRegisterSlot(v0, scratch0());
+ }
+ __ And(at, scratch0(), Operand(kSmiTagMask));
+ DeoptimizeIf(eq, instr->environment(), at, Operand(zero_reg));
}
void LCodeGen::DoCheckMaps(LCheckMaps* instr) {
+ class DeferredCheckMaps: public LDeferredCode {
+ public:
+ DeferredCheckMaps(LCodeGen* codegen, LCheckMaps* instr, Register object)
+ : LDeferredCode(codegen), instr_(instr), object_(object) {
+ SetExit(check_maps());
+ }
+ virtual void Generate() {
+ codegen()->DoDeferredInstanceMigration(instr_, object_);
+ }
+ Label* check_maps() { return &check_maps_; }
+ virtual LInstruction* instr() { return instr_; }
+ private:
+ LCheckMaps* instr_;
+ Label check_maps_;
+ Register object_;
+ };
+
if (instr->hydrogen()->CanOmitMapChecks()) return;
Register map_reg = scratch0();
LOperand* input = instr->value();
ASSERT(input->IsRegister());
Register reg = ToRegister(input);
- Label success;
SmallMapList* map_set = instr->hydrogen()->map_set();
__ lw(map_reg, FieldMemOperand(reg, HeapObject::kMapOffset));
+
+ DeferredCheckMaps* deferred = NULL;
+ if (instr->hydrogen()->has_migration_target()) {
+ deferred = new(zone()) DeferredCheckMaps(this, instr, reg);
+ __ bind(deferred->check_maps());
+ }
+
+ Label success;
for (int i = 0; i < map_set->length() - 1; i++) {
Handle<Map> map = map_set->at(i);
__ CompareMapAndBranch(map_reg, map, &success, eq, &success);
}
Handle<Map> map = map_set->last();
- DoCheckMapCommon(map_reg, map, instr->environment());
+ __ CompareMapAndBranch(map_reg, map, &success, eq, &success);
+ if (instr->hydrogen()->has_migration_target()) {
+ __ Branch(deferred->entry());
+ } else {
+ DeoptimizeIf(al, instr->environment());
+ }
+
__ bind(&success);
}
@@ -5288,25 +5295,6 @@ void LCodeGen::DoClampTToUint8(LClampTToUint8* instr) {
}
-void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) {
- if (instr->hydrogen()->CanOmitPrototypeChecks()) return;
-
- Register prototype_reg = ToRegister(instr->temp());
- Register map_reg = ToRegister(instr->temp2());
-
- ZoneList<Handle<JSObject> >* prototypes = instr->prototypes();
- ZoneList<Handle<Map> >* maps = instr->maps();
-
- ASSERT(prototypes->length() == maps->length());
-
- for (int i = 0; i < prototypes->length(); i++) {
- __ LoadHeapObject(prototype_reg, prototypes->at(i));
- __ lw(map_reg, FieldMemOperand(prototype_reg, HeapObject::kMapOffset));
- DoCheckMapCommon(map_reg, maps->at(i), instr->environment());
- }
-}
-
-
void LCodeGen::DoAllocate(LAllocate* instr) {
class DeferredAllocate: public LDeferredCode {
public:
@@ -5330,10 +5318,12 @@ void LCodeGen::DoAllocate(LAllocate* instr) {
if (instr->hydrogen()->MustAllocateDoubleAligned()) {
flags = static_cast<AllocationFlags>(flags | DOUBLE_ALIGNMENT);
}
- if (instr->hydrogen()->CanAllocateInOldPointerSpace()) {
- ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace());
+ if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
+ ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_POINTER_SPACE);
- } else if (instr->hydrogen()->CanAllocateInOldDataSpace()) {
+ } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
flags = static_cast<AllocationFlags>(flags | PRETENURE_OLD_DATA_SPACE);
}
if (instr->size()->IsConstantOperand()) {
@@ -5391,10 +5381,12 @@ void LCodeGen::DoDeferredAllocate(LAllocate* instr) {
__ Push(Smi::FromInt(size));
}
- if (instr->hydrogen()->CanAllocateInOldPointerSpace()) {
- ASSERT(!instr->hydrogen()->CanAllocateInOldDataSpace());
+ if (instr->hydrogen()->IsOldPointerSpaceAllocation()) {
+ ASSERT(!instr->hydrogen()->IsOldDataSpaceAllocation());
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
CallRuntimeFromDeferred(Runtime::kAllocateInOldPointerSpace, 1, instr);
- } else if (instr->hydrogen()->CanAllocateInOldDataSpace()) {
+ } else if (instr->hydrogen()->IsOldDataSpaceAllocation()) {
+ ASSERT(!instr->hydrogen()->IsNewSpaceAllocation());
CallRuntimeFromDeferred(Runtime::kAllocateInOldDataSpace, 1, instr);
} else {
CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr);
diff --git a/deps/v8/src/mips/lithium-codegen-mips.h b/deps/v8/src/mips/lithium-codegen-mips.h
index a485b67db9..670c4cc87a 100644
--- a/deps/v8/src/mips/lithium-codegen-mips.h
+++ b/deps/v8/src/mips/lithium-codegen-mips.h
@@ -114,7 +114,7 @@ class LCodeGen BASE_EMBEDDED {
DoubleRegister EmitLoadDoubleRegister(LOperand* op,
FloatRegister flt_scratch,
DoubleRegister dbl_scratch);
- int ToRepresentation(LConstantOperand* op, const Representation& r) const;
+ int32_t ToRepresentation(LConstantOperand* op, const Representation& r) const;
int32_t ToInteger32(LConstantOperand* op) const;
Smi* ToSmi(LConstantOperand* op) const;
double ToDouble(LConstantOperand* op) const;
@@ -153,7 +153,7 @@ class LCodeGen BASE_EMBEDDED {
void DoDeferredInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr,
Label* map_check);
- void DoCheckMapCommon(Register map_reg, Handle<Map> map, LEnvironment* env);
+ void DoDeferredInstanceMigration(LCheckMaps* instr, Register object);
// Parallel move support.
void DoParallelMove(LParallelMove* move);
@@ -213,7 +213,7 @@ class LCodeGen BASE_EMBEDDED {
int GetStackSlotCount() const { return chunk()->spill_slot_count(); }
- void Abort(const char* reason);
+ void Abort(BailoutReason reason);
void FPRINTF_CHECKING Comment(const char* format, ...);
void AddDeferredCode(LDeferredCode* code) { deferred_.Add(code, zone()); }
diff --git a/deps/v8/src/mips/lithium-gap-resolver-mips.cc b/deps/v8/src/mips/lithium-gap-resolver-mips.cc
index 771b22862e..460e13bf0a 100644
--- a/deps/v8/src/mips/lithium-gap-resolver-mips.cc
+++ b/deps/v8/src/mips/lithium-gap-resolver-mips.cc
@@ -258,7 +258,7 @@ void LGapResolver::EmitMove(int index) {
} else {
__ LoadObject(dst, cgen_->ToHandle(constant_source));
}
- } else if (source->IsDoubleRegister()) {
+ } else if (destination->IsDoubleRegister()) {
DoubleRegister result = cgen_->ToDoubleRegister(destination);
double v = cgen_->ToDouble(constant_source);
__ Move(result, v);
diff --git a/deps/v8/src/mips/lithium-mips.cc b/deps/v8/src/mips/lithium-mips.cc
index 5cfca00010..38ac19f609 100644
--- a/deps/v8/src/mips/lithium-mips.cc
+++ b/deps/v8/src/mips/lithium-mips.cc
@@ -277,24 +277,6 @@ void LCallConstantFunction::PrintDataTo(StringStream* stream) {
}
-ExternalReference LLinkObjectInList::GetReference(Isolate* isolate) {
- switch (hydrogen()->known_list()) {
- case HLinkObjectInList::ALLOCATION_SITE_LIST:
- return ExternalReference::allocation_sites_list_address(isolate);
- }
-
- UNREACHABLE();
- // Return a dummy value
- return ExternalReference::isolate_address(isolate);
-}
-
-
-void LLinkObjectInList::PrintDataTo(StringStream* stream) {
- object()->PrintTo(stream);
- stream->Add(" offset %d", hydrogen()->store_field().offset());
-}
-
-
void LLoadContextSlot::PrintDataTo(StringStream* stream) {
context()->PrintTo(stream);
stream->Add("[%d]", slot_index());
@@ -460,7 +442,7 @@ LPlatformChunk* LChunkBuilder::Build() {
}
-void LCodeGen::Abort(const char* reason) {
+void LCodeGen::Abort(BailoutReason reason) {
info()->set_bailout_reason(reason);
status_ = ABORTED;
}
@@ -668,7 +650,7 @@ LUnallocated* LChunkBuilder::TempRegister() {
new(zone()) LUnallocated(LUnallocated::MUST_HAVE_REGISTER);
int vreg = allocator_->GetVirtualRegister();
if (!allocator_->AllocationOk()) {
- Abort("Out of virtual registers while trying to allocate temp register.");
+ Abort(kOutOfVirtualRegistersWhileTryingToAllocateTempRegister);
vreg = 0;
}
operand->set_virtual_register(vreg);
@@ -1345,15 +1327,6 @@ LInstruction* LChunkBuilder::DoBitwise(HBitwise* instr) {
}
-LInstruction* LChunkBuilder::DoBitNot(HBitNot* instr) {
- ASSERT(instr->value()->representation().IsInteger32());
- ASSERT(instr->representation().IsInteger32());
- if (instr->HasNoUses()) return NULL;
- LOperand* value = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new(zone()) LBitNotI(value));
-}
-
-
LInstruction* LChunkBuilder::DoDiv(HDiv* instr) {
if (instr->representation().IsDouble()) {
return DoArithmeticD(Token::DIV, instr);
@@ -1771,17 +1744,6 @@ LInstruction* LChunkBuilder::DoSeqStringSetChar(HSeqStringSetChar* instr) {
}
-LInstruction* LChunkBuilder::DoNumericConstraint(HNumericConstraint* instr) {
- return NULL;
-}
-
-
-LInstruction* LChunkBuilder::DoInductionVariableAnnotation(
- HInductionVariableAnnotation* instr) {
- return NULL;
-}
-
-
LInstruction* LChunkBuilder::DoBoundsCheck(HBoundsCheck* instr) {
LOperand* value = UseRegisterOrConstantAtStart(instr->index());
LOperand* length = UseRegister(instr->length());
@@ -1955,19 +1917,6 @@ LInstruction* LChunkBuilder::DoCheckInstanceType(HCheckInstanceType* instr) {
}
-LInstruction* LChunkBuilder::DoCheckPrototypeMaps(HCheckPrototypeMaps* instr) {
- LUnallocated* temp1 = NULL;
- LOperand* temp2 = NULL;
- if (!instr->CanOmitPrototypeChecks()) {
- temp1 = TempRegister();
- temp2 = TempRegister();
- }
- LCheckPrototypeMaps* result = new(zone()) LCheckPrototypeMaps(temp1, temp2);
- if (instr->CanOmitPrototypeChecks()) return result;
- return AssignEnvironment(result);
-}
-
-
LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
LOperand* value = UseRegisterAtStart(instr->value());
return AssignEnvironment(new(zone()) LCheckFunction(value));
@@ -1976,10 +1925,16 @@ LInstruction* LChunkBuilder::DoCheckFunction(HCheckFunction* instr) {
LInstruction* LChunkBuilder::DoCheckMaps(HCheckMaps* instr) {
LOperand* value = NULL;
- if (!instr->CanOmitMapChecks()) value = UseRegisterAtStart(instr->value());
- LInstruction* result = new(zone()) LCheckMaps(value);
- if (instr->CanOmitMapChecks()) return result;
- return AssignEnvironment(result);
+ if (!instr->CanOmitMapChecks()) {
+ value = UseRegisterAtStart(instr->value());
+ if (instr->has_migration_target()) info()->MarkAsDeferredCalling();
+ }
+ LCheckMaps* result = new(zone()) LCheckMaps(value);
+ if (!instr->CanOmitMapChecks()) {
+ AssignEnvironment(result);
+ if (instr->has_migration_target()) return AssignPointerMap(result);
+ }
+ return result;
}
@@ -2062,13 +2017,6 @@ LInstruction* LChunkBuilder::DoStoreGlobalGeneric(HStoreGlobalGeneric* instr) {
}
-LInstruction* LChunkBuilder::DoLinkObjectInList(HLinkObjectInList* instr) {
- LOperand* object = UseRegister(instr->value());
- LLinkObjectInList* result = new(zone()) LLinkObjectInList(object);
- return result;
-}
-
-
LInstruction* LChunkBuilder::DoLoadContextSlot(HLoadContextSlot* instr) {
LOperand* context = UseRegisterAtStart(instr->value());
LInstruction* result =
@@ -2270,7 +2218,7 @@ LInstruction* LChunkBuilder::DoTrapAllocationMemento(
LInstruction* LChunkBuilder::DoStoreNamedField(HStoreNamedField* instr) {
bool is_in_object = instr->access().IsInobject();
bool needs_write_barrier = instr->NeedsWriteBarrier();
- bool needs_write_barrier_for_map = !instr->transition().is_null() &&
+ bool needs_write_barrier_for_map = instr->has_transition() &&
instr->NeedsWriteBarrierForMap();
LOperand* obj;
@@ -2341,12 +2289,6 @@ LInstruction* LChunkBuilder::DoStringCharFromCode(HStringCharFromCode* instr) {
}
-LInstruction* LChunkBuilder::DoStringLength(HStringLength* instr) {
- LOperand* string = UseRegisterAtStart(instr->value());
- return DefineAsRegister(new(zone()) LStringLength(string));
-}
-
-
LInstruction* LChunkBuilder::DoAllocate(HAllocate* instr) {
info()->MarkAsDeferredCalling();
LOperand* size = instr->size()->IsConstant()
@@ -2396,7 +2338,7 @@ LInstruction* LChunkBuilder::DoParameter(HParameter* instr) {
LInstruction* LChunkBuilder::DoUnknownOSRValue(HUnknownOSRValue* instr) {
int spill_index = chunk()->GetNextSpillIndex(false); // Not double-width.
if (spill_index > LUnallocated::kMaxFixedSlotIndex) {
- Abort("Too many spill slots needed for OSR");
+ Abort(kTooManySpillSlotsNeededForOSR);
spill_index = 0;
}
return DefineAsSpilled(new(zone()) LUnknownOSRValue, spill_index);
diff --git a/deps/v8/src/mips/lithium-mips.h b/deps/v8/src/mips/lithium-mips.h
index 2618c46992..a21c32342e 100644
--- a/deps/v8/src/mips/lithium-mips.h
+++ b/deps/v8/src/mips/lithium-mips.h
@@ -50,7 +50,6 @@ class LCodeGen;
V(ArithmeticD) \
V(ArithmeticT) \
V(BitI) \
- V(BitNotI) \
V(BoundsCheck) \
V(Branch) \
V(CallConstantFunction) \
@@ -68,7 +67,6 @@ class LCodeGen;
V(CheckMaps) \
V(CheckMapValue) \
V(CheckNonSmi) \
- V(CheckPrototypeMaps) \
V(CheckSmi) \
V(ClampDToUint8) \
V(ClampIToUint8) \
@@ -119,7 +117,6 @@ class LCodeGen;
V(IsUndetectableAndBranch) \
V(Label) \
V(LazyBailout) \
- V(LinkObjectInList) \
V(LoadContextSlot) \
V(LoadExternalArrayPointer) \
V(LoadFieldByIndex) \
@@ -175,7 +172,6 @@ class LCodeGen;
V(StringCharCodeAt) \
V(StringCharFromCode) \
V(StringCompareAndBranch) \
- V(StringLength) \
V(SubI) \
V(TaggedToI) \
V(ThisFunction) \
@@ -1358,18 +1354,6 @@ class LThrow: public LTemplateInstruction<0, 1, 0> {
};
-class LBitNotI: public LTemplateInstruction<1, 1, 0> {
- public:
- explicit LBitNotI(LOperand* value) {
- inputs_[0] = value;
- }
-
- LOperand* value() { return inputs_[0]; }
-
- DECLARE_CONCRETE_INSTRUCTION(BitNotI, "bit-not-i")
-};
-
-
class LAddI: public LTemplateInstruction<1, 2, 0> {
public:
LAddI(LOperand* left, LOperand* right) {
@@ -1654,23 +1638,6 @@ class LStoreGlobalGeneric: public LTemplateInstruction<0, 2, 0> {
};
-class LLinkObjectInList: public LTemplateInstruction<0, 1, 0> {
- public:
- explicit LLinkObjectInList(LOperand* object) {
- inputs_[0] = object;
- }
-
- LOperand* object() { return inputs_[0]; }
-
- ExternalReference GetReference(Isolate* isolate);
-
- DECLARE_CONCRETE_INSTRUCTION(LinkObjectInList, "link-object-in-list")
- DECLARE_HYDROGEN_ACCESSOR(LinkObjectInList)
-
- virtual void PrintDataTo(StringStream* stream);
-};
-
-
class LLoadContextSlot: public LTemplateInstruction<1, 1, 0> {
public:
explicit LLoadContextSlot(LOperand* context) {
@@ -2147,7 +2114,7 @@ class LStoreNamedField: public LTemplateInstruction<0, 2, 1> {
virtual void PrintDataTo(StringStream* stream);
- Handle<Map> transition() const { return hydrogen()->transition(); }
+ Handle<Map> transition() const { return hydrogen()->transition_map(); }
Representation representation() const {
return hydrogen()->field_representation();
}
@@ -2304,19 +2271,6 @@ class LStringCharFromCode: public LTemplateInstruction<1, 1, 0> {
};
-class LStringLength: public LTemplateInstruction<1, 1, 0> {
- public:
- explicit LStringLength(LOperand* string) {
- inputs_[0] = string;
- }
-
- LOperand* string() { return inputs_[0]; }
-
- DECLARE_CONCRETE_INSTRUCTION(StringLength, "string-length")
- DECLARE_HYDROGEN_ACCESSOR(StringLength)
-};
-
-
class LCheckFunction: public LTemplateInstruction<0, 1, 0> {
public:
explicit LCheckFunction(LOperand* value) {
@@ -2356,26 +2310,6 @@ class LCheckMaps: public LTemplateInstruction<0, 1, 0> {
};
-class LCheckPrototypeMaps: public LTemplateInstruction<0, 0, 2> {
- public:
- LCheckPrototypeMaps(LOperand* temp, LOperand* temp2) {
- temps_[0] = temp;
- temps_[1] = temp2;
- }
-
- LOperand* temp() { return temps_[0]; }
- LOperand* temp2() { return temps_[1]; }
-
- DECLARE_CONCRETE_INSTRUCTION(CheckPrototypeMaps, "check-prototype-maps")
- DECLARE_HYDROGEN_ACCESSOR(CheckPrototypeMaps)
-
- ZoneList<Handle<JSObject> >* prototypes() const {
- return hydrogen()->prototypes();
- }
- ZoneList<Handle<Map> >* maps() const { return hydrogen()->maps(); }
-};
-
-
class LCheckSmi: public LTemplateInstruction<1, 1, 0> {
public:
explicit LCheckSmi(LOperand* value) {
@@ -2674,7 +2608,7 @@ class LChunkBuilder BASE_EMBEDDED {
bool is_done() const { return status_ == DONE; }
bool is_aborted() const { return status_ == ABORTED; }
- void Abort(const char* reason);
+ void Abort(BailoutReason reason);
// Methods for getting operands for Use / Define / Temp.
LUnallocated* ToUnallocated(Register reg);
diff --git a/deps/v8/src/mips/macro-assembler-mips.cc b/deps/v8/src/mips/macro-assembler-mips.cc
index ea08a552be..a7ec713b35 100644
--- a/deps/v8/src/mips/macro-assembler-mips.cc
+++ b/deps/v8/src/mips/macro-assembler-mips.cc
@@ -256,7 +256,7 @@ void MacroAssembler::RecordWrite(Register object,
if (emit_debug_code()) {
lw(at, MemOperand(address));
Assert(
- eq, "Wrong address or value passed to RecordWrite", at, Operand(value));
+ eq, kWrongAddressOrValuePassedToRecordWrite, at, Operand(value));
}
Label done;
@@ -358,7 +358,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
lw(scratch, MemOperand(fp, StandardFrameConstants::kContextOffset));
// In debug mode, make sure the lexical context is set.
#ifdef DEBUG
- Check(ne, "we should not have an empty lexical context",
+ Check(ne, kWeShouldNotHaveAnEmptyLexicalContext,
scratch, Operand(zero_reg));
#endif
@@ -374,7 +374,7 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
// Read the first word and compare to the native_context_map.
lw(holder_reg, FieldMemOperand(scratch, HeapObject::kMapOffset));
LoadRoot(at, Heap::kNativeContextMapRootIndex);
- Check(eq, "JSGlobalObject::native_context should be a native context.",
+ Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext,
holder_reg, Operand(at));
pop(holder_reg); // Restore holder.
}
@@ -388,12 +388,12 @@ void MacroAssembler::CheckAccessGlobalProxy(Register holder_reg,
push(holder_reg); // Temporarily save holder on the stack.
mov(holder_reg, at); // Move at to its holding place.
LoadRoot(at, Heap::kNullValueRootIndex);
- Check(ne, "JSGlobalProxy::context() should not be null.",
+ Check(ne, kJSGlobalProxyContextShouldNotBeNull,
holder_reg, Operand(at));
lw(holder_reg, FieldMemOperand(holder_reg, HeapObject::kMapOffset));
LoadRoot(at, Heap::kNativeContextMapRootIndex);
- Check(eq, "JSGlobalObject::native_context should be a native context.",
+ Check(eq, kJSGlobalObjectNativeContextShouldBeANativeContext,
holder_reg, Operand(at));
// Restore at is not needed. at is reloaded below.
pop(holder_reg); // Restore holder.
@@ -2938,7 +2938,7 @@ void MacroAssembler::Allocate(int object_size,
// immediately below so this use of t9 does not cause difference with
// respect to register content between debug and release mode.
lw(t9, MemOperand(topaddr));
- Check(eq, "Unexpected allocation top", result, Operand(t9));
+ Check(eq, kUnexpectedAllocationTop, result, Operand(t9));
}
// Load allocation limit into t9. Result already contains allocation top.
lw(t9, MemOperand(topaddr, limit - top));
@@ -3008,7 +3008,7 @@ void MacroAssembler::Allocate(Register object_size,
// immediately below so this use of t9 does not cause difference with
// respect to register content between debug and release mode.
lw(t9, MemOperand(topaddr));
- Check(eq, "Unexpected allocation top", result, Operand(t9));
+ Check(eq, kUnexpectedAllocationTop, result, Operand(t9));
}
// Load allocation limit into t9. Result already contains allocation top.
lw(t9, MemOperand(topaddr, limit - top));
@@ -3028,7 +3028,7 @@ void MacroAssembler::Allocate(Register object_size,
// Update allocation top. result temporarily holds the new top.
if (emit_debug_code()) {
And(t9, scratch2, Operand(kObjectAlignmentMask));
- Check(eq, "Unaligned allocation in new space", t9, Operand(zero_reg));
+ Check(eq, kUnalignedAllocationInNewSpace, t9, Operand(zero_reg));
}
sw(scratch2, MemOperand(topaddr));
@@ -3050,7 +3050,7 @@ void MacroAssembler::UndoAllocationInNewSpace(Register object,
// Check that the object un-allocated is below the current top.
li(scratch, Operand(new_space_allocation_top));
lw(scratch, MemOperand(scratch));
- Check(less, "Undo allocation of non allocated memory",
+ Check(less, kUndoAllocationOfNonAllocatedMemory,
object, Operand(scratch));
#endif
// Write the address of the object to un-allocate as the current top.
@@ -3303,7 +3303,7 @@ void MacroAssembler::CopyBytes(Register src,
bind(&word_loop);
if (emit_debug_code()) {
And(scratch, src, kPointerSize - 1);
- Assert(eq, "Expecting alignment for CopyBytes",
+ Assert(eq, kExpectingAlignmentForCopyBytes,
scratch, Operand(zero_reg));
}
Branch(&byte_loop, lt, length, Operand(kPointerSize));
@@ -4029,7 +4029,7 @@ void MacroAssembler::CallApiFunctionAndReturn(ExternalReference function,
sw(s0, MemOperand(s3, kNextOffset));
if (emit_debug_code()) {
lw(a1, MemOperand(s3, kLevelOffset));
- Check(eq, "Unexpected level after return from api call", a1, Operand(s2));
+ Check(eq, kUnexpectedLevelAfterReturnFromApiCall, a1, Operand(s2));
}
Subu(s2, s2, Operand(1));
sw(s2, MemOperand(s3, kLevelOffset));
@@ -4383,10 +4383,10 @@ void MacroAssembler::DecrementCounter(StatsCounter* counter, int value,
// -----------------------------------------------------------------------------
// Debugging.
-void MacroAssembler::Assert(Condition cc, const char* msg,
+void MacroAssembler::Assert(Condition cc, BailoutReason reason,
Register rs, Operand rt) {
if (emit_debug_code())
- Check(cc, msg, rs, rt);
+ Check(cc, reason, rs, rt);
}
@@ -4394,7 +4394,7 @@ void MacroAssembler::AssertRegisterIsRoot(Register reg,
Heap::RootListIndex index) {
if (emit_debug_code()) {
LoadRoot(at, index);
- Check(eq, "Register did not match expected root", reg, Operand(at));
+ Check(eq, kRegisterDidNotMatchExpectedRoot, reg, Operand(at));
}
}
@@ -4411,24 +4411,24 @@ void MacroAssembler::AssertFastElements(Register elements) {
Branch(&ok, eq, elements, Operand(at));
LoadRoot(at, Heap::kFixedCOWArrayMapRootIndex);
Branch(&ok, eq, elements, Operand(at));
- Abort("JSObject with fast elements map has slow elements");
+ Abort(kJSObjectWithFastElementsMapHasSlowElements);
bind(&ok);
pop(elements);
}
}
-void MacroAssembler::Check(Condition cc, const char* msg,
+void MacroAssembler::Check(Condition cc, BailoutReason reason,
Register rs, Operand rt) {
Label L;
Branch(&L, cc, rs, rt);
- Abort(msg);
+ Abort(reason);
// Will not return here.
bind(&L);
}
-void MacroAssembler::Abort(const char* msg) {
+void MacroAssembler::Abort(BailoutReason reason) {
Label abort_start;
bind(&abort_start);
// We want to pass the msg string like a smi to avoid GC
@@ -4436,6 +4436,7 @@ void MacroAssembler::Abort(const char* msg) {
// properly. Instead, we pass an aligned pointer that is
// a proper v8 smi, but also pass the alignment difference
// from the real pointer as a smi.
+ const char* msg = GetBailoutReason(reason);
intptr_t p1 = reinterpret_cast<intptr_t>(msg);
intptr_t p0 = (p1 & ~kSmiTagMask) + kSmiTag;
ASSERT(reinterpret_cast<Object*>(p0)->IsSmi());
@@ -4579,7 +4580,7 @@ void MacroAssembler::LoadGlobalFunctionInitialMap(Register function,
CheckMap(map, scratch, Heap::kMetaMapRootIndex, &fail, DO_SMI_CHECK);
Branch(&ok);
bind(&fail);
- Abort("Global functions must have initial map");
+ Abort(kGlobalFunctionsMustHaveInitialMap);
bind(&ok);
}
}
@@ -4862,7 +4863,7 @@ void MacroAssembler::AssertNotSmi(Register object) {
if (emit_debug_code()) {
STATIC_ASSERT(kSmiTag == 0);
andi(at, object, kSmiTagMask);
- Check(ne, "Operand is a smi", at, Operand(zero_reg));
+ Check(ne, kOperandIsASmi, at, Operand(zero_reg));
}
}
@@ -4871,7 +4872,7 @@ void MacroAssembler::AssertSmi(Register object) {
if (emit_debug_code()) {
STATIC_ASSERT(kSmiTag == 0);
andi(at, object, kSmiTagMask);
- Check(eq, "Operand is a smi", at, Operand(zero_reg));
+ Check(eq, kOperandIsASmi, at, Operand(zero_reg));
}
}
@@ -4880,11 +4881,11 @@ void MacroAssembler::AssertString(Register object) {
if (emit_debug_code()) {
STATIC_ASSERT(kSmiTag == 0);
And(t0, object, Operand(kSmiTagMask));
- Check(ne, "Operand is a smi and not a string", t0, Operand(zero_reg));
+ Check(ne, kOperandIsASmiAndNotAString, t0, Operand(zero_reg));
push(object);
lw(object, FieldMemOperand(object, HeapObject::kMapOffset));
lbu(object, FieldMemOperand(object, Map::kInstanceTypeOffset));
- Check(lo, "Operand is not a string", object, Operand(FIRST_NONSTRING_TYPE));
+ Check(lo, kOperandIsNotAString, object, Operand(FIRST_NONSTRING_TYPE));
pop(object);
}
}
@@ -4894,11 +4895,11 @@ void MacroAssembler::AssertName(Register object) {
if (emit_debug_code()) {
STATIC_ASSERT(kSmiTag == 0);
And(t0, object, Operand(kSmiTagMask));
- Check(ne, "Operand is a smi and not a name", t0, Operand(zero_reg));
+ Check(ne, kOperandIsASmiAndNotAName, t0, Operand(zero_reg));
push(object);
lw(object, FieldMemOperand(object, HeapObject::kMapOffset));
lbu(object, FieldMemOperand(object, Map::kInstanceTypeOffset));
- Check(le, "Operand is not a name", object, Operand(LAST_NAME_TYPE));
+ Check(le, kOperandIsNotAName, object, Operand(LAST_NAME_TYPE));
pop(object);
}
}
@@ -4906,11 +4907,11 @@ void MacroAssembler::AssertName(Register object) {
void MacroAssembler::AssertRootValue(Register src,
Heap::RootListIndex root_value_index,
- const char* message) {
+ BailoutReason reason) {
if (emit_debug_code()) {
ASSERT(!src.is(at));
LoadRoot(at, root_value_index);
- Check(eq, message, src, Operand(at));
+ Check(eq, reason, src, Operand(at));
}
}
@@ -5127,7 +5128,7 @@ void MacroAssembler::PatchRelocatedValue(Register li_location,
// At this point scratch is a lui(at, ...) instruction.
if (emit_debug_code()) {
And(scratch, scratch, kOpcodeMask);
- Check(eq, "The instruction to patch should be a lui.",
+ Check(eq, kTheInstructionToPatchShouldBeALui,
scratch, Operand(LUI));
lw(scratch, MemOperand(li_location));
}
@@ -5139,7 +5140,7 @@ void MacroAssembler::PatchRelocatedValue(Register li_location,
// scratch is now ori(at, ...).
if (emit_debug_code()) {
And(scratch, scratch, kOpcodeMask);
- Check(eq, "The instruction to patch should be an ori.",
+ Check(eq, kTheInstructionToPatchShouldBeAnOri,
scratch, Operand(ORI));
lw(scratch, MemOperand(li_location, kInstrSize));
}
@@ -5156,7 +5157,7 @@ void MacroAssembler::GetRelocatedValue(Register li_location,
lw(value, MemOperand(li_location));
if (emit_debug_code()) {
And(value, value, kOpcodeMask);
- Check(eq, "The instruction should be a lui.",
+ Check(eq, kTheInstructionShouldBeALui,
value, Operand(LUI));
lw(value, MemOperand(li_location));
}
@@ -5167,7 +5168,7 @@ void MacroAssembler::GetRelocatedValue(Register li_location,
lw(scratch, MemOperand(li_location, kInstrSize));
if (emit_debug_code()) {
And(scratch, scratch, kOpcodeMask);
- Check(eq, "The instruction should be an ori.",
+ Check(eq, kTheInstructionShouldBeAnOri,
scratch, Operand(ORI));
lw(scratch, MemOperand(li_location, kInstrSize));
}
diff --git a/deps/v8/src/mips/macro-assembler-mips.h b/deps/v8/src/mips/macro-assembler-mips.h
index bc3e7c48b4..ac37db2aaa 100644
--- a/deps/v8/src/mips/macro-assembler-mips.h
+++ b/deps/v8/src/mips/macro-assembler-mips.h
@@ -627,11 +627,11 @@ class MacroAssembler: public Assembler {
void MultiPushFPU(RegList regs);
void MultiPushReversedFPU(RegList regs);
- // Lower case push() for compatibility with arch-independent code.
void push(Register src) {
Addu(sp, sp, Operand(-kPointerSize));
sw(src, MemOperand(sp, 0));
}
+ void Push(Register src) { push(src); }
// Push a handle.
void Push(Handle<Object> handle);
@@ -676,11 +676,11 @@ class MacroAssembler: public Assembler {
void MultiPopFPU(RegList regs);
void MultiPopReversedFPU(RegList regs);
- // Lower case pop() for compatibility with arch-independent code.
void pop(Register dst) {
lw(dst, MemOperand(sp, 0));
Addu(sp, sp, Operand(kPointerSize));
}
+ void Pop(Register dst) { pop(dst); }
// Pop two registers. Pops rightmost register first (from lower address).
void Pop(Register src1, Register src2) {
@@ -1286,15 +1286,15 @@ class MacroAssembler: public Assembler {
// Calls Abort(msg) if the condition cc is not satisfied.
// Use --debug_code to enable.
- void Assert(Condition cc, const char* msg, Register rs, Operand rt);
+ void Assert(Condition cc, BailoutReason reason, Register rs, Operand rt);
void AssertRegisterIsRoot(Register reg, Heap::RootListIndex index);
void AssertFastElements(Register elements);
// Like Assert(), but always enabled.
- void Check(Condition cc, const char* msg, Register rs, Operand rt);
+ void Check(Condition cc, BailoutReason reason, Register rs, Operand rt);
// Print a message to stdout and abort execution.
- void Abort(const char* msg);
+ void Abort(BailoutReason msg);
// Verify restrictions about code generated in stubs.
void set_generating_stub(bool value) { generating_stub_ = value; }
@@ -1378,7 +1378,7 @@ class MacroAssembler: public Assembler {
// enabled via --debug-code.
void AssertRootValue(Register src,
Heap::RootListIndex root_value_index,
- const char* message);
+ BailoutReason reason);
// ---------------------------------------------------------------------------
// HeapNumber utilities.