summaryrefslogtreecommitdiff
path: root/chromium/v8/src/arm/full-codegen-arm.cc
diff options
context:
space:
mode:
Diffstat (limited to 'chromium/v8/src/arm/full-codegen-arm.cc')
-rw-r--r--chromium/v8/src/arm/full-codegen-arm.cc520
1 files changed, 279 insertions, 241 deletions
diff --git a/chromium/v8/src/arm/full-codegen-arm.cc b/chromium/v8/src/arm/full-codegen-arm.cc
index b6fb70b5df0..55088033f04 100644
--- a/chromium/v8/src/arm/full-codegen-arm.cc
+++ b/chromium/v8/src/arm/full-codegen-arm.cc
@@ -148,13 +148,10 @@ void FullCodeGenerator::Generate() {
// receiver object). r5 is zero for method calls and non-zero for
// function calls.
if (!info->is_classic_mode() || info->is_native()) {
- Label ok;
__ cmp(r5, Operand::Zero());
- __ b(eq, &ok);
int receiver_offset = info->scope()->num_parameters() * kPointerSize;
__ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
- __ str(r2, MemOperand(sp, receiver_offset));
- __ bind(&ok);
+ __ str(r2, MemOperand(sp, receiver_offset), ne);
}
// Open a frame scope to indicate that there is a frame on the stack. The
@@ -163,16 +160,7 @@ void FullCodeGenerator::Generate() {
FrameScope frame_scope(masm_, StackFrame::MANUAL);
info->set_prologue_offset(masm_->pc_offset());
- {
- PredictableCodeSizeScope predictible_code_size_scope(
- masm_, kNoCodeAgeSequenceLength * Assembler::kInstrSize);
- // The following three instructions must remain together and unmodified
- // for code aging to work properly.
- __ stm(db_w, sp, r1.bit() | cp.bit() | fp.bit() | lr.bit());
- __ nop(ip.code());
- // Adjust FP to point to saved FP.
- __ add(fp, sp, Operand(2 * kPointerSize));
- }
+ __ Prologue(BUILD_FUNCTION_FRAME);
info->AddNoFrameRange(0, masm_->pc_offset());
{ Comment cmnt(masm_, "[ Allocate locals");
@@ -180,9 +168,20 @@ void FullCodeGenerator::Generate() {
// Generators allocate locals, if any, in context slots.
ASSERT(!info->function()->is_generator() || locals_count == 0);
if (locals_count > 0) {
- __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
- for (int i = 0; i < locals_count; i++) {
- __ push(ip);
+ // Emit a loop to initialize stack cells for locals when optimizing for
+ // size. Otherwise, unroll the loop for maximum performance.
+ __ LoadRoot(r9, Heap::kUndefinedValueRootIndex);
+ if (FLAG_optimize_for_size && locals_count > 4) {
+ Label loop;
+ __ mov(r2, Operand(locals_count));
+ __ bind(&loop);
+ __ sub(r2, r2, Operand(1), SetCC);
+ __ push(r9);
+ __ b(&loop, ne);
+ } else {
+ for (int i = 0; i < locals_count; i++) {
+ __ push(r9);
+ }
}
}
}
@@ -625,12 +624,11 @@ void FullCodeGenerator::StackValueContext::Plug(
Label done;
__ bind(materialize_true);
__ LoadRoot(ip, Heap::kTrueValueRootIndex);
- __ push(ip);
__ jmp(&done);
__ bind(materialize_false);
__ LoadRoot(ip, Heap::kFalseValueRootIndex);
- __ push(ip);
__ bind(&done);
+ __ push(ip);
}
@@ -1167,7 +1165,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
Handle<Object>(Smi::FromInt(TypeFeedbackCells::kForInFastCaseMarker),
isolate()));
RecordTypeFeedbackCell(stmt->ForInFeedbackId(), cell);
- __ LoadHeapObject(r1, cell);
+ __ Move(r1, cell);
__ mov(r2, Operand(Smi::FromInt(TypeFeedbackCells::kForInSlowCaseMarker)));
__ str(r2, FieldMemOperand(r1, Cell::kValueOffset));
@@ -1609,9 +1607,8 @@ void FullCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
__ jmp(&allocated);
__ bind(&runtime_allocate);
- __ push(r5);
__ mov(r0, Operand(Smi::FromInt(size)));
- __ push(r0);
+ __ Push(r5, r0);
__ CallRuntime(Runtime::kAllocateInNewSpace, 1);
__ pop(r5);
@@ -1637,6 +1634,8 @@ void FullCodeGenerator::EmitAccessor(Expression* expression) {
void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
Comment cmnt(masm_, "[ ObjectLiteral");
+
+ expr->BuildConstantProperties(isolate());
Handle<FixedArray> constant_properties = expr->constant_properties();
__ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
@@ -1651,13 +1650,11 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ mov(r0, Operand(Smi::FromInt(flags)));
int properties_count = constant_properties->length() / 2;
if ((FLAG_track_double_fields && expr->may_store_doubles()) ||
- expr->depth() > 1) {
- __ Push(r3, r2, r1, r0);
- __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
- } else if (Serializer::enabled() || flags != ObjectLiteral::kFastElements ||
+ expr->depth() > 1 || Serializer::enabled() ||
+ flags != ObjectLiteral::kFastElements ||
properties_count > FastCloneShallowObjectStub::kMaximumClonedProperties) {
__ Push(r3, r2, r1, r0);
- __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 4);
+ __ CallRuntime(Runtime::kCreateObjectLiteral, 4);
} else {
FastCloneShallowObjectStub stub(properties_count);
__ CallStub(&stub);
@@ -1772,6 +1769,11 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
Comment cmnt(masm_, "[ ArrayLiteral");
+ expr->BuildConstantElements(isolate());
+ int flags = expr->depth() == 1
+ ? ArrayLiteral::kShallowElements
+ : ArrayLiteral::kNoFlags;
+
ZoneList<Expression*>* subexprs = expr->values();
int length = subexprs->length();
Handle<FixedArray> constant_elements = expr->constant_elements();
@@ -1782,6 +1784,14 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
Handle<FixedArrayBase> constant_elements_values(
FixedArrayBase::cast(constant_elements->get(1)));
+ AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
+ ? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
+ if (has_fast_elements && !FLAG_allocation_site_pretenuring) {
+ // If the only customer of allocation sites is transitioning, then
+ // we can turn it off if we don't have anywhere else to transition to.
+ allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
+ }
+
__ ldr(r3, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
__ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
__ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
@@ -1790,29 +1800,24 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
isolate()->heap()->fixed_cow_array_map()) {
FastCloneShallowArrayStub stub(
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
- DONT_TRACK_ALLOCATION_SITE,
+ allocation_site_mode,
length);
__ CallStub(&stub);
__ IncrementCounter(
isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2);
- } else if (expr->depth() > 1) {
- __ Push(r3, r2, r1);
- __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
- } else if (Serializer::enabled() ||
- length > FastCloneShallowArrayStub::kMaximumClonedLength) {
- __ Push(r3, r2, r1);
- __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
+ } else if (expr->depth() > 1 || Serializer::enabled() ||
+ length > FastCloneShallowArrayStub::kMaximumClonedLength) {
+ __ mov(r0, Operand(Smi::FromInt(flags)));
+ __ Push(r3, r2, r1, r0);
+ __ CallRuntime(Runtime::kCreateArrayLiteral, 4);
} else {
ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
FLAG_smi_only_arrays);
FastCloneShallowArrayStub::Mode mode =
FastCloneShallowArrayStub::CLONE_ANY_ELEMENTS;
- AllocationSiteMode allocation_site_mode = FLAG_track_allocation_sites
- ? TRACK_ALLOCATION_SITE : DONT_TRACK_ALLOCATION_SITE;
if (has_fast_elements) {
mode = FastCloneShallowArrayStub::CLONE_ELEMENTS;
- allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
}
FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
@@ -2050,8 +2055,7 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
handler_table()->set(expr->index(), Smi::FromInt(l_catch.pos()));
__ LoadRoot(r2, Heap::kthrow_stringRootIndex); // "throw"
__ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
- __ push(r3); // iter
- __ push(r0); // exception
+ __ Push(r3, r0); // iter, exception
__ jmp(&l_call);
// try { received = %yield result }
@@ -2087,8 +2091,7 @@ void FullCodeGenerator::VisitYield(Yield* expr) {
__ bind(&l_next);
__ LoadRoot(r2, Heap::knext_stringRootIndex); // "next"
__ ldr(r3, MemOperand(sp, 1 * kPointerSize)); // iter
- __ push(r3); // iter
- __ push(r0); // received
+ __ Push(r3, r0); // iter, received
// result = receiver[f](arg);
__ bind(&l_call);
@@ -2164,11 +2167,13 @@ void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
__ bl(&resume_frame);
__ jmp(&done);
__ bind(&resume_frame);
- __ push(lr); // Return address.
- __ push(fp); // Caller's frame pointer.
- __ mov(fp, sp);
- __ push(cp); // Callee's context.
- __ push(r4); // Callee's JS Function.
+ // lr = return address.
+ // fp = caller's frame pointer.
+ // cp = callee's context,
+ // r4 = callee's JS function.
+ __ Push(lr, fp, cp, r4);
+ // Adjust FP to point to saved FP.
+ __ add(fp, sp, Operand(2 * kPointerSize));
// Load the operand stack size.
__ ldr(r3, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset));
@@ -2200,8 +2205,8 @@ void FullCodeGenerator::EmitGeneratorResume(Expression *generator,
__ push(r2);
__ b(&push_operand_holes);
__ bind(&call_resume);
- __ push(r1);
- __ push(result_register());
+ ASSERT(!result_register().is(r1));
+ __ Push(r1, result_register());
__ Push(Smi::FromInt(resume_mode));
__ CallRuntime(Runtime::kResumeJSGeneratorObject, 3);
// Not reached: the runtime call returns elsewhere.
@@ -2293,7 +2298,7 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
patch_site.EmitJumpIfSmi(scratch1, &smi_case);
__ bind(&stub_call);
- BinaryOpStub stub(op, mode);
+ BinaryOpICStub stub(op, mode);
CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
expr->BinaryOperationFeedbackId());
patch_site.EmitPatchInfo();
@@ -2302,7 +2307,6 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
__ bind(&smi_case);
// Smi case. This code works the same way as the smi-smi case in the type
// recording binary operation stub, see
- // BinaryOpStub::GenerateSmiSmiOperation for comments.
switch (op) {
case Token::SAR:
__ GetLeastBitsFromSmi(scratch1, right, 5);
@@ -2371,7 +2375,7 @@ void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
Token::Value op,
OverwriteMode mode) {
__ pop(r1);
- BinaryOpStub stub(op, mode);
+ BinaryOpICStub stub(op, mode);
JumpPatchSite patch_site(masm_); // unbound, signals no inlined smi code.
CallIC(stub.GetCode(isolate()), RelocInfo::CODE_TARGET,
expr->BinaryOperationFeedbackId());
@@ -2423,8 +2427,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr) {
VisitForStackValue(prop->obj());
VisitForAccumulatorValue(prop->key());
__ mov(r1, r0);
- __ pop(r2);
- __ pop(r0); // Restore value.
+ __ Pop(r0, r2); // r0 = restored value.
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize()
: isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
@@ -2558,8 +2561,7 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
// Record source code position before IC call.
SetSourcePosition(expr->position());
- __ pop(r1); // Key.
- __ pop(r2);
+ __ Pop(r2, r1); // r1 = key.
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize()
@@ -2688,27 +2690,25 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
void FullCodeGenerator::EmitResolvePossiblyDirectEval(int arg_count) {
- // Push copy of the first argument or undefined if it doesn't exist.
+ // r4: copy of the first argument or undefined if it doesn't exist.
if (arg_count > 0) {
- __ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
+ __ ldr(r4, MemOperand(sp, arg_count * kPointerSize));
} else {
- __ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
+ __ LoadRoot(r4, Heap::kUndefinedValueRootIndex);
}
- __ push(r1);
- // Push the receiver of the enclosing function.
+ // r3: the receiver of the enclosing function.
int receiver_offset = 2 + info_->scope()->num_parameters();
- __ ldr(r1, MemOperand(fp, receiver_offset * kPointerSize));
- __ push(r1);
- // Push the language mode.
- __ mov(r1, Operand(Smi::FromInt(language_mode())));
- __ push(r1);
+ __ ldr(r3, MemOperand(fp, receiver_offset * kPointerSize));
+
+ // r2: the language mode.
+ __ mov(r2, Operand(Smi::FromInt(language_mode())));
- // Push the start position of the scope the calls resides in.
+ // r1: the start position of the scope the calls resides in.
__ mov(r1, Operand(Smi::FromInt(scope()->start_position())));
- __ push(r1);
// Do the runtime call.
+ __ Push(r4, r3, r2, r1);
__ CallRuntime(Runtime::kResolvePossiblyDirectEval, 5);
}
@@ -2782,9 +2782,9 @@ void FullCodeGenerator::VisitCall(Call* expr) {
__ bind(&slow);
// Call the runtime to find the function to call (returned in r0)
// and the object holding it (returned in edx).
- __ push(context_register());
+ ASSERT(!context_register().is(r2));
__ mov(r2, Operand(proxy->name()));
- __ push(r2);
+ __ Push(context_register(), r2);
__ CallRuntime(Runtime::kLoadContextSlot, 2);
__ Push(r0, r1); // Function, receiver.
@@ -3111,6 +3111,32 @@ void FullCodeGenerator::EmitIsFunction(CallRuntime* expr) {
}
+void FullCodeGenerator::EmitIsMinusZero(CallRuntime* expr) {
+ ZoneList<Expression*>* args = expr->arguments();
+ ASSERT(args->length() == 1);
+
+ VisitForAccumulatorValue(args->at(0));
+
+ Label materialize_true, materialize_false;
+ Label* if_true = NULL;
+ Label* if_false = NULL;
+ Label* fall_through = NULL;
+ context()->PrepareTest(&materialize_true, &materialize_false,
+ &if_true, &if_false, &fall_through);
+
+ __ CheckMap(r0, r1, Heap::kHeapNumberMapRootIndex, if_false, DO_SMI_CHECK);
+ __ ldr(r2, FieldMemOperand(r0, HeapNumber::kExponentOffset));
+ __ ldr(r1, FieldMemOperand(r0, HeapNumber::kMantissaOffset));
+ __ cmp(r2, Operand(0x80000000));
+ __ cmp(r1, Operand(0x00000000), eq);
+
+ PrepareForBailoutBeforeSplit(expr, true, if_true, if_false);
+ Split(eq, if_true, if_false, fall_through);
+
+ context()->Plug(if_true, if_false);
+}
+
+
void FullCodeGenerator::EmitIsArray(CallRuntime* expr) {
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 1);
@@ -3330,50 +3356,6 @@ void FullCodeGenerator::EmitLog(CallRuntime* expr) {
}
-void FullCodeGenerator::EmitRandomHeapNumber(CallRuntime* expr) {
- ASSERT(expr->arguments()->length() == 0);
- Label slow_allocate_heapnumber;
- Label heapnumber_allocated;
-
- __ LoadRoot(r6, Heap::kHeapNumberMapRootIndex);
- __ AllocateHeapNumber(r4, r1, r2, r6, &slow_allocate_heapnumber);
- __ jmp(&heapnumber_allocated);
-
- __ bind(&slow_allocate_heapnumber);
- // Allocate a heap number.
- __ CallRuntime(Runtime::kNumberAlloc, 0);
- __ mov(r4, Operand(r0));
-
- __ bind(&heapnumber_allocated);
-
- // Convert 32 random bits in r0 to 0.(32 random bits) in a double
- // by computing:
- // ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
- __ PrepareCallCFunction(1, r0);
- __ ldr(r0,
- ContextOperand(context_register(), Context::GLOBAL_OBJECT_INDEX));
- __ ldr(r0, FieldMemOperand(r0, GlobalObject::kNativeContextOffset));
- __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
-
- // 0x41300000 is the top half of 1.0 x 2^20 as a double.
- // Create this constant using mov/orr to avoid PC relative load.
- __ mov(r1, Operand(0x41000000));
- __ orr(r1, r1, Operand(0x300000));
- // Move 0x41300000xxxxxxxx (x = random bits) to VFP.
- __ vmov(d7, r0, r1);
- // Move 0x4130000000000000 to VFP.
- __ mov(r0, Operand::Zero());
- __ vmov(d8, r0, r1);
- // Subtract and store the result in the heap number.
- __ vsub(d7, d7, d8);
- __ sub(r0, r4, Operand(kHeapObjectTag));
- __ vstr(d7, r0, HeapNumber::kValueOffset);
- __ mov(r0, r4);
-
- context()->Plug(r0);
-}
-
-
void FullCodeGenerator::EmitSubString(CallRuntime* expr) {
// Load the arguments on the stack and call the stub.
SubStringStub stub;
@@ -3466,31 +3448,6 @@ void FullCodeGenerator::EmitDateField(CallRuntime* expr) {
}
-void FullCodeGenerator::EmitSeqStringSetCharCheck(Register string,
- Register index,
- Register value,
- uint32_t encoding_mask) {
- __ SmiTst(index);
- __ Check(eq, kNonSmiIndex);
- __ SmiTst(value);
- __ Check(eq, kNonSmiValue);
-
- __ ldr(ip, FieldMemOperand(string, String::kLengthOffset));
- __ cmp(index, ip);
- __ Check(lt, kIndexIsTooLarge);
-
- __ cmp(index, Operand(Smi::FromInt(0)));
- __ Check(ge, kIndexIsNegative);
-
- __ ldr(ip, FieldMemOperand(string, HeapObject::kMapOffset));
- __ ldrb(ip, FieldMemOperand(ip, Map::kInstanceTypeOffset));
-
- __ and_(ip, ip, Operand(kStringRepresentationMask | kStringEncodingMask));
- __ cmp(ip, Operand(encoding_mask));
- __ Check(eq, kUnexpectedStringType);
-}
-
-
void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
ZoneList<Expression*>* args = expr->arguments();
ASSERT_EQ(3, args->length());
@@ -3501,13 +3458,18 @@ void FullCodeGenerator::EmitOneByteSeqStringSetChar(CallRuntime* expr) {
VisitForStackValue(args->at(1)); // index
VisitForStackValue(args->at(2)); // value
- __ pop(value);
- __ pop(index);
VisitForAccumulatorValue(args->at(0)); // string
+ __ Pop(index, value);
if (FLAG_debug_code) {
+ __ SmiTst(value);
+ __ ThrowIf(ne, kNonSmiValue);
+ __ SmiTst(index);
+ __ ThrowIf(ne, kNonSmiIndex);
+ __ SmiUntag(index, index);
static const uint32_t one_byte_seq_type = kSeqStringTag | kOneByteStringTag;
- EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
+ __ EmitSeqStringSetCharCheck(string, index, value, one_byte_seq_type);
+ __ SmiTag(index, index);
}
__ SmiUntag(value, value);
@@ -3529,13 +3491,18 @@ void FullCodeGenerator::EmitTwoByteSeqStringSetChar(CallRuntime* expr) {
VisitForStackValue(args->at(1)); // index
VisitForStackValue(args->at(2)); // value
- __ pop(value);
- __ pop(index);
VisitForAccumulatorValue(args->at(0)); // string
+ __ Pop(index, value);
if (FLAG_debug_code) {
+ __ SmiTst(value);
+ __ ThrowIf(ne, kNonSmiValue);
+ __ SmiTst(index);
+ __ ThrowIf(ne, kNonSmiIndex);
+ __ SmiUntag(index, index);
static const uint32_t two_byte_seq_type = kSeqStringTag | kTwoByteStringTag;
- EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
+ __ EmitSeqStringSetCharCheck(string, index, value, two_byte_seq_type);
+ __ SmiTag(index, index);
}
__ SmiUntag(value, value);
@@ -3592,8 +3559,8 @@ void FullCodeGenerator::EmitSetValueOf(CallRuntime* expr) {
void FullCodeGenerator::EmitNumberToString(CallRuntime* expr) {
ZoneList<Expression*>* args = expr->arguments();
ASSERT_EQ(args->length(), 1);
- // Load the argument on the stack and call the stub.
- VisitForStackValue(args->at(0));
+ // Load the argument into r0 and call the stub.
+ VisitForAccumulatorValue(args->at(0));
NumberToStringStub stub;
__ CallStub(&stub);
@@ -3714,11 +3681,21 @@ void FullCodeGenerator::EmitStringCharAt(CallRuntime* expr) {
void FullCodeGenerator::EmitStringAdd(CallRuntime* expr) {
ZoneList<Expression*>* args = expr->arguments();
ASSERT_EQ(2, args->length());
- VisitForStackValue(args->at(0));
- VisitForStackValue(args->at(1));
- StringAddStub stub(STRING_ADD_CHECK_BOTH);
- __ CallStub(&stub);
+ if (FLAG_new_string_add) {
+ VisitForStackValue(args->at(0));
+ VisitForAccumulatorValue(args->at(1));
+
+ __ pop(r1);
+ NewStringAddStub stub(STRING_ADD_CHECK_BOTH, NOT_TENURED);
+ __ CallStub(&stub);
+ } else {
+ VisitForStackValue(args->at(0));
+ VisitForStackValue(args->at(1));
+
+ StringAddStub stub(STRING_ADD_CHECK_BOTH);
+ __ CallStub(&stub);
+ }
context()->Plug(r0);
}
@@ -3735,42 +3712,6 @@ void FullCodeGenerator::EmitStringCompare(CallRuntime* expr) {
}
-void FullCodeGenerator::EmitMathSin(CallRuntime* expr) {
- // Load the argument on the stack and call the stub.
- TranscendentalCacheStub stub(TranscendentalCache::SIN,
- TranscendentalCacheStub::TAGGED);
- ZoneList<Expression*>* args = expr->arguments();
- ASSERT(args->length() == 1);
- VisitForStackValue(args->at(0));
- __ CallStub(&stub);
- context()->Plug(r0);
-}
-
-
-void FullCodeGenerator::EmitMathCos(CallRuntime* expr) {
- // Load the argument on the stack and call the stub.
- TranscendentalCacheStub stub(TranscendentalCache::COS,
- TranscendentalCacheStub::TAGGED);
- ZoneList<Expression*>* args = expr->arguments();
- ASSERT(args->length() == 1);
- VisitForStackValue(args->at(0));
- __ CallStub(&stub);
- context()->Plug(r0);
-}
-
-
-void FullCodeGenerator::EmitMathTan(CallRuntime* expr) {
- // Load the argument on the stack and call the stub.
- TranscendentalCacheStub stub(TranscendentalCache::TAN,
- TranscendentalCacheStub::TAGGED);
- ZoneList<Expression*>* args = expr->arguments();
- ASSERT(args->length() == 1);
- VisitForStackValue(args->at(0));
- __ CallStub(&stub);
- context()->Plug(r0);
-}
-
-
void FullCodeGenerator::EmitMathLog(CallRuntime* expr) {
// Load the argument on the stack and call the stub.
TranscendentalCacheStub stub(TranscendentalCache::LOG,
@@ -3964,9 +3905,8 @@ void FullCodeGenerator::EmitGetCachedArrayIndex(CallRuntime* expr) {
void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
- Label bailout, done, one_char_separator, long_separator,
- non_trivial_array, not_size_one_array, loop,
- empty_separator_loop, one_char_separator_loop,
+ Label bailout, done, one_char_separator, long_separator, non_trivial_array,
+ not_size_one_array, loop, empty_separator_loop, one_char_separator_loop,
one_char_separator_loop_entry, long_separator_loop;
ZoneList<Expression*>* args = expr->arguments();
ASSERT(args->length() == 2);
@@ -3984,19 +3924,18 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
Register string = r4;
Register element = r5;
Register elements_end = r6;
- Register scratch1 = r7;
- Register scratch2 = r9;
+ Register scratch = r9;
// Separator operand is on the stack.
__ pop(separator);
// Check that the array is a JSArray.
__ JumpIfSmi(array, &bailout);
- __ CompareObjectType(array, scratch1, scratch2, JS_ARRAY_TYPE);
+ __ CompareObjectType(array, scratch, array_length, JS_ARRAY_TYPE);
__ b(ne, &bailout);
// Check that the array has fast elements.
- __ CheckFastElements(scratch1, scratch2, &bailout);
+ __ CheckFastElements(scratch, array_length, &bailout);
// If the array has length zero, return the empty string.
__ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
@@ -4033,11 +3972,11 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
__ bind(&loop);
__ ldr(string, MemOperand(element, kPointerSize, PostIndex));
__ JumpIfSmi(string, &bailout);
- __ ldr(scratch1, FieldMemOperand(string, HeapObject::kMapOffset));
- __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
- __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
- __ ldr(scratch1, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
- __ add(string_length, string_length, Operand(scratch1), SetCC);
+ __ ldr(scratch, FieldMemOperand(string, HeapObject::kMapOffset));
+ __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
+ __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &bailout);
+ __ ldr(scratch, FieldMemOperand(string, SeqOneByteString::kLengthOffset));
+ __ add(string_length, string_length, Operand(scratch), SetCC);
__ b(vs, &bailout);
__ cmp(element, elements_end);
__ b(lt, &loop);
@@ -4058,23 +3997,23 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
// Check that the separator is a flat ASCII string.
__ JumpIfSmi(separator, &bailout);
- __ ldr(scratch1, FieldMemOperand(separator, HeapObject::kMapOffset));
- __ ldrb(scratch1, FieldMemOperand(scratch1, Map::kInstanceTypeOffset));
- __ JumpIfInstanceTypeIsNotSequentialAscii(scratch1, scratch2, &bailout);
+ __ ldr(scratch, FieldMemOperand(separator, HeapObject::kMapOffset));
+ __ ldrb(scratch, FieldMemOperand(scratch, Map::kInstanceTypeOffset));
+ __ JumpIfInstanceTypeIsNotSequentialAscii(scratch, scratch, &bailout);
// Add (separator length times array_length) - separator length to the
// string_length to get the length of the result string. array_length is not
// smi but the other values are, so the result is a smi
- __ ldr(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
- __ sub(string_length, string_length, Operand(scratch1));
- __ smull(scratch2, ip, array_length, scratch1);
+ __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
+ __ sub(string_length, string_length, Operand(scratch));
+ __ smull(scratch, ip, array_length, scratch);
// Check for smi overflow. No overflow if higher 33 bits of 64-bit result are
// zero.
__ cmp(ip, Operand::Zero());
__ b(ne, &bailout);
- __ tst(scratch2, Operand(0x80000000));
+ __ tst(scratch, Operand(0x80000000));
__ b(ne, &bailout);
- __ add(string_length, string_length, Operand(scratch2), SetCC);
+ __ add(string_length, string_length, Operand(scratch), SetCC);
__ b(vs, &bailout);
__ SmiUntag(string_length);
@@ -4091,9 +4030,9 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
// array_length: Length of the array.
__ AllocateAsciiString(result,
string_length,
- scratch1,
- scratch2,
- elements_end,
+ scratch,
+ string, // used as scratch
+ elements_end, // used as scratch
&bailout);
// Prepare for looping. Set up elements_end to end of the array. Set
// result_pos to the position of the result where to write the first
@@ -4106,8 +4045,8 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
// Check the length of the separator.
- __ ldr(scratch1, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
- __ cmp(scratch1, Operand(Smi::FromInt(1)));
+ __ ldr(scratch, FieldMemOperand(separator, SeqOneByteString::kLengthOffset));
+ __ cmp(scratch, Operand(Smi::FromInt(1)));
__ b(eq, &one_char_separator);
__ b(gt, &long_separator);
@@ -4125,7 +4064,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
__ add(string,
string,
Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
- __ CopyBytes(string, result_pos, string_length, scratch1);
+ __ CopyBytes(string, result_pos, string_length, scratch);
__ cmp(element, elements_end);
__ b(lt, &empty_separator_loop); // End while (element < elements_end).
ASSERT(result.is(r0));
@@ -4157,7 +4096,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
__ add(string,
string,
Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
- __ CopyBytes(string, result_pos, string_length, scratch1);
+ __ CopyBytes(string, result_pos, string_length, scratch);
__ cmp(element, elements_end);
__ b(lt, &one_char_separator_loop); // End while (element < elements_end).
ASSERT(result.is(r0));
@@ -4178,7 +4117,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
__ add(string,
separator,
Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
- __ CopyBytes(string, result_pos, string_length, scratch1);
+ __ CopyBytes(string, result_pos, string_length, scratch);
__ bind(&long_separator);
__ ldr(string, MemOperand(element, kPointerSize, PostIndex));
@@ -4187,7 +4126,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(CallRuntime* expr) {
__ add(string,
string,
Operand(SeqOneByteString::kHeaderSize - kHeapObjectTag));
- __ CopyBytes(string, result_pos, string_length, scratch1);
+ __ CopyBytes(string, result_pos, string_length, scratch);
__ cmp(element, elements_end);
__ b(lt, &long_separator_loop); // End while (element < elements_end).
ASSERT(result.is(r0));
@@ -4276,9 +4215,9 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
} else {
// Non-global variable. Call the runtime to try to delete from the
// context where the variable was introduced.
- __ push(context_register());
+ ASSERT(!context_register().is(r2));
__ mov(r2, Operand(var->name()));
- __ push(r2);
+ __ Push(context_register(), r2);
__ CallRuntime(Runtime::kDeleteContextSlot, 2);
context()->Plug(r0);
}
@@ -4409,14 +4348,44 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
PrepareForBailoutForId(prop->LoadId(), TOS_REG);
}
- // Call ToNumber only if operand is not a smi.
- Label no_conversion;
+ // Inline smi case if we are in a loop.
+ Label stub_call, done;
+ JumpPatchSite patch_site(masm_);
+
+ int count_value = expr->op() == Token::INC ? 1 : -1;
if (ShouldInlineSmiCase(expr->op())) {
- __ JumpIfSmi(r0, &no_conversion);
+ Label slow;
+ patch_site.EmitJumpIfNotSmi(r0, &slow);
+
+ // Save result for postfix expressions.
+ if (expr->is_postfix()) {
+ if (!context()->IsEffect()) {
+ // Save the result on the stack. If we have a named or keyed property
+ // we store the result under the receiver that is currently on top
+ // of the stack.
+ switch (assign_type) {
+ case VARIABLE:
+ __ push(r0);
+ break;
+ case NAMED_PROPERTY:
+ __ str(r0, MemOperand(sp, kPointerSize));
+ break;
+ case KEYED_PROPERTY:
+ __ str(r0, MemOperand(sp, 2 * kPointerSize));
+ break;
+ }
+ }
+ }
+
+ __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
+ __ b(vc, &done);
+ // Call stub. Undo operation first.
+ __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
+ __ jmp(&stub_call);
+ __ bind(&slow);
}
ToNumberStub convert_stub;
__ CallStub(&convert_stub);
- __ bind(&no_conversion);
// Save result for postfix expressions.
if (expr->is_postfix()) {
@@ -4439,29 +4408,14 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
}
- // Inline smi case if we are in a loop.
- Label stub_call, done;
- JumpPatchSite patch_site(masm_);
-
- int count_value = expr->op() == Token::INC ? 1 : -1;
- if (ShouldInlineSmiCase(expr->op())) {
- __ add(r0, r0, Operand(Smi::FromInt(count_value)), SetCC);
- __ b(vs, &stub_call);
- // We could eliminate this smi check if we split the code at
- // the first smi check before calling ToNumber.
- patch_site.EmitJumpIfSmi(r0, &done);
-
- __ bind(&stub_call);
- // Call stub. Undo operation first.
- __ sub(r0, r0, Operand(Smi::FromInt(count_value)));
- }
+ __ bind(&stub_call);
__ mov(r1, r0);
__ mov(r0, Operand(Smi::FromInt(count_value)));
// Record position before stub call.
SetSourcePosition(expr->position());
- BinaryOpStub stub(Token::ADD, NO_OVERWRITE);
+ BinaryOpICStub stub(Token::ADD, NO_OVERWRITE);
CallIC(stub.GetCode(isolate()),
RelocInfo::CODE_TARGET,
expr->CountBinOpFeedbackId());
@@ -4508,8 +4462,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
break;
}
case KEYED_PROPERTY: {
- __ pop(r1); // Key.
- __ pop(r2); // Receiver.
+ __ Pop(r2, r1); // r1 = key. r2 = receiver.
Handle<Code> ic = is_classic_mode()
? isolate()->builtins()->KeyedStoreIC_Initialize()
: isolate()->builtins()->KeyedStoreIC_Initialize_Strict();
@@ -4894,6 +4847,91 @@ FullCodeGenerator::NestedStatement* FullCodeGenerator::TryFinally::Exit(
#undef __
+
+static const int32_t kBranchBeforeInterrupt = 0x5a000004;
+
+
+void BackEdgeTable::PatchAt(Code* unoptimized_code,
+ Address pc,
+ BackEdgeState target_state,
+ Code* replacement_code) {
+ static const int kInstrSize = Assembler::kInstrSize;
+ Address branch_address = pc - 3 * kInstrSize;
+ CodePatcher patcher(branch_address, 1);
+
+ switch (target_state) {
+ case INTERRUPT:
+ // <decrement profiling counter>
+ // 2a 00 00 01 bpl ok
+ // e5 9f c? ?? ldr ip, [pc, <interrupt stub address>]
+ // e1 2f ff 3c blx ip
+ // ok-label
+ patcher.masm()->b(4 * kInstrSize, pl); // Jump offset is 4 instructions.
+ ASSERT_EQ(kBranchBeforeInterrupt, Memory::int32_at(branch_address));
+ break;
+ case ON_STACK_REPLACEMENT:
+ case OSR_AFTER_STACK_CHECK:
+ // <decrement profiling counter>
+ // e1 a0 00 00 mov r0, r0 (NOP)
+ // e5 9f c? ?? ldr ip, [pc, <on-stack replacement address>]
+ // e1 2f ff 3c blx ip
+ // ok-label
+ patcher.masm()->nop();
+ break;
+ }
+
+ Address pc_immediate_load_address = pc - 2 * kInstrSize;
+ // Replace the call address.
+ uint32_t interrupt_address_offset =
+ Memory::uint16_at(pc_immediate_load_address) & 0xfff;
+ Address interrupt_address_pointer = pc + interrupt_address_offset;
+ Memory::uint32_at(interrupt_address_pointer) =
+ reinterpret_cast<uint32_t>(replacement_code->entry());
+
+ unoptimized_code->GetHeap()->incremental_marking()->RecordCodeTargetPatch(
+ unoptimized_code, pc_immediate_load_address, replacement_code);
+}
+
+
+BackEdgeTable::BackEdgeState BackEdgeTable::GetBackEdgeState(
+ Isolate* isolate,
+ Code* unoptimized_code,
+ Address pc) {
+ static const int kInstrSize = Assembler::kInstrSize;
+ ASSERT(Memory::int32_at(pc - kInstrSize) == kBlxIp);
+
+ Address branch_address = pc - 3 * kInstrSize;
+ Address pc_immediate_load_address = pc - 2 * kInstrSize;
+ uint32_t interrupt_address_offset =
+ Memory::uint16_at(pc_immediate_load_address) & 0xfff;
+ Address interrupt_address_pointer = pc + interrupt_address_offset;
+
+ if (Memory::int32_at(branch_address) == kBranchBeforeInterrupt) {
+ ASSERT(Memory::uint32_at(interrupt_address_pointer) ==
+ reinterpret_cast<uint32_t>(
+ isolate->builtins()->InterruptCheck()->entry()));
+ ASSERT(Assembler::IsLdrPcImmediateOffset(
+ Assembler::instr_at(pc_immediate_load_address)));
+ return INTERRUPT;
+ }
+
+ ASSERT(Assembler::IsNop(Assembler::instr_at(branch_address)));
+ ASSERT(Assembler::IsLdrPcImmediateOffset(
+ Assembler::instr_at(pc_immediate_load_address)));
+
+ if (Memory::uint32_at(interrupt_address_pointer) ==
+ reinterpret_cast<uint32_t>(
+ isolate->builtins()->OnStackReplacement()->entry())) {
+ return ON_STACK_REPLACEMENT;
+ }
+
+ ASSERT(Memory::uint32_at(interrupt_address_pointer) ==
+ reinterpret_cast<uint32_t>(
+ isolate->builtins()->OsrAfterStackCheck()->entry()));
+ return OSR_AFTER_STACK_CHECK;
+}
+
+
} } // namespace v8::internal
#endif // V8_TARGET_ARCH_ARM