summaryrefslogtreecommitdiff
path: root/deps/v8/src/arm/full-codegen-arm.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/arm/full-codegen-arm.cc')
-rw-r--r--deps/v8/src/arm/full-codegen-arm.cc1069
1 files changed, 578 insertions, 491 deletions
diff --git a/deps/v8/src/arm/full-codegen-arm.cc b/deps/v8/src/arm/full-codegen-arm.cc
index 7a47644781..9b771dae28 100644
--- a/deps/v8/src/arm/full-codegen-arm.cc
+++ b/deps/v8/src/arm/full-codegen-arm.cc
@@ -30,7 +30,7 @@
#if defined(V8_TARGET_ARCH_ARM)
#include "code-stubs.h"
-#include "codegen-inl.h"
+#include "codegen.h"
#include "compiler.h"
#include "debug.h"
#include "full-codegen.h"
@@ -46,6 +46,12 @@ namespace internal {
#define __ ACCESS_MASM(masm_)
+static unsigned GetPropertyId(Property* property) {
+ if (property->is_synthetic()) return AstNode::kNoNumber;
+ return property->id();
+}
+
+
// A patch site is a location in the code which it is possible to patch. This
// class has a number of methods to emit the code which is patchable and the
// method EmitPatchInfo to record a marker back to the patchable code. This
@@ -133,6 +139,20 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
}
#endif
+ // Strict mode functions and builtins need to replace the receiver
+ // with undefined when called as functions (without an explicit
+ // receiver object). r5 is zero for method calls and non-zero for
+ // function calls.
+ if (info->is_strict_mode() || info->is_native()) {
+ Label ok;
+ __ cmp(r5, Operand(0));
+ __ b(eq, &ok);
+ int receiver_offset = scope()->num_parameters() * kPointerSize;
+ __ LoadRoot(r2, Heap::kUndefinedValueRootIndex);
+ __ str(r2, MemOperand(sp, receiver_offset));
+ __ bind(&ok);
+ }
+
int locals_count = scope()->num_stack_slots();
__ Push(lr, fp, cp, r1);
@@ -162,7 +182,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
FastNewContextStub stub(heap_slots);
__ CallStub(&stub);
} else {
- __ CallRuntime(Runtime::kNewContext, 1);
+ __ CallRuntime(Runtime::kNewFunctionContext, 1);
}
function_in_register = false;
// Context is returned in both r0 and cp. It replaces the context
@@ -210,13 +230,18 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
// function, receiver address, parameter count.
// The stub will rewrite receiever and parameter count if the previous
// stack frame was an arguments adapter frame.
- ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT);
+ ArgumentsAccessStub::Type type;
+ if (is_strict_mode()) {
+ type = ArgumentsAccessStub::NEW_STRICT;
+ } else if (function()->has_duplicate_parameters()) {
+ type = ArgumentsAccessStub::NEW_NON_STRICT_SLOW;
+ } else {
+ type = ArgumentsAccessStub::NEW_NON_STRICT_FAST;
+ }
+ ArgumentsAccessStub stub(type);
__ CallStub(&stub);
- // Duplicate the value; move-to-slot operation might clobber registers.
- __ mov(r3, r0);
+
Move(arguments->AsSlot(), r0, r1, r2);
- Slot* dot_arguments_slot = scope()->arguments_shadow()->AsSlot();
- Move(dot_arguments_slot, r3, r1, r2);
}
if (FLAG_trace) {
@@ -240,7 +265,7 @@ void FullCodeGenerator::Generate(CompilationInfo* info) {
}
{ Comment cmnt(masm_, "[ Stack check");
- PrepareForBailout(info->function(), NO_REGISTERS);
+ PrepareForBailoutForId(AstNode::kFunctionEntryId, NO_REGISTERS);
Label ok;
__ LoadRoot(ip, Heap::kStackLimitRootIndex);
__ cmp(sp, Operand(ip));
@@ -358,7 +383,7 @@ void FullCodeGenerator::TestContext::Plug(Slot* slot) const {
// For simplicity we always test the accumulator register.
codegen()->Move(result_register(), slot);
codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
- codegen()->DoTest(true_label_, false_label_, fall_through_);
+ codegen()->DoTest(this);
}
@@ -392,7 +417,7 @@ void FullCodeGenerator::TestContext::Plug(Heap::RootListIndex index) const {
if (true_label_ != fall_through_) __ b(true_label_);
} else {
__ LoadRoot(result_register(), index);
- codegen()->DoTest(true_label_, false_label_, fall_through_);
+ codegen()->DoTest(this);
}
}
@@ -426,8 +451,7 @@ void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
if (true_label_ != fall_through_) __ b(true_label_);
} else if (lit->IsString()) {
if (String::cast(*lit)->length() == 0) {
- if (false_label_ != fall_through_) __ b(false_label_);
- __ b(false_label_);
+ if (false_label_ != fall_through_) __ b(false_label_);
} else {
if (true_label_ != fall_through_) __ b(true_label_);
}
@@ -440,7 +464,7 @@ void FullCodeGenerator::TestContext::Plug(Handle<Object> lit) const {
} else {
// For simplicity we always test the accumulator register.
__ mov(result_register(), Operand(lit));
- codegen()->DoTest(true_label_, false_label_, fall_through_);
+ codegen()->DoTest(this);
}
}
@@ -476,7 +500,7 @@ void FullCodeGenerator::TestContext::DropAndPlug(int count,
__ Drop(count);
__ Move(result_register(), reg);
codegen()->PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
- codegen()->DoTest(true_label_, false_label_, fall_through_);
+ codegen()->DoTest(this);
}
@@ -554,27 +578,11 @@ void FullCodeGenerator::TestContext::Plug(bool flag) const {
}
-void FullCodeGenerator::DoTest(Label* if_true,
+void FullCodeGenerator::DoTest(Expression* condition,
+ Label* if_true,
Label* if_false,
Label* fall_through) {
if (CpuFeatures::IsSupported(VFP3)) {
- CpuFeatures::Scope scope(VFP3);
- // Emit the inlined tests assumed by the stub.
- __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
- __ cmp(result_register(), ip);
- __ b(eq, if_false);
- __ LoadRoot(ip, Heap::kTrueValueRootIndex);
- __ cmp(result_register(), ip);
- __ b(eq, if_true);
- __ LoadRoot(ip, Heap::kFalseValueRootIndex);
- __ cmp(result_register(), ip);
- __ b(eq, if_false);
- STATIC_ASSERT(kSmiTag == 0);
- __ tst(result_register(), result_register());
- __ b(eq, if_false);
- __ JumpIfSmi(result_register(), if_true);
-
- // Call the ToBoolean stub for all other cases.
ToBooleanStub stub(result_register());
__ CallStub(&stub);
__ tst(result_register(), result_register());
@@ -586,8 +594,6 @@ void FullCodeGenerator::DoTest(Label* if_true,
__ LoadRoot(ip, Heap::kFalseValueRootIndex);
__ cmp(r0, ip);
}
-
- // The stub returns nonzero for true.
Split(ne, if_true, if_false, fall_through);
}
@@ -707,10 +713,12 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
// context.
ASSERT_EQ(0, scope()->ContextChainLength(variable->scope()));
if (FLAG_debug_code) {
- // Check that we're not inside a 'with'.
- __ ldr(r1, ContextOperand(cp, Context::FCONTEXT_INDEX));
- __ cmp(r1, cp);
- __ Check(eq, "Unexpected declaration in current context.");
+ // Check that we're not inside a with or catch context.
+ __ ldr(r1, FieldMemOperand(cp, HeapObject::kMapOffset));
+ __ CompareRoot(r1, Heap::kWithContextMapRootIndex);
+ __ Check(ne, "Declaration in with context.");
+ __ CompareRoot(r1, Heap::kCatchContextMapRootIndex);
+ __ Check(ne, "Declaration in catch context.");
}
if (mode == Variable::CONST) {
__ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
@@ -755,31 +763,30 @@ void FullCodeGenerator::EmitDeclaration(Variable* variable,
}
} else if (prop != NULL) {
- if (function != NULL || mode == Variable::CONST) {
- // We are declaring a function or constant that rewrites to a
- // property. Use (keyed) IC to set the initial value. We
- // cannot visit the rewrite because it's shared and we risk
- // recording duplicate AST IDs for bailouts from optimized code.
+ // A const declaration aliasing a parameter is an illegal redeclaration.
+ ASSERT(mode != Variable::CONST);
+ if (function != NULL) {
+ // We are declaring a function that rewrites to a property.
+ // Use (keyed) IC to set the initial value. We cannot visit the
+ // rewrite because it's shared and we risk recording duplicate AST
+ // IDs for bailouts from optimized code.
ASSERT(prop->obj()->AsVariableProxy() != NULL);
{ AccumulatorValueContext for_object(this);
EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
}
- if (function != NULL) {
- __ push(r0);
- VisitForAccumulatorValue(function);
- __ pop(r2);
- } else {
- __ mov(r2, r0);
- __ LoadRoot(r0, Heap::kTheHoleValueRootIndex);
- }
+
+ __ push(r0);
+ VisitForAccumulatorValue(function);
+ __ pop(r2);
+
ASSERT(prop->key()->AsLiteral() != NULL &&
prop->key()->AsLiteral()->handle()->IsSmi());
__ mov(r1, Operand(prop->key()->AsLiteral()->handle()));
- Handle<Code> ic(Builtins::builtin(is_strict()
- ? Builtins::KeyedStoreIC_Initialize_Strict
- : Builtins::KeyedStoreIC_Initialize));
- EmitCallIC(ic, RelocInfo::CODE_TARGET);
+ Handle<Code> ic = is_strict_mode()
+ ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
+ : isolate()->builtins()->KeyedStoreIC_Initialize();
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
// Value in r0 is ignored (declarations are statements).
}
}
@@ -819,7 +826,7 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
// Compile all the tests with branches to their bodies.
for (int i = 0; i < clauses->length(); i++) {
CaseClause* clause = clauses->at(i);
- clause->body_target()->entry_label()->Unuse();
+ clause->body_target()->Unuse();
// The default is not a test, but remember it as final fall through.
if (clause->is_default()) {
@@ -846,18 +853,19 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
__ cmp(r1, r0);
__ b(ne, &next_test);
__ Drop(1); // Switch value is no longer needed.
- __ b(clause->body_target()->entry_label());
+ __ b(clause->body_target());
__ bind(&slow_case);
}
// Record position before stub call for type feedback.
SetSourcePosition(clause->position());
Handle<Code> ic = CompareIC::GetUninitialized(Token::EQ_STRICT);
- EmitCallIC(ic, &patch_site);
+ EmitCallIC(ic, &patch_site, clause->CompareId());
+
__ cmp(r0, Operand(0));
__ b(ne, &next_test);
__ Drop(1); // Switch value is no longer needed.
- __ b(clause->body_target()->entry_label());
+ __ b(clause->body_target());
}
// Discard the test value and jump to the default if present, otherwise to
@@ -867,14 +875,15 @@ void FullCodeGenerator::VisitSwitchStatement(SwitchStatement* stmt) {
if (default_clause == NULL) {
__ b(nested_statement.break_target());
} else {
- __ b(default_clause->body_target()->entry_label());
+ __ b(default_clause->body_target());
}
// Compile all the case bodies.
for (int i = 0; i < clauses->length(); i++) {
Comment cmnt(masm_, "[ Case body");
CaseClause* clause = clauses->at(i);
- __ bind(clause->body_target()->entry_label());
+ __ bind(clause->body_target());
+ PrepareForBailoutForId(clause->EntryId(), NO_REGISTERS);
VisitStatements(clause->statements());
}
@@ -906,11 +915,11 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// Convert the object to a JS object.
Label convert, done_convert;
__ JumpIfSmi(r0, &convert);
- __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
- __ b(hs, &done_convert);
+ __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
+ __ b(ge, &done_convert);
__ bind(&convert);
__ push(r0);
- __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_JS);
+ __ InvokeBuiltin(Builtins::TO_OBJECT, CALL_FUNCTION);
__ bind(&done_convert);
__ push(r0);
@@ -938,9 +947,8 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// check for an enum cache. Leave the map in r2 for the subsequent
// prototype load.
__ ldr(r2, FieldMemOperand(r1, HeapObject::kMapOffset));
- __ ldr(r3, FieldMemOperand(r2, Map::kInstanceDescriptorsOffset));
- __ cmp(r3, empty_descriptor_array_value);
- __ b(eq, &call_runtime);
+ __ ldr(r3, FieldMemOperand(r2, Map::kInstanceDescriptorsOrBitField3Offset));
+ __ JumpIfSmi(r3, &call_runtime);
// Check that there is an enum cache in the non-empty instance
// descriptors (r3). This is the case if the next enumeration
@@ -985,7 +993,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// We got a map in register r0. Get the enumeration cache from it.
__ bind(&use_cache);
- __ ldr(r1, FieldMemOperand(r0, Map::kInstanceDescriptorsOffset));
+ __ LoadInstanceDescriptors(r0, r1);
__ ldr(r1, FieldMemOperand(r1, DescriptorArray::kEnumerationIndexOffset));
__ ldr(r2, FieldMemOperand(r1, DescriptorArray::kEnumCacheBridgeCacheOffset));
@@ -1034,7 +1042,7 @@ void FullCodeGenerator::VisitForInStatement(ForInStatement* stmt) {
// just skip it.
__ push(r1); // Enumerable.
__ push(r3); // Current entry.
- __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_JS);
+ __ InvokeBuiltin(Builtins::FILTER_KEY, CALL_FUNCTION);
__ mov(r3, Operand(r0), SetCC);
__ b(eq, loop_statement.continue_target());
@@ -1080,10 +1088,10 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
// doesn't just get a copy of the existing unoptimized code.
if (!FLAG_always_opt &&
!FLAG_prepare_always_opt &&
+ !pretenure &&
scope()->is_function_scope() &&
- info->num_literals() == 0 &&
- !pretenure) {
- FastNewClosureStub stub;
+ info->num_literals() == 0) {
+ FastNewClosureStub stub(info->strict_mode() ? kStrictMode : kNonStrictMode);
__ mov(r0, Operand(info));
__ push(r0);
__ CallStub(&stub);
@@ -1104,6 +1112,65 @@ void FullCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
}
+void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
+ Slot* slot,
+ TypeofState typeof_state,
+ Label* slow) {
+ Register current = cp;
+ Register next = r1;
+ Register temp = r2;
+
+ Scope* s = scope();
+ while (s != NULL) {
+ if (s->num_heap_slots() > 0) {
+ if (s->calls_eval()) {
+ // Check that extension is NULL.
+ __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
+ __ tst(temp, temp);
+ __ b(ne, slow);
+ }
+ // Load next context in chain.
+ __ ldr(next, ContextOperand(current, Context::PREVIOUS_INDEX));
+ // Walk the rest of the chain without clobbering cp.
+ current = next;
+ }
+ // If no outer scope calls eval, we do not need to check more
+ // context extensions.
+ if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
+ s = s->outer_scope();
+ }
+
+ if (s->is_eval_scope()) {
+ Label loop, fast;
+ if (!current.is(next)) {
+ __ Move(next, current);
+ }
+ __ bind(&loop);
+ // Terminate at global context.
+ __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
+ __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
+ __ cmp(temp, ip);
+ __ b(eq, &fast);
+ // Check that extension is NULL.
+ __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
+ __ tst(temp, temp);
+ __ b(ne, slow);
+ // Load next context in chain.
+ __ ldr(next, ContextOperand(next, Context::PREVIOUS_INDEX));
+ __ b(&loop);
+ __ bind(&fast);
+ }
+
+ __ ldr(r0, GlobalObjectOperand());
+ __ mov(r2, Operand(slot->var()->name()));
+ RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
+ ? RelocInfo::CODE_TARGET
+ : RelocInfo::CODE_TARGET_CONTEXT;
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
+ EmitCallIC(ic, mode, AstNode::kNoNumber);
+}
+
+
MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
Slot* slot,
Label* slow) {
@@ -1120,8 +1187,7 @@ MemOperand FullCodeGenerator::ContextSlotOperandCheckExtensions(
__ tst(temp, temp);
__ b(ne, slow);
}
- __ ldr(next, ContextOperand(context, Context::CLOSURE_INDEX));
- __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
+ __ ldr(next, ContextOperand(context, Context::PREVIOUS_INDEX));
// Walk the rest of the chain without clobbering cp.
context = next;
}
@@ -1180,8 +1246,9 @@ void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
slow));
__ mov(r0, Operand(key_literal->handle()));
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
- EmitCallIC(ic, RelocInfo::CODE_TARGET);
+ Handle<Code> ic =
+ isolate()->builtins()->KeyedLoadIC_Initialize();
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(property));
__ jmp(done);
}
}
@@ -1190,85 +1257,23 @@ void FullCodeGenerator::EmitDynamicLoadFromSlotFastCase(
}
-void FullCodeGenerator::EmitLoadGlobalSlotCheckExtensions(
- Slot* slot,
- TypeofState typeof_state,
- Label* slow) {
- Register current = cp;
- Register next = r1;
- Register temp = r2;
-
- Scope* s = scope();
- while (s != NULL) {
- if (s->num_heap_slots() > 0) {
- if (s->calls_eval()) {
- // Check that extension is NULL.
- __ ldr(temp, ContextOperand(current, Context::EXTENSION_INDEX));
- __ tst(temp, temp);
- __ b(ne, slow);
- }
- // Load next context in chain.
- __ ldr(next, ContextOperand(current, Context::CLOSURE_INDEX));
- __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
- // Walk the rest of the chain without clobbering cp.
- current = next;
- }
- // If no outer scope calls eval, we do not need to check more
- // context extensions.
- if (!s->outer_scope_calls_eval() || s->is_eval_scope()) break;
- s = s->outer_scope();
- }
-
- if (s->is_eval_scope()) {
- Label loop, fast;
- if (!current.is(next)) {
- __ Move(next, current);
- }
- __ bind(&loop);
- // Terminate at global context.
- __ ldr(temp, FieldMemOperand(next, HeapObject::kMapOffset));
- __ LoadRoot(ip, Heap::kGlobalContextMapRootIndex);
- __ cmp(temp, ip);
- __ b(eq, &fast);
- // Check that extension is NULL.
- __ ldr(temp, ContextOperand(next, Context::EXTENSION_INDEX));
- __ tst(temp, temp);
- __ b(ne, slow);
- // Load next context in chain.
- __ ldr(next, ContextOperand(next, Context::CLOSURE_INDEX));
- __ ldr(next, FieldMemOperand(next, JSFunction::kContextOffset));
- __ b(&loop);
- __ bind(&fast);
- }
-
- __ ldr(r0, GlobalObjectOperand());
- __ mov(r2, Operand(slot->var()->name()));
- RelocInfo::Mode mode = (typeof_state == INSIDE_TYPEOF)
- ? RelocInfo::CODE_TARGET
- : RelocInfo::CODE_TARGET_CONTEXT;
- Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
- EmitCallIC(ic, mode);
-}
-
-
void FullCodeGenerator::EmitVariableLoad(Variable* var) {
- // Four cases: non-this global variables, lookup slots, all other
- // types of slots, and parameters that rewrite to explicit property
- // accesses on the arguments object.
+ // Three cases: non-this global variables, lookup slots, and all other
+ // types of slots.
Slot* slot = var->AsSlot();
- Property* property = var->AsProperty();
+ ASSERT((var->is_global() && !var->is_this()) == (slot == NULL));
- if (var->is_global() && !var->is_this()) {
+ if (slot == NULL) {
Comment cmnt(masm_, "Global variable");
// Use inline caching. Variable name is passed in r2 and the global
// object (receiver) in r0.
__ ldr(r0, GlobalObjectOperand());
__ mov(r2, Operand(var->name()));
- Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
- EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
+ EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber);
context()->Plug(r0);
- } else if (slot != NULL && slot->type() == Slot::LOOKUP) {
+ } else if (slot->type() == Slot::LOOKUP) {
Label done, slow;
// Generate code for loading from variables potentially shadowed
@@ -1284,7 +1289,7 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
context()->Plug(r0);
- } else if (slot != NULL) {
+ } else {
Comment cmnt(masm_, (slot->type() == Slot::CONTEXT)
? "Context slot"
: "Stack slot");
@@ -1300,32 +1305,6 @@ void FullCodeGenerator::EmitVariableLoad(Variable* var) {
} else {
context()->Plug(slot);
}
- } else {
- Comment cmnt(masm_, "Rewritten parameter");
- ASSERT_NOT_NULL(property);
- // Rewritten parameter accesses are of the form "slot[literal]".
-
- // Assert that the object is in a slot.
- Variable* object_var = property->obj()->AsVariableProxy()->AsVariable();
- ASSERT_NOT_NULL(object_var);
- Slot* object_slot = object_var->AsSlot();
- ASSERT_NOT_NULL(object_slot);
-
- // Load the object.
- Move(r1, object_slot);
-
- // Assert that the key is a smi.
- Literal* key_literal = property->key()->AsLiteral();
- ASSERT_NOT_NULL(key_literal);
- ASSERT(key_literal->handle()->IsSmi());
-
- // Load the key.
- __ mov(r0, Operand(key_literal->handle()));
-
- // Call keyed load IC. It has arguments key and receiver in r0 and r1.
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
- EmitCallIC(ic, RelocInfo::CODE_TARGET);
- context()->Plug(r0);
}
}
@@ -1387,7 +1366,13 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
__ ldr(r3, FieldMemOperand(r3, JSFunction::kLiteralsOffset));
__ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
__ mov(r1, Operand(expr->constant_properties()));
- __ mov(r0, Operand(Smi::FromInt(expr->fast_elements() ? 1 : 0)));
+ int flags = expr->fast_elements()
+ ? ObjectLiteral::kFastElements
+ : ObjectLiteral::kNoFlags;
+ flags |= expr->has_function()
+ ? ObjectLiteral::kHasFunction
+ : ObjectLiteral::kNoFlags;
+ __ mov(r0, Operand(Smi::FromInt(flags)));
__ Push(r3, r2, r1, r0);
if (expr->depth() > 1) {
__ CallRuntime(Runtime::kCreateObjectLiteral, 4);
@@ -1426,8 +1411,10 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
VisitForAccumulatorValue(value);
__ mov(r2, Operand(key->handle()));
__ ldr(r1, MemOperand(sp));
- Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize));
- EmitCallIC(ic, RelocInfo::CODE_TARGET);
+ Handle<Code> ic = is_strict_mode()
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
+ : isolate()->builtins()->StoreIC_Initialize();
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, key->id());
PrepareForBailoutForId(key->id(), NO_REGISTERS);
} else {
VisitForEffect(value);
@@ -1465,6 +1452,13 @@ void FullCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
}
}
+ if (expr->has_function()) {
+ ASSERT(result_saved);
+ __ ldr(r0, MemOperand(sp));
+ __ push(r0);
+ __ CallRuntime(Runtime::kToFastProperties, 1);
+ }
+
if (result_saved) {
context()->PlugTOS();
} else {
@@ -1484,11 +1478,13 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
__ mov(r2, Operand(Smi::FromInt(expr->literal_index())));
__ mov(r1, Operand(expr->constant_elements()));
__ Push(r3, r2, r1);
- if (expr->constant_elements()->map() == Heap::fixed_cow_array_map()) {
+ if (expr->constant_elements()->map() ==
+ isolate()->heap()->fixed_cow_array_map()) {
FastCloneShallowArrayStub stub(
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS, length);
__ CallStub(&stub);
- __ IncrementCounter(&Counters::cow_arrays_created_stub, 1, r1, r2);
+ __ IncrementCounter(
+ isolate()->counters()->cow_arrays_created_stub(), 1, r1, r2);
} else if (expr->depth() > 1) {
__ CallRuntime(Runtime::kCreateArrayLiteral, 3);
} else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
@@ -1549,7 +1545,7 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
}
// Left-hand side can only be a property, a global or a (parameter or local)
- // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
+ // slot.
enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
LhsKind assign_type = VARIABLE;
Property* property = expr->target()->AsProperty();
@@ -1575,52 +1571,37 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
break;
case KEYED_PROPERTY:
if (expr->is_compound()) {
- if (property->is_arguments_access()) {
- VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
- __ ldr(r0, EmitSlotSearch(obj_proxy->var()->AsSlot(), r0));
- __ push(r0);
- __ mov(r0, Operand(property->key()->AsLiteral()->handle()));
- } else {
- VisitForStackValue(property->obj());
- VisitForAccumulatorValue(property->key());
- }
+ VisitForStackValue(property->obj());
+ VisitForAccumulatorValue(property->key());
__ ldr(r1, MemOperand(sp, 0));
__ push(r0);
} else {
- if (property->is_arguments_access()) {
- VariableProxy* obj_proxy = property->obj()->AsVariableProxy();
- __ ldr(r1, EmitSlotSearch(obj_proxy->var()->AsSlot(), r0));
- __ mov(r0, Operand(property->key()->AsLiteral()->handle()));
- __ Push(r1, r0);
- } else {
- VisitForStackValue(property->obj());
- VisitForStackValue(property->key());
- }
+ VisitForStackValue(property->obj());
+ VisitForStackValue(property->key());
}
break;
}
+ // For compound assignments we need another deoptimization point after the
+ // variable/property load.
if (expr->is_compound()) {
{ AccumulatorValueContext context(this);
switch (assign_type) {
case VARIABLE:
EmitVariableLoad(expr->target()->AsVariableProxy()->var());
+ PrepareForBailout(expr->target(), TOS_REG);
break;
case NAMED_PROPERTY:
EmitNamedPropertyLoad(property);
+ PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
break;
case KEYED_PROPERTY:
EmitKeyedPropertyLoad(property);
+ PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
break;
}
}
- // For property compound assignments we need another deoptimization
- // point after the property load.
- if (property != NULL) {
- PrepareForBailoutForId(expr->CompoundLoadId(), TOS_REG);
- }
-
Token::Value op = expr->binary_op();
__ push(r0); // Left operand goes on the stack.
VisitForAccumulatorValue(expr->value());
@@ -1631,13 +1612,13 @@ void FullCodeGenerator::VisitAssignment(Assignment* expr) {
SetSourcePosition(expr->position() + 1);
AccumulatorValueContext context(this);
if (ShouldInlineSmiCase(op)) {
- EmitInlineSmiBinaryOp(expr,
+ EmitInlineSmiBinaryOp(expr->binary_operation(),
op,
mode,
expr->target(),
expr->value());
} else {
- EmitBinaryOp(op, mode);
+ EmitBinaryOp(expr->binary_operation(), op, mode);
}
// Deoptimization point in case the binary operation may have side effects.
@@ -1672,20 +1653,20 @@ void FullCodeGenerator::EmitNamedPropertyLoad(Property* prop) {
Literal* key = prop->key()->AsLiteral();
__ mov(r2, Operand(key->handle()));
// Call load IC. It has arguments receiver and property name r0 and r2.
- Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
- EmitCallIC(ic, RelocInfo::CODE_TARGET);
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
}
void FullCodeGenerator::EmitKeyedPropertyLoad(Property* prop) {
SetSourcePosition(prop->position());
// Call keyed load IC. It has arguments key and receiver in r0 and r1.
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
- EmitCallIC(ic, RelocInfo::CODE_TARGET);
+ Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
}
-void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
+void FullCodeGenerator::EmitInlineSmiBinaryOp(BinaryOperation* expr,
Token::Value op,
OverwriteMode mode,
Expression* left_expr,
@@ -1707,14 +1688,14 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
patch_site.EmitJumpIfSmi(scratch1, &smi_case);
__ bind(&stub_call);
- TypeRecordingBinaryOpStub stub(op, mode);
- EmitCallIC(stub.GetCode(), &patch_site);
+ BinaryOpStub stub(op, mode);
+ EmitCallIC(stub.GetCode(), &patch_site, expr->id());
__ jmp(&done);
__ bind(&smi_case);
// Smi case. This code works the same way as the smi-smi case in the type
// recording binary operation stub, see
- // TypeRecordingBinaryOpStub::GenerateSmiSmiOperation for comments.
+ // BinaryOpStub::GenerateSmiSmiOperation for comments.
switch (op) {
case Token::SAR:
__ b(&stub_call);
@@ -1784,11 +1765,12 @@ void FullCodeGenerator::EmitInlineSmiBinaryOp(Expression* expr,
}
-void FullCodeGenerator::EmitBinaryOp(Token::Value op,
+void FullCodeGenerator::EmitBinaryOp(BinaryOperation* expr,
+ Token::Value op,
OverwriteMode mode) {
__ pop(r1);
- TypeRecordingBinaryOpStub stub(op, mode);
- EmitCallIC(stub.GetCode(), NULL);
+ BinaryOpStub stub(op, mode);
+ EmitCallIC(stub.GetCode(), NULL, expr->id());
context()->Plug(r0);
}
@@ -1802,7 +1784,7 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
}
// Left-hand side can only be a property, a global or a (parameter or local)
- // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
+ // slot.
enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
LhsKind assign_type = VARIABLE;
Property* prop = expr->AsProperty();
@@ -1825,33 +1807,23 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
__ mov(r1, r0);
__ pop(r0); // Restore value.
__ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
- Handle<Code> ic(Builtins::builtin(
- is_strict() ? Builtins::StoreIC_Initialize_Strict
- : Builtins::StoreIC_Initialize));
- EmitCallIC(ic, RelocInfo::CODE_TARGET);
+ Handle<Code> ic = is_strict_mode()
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
+ : isolate()->builtins()->StoreIC_Initialize();
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
break;
}
case KEYED_PROPERTY: {
__ push(r0); // Preserve value.
- if (prop->is_synthetic()) {
- ASSERT(prop->obj()->AsVariableProxy() != NULL);
- ASSERT(prop->key()->AsLiteral() != NULL);
- { AccumulatorValueContext for_object(this);
- EmitVariableLoad(prop->obj()->AsVariableProxy()->var());
- }
- __ mov(r2, r0);
- __ mov(r1, Operand(prop->key()->AsLiteral()->handle()));
- } else {
- VisitForStackValue(prop->obj());
- VisitForAccumulatorValue(prop->key());
- __ mov(r1, r0);
- __ pop(r2);
- }
+ VisitForStackValue(prop->obj());
+ VisitForAccumulatorValue(prop->key());
+ __ mov(r1, r0);
+ __ pop(r2);
__ pop(r0); // Restore value.
- Handle<Code> ic(Builtins::builtin(
- is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict
- : Builtins::KeyedStoreIC_Initialize));
- EmitCallIC(ic, RelocInfo::CODE_TARGET);
+ Handle<Code> ic = is_strict_mode()
+ ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
+ : isolate()->builtins()->KeyedStoreIC_Initialize();
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
break;
}
}
@@ -1862,8 +1834,6 @@ void FullCodeGenerator::EmitAssignment(Expression* expr, int bailout_ast_id) {
void FullCodeGenerator::EmitVariableAssignment(Variable* var,
Token::Value op) {
- // Left-hand sides that rewrite to explicit property accesses do not reach
- // here.
ASSERT(var != NULL);
ASSERT(var->is_global() || var->AsSlot() != NULL);
@@ -1874,10 +1844,10 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
// r2, and the global object in r1.
__ mov(r2, Operand(var->name()));
__ ldr(r1, GlobalObjectOperand());
- Handle<Code> ic(Builtins::builtin(
- is_strict() ? Builtins::StoreIC_Initialize_Strict
- : Builtins::StoreIC_Initialize));
- EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT);
+ Handle<Code> ic = is_strict_mode()
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
+ : isolate()->builtins()->StoreIC_Initialize();
+ EmitCallIC(ic, RelocInfo::CODE_TARGET_CONTEXT, AstNode::kNoNumber);
} else if (op == Token::INIT_CONST) {
// Like var declarations, const declarations are hoisted to function
@@ -1899,18 +1869,7 @@ void FullCodeGenerator::EmitVariableAssignment(Variable* var,
__ b(ne, &skip);
__ str(result_register(), MemOperand(fp, SlotOffset(slot)));
break;
- case Slot::CONTEXT: {
- __ ldr(r1, ContextOperand(cp, Context::FCONTEXT_INDEX));
- __ ldr(r2, ContextOperand(r1, slot->index()));
- __ LoadRoot(ip, Heap::kTheHoleValueRootIndex);
- __ cmp(r2, ip);
- __ b(ne, &skip);
- __ str(r0, ContextOperand(r1, slot->index()));
- int offset = Context::SlotOffset(slot->index());
- __ mov(r3, r0); // Preserve the stored value in r0.
- __ RecordWrite(r1, Operand(offset), r3, r2);
- break;
- }
+ case Slot::CONTEXT:
case Slot::LOOKUP:
__ push(r0);
__ mov(r0, Operand(slot->var()->name()));
@@ -1983,10 +1942,10 @@ void FullCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) {
__ pop(r1);
}
- Handle<Code> ic(Builtins::builtin(
- is_strict() ? Builtins::StoreIC_Initialize_Strict
- : Builtins::StoreIC_Initialize));
- EmitCallIC(ic, RelocInfo::CODE_TARGET);
+ Handle<Code> ic = is_strict_mode()
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
+ : isolate()->builtins()->StoreIC_Initialize();
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
@@ -2029,10 +1988,10 @@ void FullCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) {
__ pop(r2);
}
- Handle<Code> ic(Builtins::builtin(
- is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict
- : Builtins::KeyedStoreIC_Initialize));
- EmitCallIC(ic, RelocInfo::CODE_TARGET);
+ Handle<Code> ic = is_strict_mode()
+ ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
+ : isolate()->builtins()->KeyedStoreIC_Initialize();
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
// If the assignment ends an initialization block, revert to fast case.
if (expr->ends_initialization_block()) {
@@ -2082,8 +2041,9 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
SetSourcePosition(expr->position());
// Call the IC initialization code.
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
- Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, in_loop);
- EmitCallIC(ic, mode);
+ Handle<Code> ic =
+ isolate()->stub_cache()->ComputeCallInitialize(arg_count, in_loop, mode);
+ EmitCallIC(ic, mode, expr->id());
RecordJSReturnSite(expr);
// Restore context register.
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2092,8 +2052,7 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
- Expression* key,
- RelocInfo::Mode mode) {
+ Expression* key) {
// Load the key.
VisitForAccumulatorValue(key);
@@ -2115,9 +2074,10 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
SetSourcePosition(expr->position());
// Call the IC initialization code.
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
- Handle<Code> ic = StubCache::ComputeKeyedCallInitialize(arg_count, in_loop);
+ Handle<Code> ic =
+ isolate()->stub_cache()->ComputeKeyedCallInitialize(arg_count, in_loop);
__ ldr(r2, MemOperand(sp, (arg_count + 1) * kPointerSize)); // Key.
- EmitCallIC(ic, mode);
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
RecordJSReturnSite(expr);
// Restore context register.
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
@@ -2125,7 +2085,7 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
}
-void FullCodeGenerator::EmitCallWithStub(Call* expr) {
+void FullCodeGenerator::EmitCallWithStub(Call* expr, CallFunctionFlags flags) {
// Code common for calls using the call stub.
ZoneList<Expression*>* args = expr->arguments();
int arg_count = args->length();
@@ -2137,7 +2097,7 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
// Record source position for debugger.
SetSourcePosition(expr->position());
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
- CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
+ CallFunctionStub stub(arg_count, in_loop, flags);
__ CallStub(&stub);
RecordJSReturnSite(expr);
// Restore context register.
@@ -2233,7 +2193,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
// Record source position for debugger.
SetSourcePosition(expr->position());
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
- CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
+ CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_IMPLICIT);
__ CallStub(&stub);
RecordJSReturnSite(expr);
// Restore context register.
@@ -2276,14 +2236,17 @@ void FullCodeGenerator::VisitCall(Call* expr) {
__ bind(&done);
// Push function.
__ push(r0);
- // Push global receiver.
- __ ldr(r1, GlobalObjectOperand());
- __ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
+ // The receiver is implicitly the global receiver. Indicate this
+ // by passing the hole to the call function stub.
+ __ LoadRoot(r1, Heap::kTheHoleValueRootIndex);
__ push(r1);
__ bind(&call);
}
- EmitCallWithStub(expr);
+ // The receiver is either the global receiver or an object found
+ // by LoadContextSlot. That object could be the hole if the
+ // receiver is implicitly the global object.
+ EmitCallWithStub(expr, RECEIVER_MIGHT_BE_IMPLICIT);
} else if (fun->AsProperty() != NULL) {
// Call to an object property.
Property* prop = fun->AsProperty();
@@ -2297,7 +2260,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
} else {
// Call to a keyed property.
// For a synthetic property use keyed load IC followed by function call,
- // for a regular property use keyed CallIC.
+ // for a regular property use keyed EmitCallIC.
if (prop->is_synthetic()) {
// Do not visit the object and key subexpressions (they are shared
// by all occurrences of the same rewritten parameter).
@@ -2314,30 +2277,20 @@ void FullCodeGenerator::VisitCall(Call* expr) {
// Record source code position for IC call.
SetSourcePosition(prop->position());
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
- EmitCallIC(ic, RelocInfo::CODE_TARGET);
+ Handle<Code> ic = isolate()->builtins()->KeyedLoadIC_Initialize();
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, GetPropertyId(prop));
__ ldr(r1, GlobalObjectOperand());
__ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
__ Push(r0, r1); // Function, receiver.
- EmitCallWithStub(expr);
+ EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
} else {
{ PreservePositionScope scope(masm()->positions_recorder());
VisitForStackValue(prop->obj());
}
- EmitKeyedCallWithIC(expr, prop->key(), RelocInfo::CODE_TARGET);
+ EmitKeyedCallWithIC(expr, prop->key());
}
}
} else {
- // Call to some other expression. If the expression is an anonymous
- // function literal not called in a loop, mark it as one that should
- // also use the fast code generator.
- FunctionLiteral* lit = fun->AsFunctionLiteral();
- if (lit != NULL &&
- lit->name()->Equals(Heap::empty_string()) &&
- loop_depth() == 0) {
- lit->set_try_full_codegen(true);
- }
-
{ PreservePositionScope scope(masm()->positions_recorder());
VisitForStackValue(fun);
}
@@ -2346,7 +2299,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
__ ldr(r1, FieldMemOperand(r1, GlobalObject::kGlobalReceiverOffset));
__ push(r1);
// Emit function call.
- EmitCallWithStub(expr);
+ EmitCallWithStub(expr, NO_CALL_FUNCTION_FLAGS);
}
#ifdef DEBUG
@@ -2382,7 +2335,8 @@ void FullCodeGenerator::VisitCallNew(CallNew* expr) {
__ mov(r0, Operand(arg_count));
__ ldr(r1, MemOperand(sp, arg_count * kPointerSize));
- Handle<Code> construct_builtin(Builtins::builtin(Builtins::JSConstructCall));
+ Handle<Code> construct_builtin =
+ isolate()->builtins()->JSConstructCall();
__ Call(construct_builtin, RelocInfo::CONSTRUCT_CALL);
context()->Plug(r0);
}
@@ -2450,9 +2404,9 @@ void FullCodeGenerator::EmitIsObject(ZoneList<Expression*>* args) {
__ tst(r1, Operand(1 << Map::kIsUndetectable));
__ b(ne, if_false);
__ ldrb(r1, FieldMemOperand(r2, Map::kInstanceTypeOffset));
- __ cmp(r1, Operand(FIRST_JS_OBJECT_TYPE));
+ __ cmp(r1, Operand(FIRST_NONCALLABLE_SPEC_OBJECT_TYPE));
__ b(lt, if_false);
- __ cmp(r1, Operand(LAST_JS_OBJECT_TYPE));
+ __ cmp(r1, Operand(LAST_NONCALLABLE_SPEC_OBJECT_TYPE));
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(le, if_true, if_false, fall_through);
@@ -2473,7 +2427,7 @@ void FullCodeGenerator::EmitIsSpecObject(ZoneList<Expression*>* args) {
&if_true, &if_false, &fall_through);
__ JumpIfSmi(r0, if_false);
- __ CompareObjectType(r0, r1, r1, FIRST_JS_OBJECT_TYPE);
+ __ CompareObjectType(r0, r1, r1, FIRST_SPEC_OBJECT_TYPE);
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
Split(ge, if_true, if_false, fall_through);
@@ -2518,11 +2472,74 @@ void FullCodeGenerator::EmitIsStringWrapperSafeForDefaultValueOf(
context()->PrepareTest(&materialize_true, &materialize_false,
&if_true, &if_false, &fall_through);
- // Just indicate false, as %_IsStringWrapperSafeForDefaultValueOf() is only
- // used in a few functions in runtime.js which should not normally be hit by
- // this compiler.
+ if (FLAG_debug_code) __ AbortIfSmi(r0);
+
+ __ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
+ __ ldrb(ip, FieldMemOperand(r1, Map::kBitField2Offset));
+ __ tst(ip, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
+ __ b(ne, if_true);
+
+ // Check for fast case object. Generate false result for slow case object.
+ __ ldr(r2, FieldMemOperand(r0, JSObject::kPropertiesOffset));
+ __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
+ __ LoadRoot(ip, Heap::kHashTableMapRootIndex);
+ __ cmp(r2, ip);
+ __ b(eq, if_false);
+
+ // Look for valueOf symbol in the descriptor array, and indicate false if
+ // found. The type is not checked, so if it is a transition it is a false
+ // negative.
+ __ LoadInstanceDescriptors(r1, r4);
+ __ ldr(r3, FieldMemOperand(r4, FixedArray::kLengthOffset));
+ // r4: descriptor array
+ // r3: length of descriptor array
+ // Calculate the end of the descriptor array.
+ STATIC_ASSERT(kSmiTag == 0);
+ STATIC_ASSERT(kSmiTagSize == 1);
+ STATIC_ASSERT(kPointerSize == 4);
+ __ add(r2, r4, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+ __ add(r2, r2, Operand(r3, LSL, kPointerSizeLog2 - kSmiTagSize));
+
+ // Calculate location of the first key name.
+ __ add(r4,
+ r4,
+ Operand(FixedArray::kHeaderSize - kHeapObjectTag +
+ DescriptorArray::kFirstIndex * kPointerSize));
+ // Loop through all the keys in the descriptor array. If one of these is the
+ // symbol valueOf the result is false.
+ Label entry, loop;
+ // The use of ip to store the valueOf symbol asumes that it is not otherwise
+ // used in the loop below.
+ __ mov(ip, Operand(FACTORY->value_of_symbol()));
+ __ jmp(&entry);
+ __ bind(&loop);
+ __ ldr(r3, MemOperand(r4, 0));
+ __ cmp(r3, ip);
+ __ b(eq, if_false);
+ __ add(r4, r4, Operand(kPointerSize));
+ __ bind(&entry);
+ __ cmp(r4, Operand(r2));
+ __ b(ne, &loop);
+
+ // If a valueOf property is not found on the object check that it's
+ // prototype is the un-modified String prototype. If not result is false.
+ __ ldr(r2, FieldMemOperand(r1, Map::kPrototypeOffset));
+ __ JumpIfSmi(r2, if_false);
+ __ ldr(r2, FieldMemOperand(r2, HeapObject::kMapOffset));
+ __ ldr(r3, ContextOperand(cp, Context::GLOBAL_INDEX));
+ __ ldr(r3, FieldMemOperand(r3, GlobalObject::kGlobalContextOffset));
+ __ ldr(r3, ContextOperand(r3, Context::STRING_FUNCTION_PROTOTYPE_MAP_INDEX));
+ __ cmp(r2, r3);
+ __ b(ne, if_false);
+
+ // Set the bit in the map to indicate that it has been checked safe for
+ // default valueOf and set true result.
+ __ ldrb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
+ __ orr(r2, r2, Operand(1 << Map::kStringWrapperSafeForDefaultValueOf));
+ __ strb(r2, FieldMemOperand(r1, Map::kBitField2Offset));
+ __ jmp(if_true);
+
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
- __ jmp(if_false);
context()->Plug(if_true, if_false);
}
@@ -2692,16 +2709,18 @@ void FullCodeGenerator::EmitClassOf(ZoneList<Expression*>* args) {
// Check that the object is a JS object but take special care of JS
// functions to make sure they have 'Function' as their class.
- __ CompareObjectType(r0, r0, r1, FIRST_JS_OBJECT_TYPE); // Map is now in r0.
+ __ CompareObjectType(r0, r0, r1, FIRST_SPEC_OBJECT_TYPE);
+ // Map is now in r0.
__ b(lt, &null);
- // As long as JS_FUNCTION_TYPE is the last instance type and it is
- // right after LAST_JS_OBJECT_TYPE, we can avoid checking for
- // LAST_JS_OBJECT_TYPE.
- ASSERT(LAST_TYPE == JS_FUNCTION_TYPE);
- ASSERT(JS_FUNCTION_TYPE == LAST_JS_OBJECT_TYPE + 1);
- __ cmp(r1, Operand(JS_FUNCTION_TYPE));
- __ b(eq, &function);
+ // As long as LAST_CALLABLE_SPEC_OBJECT_TYPE is the last instance type, and
+ // FIRST_CALLABLE_SPEC_OBJECT_TYPE comes right after
+ // LAST_NONCALLABLE_SPEC_OBJECT_TYPE, we can avoid checking for the latter.
+ STATIC_ASSERT(LAST_TYPE == LAST_CALLABLE_SPEC_OBJECT_TYPE);
+ STATIC_ASSERT(FIRST_CALLABLE_SPEC_OBJECT_TYPE ==
+ LAST_NONCALLABLE_SPEC_OBJECT_TYPE + 1);
+ __ cmp(r1, Operand(FIRST_CALLABLE_SPEC_OBJECT_TYPE));
+ __ b(ge, &function);
// Check if the constructor in the map is a function.
__ ldr(r0, FieldMemOperand(r0, Map::kConstructorOffset));
@@ -2778,8 +2797,9 @@ void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) {
// by computing:
// ( 1.(20 0s)(32 random bits) x 2^20 ) - (1.0 x 2^20)).
if (CpuFeatures::IsSupported(VFP3)) {
- __ PrepareCallCFunction(0, r1);
- __ CallCFunction(ExternalReference::random_uint32_function(), 0);
+ __ PrepareCallCFunction(1, r0);
+ __ mov(r0, Operand(ExternalReference::isolate_address()));
+ __ CallCFunction(ExternalReference::random_uint32_function(isolate()), 1);
CpuFeatures::Scope scope(VFP3);
// 0x41300000 is the top half of 1.0 x 2^20 as a double.
@@ -2797,10 +2817,11 @@ void FullCodeGenerator::EmitRandomHeapNumber(ZoneList<Expression*>* args) {
__ vstr(d7, r0, HeapNumber::kValueOffset);
__ mov(r0, r4);
} else {
+ __ PrepareCallCFunction(2, r0);
__ mov(r0, Operand(r4));
- __ PrepareCallCFunction(1, r1);
+ __ mov(r1, Operand(ExternalReference::isolate_address()));
__ CallCFunction(
- ExternalReference::fill_heap_number_with_random_function(), 1);
+ ExternalReference::fill_heap_number_with_random_function(isolate()), 2);
}
context()->Plug(r0);
@@ -2855,7 +2876,8 @@ void FullCodeGenerator::EmitMathPow(ZoneList<Expression*>* args) {
ASSERT(args->length() == 2);
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
- __ CallRuntime(Runtime::kMath_pow, 2);
+ MathPowStub stub;
+ __ CallStub(&stub);
context()->Plug(r0);
}
@@ -3038,7 +3060,8 @@ void FullCodeGenerator::EmitStringCompare(ZoneList<Expression*>* args) {
void FullCodeGenerator::EmitMathSin(ZoneList<Expression*>* args) {
// Load the argument on the stack and call the stub.
- TranscendentalCacheStub stub(TranscendentalCache::SIN);
+ TranscendentalCacheStub stub(TranscendentalCache::SIN,
+ TranscendentalCacheStub::TAGGED);
ASSERT(args->length() == 1);
VisitForStackValue(args->at(0));
__ CallStub(&stub);
@@ -3048,7 +3071,8 @@ void FullCodeGenerator::EmitMathSin(ZoneList<Expression*>* args) {
void FullCodeGenerator::EmitMathCos(ZoneList<Expression*>* args) {
// Load the argument on the stack and call the stub.
- TranscendentalCacheStub stub(TranscendentalCache::COS);
+ TranscendentalCacheStub stub(TranscendentalCache::COS,
+ TranscendentalCacheStub::TAGGED);
ASSERT(args->length() == 1);
VisitForStackValue(args->at(0));
__ CallStub(&stub);
@@ -3058,7 +3082,8 @@ void FullCodeGenerator::EmitMathCos(ZoneList<Expression*>* args) {
void FullCodeGenerator::EmitMathLog(ZoneList<Expression*>* args) {
// Load the argument on the stack and call the stub.
- TranscendentalCacheStub stub(TranscendentalCache::LOG);
+ TranscendentalCacheStub stub(TranscendentalCache::LOG,
+ TranscendentalCacheStub::TAGGED);
ASSERT(args->length() == 1);
VisitForStackValue(args->at(0));
__ CallStub(&stub);
@@ -3078,17 +3103,17 @@ void FullCodeGenerator::EmitMathSqrt(ZoneList<Expression*>* args) {
void FullCodeGenerator::EmitCallFunction(ZoneList<Expression*>* args) {
ASSERT(args->length() >= 2);
- int arg_count = args->length() - 2; // For receiver and function.
- VisitForStackValue(args->at(0)); // Receiver.
- for (int i = 0; i < arg_count; i++) {
- VisitForStackValue(args->at(i + 1));
+ int arg_count = args->length() - 2; // 2 ~ receiver and function.
+ for (int i = 0; i < arg_count + 1; i++) {
+ VisitForStackValue(args->at(i));
}
- VisitForAccumulatorValue(args->at(arg_count + 1)); // Function.
+ VisitForAccumulatorValue(args->last()); // Function.
- // InvokeFunction requires function in r1. Move it in there.
- if (!result_register().is(r1)) __ mov(r1, result_register());
+ // InvokeFunction requires the function in r1. Move it in there.
+ __ mov(r1, result_register());
ParameterCount count(arg_count);
- __ InvokeFunction(r1, count, CALL_FUNCTION);
+ __ InvokeFunction(r1, count, CALL_FUNCTION,
+ NullCallWrapper(), CALL_AS_METHOD);
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
context()->Plug(r0);
}
@@ -3110,7 +3135,79 @@ void FullCodeGenerator::EmitSwapElements(ZoneList<Expression*>* args) {
VisitForStackValue(args->at(0));
VisitForStackValue(args->at(1));
VisitForStackValue(args->at(2));
+ Label done;
+ Label slow_case;
+ Register object = r0;
+ Register index1 = r1;
+ Register index2 = r2;
+ Register elements = r3;
+ Register scratch1 = r4;
+ Register scratch2 = r5;
+
+ __ ldr(object, MemOperand(sp, 2 * kPointerSize));
+ // Fetch the map and check if array is in fast case.
+ // Check that object doesn't require security checks and
+ // has no indexed interceptor.
+ __ CompareObjectType(object, scratch1, scratch2, JS_ARRAY_TYPE);
+ __ b(ne, &slow_case);
+ // Map is now in scratch1.
+
+ __ ldrb(scratch2, FieldMemOperand(scratch1, Map::kBitFieldOffset));
+ __ tst(scratch2, Operand(KeyedLoadIC::kSlowCaseBitFieldMask));
+ __ b(ne, &slow_case);
+
+ // Check the object's elements are in fast case and writable.
+ __ ldr(elements, FieldMemOperand(object, JSObject::kElementsOffset));
+ __ ldr(scratch1, FieldMemOperand(elements, HeapObject::kMapOffset));
+ __ LoadRoot(ip, Heap::kFixedArrayMapRootIndex);
+ __ cmp(scratch1, ip);
+ __ b(ne, &slow_case);
+
+ // Check that both indices are smis.
+ __ ldr(index1, MemOperand(sp, 1 * kPointerSize));
+ __ ldr(index2, MemOperand(sp, 0));
+ __ JumpIfNotBothSmi(index1, index2, &slow_case);
+
+ // Check that both indices are valid.
+ __ ldr(scratch1, FieldMemOperand(object, JSArray::kLengthOffset));
+ __ cmp(scratch1, index1);
+ __ cmp(scratch1, index2, hi);
+ __ b(ls, &slow_case);
+
+ // Bring the address of the elements into index1 and index2.
+ __ add(scratch1, elements, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
+ __ add(index1,
+ scratch1,
+ Operand(index1, LSL, kPointerSizeLog2 - kSmiTagSize));
+ __ add(index2,
+ scratch1,
+ Operand(index2, LSL, kPointerSizeLog2 - kSmiTagSize));
+
+ // Swap elements.
+ __ ldr(scratch1, MemOperand(index1, 0));
+ __ ldr(scratch2, MemOperand(index2, 0));
+ __ str(scratch1, MemOperand(index2, 0));
+ __ str(scratch2, MemOperand(index1, 0));
+
+ Label new_space;
+ __ InNewSpace(elements, scratch1, eq, &new_space);
+ // Possible optimization: do a check that both values are Smis
+ // (or them and test against Smi mask.)
+
+ __ mov(scratch1, elements);
+ __ RecordWriteHelper(elements, index1, scratch2);
+ __ RecordWriteHelper(scratch1, index2, scratch2); // scratch1 holds elements.
+
+ __ bind(&new_space);
+ // We are done. Drop elements from the stack, and return undefined.
+ __ Drop(3);
+ __ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
+ __ jmp(&done);
+
+ __ bind(&slow_case);
__ CallRuntime(Runtime::kSwapElements, 3);
+
+ __ bind(&done);
context()->Plug(r0);
}
@@ -3122,7 +3219,7 @@ void FullCodeGenerator::EmitGetFromCache(ZoneList<Expression*>* args) {
int cache_id = Smi::cast(*(args->at(0)->AsLiteral()->handle()))->value();
Handle<FixedArray> jsfunction_result_caches(
- Top::global_context()->jsfunction_result_caches());
+ isolate()->global_context()->jsfunction_result_caches());
if (jsfunction_result_caches->length() <= cache_id) {
__ Abort("Attempt to use undefined cache.");
__ LoadRoot(r0, Heap::kUndefinedValueRootIndex);
@@ -3183,8 +3280,7 @@ void FullCodeGenerator::EmitIsRegExpEquivalent(ZoneList<Expression*>* args) {
__ b(eq, &ok);
// Fail if either is a non-HeapObject.
__ and_(tmp, left, Operand(right));
- __ tst(tmp, Operand(kSmiTagMask));
- __ b(eq, &fail);
+ __ JumpIfSmi(tmp, &fail);
__ ldr(tmp, FieldMemOperand(left, HeapObject::kMapOffset));
__ ldrb(tmp2, FieldMemOperand(tmp, Map::kInstanceTypeOffset));
__ cmp(tmp2, Operand(JS_REGEXP_TYPE));
@@ -3274,9 +3370,7 @@ void FullCodeGenerator::EmitFastAsciiArrayJoin(ZoneList<Expression*>* args) {
__ b(ne, &bailout);
// Check that the array has fast elements.
- __ ldrb(scratch2, FieldMemOperand(scratch1, Map::kBitField2Offset));
- __ tst(scratch2, Operand(1 << Map::kHasFastElements));
- __ b(eq, &bailout);
+ __ CheckFastElements(scratch1, scratch2, &bailout);
// If the array has length zero, return the empty string.
__ ldr(array_length, FieldMemOperand(array, JSArray::kLengthOffset));
@@ -3501,8 +3595,12 @@ void FullCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
if (expr->is_jsruntime()) {
// Call the JS runtime function.
__ mov(r2, Operand(expr->name()));
- Handle<Code> ic = StubCache::ComputeCallInitialize(arg_count, NOT_IN_LOOP);
- EmitCallIC(ic, RelocInfo::CODE_TARGET);
+ RelocInfo::Mode mode = RelocInfo::CODE_TARGET;
+ Handle<Code> ic =
+ isolate()->stub_cache()->ComputeCallInitialize(arg_count,
+ NOT_IN_LOOP,
+ mode);
+ EmitCallIC(ic, mode, expr->id());
// Restore context register.
__ ldr(cp, MemOperand(fp, StandardFrameConstants::kContextOffset));
} else {
@@ -3530,7 +3628,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
VisitForStackValue(prop->key());
__ mov(r1, Operand(Smi::FromInt(strict_mode_flag())));
__ push(r1);
- __ InvokeBuiltin(Builtins::DELETE, CALL_JS);
+ __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
context()->Plug(r0);
}
} else if (var != NULL) {
@@ -3542,7 +3640,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
__ mov(r1, Operand(var->name()));
__ mov(r0, Operand(Smi::FromInt(kNonStrictMode)));
__ Push(r2, r1, r0);
- __ InvokeBuiltin(Builtins::DELETE, CALL_JS);
+ __ InvokeBuiltin(Builtins::DELETE, CALL_FUNCTION);
context()->Plug(r0);
} else if (var->AsSlot() != NULL &&
var->AsSlot()->type() != Slot::LOOKUP) {
@@ -3610,8 +3708,7 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
Comment cmt(masm_, "[ UnaryOperation (ADD)");
VisitForAccumulatorValue(expr->expression());
Label no_conversion;
- __ tst(result_register(), Operand(kSmiTagMask));
- __ b(eq, &no_conversion);
+ __ JumpIfSmi(result_register(), &no_conversion);
ToNumberStub convert_stub;
__ CallStub(&convert_stub);
__ bind(&no_conversion);
@@ -3619,48 +3716,13 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
break;
}
- case Token::SUB: {
- Comment cmt(masm_, "[ UnaryOperation (SUB)");
- bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
- UnaryOverwriteMode overwrite =
- can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
- GenericUnaryOpStub stub(Token::SUB, overwrite, NO_UNARY_FLAGS);
- // GenericUnaryOpStub expects the argument to be in the
- // accumulator register r0.
- VisitForAccumulatorValue(expr->expression());
- __ CallStub(&stub);
- context()->Plug(r0);
+ case Token::SUB:
+ EmitUnaryOperation(expr, "[ UnaryOperation (SUB)");
break;
- }
- case Token::BIT_NOT: {
- Comment cmt(masm_, "[ UnaryOperation (BIT_NOT)");
- // The generic unary operation stub expects the argument to be
- // in the accumulator register r0.
- VisitForAccumulatorValue(expr->expression());
- Label done;
- bool inline_smi_code = ShouldInlineSmiCase(expr->op());
- if (inline_smi_code) {
- Label call_stub;
- __ JumpIfNotSmi(r0, &call_stub);
- __ mvn(r0, Operand(r0));
- // Bit-clear inverted smi-tag.
- __ bic(r0, r0, Operand(kSmiTagMask));
- __ b(&done);
- __ bind(&call_stub);
- }
- bool overwrite = expr->expression()->ResultOverwriteAllowed();
- UnaryOpFlags flags = inline_smi_code
- ? NO_UNARY_SMI_CODE_IN_STUB
- : NO_UNARY_FLAGS;
- UnaryOverwriteMode mode =
- overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
- GenericUnaryOpStub stub(Token::BIT_NOT, mode, flags);
- __ CallStub(&stub);
- __ bind(&done);
- context()->Plug(r0);
+ case Token::BIT_NOT:
+ EmitUnaryOperation(expr, "[ UnaryOperation (BIT_NOT)");
break;
- }
default:
UNREACHABLE();
@@ -3668,6 +3730,23 @@ void FullCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) {
}
+void FullCodeGenerator::EmitUnaryOperation(UnaryOperation* expr,
+ const char* comment) {
+ // TODO(svenpanne): Allowing format strings in Comment would be nice here...
+ Comment cmt(masm_, comment);
+ bool can_overwrite = expr->expression()->ResultOverwriteAllowed();
+ UnaryOverwriteMode overwrite =
+ can_overwrite ? UNARY_OVERWRITE : UNARY_NO_OVERWRITE;
+ UnaryOpStub stub(expr->op(), overwrite);
+ // UnaryOpStub expects the argument to be in the
+ // accumulator register r0.
+ VisitForAccumulatorValue(expr->expression());
+ SetSourcePosition(expr->position());
+ EmitCallIC(stub.GetCode(), RelocInfo::CODE_TARGET, expr->id());
+ context()->Plug(r0);
+}
+
+
void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
Comment cmnt(masm_, "[ CountOperation");
SetSourcePosition(expr->position());
@@ -3680,7 +3759,7 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
}
// Expression can only be a property, a global or a (parameter or local)
- // slot. Variables with rewrite to .arguments are treated as KEYED_PROPERTY.
+ // slot.
enum LhsKind { VARIABLE, NAMED_PROPERTY, KEYED_PROPERTY };
LhsKind assign_type = VARIABLE;
Property* prop = expr->expression()->AsProperty();
@@ -3708,15 +3787,8 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
__ push(r0);
EmitNamedPropertyLoad(prop);
} else {
- if (prop->is_arguments_access()) {
- VariableProxy* obj_proxy = prop->obj()->AsVariableProxy();
- __ ldr(r0, EmitSlotSearch(obj_proxy->var()->AsSlot(), r0));
- __ push(r0);
- __ mov(r0, Operand(prop->key()->AsLiteral()->handle()));
- } else {
- VisitForStackValue(prop->obj());
- VisitForAccumulatorValue(prop->key());
- }
+ VisitForStackValue(prop->obj());
+ VisitForAccumulatorValue(prop->key());
__ ldr(r1, MemOperand(sp, 0));
__ push(r0);
EmitKeyedPropertyLoad(prop);
@@ -3725,7 +3797,11 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
// We need a second deoptimization point after loading the value
// in case evaluating the property load my have a side effect.
- PrepareForBailout(expr->increment(), TOS_REG);
+ if (assign_type == VARIABLE) {
+ PrepareForBailout(expr->expression(), TOS_REG);
+ } else {
+ PrepareForBailoutForId(expr->CountId(), TOS_REG);
+ }
// Call ToNumber only if operand is not a smi.
Label no_conversion;
@@ -3776,8 +3852,8 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
// Record position before stub call.
SetSourcePosition(expr->position());
- TypeRecordingBinaryOpStub stub(Token::ADD, NO_OVERWRITE);
- EmitCallIC(stub.GetCode(), &patch_site);
+ BinaryOpStub stub(Token::ADD, NO_OVERWRITE);
+ EmitCallIC(stub.GetCode(), &patch_site, expr->CountId());
__ bind(&done);
// Store the value returned in r0.
@@ -3805,10 +3881,10 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
case NAMED_PROPERTY: {
__ mov(r2, Operand(prop->key()->AsLiteral()->handle()));
__ pop(r1);
- Handle<Code> ic(Builtins::builtin(
- is_strict() ? Builtins::StoreIC_Initialize_Strict
- : Builtins::StoreIC_Initialize));
- EmitCallIC(ic, RelocInfo::CODE_TARGET);
+ Handle<Code> ic = is_strict_mode()
+ ? isolate()->builtins()->StoreIC_Initialize_Strict()
+ : isolate()->builtins()->StoreIC_Initialize();
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -3822,10 +3898,10 @@ void FullCodeGenerator::VisitCountOperation(CountOperation* expr) {
case KEYED_PROPERTY: {
__ pop(r1); // Key.
__ pop(r2); // Receiver.
- Handle<Code> ic(Builtins::builtin(
- is_strict() ? Builtins::KeyedStoreIC_Initialize_Strict
- : Builtins::KeyedStoreIC_Initialize));
- EmitCallIC(ic, RelocInfo::CODE_TARGET);
+ Handle<Code> ic = is_strict_mode()
+ ? isolate()->builtins()->KeyedStoreIC_Initialize_Strict()
+ : isolate()->builtins()->KeyedStoreIC_Initialize();
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, expr->id());
PrepareForBailoutForId(expr->AssignmentId(), TOS_REG);
if (expr->is_postfix()) {
if (!context()->IsEffect()) {
@@ -3848,10 +3924,10 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
Comment cmnt(masm_, "Global variable");
__ ldr(r0, GlobalObjectOperand());
__ mov(r2, Operand(proxy->name()));
- Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+ Handle<Code> ic = isolate()->builtins()->LoadIC_Initialize();
// Use a regular load, not a contextual load, to avoid a reference
// error.
- EmitCallIC(ic, RelocInfo::CODE_TARGET);
+ EmitCallIC(ic, RelocInfo::CODE_TARGET, AstNode::kNoNumber);
PrepareForBailout(expr, TOS_REG);
context()->Plug(r0);
} else if (proxy != NULL &&
@@ -3874,104 +3950,83 @@ void FullCodeGenerator::VisitForTypeofValue(Expression* expr) {
context()->Plug(r0);
} else {
// This expression cannot throw a reference error at the top level.
- context()->HandleExpression(expr);
+ VisitInCurrentContext(expr);
}
}
-bool FullCodeGenerator::TryLiteralCompare(Token::Value op,
- Expression* left,
- Expression* right,
- Label* if_true,
- Label* if_false,
- Label* fall_through) {
- if (op != Token::EQ && op != Token::EQ_STRICT) return false;
-
- // Check for the pattern: typeof <expression> == <string literal>.
- Literal* right_literal = right->AsLiteral();
- if (right_literal == NULL) return false;
- Handle<Object> right_literal_value = right_literal->handle();
- if (!right_literal_value->IsString()) return false;
- UnaryOperation* left_unary = left->AsUnaryOperation();
- if (left_unary == NULL || left_unary->op() != Token::TYPEOF) return false;
- Handle<String> check = Handle<String>::cast(right_literal_value);
-
+void FullCodeGenerator::EmitLiteralCompareTypeof(Expression* expr,
+ Handle<String> check,
+ Label* if_true,
+ Label* if_false,
+ Label* fall_through) {
{ AccumulatorValueContext context(this);
- VisitForTypeofValue(left_unary->expression());
+ VisitForTypeofValue(expr);
}
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
- if (check->Equals(Heap::number_symbol())) {
- __ tst(r0, Operand(kSmiTagMask));
- __ b(eq, if_true);
+ if (check->Equals(isolate()->heap()->number_symbol())) {
+ __ JumpIfSmi(r0, if_true);
__ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
__ LoadRoot(ip, Heap::kHeapNumberMapRootIndex);
__ cmp(r0, ip);
Split(eq, if_true, if_false, fall_through);
- } else if (check->Equals(Heap::string_symbol())) {
- __ tst(r0, Operand(kSmiTagMask));
- __ b(eq, if_false);
+ } else if (check->Equals(isolate()->heap()->string_symbol())) {
+ __ JumpIfSmi(r0, if_false);
// Check for undetectable objects => false.
- __ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
+ __ CompareObjectType(r0, r0, r1, FIRST_NONSTRING_TYPE);
+ __ b(ge, if_false);
__ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
- __ and_(r1, r1, Operand(1 << Map::kIsUndetectable));
- __ cmp(r1, Operand(1 << Map::kIsUndetectable));
- __ b(eq, if_false);
- __ ldrb(r1, FieldMemOperand(r0, Map::kInstanceTypeOffset));
- __ cmp(r1, Operand(FIRST_NONSTRING_TYPE));
- Split(lt, if_true, if_false, fall_through);
- } else if (check->Equals(Heap::boolean_symbol())) {
- __ LoadRoot(ip, Heap::kTrueValueRootIndex);
- __ cmp(r0, ip);
+ __ tst(r1, Operand(1 << Map::kIsUndetectable));
+ Split(eq, if_true, if_false, fall_through);
+ } else if (check->Equals(isolate()->heap()->boolean_symbol())) {
+ __ CompareRoot(r0, Heap::kTrueValueRootIndex);
__ b(eq, if_true);
- __ LoadRoot(ip, Heap::kFalseValueRootIndex);
- __ cmp(r0, ip);
+ __ CompareRoot(r0, Heap::kFalseValueRootIndex);
Split(eq, if_true, if_false, fall_through);
- } else if (check->Equals(Heap::undefined_symbol())) {
- __ LoadRoot(ip, Heap::kUndefinedValueRootIndex);
- __ cmp(r0, ip);
+ } else if (check->Equals(isolate()->heap()->undefined_symbol())) {
+ __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
__ b(eq, if_true);
- __ tst(r0, Operand(kSmiTagMask));
- __ b(eq, if_false);
+ __ JumpIfSmi(r0, if_false);
// Check for undetectable objects => true.
__ ldr(r0, FieldMemOperand(r0, HeapObject::kMapOffset));
__ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
- __ and_(r1, r1, Operand(1 << Map::kIsUndetectable));
- __ cmp(r1, Operand(1 << Map::kIsUndetectable));
- Split(eq, if_true, if_false, fall_through);
- } else if (check->Equals(Heap::function_symbol())) {
- __ tst(r0, Operand(kSmiTagMask));
- __ b(eq, if_false);
- __ CompareObjectType(r0, r1, r0, JS_FUNCTION_TYPE);
- __ b(eq, if_true);
- // Regular expressions => 'function' (they are callable).
- __ CompareInstanceType(r1, r0, JS_REGEXP_TYPE);
- Split(eq, if_true, if_false, fall_through);
- } else if (check->Equals(Heap::object_symbol())) {
- __ tst(r0, Operand(kSmiTagMask));
- __ b(eq, if_false);
- __ LoadRoot(ip, Heap::kNullValueRootIndex);
- __ cmp(r0, ip);
+ __ tst(r1, Operand(1 << Map::kIsUndetectable));
+ Split(ne, if_true, if_false, fall_through);
+
+ } else if (check->Equals(isolate()->heap()->function_symbol())) {
+ __ JumpIfSmi(r0, if_false);
+ __ CompareObjectType(r0, r1, r0, FIRST_CALLABLE_SPEC_OBJECT_TYPE);
+ Split(ge, if_true, if_false, fall_through);
+
+ } else if (check->Equals(isolate()->heap()->object_symbol())) {
+ __ JumpIfSmi(r0, if_false);
+ __ CompareRoot(r0, Heap::kNullValueRootIndex);
__ b(eq, if_true);
- // Regular expressions => 'function', not 'object'.
- __ CompareObjectType(r0, r1, r0, JS_REGEXP_TYPE);
- __ b(eq, if_false);
- // Check for undetectable objects => false.
- __ ldrb(r0, FieldMemOperand(r1, Map::kBitFieldOffset));
- __ and_(r0, r0, Operand(1 << Map::kIsUndetectable));
- __ cmp(r0, Operand(1 << Map::kIsUndetectable));
- __ b(eq, if_false);
// Check for JS objects => true.
- __ ldrb(r0, FieldMemOperand(r1, Map::kInstanceTypeOffset));
- __ cmp(r0, Operand(FIRST_JS_OBJECT_TYPE));
+ __ CompareObjectType(r0, r0, r1, FIRST_NONCALLABLE_SPEC_OBJECT_TYPE);
__ b(lt, if_false);
- __ cmp(r0, Operand(LAST_JS_OBJECT_TYPE));
- Split(le, if_true, if_false, fall_through);
+ __ CompareInstanceType(r0, r1, LAST_NONCALLABLE_SPEC_OBJECT_TYPE);
+ __ b(gt, if_false);
+ // Check for undetectable objects => false.
+ __ ldrb(r1, FieldMemOperand(r0, Map::kBitFieldOffset));
+ __ tst(r1, Operand(1 << Map::kIsUndetectable));
+ Split(eq, if_true, if_false, fall_through);
} else {
if (if_false != fall_through) __ jmp(if_false);
}
+}
- return true;
+
+void FullCodeGenerator::EmitLiteralCompareUndefined(Expression* expr,
+ Label* if_true,
+ Label* if_false,
+ Label* fall_through) {
+ VisitForAccumulatorValue(expr);
+ PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
+
+ __ CompareRoot(r0, Heap::kUndefinedValueRootIndex);
+ Split(eq, if_true, if_false, fall_through);
}
@@ -3991,19 +4046,17 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
// First we try a fast inlined version of the compare when one of
// the operands is a literal.
- Token::Value op = expr->op();
- Expression* left = expr->left();
- Expression* right = expr->right();
- if (TryLiteralCompare(op, left, right, if_true, if_false, fall_through)) {
+ if (TryLiteralCompare(expr, if_true, if_false, fall_through)) {
context()->Plug(if_true, if_false);
return;
}
+ Token::Value op = expr->op();
VisitForStackValue(expr->left());
switch (op) {
case Token::IN:
VisitForStackValue(expr->right());
- __ InvokeBuiltin(Builtins::IN, CALL_JS);
+ __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION);
PrepareForBailoutBeforeSplit(TOS_REG, false, NULL, NULL);
__ LoadRoot(ip, Heap::kTrueValueRootIndex);
__ cmp(r0, ip);
@@ -4073,7 +4126,7 @@ void FullCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
// Record position and call the compare IC.
SetSourcePosition(expr->position());
Handle<Code> ic = CompareIC::GetUninitialized(op);
- EmitCallIC(ic, &patch_site);
+ EmitCallIC(ic, &patch_site, expr->id());
PrepareForBailoutBeforeSplit(TOS_REG, true, if_true, if_false);
__ cmp(r0, Operand(0));
Split(cond, if_true, if_false, fall_through);
@@ -4106,8 +4159,7 @@ void FullCodeGenerator::VisitCompareToNull(CompareToNull* expr) {
__ LoadRoot(r1, Heap::kUndefinedValueRootIndex);
__ cmp(r0, r1);
__ b(eq, if_true);
- __ tst(r0, Operand(kSmiTagMask));
- __ b(eq, if_false);
+ __ JumpIfSmi(r0, if_false);
// It can be an undetectable object.
__ ldr(r1, FieldMemOperand(r0, HeapObject::kMapOffset));
__ ldrb(r1, FieldMemOperand(r1, Map::kBitFieldOffset));
@@ -4135,47 +4187,62 @@ Register FullCodeGenerator::context_register() {
}
-void FullCodeGenerator::EmitCallIC(Handle<Code> ic, RelocInfo::Mode mode) {
+void FullCodeGenerator::EmitCallIC(Handle<Code> ic,
+ RelocInfo::Mode mode,
+ unsigned ast_id) {
ASSERT(mode == RelocInfo::CODE_TARGET ||
mode == RelocInfo::CODE_TARGET_CONTEXT);
+ Counters* counters = isolate()->counters();
switch (ic->kind()) {
case Code::LOAD_IC:
- __ IncrementCounter(&Counters::named_load_full, 1, r1, r2);
+ __ IncrementCounter(counters->named_load_full(), 1, r1, r2);
break;
case Code::KEYED_LOAD_IC:
- __ IncrementCounter(&Counters::keyed_load_full, 1, r1, r2);
+ __ IncrementCounter(counters->keyed_load_full(), 1, r1, r2);
break;
case Code::STORE_IC:
- __ IncrementCounter(&Counters::named_store_full, 1, r1, r2);
+ __ IncrementCounter(counters->named_store_full(), 1, r1, r2);
break;
case Code::KEYED_STORE_IC:
- __ IncrementCounter(&Counters::keyed_store_full, 1, r1, r2);
+ __ IncrementCounter(counters->keyed_store_full(), 1, r1, r2);
default:
break;
}
-
- __ Call(ic, mode);
+ if (ast_id == kNoASTId || mode == RelocInfo::CODE_TARGET_CONTEXT) {
+ __ Call(ic, mode);
+ } else {
+ ASSERT(mode == RelocInfo::CODE_TARGET);
+ mode = RelocInfo::CODE_TARGET_WITH_ID;
+ __ CallWithAstId(ic, mode, ast_id);
+ }
}
-void FullCodeGenerator::EmitCallIC(Handle<Code> ic, JumpPatchSite* patch_site) {
+void FullCodeGenerator::EmitCallIC(Handle<Code> ic,
+ JumpPatchSite* patch_site,
+ unsigned ast_id) {
+ Counters* counters = isolate()->counters();
switch (ic->kind()) {
case Code::LOAD_IC:
- __ IncrementCounter(&Counters::named_load_full, 1, r1, r2);
+ __ IncrementCounter(counters->named_load_full(), 1, r1, r2);
break;
case Code::KEYED_LOAD_IC:
- __ IncrementCounter(&Counters::keyed_load_full, 1, r1, r2);
+ __ IncrementCounter(counters->keyed_load_full(), 1, r1, r2);
break;
case Code::STORE_IC:
- __ IncrementCounter(&Counters::named_store_full, 1, r1, r2);
+ __ IncrementCounter(counters->named_store_full(), 1, r1, r2);
break;
case Code::KEYED_STORE_IC:
- __ IncrementCounter(&Counters::keyed_store_full, 1, r1, r2);
+ __ IncrementCounter(counters->keyed_store_full(), 1, r1, r2);
default:
break;
}
- __ Call(ic, RelocInfo::CODE_TARGET);
+ if (ast_id == kNoASTId) {
+ __ Call(ic, RelocInfo::CODE_TARGET);
+ } else {
+ __ CallWithAstId(ic, RelocInfo::CODE_TARGET_WITH_ID, ast_id);
+ }
if (patch_site != NULL && patch_site->is_bound()) {
patch_site->EmitPatchInfo();
} else {
@@ -4195,6 +4262,26 @@ void FullCodeGenerator::LoadContextField(Register dst, int context_index) {
}
+void FullCodeGenerator::PushFunctionArgumentForContextAllocation() {
+ if (scope()->is_global_scope()) {
+ // Contexts nested in the global context have a canonical empty function
+ // as their closure, not the anonymous closure containing the global
+ // code. Pass a smi sentinel and let the runtime look up the empty
+ // function.
+ __ mov(ip, Operand(Smi::FromInt(0)));
+ } else if (scope()->is_eval_scope()) {
+ // Contexts created by a call to eval have the same closure as the
+ // context calling eval, not the anonymous closure containing the eval
+ // code. Fetch it from the context.
+ __ ldr(ip, ContextOperand(cp, Context::CLOSURE_INDEX));
+ } else {
+ ASSERT(scope()->is_function_scope());
+ __ ldr(ip, MemOperand(fp, JavaScriptFrameConstants::kFunctionOffset));
+ }
+ __ push(ip);
+}
+
+
// ----------------------------------------------------------------------------
// Non-local control flow support.