summaryrefslogtreecommitdiff
path: root/deps/v8/src/ia32
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/ia32')
-rw-r--r--deps/v8/src/ia32/assembler-ia32.cc11
-rw-r--r--deps/v8/src/ia32/assembler-ia32.h1
-rw-r--r--deps/v8/src/ia32/builtins-ia32.cc47
-rw-r--r--deps/v8/src/ia32/codegen-ia32.cc458
-rw-r--r--deps/v8/src/ia32/codegen-ia32.h25
-rw-r--r--deps/v8/src/ia32/disasm-ia32.cc8
-rw-r--r--deps/v8/src/ia32/fast-codegen-ia32.cc381
-rw-r--r--deps/v8/src/ia32/ic-ia32.cc165
-rw-r--r--deps/v8/src/ia32/macro-assembler-ia32.cc48
-rw-r--r--deps/v8/src/ia32/macro-assembler-ia32.h22
-rw-r--r--deps/v8/src/ia32/stub-cache-ia32.cc143
-rw-r--r--deps/v8/src/ia32/virtual-frame-ia32.cc11
-rw-r--r--deps/v8/src/ia32/virtual-frame-ia32.h6
13 files changed, 958 insertions, 368 deletions
diff --git a/deps/v8/src/ia32/assembler-ia32.cc b/deps/v8/src/ia32/assembler-ia32.cc
index d6f555082a..a0236d0264 100644
--- a/deps/v8/src/ia32/assembler-ia32.cc
+++ b/deps/v8/src/ia32/assembler-ia32.cc
@@ -2004,6 +2004,17 @@ void Assembler::divsd(XMMRegister dst, XMMRegister src) {
}
+void Assembler::xorpd(XMMRegister dst, XMMRegister src) {
+ ASSERT(CpuFeatures::IsEnabled(SSE2));
+ EnsureSpace ensure_space(this);
+ last_pc_ = pc_;
+ EMIT(0x66);
+ EMIT(0x0F);
+ EMIT(0x57);
+ emit_sse_operand(dst, src);
+}
+
+
void Assembler::comisd(XMMRegister dst, XMMRegister src) {
ASSERT(CpuFeatures::IsEnabled(SSE2));
EnsureSpace ensure_space(this);
diff --git a/deps/v8/src/ia32/assembler-ia32.h b/deps/v8/src/ia32/assembler-ia32.h
index 662ebc9022..21fa0d5a00 100644
--- a/deps/v8/src/ia32/assembler-ia32.h
+++ b/deps/v8/src/ia32/assembler-ia32.h
@@ -745,6 +745,7 @@ class Assembler : public Malloced {
void subsd(XMMRegister dst, XMMRegister src);
void mulsd(XMMRegister dst, XMMRegister src);
void divsd(XMMRegister dst, XMMRegister src);
+ void xorpd(XMMRegister dst, XMMRegister src);
void comisd(XMMRegister dst, XMMRegister src);
diff --git a/deps/v8/src/ia32/builtins-ia32.cc b/deps/v8/src/ia32/builtins-ia32.cc
index a164cfa85c..f4dd2f931d 100644
--- a/deps/v8/src/ia32/builtins-ia32.cc
+++ b/deps/v8/src/ia32/builtins-ia32.cc
@@ -472,35 +472,38 @@ void Builtins::Generate_FunctionCall(MacroAssembler* masm) {
__ bind(&done);
}
- // 4. Shift stuff one slot down the stack.
+ // 4. Check that the function really is a function.
+ { Label done;
+ __ test(edi, Operand(edi));
+ __ j(not_zero, &done, taken);
+ __ xor_(ebx, Operand(ebx));
+ // CALL_NON_FUNCTION will expect to find the non-function callee on the
+ // expression stack of the caller. Transfer it from receiver to the
+ // caller's expression stack (and make the first argument the receiver
+ // for CALL_NON_FUNCTION) by decrementing the argument count.
+ __ dec(eax);
+ __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
+ __ jmp(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
+ RelocInfo::CODE_TARGET);
+ __ bind(&done);
+ }
+
+ // 5. Shift arguments and return address one slot down on the stack
+ // (overwriting the receiver).
{ Label loop;
- __ lea(ecx, Operand(eax, +1)); // +1 ~ copy receiver too
+ __ mov(ecx, eax);
__ bind(&loop);
__ mov(ebx, Operand(esp, ecx, times_4, 0));
__ mov(Operand(esp, ecx, times_4, kPointerSize), ebx);
__ dec(ecx);
- __ j(not_zero, &loop);
+ __ j(not_sign, &loop);
+ __ pop(ebx); // Discard copy of return address.
+ __ dec(eax); // One fewer argument (first argument is new receiver).
}
- // 5. Remove TOS (copy of last arguments), but keep return address.
- __ pop(ebx);
- __ pop(ecx);
- __ push(ebx);
- __ dec(eax);
-
- // 6. Check that function really was a function and get the code to
- // call from the function and check that the number of expected
- // arguments matches what we're providing.
- { Label invoke;
- __ test(edi, Operand(edi));
- __ j(not_zero, &invoke, taken);
- __ xor_(ebx, Operand(ebx));
- __ GetBuiltinEntry(edx, Builtins::CALL_NON_FUNCTION);
- __ jmp(Handle<Code>(builtin(ArgumentsAdaptorTrampoline)),
- RelocInfo::CODE_TARGET);
-
- __ bind(&invoke);
- __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
+ // 6. Get the code to call from the function and check that the number of
+ // expected arguments matches what we're providing.
+ { __ mov(edx, FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset));
__ mov(ebx,
FieldOperand(edx, SharedFunctionInfo::kFormalParameterCountOffset));
__ mov(edx, FieldOperand(edx, SharedFunctionInfo::kCodeOffset));
diff --git a/deps/v8/src/ia32/codegen-ia32.cc b/deps/v8/src/ia32/codegen-ia32.cc
index 7c8ff31f60..72979c6794 100644
--- a/deps/v8/src/ia32/codegen-ia32.cc
+++ b/deps/v8/src/ia32/codegen-ia32.cc
@@ -174,12 +174,19 @@ void CodeGenerator::GenCode(FunctionLiteral* fun) {
function_return_is_shadowed_ = false;
// Allocate the local context if needed.
- if (scope_->num_heap_slots() > 0) {
+ int heap_slots = scope_->num_heap_slots();
+ if (heap_slots > 0) {
Comment cmnt(masm_, "[ allocate local context");
// Allocate local context.
// Get outer context and create a new context based on it.
frame_->PushFunction();
- Result context = frame_->CallRuntime(Runtime::kNewContext, 1);
+ Result context;
+ if (heap_slots <= FastNewContextStub::kMaximumSlots) {
+ FastNewContextStub stub(heap_slots);
+ context = frame_->CallStub(&stub, 1);
+ } else {
+ context = frame_->CallRuntime(Runtime::kNewContext, 1);
+ }
// Update context local.
frame_->SaveContextRegister();
@@ -763,19 +770,27 @@ class FloatingPointHelper : public AllStatic {
const char* GenericBinaryOpStub::GetName() {
- switch (op_) {
- case Token::ADD: return "GenericBinaryOpStub_ADD";
- case Token::SUB: return "GenericBinaryOpStub_SUB";
- case Token::MUL: return "GenericBinaryOpStub_MUL";
- case Token::DIV: return "GenericBinaryOpStub_DIV";
- case Token::BIT_OR: return "GenericBinaryOpStub_BIT_OR";
- case Token::BIT_AND: return "GenericBinaryOpStub_BIT_AND";
- case Token::BIT_XOR: return "GenericBinaryOpStub_BIT_XOR";
- case Token::SAR: return "GenericBinaryOpStub_SAR";
- case Token::SHL: return "GenericBinaryOpStub_SHL";
- case Token::SHR: return "GenericBinaryOpStub_SHR";
- default: return "GenericBinaryOpStub";
- }
+ if (name_ != NULL) return name_;
+ const int len = 100;
+ name_ = Bootstrapper::AllocateAutoDeletedArray(len);
+ if (name_ == NULL) return "OOM";
+ const char* op_name = Token::Name(op_);
+ const char* overwrite_name;
+ switch (mode_) {
+ case NO_OVERWRITE: overwrite_name = "Alloc"; break;
+ case OVERWRITE_RIGHT: overwrite_name = "OverwriteRight"; break;
+ case OVERWRITE_LEFT: overwrite_name = "OverwriteLeft"; break;
+ default: overwrite_name = "UnknownOverwrite"; break;
+ }
+
+ OS::SNPrintF(Vector<char>(name_, len),
+ "GenericBinaryOpStub_%s_%s%s_%s%s",
+ op_name,
+ overwrite_name,
+ (flags_ & NO_SMI_CODE_IN_STUB) ? "_NoSmiInStub" : "",
+ args_in_registers_ ? "RegArgs" : "StackArgs",
+ args_reversed_ ? "_R" : "");
+ return name_;
}
@@ -803,14 +818,88 @@ class DeferredInlineBinaryOperation: public DeferredCode {
void DeferredInlineBinaryOperation::Generate() {
+ Label done;
+ if (CpuFeatures::IsSupported(SSE2) && ((op_ == Token::ADD) ||
+ (op_ ==Token::SUB) ||
+ (op_ == Token::MUL) ||
+ (op_ == Token::DIV))) {
+ CpuFeatures::Scope use_sse2(SSE2);
+ Label call_runtime, after_alloc_failure;
+ Label left_smi, right_smi, load_right, do_op;
+ __ test(left_, Immediate(kSmiTagMask));
+ __ j(zero, &left_smi);
+ __ cmp(FieldOperand(left_, HeapObject::kMapOffset),
+ Factory::heap_number_map());
+ __ j(not_equal, &call_runtime);
+ __ movdbl(xmm0, FieldOperand(left_, HeapNumber::kValueOffset));
+ if (mode_ == OVERWRITE_LEFT) {
+ __ mov(dst_, left_);
+ }
+ __ jmp(&load_right);
+
+ __ bind(&left_smi);
+ __ sar(left_, 1);
+ __ cvtsi2sd(xmm0, Operand(left_));
+ __ shl(left_, 1);
+ if (mode_ == OVERWRITE_LEFT) {
+ Label alloc_failure;
+ __ push(left_);
+ __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
+ __ pop(left_);
+ }
+
+ __ bind(&load_right);
+ __ test(right_, Immediate(kSmiTagMask));
+ __ j(zero, &right_smi);
+ __ cmp(FieldOperand(right_, HeapObject::kMapOffset),
+ Factory::heap_number_map());
+ __ j(not_equal, &call_runtime);
+ __ movdbl(xmm1, FieldOperand(right_, HeapNumber::kValueOffset));
+ if (mode_ == OVERWRITE_RIGHT) {
+ __ mov(dst_, right_);
+ } else if (mode_ == NO_OVERWRITE) {
+ Label alloc_failure;
+ __ push(left_);
+ __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
+ __ pop(left_);
+ }
+ __ jmp(&do_op);
+
+ __ bind(&right_smi);
+ __ sar(right_, 1);
+ __ cvtsi2sd(xmm1, Operand(right_));
+ __ shl(right_, 1);
+ if (mode_ == OVERWRITE_RIGHT || mode_ == NO_OVERWRITE) {
+ Label alloc_failure;
+ __ push(left_);
+ __ AllocateHeapNumber(dst_, left_, no_reg, &after_alloc_failure);
+ __ pop(left_);
+ }
+
+ __ bind(&do_op);
+ switch (op_) {
+ case Token::ADD: __ addsd(xmm0, xmm1); break;
+ case Token::SUB: __ subsd(xmm0, xmm1); break;
+ case Token::MUL: __ mulsd(xmm0, xmm1); break;
+ case Token::DIV: __ divsd(xmm0, xmm1); break;
+ default: UNREACHABLE();
+ }
+ __ movdbl(FieldOperand(dst_, HeapNumber::kValueOffset), xmm0);
+ __ jmp(&done);
+
+ __ bind(&after_alloc_failure);
+ __ pop(left_);
+ __ bind(&call_runtime);
+ }
GenericBinaryOpStub stub(op_, mode_, NO_SMI_CODE_IN_STUB);
stub.GenerateCall(masm_, left_, right_);
if (!dst_.is(eax)) __ mov(dst_, eax);
+ __ bind(&done);
}
void CodeGenerator::GenericBinaryOperation(Token::Value op,
- SmiAnalysis* type,
+ StaticType* type,
OverwriteMode overwrite_mode) {
Comment cmnt(masm_, "[ BinaryOperation");
Comment cmnt_token(masm_, Token::String(op));
@@ -1491,7 +1580,7 @@ void DeferredInlineSmiSub::Generate() {
void CodeGenerator::ConstantSmiBinaryOperation(Token::Value op,
Result* operand,
Handle<Object> value,
- SmiAnalysis* type,
+ StaticType* type,
bool reversed,
OverwriteMode overwrite_mode) {
// NOTE: This is an attempt to inline (a bit) more of the code for
@@ -1776,7 +1865,8 @@ void CodeGenerator::ConstantSmiBinaryOperation(Token::Value op,
}
-void CodeGenerator::Comparison(Condition cc,
+void CodeGenerator::Comparison(AstNode* node,
+ Condition cc,
bool strict,
ControlDestination* dest) {
// Strict only makes sense for equality comparisons.
@@ -1823,7 +1913,8 @@ void CodeGenerator::Comparison(Condition cc,
default:
UNREACHABLE();
}
- } else { // Only one side is a constant Smi.
+ } else {
+ // Only one side is a constant Smi.
// If left side is a constant Smi, reverse the operands.
// Since one side is a constant Smi, conversion order does not matter.
if (left_side_constant_smi) {
@@ -1837,6 +1928,8 @@ void CodeGenerator::Comparison(Condition cc,
// Implement comparison against a constant Smi, inlining the case
// where both sides are Smis.
left_side.ToRegister();
+ Register left_reg = left_side.reg();
+ Handle<Object> right_val = right_side.handle();
// Here we split control flow to the stub call and inlined cases
// before finally splitting it to the control destination. We use
@@ -1844,11 +1937,50 @@ void CodeGenerator::Comparison(Condition cc,
// the first split. We manually handle the off-frame references
// by reconstituting them on the non-fall-through path.
JumpTarget is_smi;
- Register left_reg = left_side.reg();
- Handle<Object> right_val = right_side.handle();
__ test(left_side.reg(), Immediate(kSmiTagMask));
is_smi.Branch(zero, taken);
+ bool is_for_loop_compare = (node->AsCompareOperation() != NULL)
+ && node->AsCompareOperation()->is_for_loop_condition();
+ if (!is_for_loop_compare
+ && CpuFeatures::IsSupported(SSE2)
+ && right_val->IsSmi()) {
+ // Right side is a constant smi and left side has been checked
+ // not to be a smi.
+ CpuFeatures::Scope use_sse2(SSE2);
+ JumpTarget not_number;
+ __ cmp(FieldOperand(left_reg, HeapObject::kMapOffset),
+ Immediate(Factory::heap_number_map()));
+ not_number.Branch(not_equal, &left_side);
+ __ movdbl(xmm1,
+ FieldOperand(left_reg, HeapNumber::kValueOffset));
+ int value = Smi::cast(*right_val)->value();
+ if (value == 0) {
+ __ xorpd(xmm0, xmm0);
+ } else {
+ Result temp = allocator()->Allocate();
+ __ mov(temp.reg(), Immediate(value));
+ __ cvtsi2sd(xmm0, Operand(temp.reg()));
+ temp.Unuse();
+ }
+ __ comisd(xmm1, xmm0);
+ // Jump to builtin for NaN.
+ not_number.Branch(parity_even, &left_side);
+ left_side.Unuse();
+ Condition double_cc = cc;
+ switch (cc) {
+ case less: double_cc = below; break;
+ case equal: double_cc = equal; break;
+ case less_equal: double_cc = below_equal; break;
+ case greater: double_cc = above; break;
+ case greater_equal: double_cc = above_equal; break;
+ default: UNREACHABLE();
+ }
+ dest->true_target()->Branch(double_cc);
+ dest->false_target()->Jump();
+ not_number.Bind(&left_side);
+ }
+
// Setup and call the compare stub.
CompareStub stub(cc, strict);
Result result = frame_->CallStub(&stub, &left_side, &right_side);
@@ -1872,6 +2004,7 @@ void CodeGenerator::Comparison(Condition cc,
right_side.Unuse();
dest->Split(cc);
}
+
} else if (cc == equal &&
(left_side_constant_null || right_side_constant_null)) {
// To make null checks efficient, we check if either the left side or
@@ -1908,7 +2041,8 @@ void CodeGenerator::Comparison(Condition cc,
operand.Unuse();
dest->Split(not_zero);
}
- } else { // Neither side is a constant Smi or null.
+ } else {
+ // Neither side is a constant Smi or null.
// If either side is a non-smi constant, skip the smi check.
bool known_non_smi =
(left_side.is_constant() && !left_side.handle()->IsSmi()) ||
@@ -2575,7 +2709,7 @@ void CodeGenerator::VisitSwitchStatement(SwitchStatement* node) {
// Compare and branch to the body if true or the next test if
// false. Prefer the next test as a fall through.
ControlDestination dest(clause->body_target(), &next_test, false);
- Comparison(equal, true, &dest);
+ Comparison(node, equal, true, &dest);
// If the comparison fell through to the true target, jump to the
// actual body.
@@ -3585,18 +3719,28 @@ void CodeGenerator::VisitDebuggerStatement(DebuggerStatement* node) {
void CodeGenerator::InstantiateBoilerplate(Handle<JSFunction> boilerplate) {
- // Call the runtime to instantiate the function boilerplate object.
- // The inevitable call will sync frame elements to memory anyway, so
- // we do it eagerly to allow us to push the arguments directly into
- // place.
ASSERT(boilerplate->IsBoilerplate());
- frame_->SyncRange(0, frame_->element_count() - 1);
- // Create a new closure.
- frame_->EmitPush(esi);
- frame_->EmitPush(Immediate(boilerplate));
- Result result = frame_->CallRuntime(Runtime::kNewClosure, 2);
- frame_->Push(&result);
+ // Use the fast case closure allocation code that allocates in new
+ // space for nested functions that don't need literals cloning.
+ if (scope()->is_function_scope() && boilerplate->NumberOfLiterals() == 0) {
+ FastNewClosureStub stub;
+ frame_->Push(boilerplate);
+ Result answer = frame_->CallStub(&stub, 1);
+ frame_->Push(&answer);
+ } else {
+ // Call the runtime to instantiate the function boilerplate
+ // object. The inevitable call will sync frame elements to memory
+ // anyway, so we do it eagerly to allow us to push the arguments
+ // directly into place.
+ frame_->SyncRange(0, frame_->element_count() - 1);
+
+ // Create a new closure.
+ frame_->EmitPush(esi);
+ frame_->EmitPush(Immediate(boilerplate));
+ Result result = frame_->CallRuntime(Runtime::kNewClosure, 2);
+ frame_->Push(&result);
+ }
}
@@ -4295,18 +4439,23 @@ void CodeGenerator::VisitArrayLiteral(ArrayLiteral* node) {
// Push the resulting array literal boilerplate on the stack.
frame_->Push(&boilerplate);
+
// Clone the boilerplate object.
- Runtime::FunctionId clone_function_id = Runtime::kCloneLiteralBoilerplate;
- if (node->depth() == 1) {
- clone_function_id = Runtime::kCloneShallowLiteralBoilerplate;
+ int length = node->values()->length();
+ Result clone;
+ if (node->depth() == 1 &&
+ length <= FastCloneShallowArrayStub::kMaximumLength) {
+ FastCloneShallowArrayStub stub(length);
+ clone = frame_->CallStub(&stub, 1);
+ } else {
+ clone = frame_->CallRuntime(Runtime::kCloneLiteralBoilerplate, 1);
}
- Result clone = frame_->CallRuntime(clone_function_id, 1);
// Push the newly cloned literal object as the result.
frame_->Push(&clone);
// Generate code to set the elements in the array that are not
// literals.
- for (int i = 0; i < node->values()->length(); i++) {
+ for (int i = 0; i < length; i++) {
Expression* value = node->values()->at(i);
// If value is a literal the property value is already set in the
@@ -4535,9 +4684,6 @@ void CodeGenerator::VisitCall(Call* node) {
// JavaScript example: 'foo(1, 2, 3)' // foo is global
// ----------------------------------
- // Push the name of the function and the receiver onto the stack.
- frame_->Push(var->name());
-
// Pass the global object as the receiver and let the IC stub
// patch the stack to use the global proxy as 'this' in the
// invoked function.
@@ -4549,14 +4695,16 @@ void CodeGenerator::VisitCall(Call* node) {
Load(args->at(i));
}
+ // Push the name of the function onto the frame.
+ frame_->Push(var->name());
+
// Call the IC initialization code.
CodeForSourcePosition(node->position());
Result result = frame_->CallCallIC(RelocInfo::CODE_TARGET_CONTEXT,
arg_count,
loop_nesting());
frame_->RestoreContextRegister();
- // Replace the function on the stack with the result.
- frame_->SetElementAt(0, &result);
+ frame_->Push(&result);
} else if (var != NULL && var->slot() != NULL &&
var->slot()->type() == Slot::LOOKUP) {
@@ -4609,8 +4757,7 @@ void CodeGenerator::VisitCall(Call* node) {
node->position());
} else {
- // Push the name of the function and the receiver onto the stack.
- frame_->Push(name);
+ // Push the receiver onto the frame.
Load(property->obj());
// Load the arguments.
@@ -4619,14 +4766,16 @@ void CodeGenerator::VisitCall(Call* node) {
Load(args->at(i));
}
+ // Push the name of the function onto the frame.
+ frame_->Push(name);
+
// Call the IC initialization code.
CodeForSourcePosition(node->position());
Result result =
frame_->CallCallIC(RelocInfo::CODE_TARGET, arg_count,
loop_nesting());
frame_->RestoreContextRegister();
- // Replace the function on the stack with the result.
- frame_->SetElementAt(0, &result);
+ frame_->Push(&result);
}
} else {
@@ -5284,8 +5433,6 @@ void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
Runtime::Function* function = node->function();
if (function == NULL) {
- // Prepare stack for calling JS runtime function.
- frame_->Push(node->name());
// Push the builtins object found in the current global object.
Result temp = allocator()->Allocate();
ASSERT(temp.is_valid());
@@ -5302,11 +5449,12 @@ void CodeGenerator::VisitCallRuntime(CallRuntime* node) {
if (function == NULL) {
// Call the JS runtime function.
+ frame_->Push(node->name());
Result answer = frame_->CallCallIC(RelocInfo::CODE_TARGET,
arg_count,
loop_nesting_);
frame_->RestoreContextRegister();
- frame_->SetElementAt(0, &answer);
+ frame_->Push(&answer);
} else {
// Call the C runtime function.
Result answer = frame_->CallRuntime(function, arg_count);
@@ -5974,7 +6122,7 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) {
}
Load(left);
Load(right);
- Comparison(cc, strict, destination());
+ Comparison(node, cc, strict, destination());
}
@@ -6428,7 +6576,7 @@ void Reference::SetValue(InitState init_state) {
// a loop and the key is likely to be a smi.
Property* property = expression()->AsProperty();
ASSERT(property != NULL);
- SmiAnalysis* key_smi_analysis = property->key()->type();
+ StaticType* key_smi_analysis = property->key()->type();
if (cgen_->loop_nesting() > 0 && key_smi_analysis->IsLikelySmi()) {
Comment cmnt(masm, "[ Inlined store to keyed Property");
@@ -6529,6 +6677,133 @@ void Reference::SetValue(InitState init_state) {
}
+void FastNewClosureStub::Generate(MacroAssembler* masm) {
+ // Clone the boilerplate in new space. Set the context to the
+ // current context in esi.
+ Label gc;
+ __ AllocateInNewSpace(JSFunction::kSize, eax, ebx, ecx, &gc, TAG_OBJECT);
+
+ // Get the boilerplate function from the stack.
+ __ mov(edx, Operand(esp, 1 * kPointerSize));
+
+ // Compute the function map in the current global context and set that
+ // as the map of the allocated object.
+ __ mov(ecx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ __ mov(ecx, FieldOperand(ecx, GlobalObject::kGlobalContextOffset));
+ __ mov(ecx, Operand(ecx, Context::SlotOffset(Context::FUNCTION_MAP_INDEX)));
+ __ mov(FieldOperand(eax, JSObject::kMapOffset), ecx);
+
+ // Clone the rest of the boilerplate fields. We don't have to update
+ // the write barrier because the allocated object is in new space.
+ for (int offset = kPointerSize;
+ offset < JSFunction::kSize;
+ offset += kPointerSize) {
+ if (offset == JSFunction::kContextOffset) {
+ __ mov(FieldOperand(eax, offset), esi);
+ } else {
+ __ mov(ebx, FieldOperand(edx, offset));
+ __ mov(FieldOperand(eax, offset), ebx);
+ }
+ }
+
+ // Return and remove the on-stack parameter.
+ __ ret(1 * kPointerSize);
+
+ // Create a new closure through the slower runtime call.
+ __ bind(&gc);
+ __ pop(ecx); // Temporarily remove return address.
+ __ pop(edx);
+ __ push(esi);
+ __ push(edx);
+ __ push(ecx); // Restore return address.
+ __ TailCallRuntime(ExternalReference(Runtime::kNewClosure), 2, 1);
+}
+
+
+void FastNewContextStub::Generate(MacroAssembler* masm) {
+ // Try to allocate the context in new space.
+ Label gc;
+ int length = slots_ + Context::MIN_CONTEXT_SLOTS;
+ __ AllocateInNewSpace((length * kPointerSize) + FixedArray::kHeaderSize,
+ eax, ebx, ecx, &gc, TAG_OBJECT);
+
+ // Get the function from the stack.
+ __ mov(ecx, Operand(esp, 1 * kPointerSize));
+
+ // Setup the object header.
+ __ mov(FieldOperand(eax, HeapObject::kMapOffset), Factory::context_map());
+ __ mov(FieldOperand(eax, Array::kLengthOffset), Immediate(length));
+
+ // Setup the fixed slots.
+ __ xor_(ebx, Operand(ebx)); // Set to NULL.
+ __ mov(Operand(eax, Context::SlotOffset(Context::CLOSURE_INDEX)), ecx);
+ __ mov(Operand(eax, Context::SlotOffset(Context::FCONTEXT_INDEX)), eax);
+ __ mov(Operand(eax, Context::SlotOffset(Context::PREVIOUS_INDEX)), ebx);
+ __ mov(Operand(eax, Context::SlotOffset(Context::EXTENSION_INDEX)), ebx);
+
+ // Copy the global object from the surrounding context.
+ __ mov(ebx, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ __ mov(Operand(eax, Context::SlotOffset(Context::GLOBAL_INDEX)), ebx);
+
+ // Initialize the rest of the slots to undefined.
+ __ mov(ebx, Factory::undefined_value());
+ for (int i = Context::MIN_CONTEXT_SLOTS; i < length; i++) {
+ __ mov(Operand(eax, Context::SlotOffset(i)), ebx);
+ }
+
+ // Return and remove the on-stack parameter.
+ __ mov(esi, Operand(eax));
+ __ ret(1 * kPointerSize);
+
+ // Need to collect. Call into runtime system.
+ __ bind(&gc);
+ __ TailCallRuntime(ExternalReference(Runtime::kNewContext), 1, 1);
+}
+
+
+void FastCloneShallowArrayStub::Generate(MacroAssembler* masm) {
+ int elements_size = (length_ > 0) ? FixedArray::SizeFor(length_) : 0;
+ int size = JSArray::kSize + elements_size;
+
+ // Allocate both the JS array and the elements array in one big
+ // allocation. This avoid multiple limit checks.
+ Label gc;
+ __ AllocateInNewSpace(size, eax, ebx, ecx, &gc, TAG_OBJECT);
+
+ // Get the boilerplate from the stack.
+ __ mov(ecx, Operand(esp, 1 * kPointerSize));
+
+ // Copy the JS array part.
+ for (int i = 0; i < JSArray::kSize; i += kPointerSize) {
+ if ((i != JSArray::kElementsOffset) || (length_ == 0)) {
+ __ mov(ebx, FieldOperand(ecx, i));
+ __ mov(FieldOperand(eax, i), ebx);
+ }
+ }
+
+ if (length_ > 0) {
+ // Get hold of the elements array of the boilerplate and setup the
+ // elements pointer in the resulting object.
+ __ mov(ecx, FieldOperand(ecx, JSArray::kElementsOffset));
+ __ lea(edx, Operand(eax, JSArray::kSize));
+ __ mov(FieldOperand(eax, JSArray::kElementsOffset), edx);
+
+ // Copy the elements array.
+ for (int i = 0; i < elements_size; i += kPointerSize) {
+ __ mov(ebx, FieldOperand(ecx, i));
+ __ mov(FieldOperand(edx, i), ebx);
+ }
+ }
+
+ // Return and remove the on-stack parameter.
+ __ ret(1 * kPointerSize);
+
+ __ bind(&gc);
+ ExternalReference runtime(Runtime::kCloneShallowLiteralBoilerplate);
+ __ TailCallRuntime(runtime, 1, 1);
+}
+
+
// NOTE: The stub does not handle the inlined cases (Smis, Booleans, undefined).
void ToBooleanStub::Generate(MacroAssembler* masm) {
Label false_result, true_result, not_string;
@@ -7441,18 +7716,90 @@ void ArgumentsAccessStub::GenerateNewObject(MacroAssembler* masm) {
static const int kDisplacement = 2 * kPointerSize;
// Check if the calling frame is an arguments adaptor frame.
- Label runtime;
+ Label adaptor_frame, try_allocate, runtime;
__ mov(edx, Operand(ebp, StandardFrameConstants::kCallerFPOffset));
__ mov(ecx, Operand(edx, StandardFrameConstants::kContextOffset));
__ cmp(Operand(ecx), Immediate(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
- __ j(not_equal, &runtime);
+ __ j(equal, &adaptor_frame);
+
+ // Get the length from the frame.
+ __ mov(ecx, Operand(esp, 1 * kPointerSize));
+ __ jmp(&try_allocate);
// Patch the arguments.length and the parameters pointer.
+ __ bind(&adaptor_frame);
__ mov(ecx, Operand(edx, ArgumentsAdaptorFrameConstants::kLengthOffset));
__ mov(Operand(esp, 1 * kPointerSize), ecx);
__ lea(edx, Operand(edx, ecx, times_2, kDisplacement));
__ mov(Operand(esp, 2 * kPointerSize), edx);
+ // Try the new space allocation. Start out with computing the size of
+ // the arguments object and the elements array.
+ Label add_arguments_object;
+ __ bind(&try_allocate);
+ __ test(ecx, Operand(ecx));
+ __ j(zero, &add_arguments_object);
+ __ lea(ecx, Operand(ecx, times_2, FixedArray::kHeaderSize));
+ __ bind(&add_arguments_object);
+ __ add(Operand(ecx), Immediate(Heap::kArgumentsObjectSize));
+
+ // Do the allocation of both objects in one go.
+ __ AllocateInNewSpace(ecx, eax, edx, ebx, &runtime, TAG_OBJECT);
+
+ // Get the arguments boilerplate from the current (global) context.
+ int offset = Context::SlotOffset(Context::ARGUMENTS_BOILERPLATE_INDEX);
+ __ mov(edi, Operand(esi, Context::SlotOffset(Context::GLOBAL_INDEX)));
+ __ mov(edi, FieldOperand(edi, GlobalObject::kGlobalContextOffset));
+ __ mov(edi, Operand(edi, offset));
+
+ // Copy the JS object part.
+ for (int i = 0; i < JSObject::kHeaderSize; i += kPointerSize) {
+ __ mov(ebx, FieldOperand(edi, i));
+ __ mov(FieldOperand(eax, i), ebx);
+ }
+
+ // Setup the callee in-object property.
+ ASSERT(Heap::arguments_callee_index == 0);
+ __ mov(ebx, Operand(esp, 3 * kPointerSize));
+ __ mov(FieldOperand(eax, JSObject::kHeaderSize), ebx);
+
+ // Get the length (smi tagged) and set that as an in-object property too.
+ ASSERT(Heap::arguments_length_index == 1);
+ __ mov(ecx, Operand(esp, 1 * kPointerSize));
+ __ mov(FieldOperand(eax, JSObject::kHeaderSize + kPointerSize), ecx);
+
+ // If there are no actual arguments, we're done.
+ Label done;
+ __ test(ecx, Operand(ecx));
+ __ j(zero, &done);
+
+ // Get the parameters pointer from the stack and untag the length.
+ __ mov(edx, Operand(esp, 2 * kPointerSize));
+ __ sar(ecx, kSmiTagSize);
+
+ // Setup the elements pointer in the allocated arguments object and
+ // initialize the header in the elements fixed array.
+ __ lea(edi, Operand(eax, Heap::kArgumentsObjectSize));
+ __ mov(FieldOperand(eax, JSObject::kElementsOffset), edi);
+ __ mov(FieldOperand(edi, FixedArray::kMapOffset),
+ Immediate(Factory::fixed_array_map()));
+ __ mov(FieldOperand(edi, FixedArray::kLengthOffset), ecx);
+
+ // Copy the fixed array slots.
+ Label loop;
+ __ bind(&loop);
+ __ mov(ebx, Operand(edx, -1 * kPointerSize)); // Skip receiver.
+ __ mov(FieldOperand(edi, FixedArray::kHeaderSize), ebx);
+ __ add(Operand(edi), Immediate(kPointerSize));
+ __ sub(Operand(edx), Immediate(kPointerSize));
+ __ dec(ecx);
+ __ test(ecx, Operand(ecx));
+ __ j(not_zero, &loop);
+
+ // Return and remove the on-stack parameters.
+ __ bind(&done);
+ __ ret(3 * kPointerSize);
+
// Do the runtime call to allocate the arguments object.
__ bind(&runtime);
__ TailCallRuntime(ExternalReference(Runtime::kNewArgumentsFast), 3, 1);
@@ -8306,6 +8653,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
__ mov(edi, FieldOperand(edx, HeapObject::kMapOffset));
__ movzx_b(edi, FieldOperand(edi, Map::kInstanceTypeOffset));
__ and_(ecx, Operand(edi));
+ ASSERT(kStringEncodingMask == kAsciiStringTag);
__ test(ecx, Immediate(kAsciiStringTag));
__ j(zero, &non_ascii);
// Allocate an acsii cons string.
@@ -8348,7 +8696,7 @@ void StringAddStub::Generate(MacroAssembler* masm) {
Label non_ascii_string_add_flat_result;
__ mov(ecx, FieldOperand(eax, HeapObject::kMapOffset));
__ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
- ASSERT(kAsciiStringTag != 0);
+ ASSERT(kStringEncodingMask == kAsciiStringTag);
__ test(ecx, Immediate(kAsciiStringTag));
__ j(zero, &non_ascii_string_add_flat_result);
__ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
diff --git a/deps/v8/src/ia32/codegen-ia32.h b/deps/v8/src/ia32/codegen-ia32.h
index 11a5163db8..3d17c96ad0 100644
--- a/deps/v8/src/ia32/codegen-ia32.h
+++ b/deps/v8/src/ia32/codegen-ia32.h
@@ -434,7 +434,7 @@ class CodeGenerator: public AstVisitor {
void GenericBinaryOperation(
Token::Value op,
- SmiAnalysis* type,
+ StaticType* type,
OverwriteMode overwrite_mode);
// If possible, combine two constant smi values using op to produce
@@ -447,7 +447,7 @@ class CodeGenerator: public AstVisitor {
void ConstantSmiBinaryOperation(Token::Value op,
Result* operand,
Handle<Object> constant_operand,
- SmiAnalysis* type,
+ StaticType* type,
bool reversed,
OverwriteMode overwrite_mode);
@@ -459,7 +459,8 @@ class CodeGenerator: public AstVisitor {
Result* right,
OverwriteMode overwrite_mode);
- void Comparison(Condition cc,
+ void Comparison(AstNode* node,
+ Condition cc,
bool strict,
ControlDestination* destination);
@@ -665,7 +666,8 @@ class GenericBinaryOpStub: public CodeStub {
mode_(mode),
flags_(flags),
args_in_registers_(false),
- args_reversed_(false) {
+ args_reversed_(false),
+ name_(NULL) {
use_sse3_ = CpuFeatures::IsSupported(SSE3);
ASSERT(OpBits::is_valid(Token::NUM_TOKENS));
}
@@ -684,6 +686,7 @@ class GenericBinaryOpStub: public CodeStub {
bool args_in_registers_; // Arguments passed in registers not on the stack.
bool args_reversed_; // Left and right argument are swapped.
bool use_sse3_;
+ char* name_;
const char* GetName();
@@ -725,8 +728,8 @@ class GenericBinaryOpStub: public CodeStub {
bool ArgsInRegistersSupported() {
return ((op_ == Token::ADD) || (op_ == Token::SUB)
- || (op_ == Token::MUL) || (op_ == Token::DIV))
- && flags_ != NO_SMI_CODE_IN_STUB;
+ || (op_ == Token::MUL) || (op_ == Token::DIV))
+ && flags_ != NO_SMI_CODE_IN_STUB;
}
bool IsOperationCommutative() {
return (op_ == Token::ADD) || (op_ == Token::MUL);
@@ -760,11 +763,11 @@ class StringAddStub: public CodeStub {
void Generate(MacroAssembler* masm);
void GenerateCopyCharacters(MacroAssembler* masm,
- Register desc,
- Register src,
- Register count,
- Register scratch,
- bool ascii);
+ Register desc,
+ Register src,
+ Register count,
+ Register scratch,
+ bool ascii);
// Should the stub check whether arguments are strings?
bool string_check_;
diff --git a/deps/v8/src/ia32/disasm-ia32.cc b/deps/v8/src/ia32/disasm-ia32.cc
index df5a28a54b..375cbdf7eb 100644
--- a/deps/v8/src/ia32/disasm-ia32.cc
+++ b/deps/v8/src/ia32/disasm-ia32.cc
@@ -1049,6 +1049,14 @@ int DisassemblerIA32::InstructionDecode(v8::internal::Vector<char> out_buffer,
NameOfXMMRegister(regop),
NameOfXMMRegister(rm));
data++;
+ } else if (*data == 0x57) {
+ data++;
+ int mod, regop, rm;
+ get_modrm(*data, &mod, &regop, &rm);
+ AppendToBuffer("xorpd %s,%s",
+ NameOfXMMRegister(regop),
+ NameOfXMMRegister(rm));
+ data++;
} else {
UnimplementedInstruction();
}
diff --git a/deps/v8/src/ia32/fast-codegen-ia32.cc b/deps/v8/src/ia32/fast-codegen-ia32.cc
index c5d544127a..46524d7dce 100644
--- a/deps/v8/src/ia32/fast-codegen-ia32.cc
+++ b/deps/v8/src/ia32/fast-codegen-ia32.cc
@@ -412,46 +412,24 @@ void FastCodeGenerator::VisitDeclaration(Declaration* decl) {
Variable* var = decl->proxy()->var();
ASSERT(var != NULL); // Must have been resolved.
Slot* slot = var->slot();
- ASSERT(slot != NULL); // No global declarations here.
-
- // We have 3 cases for slots: LOOKUP, LOCAL, CONTEXT.
- switch (slot->type()) {
- case Slot::LOOKUP: {
- __ push(esi);
- __ push(Immediate(var->name()));
- // Declaration nodes are always introduced in one of two modes.
- ASSERT(decl->mode() == Variable::VAR || decl->mode() == Variable::CONST);
- PropertyAttributes attr =
- (decl->mode() == Variable::VAR) ? NONE : READ_ONLY;
- __ push(Immediate(Smi::FromInt(attr)));
- // Push initial value, if any.
- // Note: For variables we must not push an initial value (such as
- // 'undefined') because we may have a (legal) redeclaration and we
- // must not destroy the current value.
- if (decl->mode() == Variable::CONST) {
- __ push(Immediate(Factory::the_hole_value()));
- } else if (decl->fun() != NULL) {
- Visit(decl->fun());
- } else {
- __ push(Immediate(Smi::FromInt(0))); // No initial value!
- }
- __ CallRuntime(Runtime::kDeclareContextSlot, 4);
- break;
- }
- case Slot::LOCAL:
- if (decl->mode() == Variable::CONST) {
- __ mov(Operand(ebp, SlotOffset(var->slot())),
- Immediate(Factory::the_hole_value()));
- } else if (decl->fun() != NULL) {
- Visit(decl->fun());
- __ pop(Operand(ebp, SlotOffset(var->slot())));
- }
- break;
- case Slot::CONTEXT:
- // The variable in the decl always resides in the current context.
- ASSERT(function_->scope()->ContextChainLength(slot->var()->scope()) == 0);
- if (decl->mode() == Variable::CONST) {
- __ mov(eax, Immediate(Factory::the_hole_value()));
+ Property* prop = var->AsProperty();
+
+ if (slot != NULL) {
+ switch (slot->type()) {
+ case Slot::PARAMETER: // Fall through.
+ case Slot::LOCAL:
+ if (decl->mode() == Variable::CONST) {
+ __ mov(Operand(ebp, SlotOffset(var->slot())),
+ Immediate(Factory::the_hole_value()));
+ } else if (decl->fun() != NULL) {
+ Visit(decl->fun());
+ __ pop(Operand(ebp, SlotOffset(var->slot())));
+ }
+ break;
+
+ case Slot::CONTEXT:
+ // The variable in the decl always resides in the current context.
+ ASSERT_EQ(0, function_->scope()->ContextChainLength(var->scope()));
if (FLAG_debug_code) {
// Check if we have the correct context pointer.
__ mov(ebx,
@@ -459,26 +437,70 @@ void FastCodeGenerator::VisitDeclaration(Declaration* decl) {
__ cmp(ebx, Operand(esi));
__ Check(equal, "Unexpected declaration in current context.");
}
- __ mov(CodeGenerator::ContextOperand(esi, slot->index()), eax);
- // No write barrier since the_hole_value is in old space.
- ASSERT(!Heap::InNewSpace(*Factory::the_hole_value()));
- } else if (decl->fun() != NULL) {
+ if (decl->mode() == Variable::CONST) {
+ __ mov(eax, Immediate(Factory::the_hole_value()));
+ __ mov(CodeGenerator::ContextOperand(esi, slot->index()), eax);
+ // No write barrier since the hole value is in old space.
+ } else if (decl->fun() != NULL) {
+ Visit(decl->fun());
+ __ pop(eax);
+ __ mov(CodeGenerator::ContextOperand(esi, slot->index()), eax);
+ int offset = Context::SlotOffset(slot->index());
+ __ RecordWrite(esi, offset, eax, ecx);
+ }
+ break;
+
+ case Slot::LOOKUP: {
+ __ push(esi);
+ __ push(Immediate(var->name()));
+ // Declaration nodes are always introduced in one of two modes.
+ ASSERT(decl->mode() == Variable::VAR ||
+ decl->mode() == Variable::CONST);
+ PropertyAttributes attr =
+ (decl->mode() == Variable::VAR) ? NONE : READ_ONLY;
+ __ push(Immediate(Smi::FromInt(attr)));
+ // Push initial value, if any.
+ // Note: For variables we must not push an initial value (such as
+ // 'undefined') because we may have a (legal) redeclaration and we
+ // must not destroy the current value.
+ if (decl->mode() == Variable::CONST) {
+ __ push(Immediate(Factory::the_hole_value()));
+ } else if (decl->fun() != NULL) {
+ Visit(decl->fun());
+ } else {
+ __ push(Immediate(Smi::FromInt(0))); // No initial value!
+ }
+ __ CallRuntime(Runtime::kDeclareContextSlot, 4);
+ break;
+ }
+ }
+
+ } else if (prop != NULL) {
+ if (decl->fun() != NULL || decl->mode() == Variable::CONST) {
+ // We are declaring a function or constant that rewrites to a
+ // property. Use (keyed) IC to set the initial value.
+ ASSERT_EQ(Expression::kValue, prop->obj()->context());
+ Visit(prop->obj());
+ ASSERT_EQ(Expression::kValue, prop->key()->context());
+ Visit(prop->key());
+
+ if (decl->fun() != NULL) {
+ ASSERT_EQ(Expression::kValue, decl->fun()->context());
Visit(decl->fun());
__ pop(eax);
- if (FLAG_debug_code) {
- // Check if we have the correct context pointer.
- __ mov(ebx,
- CodeGenerator::ContextOperand(esi, Context::FCONTEXT_INDEX));
- __ cmp(ebx, Operand(esi));
- __ Check(equal, "Unexpected declaration in current context.");
- }
- __ mov(CodeGenerator::ContextOperand(esi, slot->index()), eax);
- int offset = Context::SlotOffset(slot->index());
- __ RecordWrite(esi, offset, eax, ecx);
+ } else {
+ __ Set(eax, Immediate(Factory::the_hole_value()));
}
- break;
- default:
- UNREACHABLE();
+
+ Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
+ __ call(ic, RelocInfo::CODE_TARGET);
+ // Absence of a test eax instruction following the call
+ // indicates that none of the load was inlined.
+
+ // Value in eax is ignored (declarations are statements). Receiver
+ // and key on stack are discarded.
+ __ add(Operand(esp), Immediate(2 * kPointerSize));
+ }
}
}
@@ -493,20 +515,6 @@ void FastCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) {
}
-void FastCodeGenerator::VisitReturnStatement(ReturnStatement* stmt) {
- Comment cmnt(masm_, "[ ReturnStatement");
- Expression* expr = stmt->expression();
- if (expr->AsLiteral() != NULL) {
- __ mov(eax, expr->AsLiteral()->handle());
- } else {
- ASSERT_EQ(Expression::kValue, expr->context());
- Visit(expr);
- __ pop(eax);
- }
- EmitReturnSequence(stmt->statement_pos());
-}
-
-
void FastCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
Comment cmnt(masm_, "[ FunctionLiteral");
@@ -527,14 +535,20 @@ void FastCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) {
void FastCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
Comment cmnt(masm_, "[ VariableProxy");
- Expression* rewrite = expr->var()->rewrite();
+ EmitVariableLoad(expr->var(), expr->context());
+}
+
+
+void FastCodeGenerator::EmitVariableLoad(Variable* var,
+ Expression::Context context) {
+ Expression* rewrite = var->rewrite();
if (rewrite == NULL) {
- ASSERT(expr->var()->is_global());
+ ASSERT(var->is_global());
Comment cmnt(masm_, "Global variable");
// Use inline caching. Variable name is passed in ecx and the global
// object on the stack.
__ push(CodeGenerator::GlobalObject());
- __ mov(ecx, expr->name());
+ __ mov(ecx, var->name());
Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
__ call(ic, RelocInfo::CODE_TARGET_CONTEXT);
// By emitting a nop we make sure that we do not have a test eax
@@ -542,8 +556,7 @@ void FastCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
// Remember that the assembler may choose to do peephole optimization
// (eg, push/pop elimination).
__ nop();
-
- DropAndMove(expr->context(), eax);
+ DropAndMove(context, eax);
} else if (rewrite->AsSlot() != NULL) {
Slot* slot = rewrite->AsSlot();
if (FLAG_debug_code) {
@@ -564,7 +577,7 @@ void FastCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
UNREACHABLE();
}
}
- Move(expr->context(), slot, eax);
+ Move(context, slot, eax);
} else {
Comment cmnt(masm_, "Variable rewritten to Property");
// A variable has been rewritten into an explicit access to
@@ -598,9 +611,8 @@ void FastCodeGenerator::VisitVariableProxy(VariableProxy* expr) {
// Notice: We must not have a "test eax, ..." instruction after
// the call. It is treated specially by the LoadIC code.
__ nop();
-
- // Drop key and object left on the stack by IC, and push the result.
- DropAndMove(expr->context(), eax, 2);
+ // Drop key and object left on the stack by IC.
+ DropAndMove(context, eax, 2);
}
}
@@ -634,35 +646,14 @@ void FastCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) {
void FastCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
Comment cmnt(masm_, "[ ObjectLiteral");
- Label exists;
- // Registers will be used as follows:
- // edi = JS function.
- // ebx = literals array.
- // eax = boilerplate
-
__ mov(edi, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
- __ mov(ebx, FieldOperand(edi, JSFunction::kLiteralsOffset));
- int literal_offset =
- FixedArray::kHeaderSize + expr->literal_index() * kPointerSize;
- __ mov(eax, FieldOperand(ebx, literal_offset));
- __ cmp(eax, Factory::undefined_value());
- __ j(not_equal, &exists);
- // Create boilerplate if it does not exist.
- // Literal array (0).
- __ push(ebx);
- // Literal index (1).
+ __ push(FieldOperand(edi, JSFunction::kLiteralsOffset));
__ push(Immediate(Smi::FromInt(expr->literal_index())));
- // Constant properties (2).
__ push(Immediate(expr->constant_properties()));
- __ CallRuntime(Runtime::kCreateObjectLiteralBoilerplate, 3);
- __ bind(&exists);
- // eax contains boilerplate.
- // Clone boilerplate.
- __ push(eax);
- if (expr->depth() == 1) {
- __ CallRuntime(Runtime::kCloneShallowLiteralBoilerplate, 1);
+ if (expr->depth() > 1) {
+ __ CallRuntime(Runtime::kCreateObjectLiteral, 3);
} else {
- __ CallRuntime(Runtime::kCloneLiteralBoilerplate, 1);
+ __ CallRuntime(Runtime::kCreateObjectLiteralShallow, 3);
}
// If result_saved == true: The result is saved on top of the
@@ -758,31 +749,14 @@ void FastCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) {
void FastCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
Comment cmnt(masm_, "[ ArrayLiteral");
- Label make_clone;
-
- // Fetch the function's literals array.
__ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
- __ mov(ebx, FieldOperand(ebx, JSFunction::kLiteralsOffset));
- // Check if the literal's boilerplate has been instantiated.
- int offset =
- FixedArray::kHeaderSize + (expr->literal_index() * kPointerSize);
- __ mov(eax, FieldOperand(ebx, offset));
- __ cmp(eax, Factory::undefined_value());
- __ j(not_equal, &make_clone);
-
- // Instantiate the boilerplate.
- __ push(ebx);
+ __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
__ push(Immediate(Smi::FromInt(expr->literal_index())));
__ push(Immediate(expr->literals()));
- __ CallRuntime(Runtime::kCreateArrayLiteralBoilerplate, 3);
-
- __ bind(&make_clone);
- // Clone the boilerplate.
- __ push(eax);
if (expr->depth() > 1) {
- __ CallRuntime(Runtime::kCloneLiteralBoilerplate, 1);
+ __ CallRuntime(Runtime::kCreateArrayLiteral, 3);
} else {
- __ CallRuntime(Runtime::kCloneShallowLiteralBoilerplate, 1);
+ __ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
}
bool result_saved = false; // Is the result saved to the stack?
@@ -852,10 +826,37 @@ void FastCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
}
+void FastCodeGenerator::EmitNamedPropertyLoad(Property* prop,
+ Expression::Context context) {
+ Literal* key = prop->key()->AsLiteral();
+ __ mov(ecx, Immediate(key->handle()));
+ Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
+ __ call(ic, RelocInfo::CODE_TARGET);
+ Move(context, eax);
+}
+
+
+void FastCodeGenerator::EmitKeyedPropertyLoad(Expression::Context context) {
+ Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
+ __ call(ic, RelocInfo::CODE_TARGET);
+ Move(context, eax);
+}
+
+
+void FastCodeGenerator::EmitCompoundAssignmentOp(Token::Value op,
+ Expression::Context context) {
+ GenericBinaryOpStub stub(op,
+ NO_OVERWRITE,
+ NO_GENERIC_BINARY_FLAGS);
+ __ CallStub(&stub);
+ Move(context, eax);
+}
+
+
void FastCodeGenerator::EmitVariableAssignment(Assignment* expr) {
Variable* var = expr->target()->AsVariableProxy()->AsVariable();
ASSERT(var != NULL);
-
+ ASSERT(var->is_global() || var->slot() != NULL);
if (var->is_global()) {
// Assignment to a global variable. Use inline caching for the
// assignment. Right-hand-side value is passed in eax, variable name in
@@ -960,35 +961,6 @@ void FastCodeGenerator::EmitVariableAssignment(Assignment* expr) {
UNREACHABLE();
break;
}
- } else {
- Property* property = var->rewrite()->AsProperty();
- ASSERT_NOT_NULL(property);
-
- // Load object and key onto the stack.
- Slot* object_slot = property->obj()->AsSlot();
- ASSERT_NOT_NULL(object_slot);
- Move(Expression::kValue, object_slot, eax);
-
- Literal* key_literal = property->key()->AsLiteral();
- ASSERT_NOT_NULL(key_literal);
- Move(Expression::kValue, key_literal);
-
- // Value to store was pushed before object and key on the stack.
- __ mov(eax, Operand(esp, 2 * kPointerSize));
-
- // Arguments to ic is value in eax, object and key on stack.
- Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize));
- __ call(ic, RelocInfo::CODE_TARGET);
-
- if (expr->context() == Expression::kEffect) {
- __ add(Operand(esp), Immediate(3 * kPointerSize));
- } else if (expr->context() == Expression::kValue) {
- // Value is still on the stack in esp[2 * kPointerSize]
- __ add(Operand(esp), Immediate(2 * kPointerSize));
- } else {
- __ mov(eax, Operand(esp, 2 * kPointerSize));
- DropAndMove(expr->context(), eax, 3);
- }
}
}
@@ -1094,7 +1066,9 @@ void FastCodeGenerator::VisitProperty(Property* expr) {
}
-void FastCodeGenerator::EmitCallWithIC(Call* expr, RelocInfo::Mode reloc_info) {
+void FastCodeGenerator::EmitCallWithIC(Call* expr,
+ Handle<Object> name,
+ RelocInfo::Mode mode) {
// Code common for calls using the IC.
ZoneList<Expression*>* args = expr->arguments();
int arg_count = args->length();
@@ -1102,16 +1076,15 @@ void FastCodeGenerator::EmitCallWithIC(Call* expr, RelocInfo::Mode reloc_info) {
Visit(args->at(i));
ASSERT_EQ(Expression::kValue, args->at(i)->context());
}
- // Record source position for debugger.
+ __ Set(ecx, Immediate(name));
+ // Record source position of the IC call.
SetSourcePosition(expr->position());
- // Call the IC initialization code.
- Handle<Code> ic = CodeGenerator::ComputeCallInitialize(arg_count,
- NOT_IN_LOOP);
- __ call(ic, reloc_info);
+ InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
+ Handle<Code> ic = CodeGenerator::ComputeCallInitialize(arg_count, in_loop);
+ __ call(ic, mode);
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
- // Discard the function left on TOS.
- DropAndMove(expr->context(), eax);
+ Move(expr->context(), eax);
}
@@ -1128,7 +1101,6 @@ void FastCodeGenerator::EmitCallWithStub(Call* expr) {
__ CallStub(&stub);
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
- // Discard the function left on TOS.
DropAndMove(expr->context(), eax);
}
@@ -1142,11 +1114,9 @@ void FastCodeGenerator::VisitCall(Call* expr) {
// Call to the identifier 'eval'.
UNREACHABLE();
} else if (var != NULL && !var->is_this() && var->is_global()) {
- // Call to a global variable.
- __ push(Immediate(var->name()));
- // Push global object as receiver for the call IC lookup.
+ // Push global object as receiver for the call IC.
__ push(CodeGenerator::GlobalObject());
- EmitCallWithIC(expr, RelocInfo::CODE_TARGET_CONTEXT);
+ EmitCallWithIC(expr, var->name(), RelocInfo::CODE_TARGET_CONTEXT);
} else if (var != NULL && var->slot() != NULL &&
var->slot()->type() == Slot::LOOKUP) {
// Call to a lookup slot.
@@ -1157,9 +1127,8 @@ void FastCodeGenerator::VisitCall(Call* expr) {
Literal* key = prop->key()->AsLiteral();
if (key != NULL && key->handle()->IsSymbol()) {
// Call to a named property, use call IC.
- __ push(Immediate(key->handle()));
Visit(prop->obj());
- EmitCallWithIC(expr, RelocInfo::CODE_TARGET);
+ EmitCallWithIC(expr, key->handle(), RelocInfo::CODE_TARGET);
} else {
// Call to a keyed property, use keyed load IC followed by function
// call.
@@ -1251,7 +1220,6 @@ void FastCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
if (expr->is_jsruntime()) {
// Prepare for calling JS runtime function.
- __ push(Immediate(expr->name()));
__ mov(eax, CodeGenerator::GlobalObject());
__ push(FieldOperand(eax, GlobalObject::kBuiltinsOffset));
}
@@ -1264,19 +1232,18 @@ void FastCodeGenerator::VisitCallRuntime(CallRuntime* expr) {
}
if (expr->is_jsruntime()) {
- // Call the JS runtime function.
- Handle<Code> ic = CodeGenerator::ComputeCallInitialize(arg_count,
- NOT_IN_LOOP);
+ // Call the JS runtime function via a call IC.
+ __ Set(ecx, Immediate(expr->name()));
+ InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
+ Handle<Code> ic = CodeGenerator::ComputeCallInitialize(arg_count, in_loop);
__ call(ic, RelocInfo::CODE_TARGET);
// Restore context register.
__ mov(esi, Operand(ebp, StandardFrameConstants::kContextOffset));
- // Discard the function left on TOS.
- DropAndMove(expr->context(), eax);
} else {
// Call the C runtime function.
__ CallRuntime(expr->function(), arg_count);
- Move(expr->context(), eax);
}
+ Move(expr->context(), eax);
}
@@ -1685,7 +1652,65 @@ void FastCodeGenerator::VisitCompareOperation(CompareOperation* expr) {
}
-#undef __
+void FastCodeGenerator::VisitThisFunction(ThisFunction* expr) {
+ __ mov(eax, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
+ Move(expr->context(), eax);
+}
+
+
+Register FastCodeGenerator::result_register() { return eax; }
+Register FastCodeGenerator::context_register() { return esi; }
+
+
+void FastCodeGenerator::StoreToFrameField(int frame_offset, Register value) {
+ ASSERT_EQ(POINTER_SIZE_ALIGN(frame_offset), frame_offset);
+ __ mov(Operand(ebp, frame_offset), value);
+}
+
+
+void FastCodeGenerator::LoadContextField(Register dst, int context_index) {
+ __ mov(dst, CodeGenerator::ContextOperand(esi, context_index));
+}
+
+
+// ----------------------------------------------------------------------------
+// Non-local control flow support.
+
+void FastCodeGenerator::EnterFinallyBlock() {
+ // Cook return address on top of stack (smi encoded Code* delta)
+ ASSERT(!result_register().is(edx));
+ __ mov(edx, Operand(esp, 0));
+ __ sub(Operand(edx), Immediate(masm_->CodeObject()));
+ ASSERT_EQ(1, kSmiTagSize + kSmiShiftSize);
+ ASSERT_EQ(0, kSmiTag);
+ __ add(edx, Operand(edx)); // Convert to smi.
+ __ mov(Operand(esp, 0), edx);
+ // Store result register while executing finally block.
+ __ push(result_register());
+}
+
+
+void FastCodeGenerator::ExitFinallyBlock() {
+ ASSERT(!result_register().is(edx));
+ // Restore result register from stack.
+ __ pop(result_register());
+ // Uncook return address.
+ __ mov(edx, Operand(esp, 0));
+ __ sar(edx, 1); // Convert smi to int.
+ __ add(Operand(edx), Immediate(masm_->CodeObject()));
+ __ mov(Operand(esp, 0), edx);
+ // And return.
+ __ ret(0);
+}
+
+
+void FastCodeGenerator::ThrowException() {
+ __ push(result_register());
+ __ CallRuntime(Runtime::kThrow, 1);
+}
+
+#undef __
+
} } // namespace v8::internal
diff --git a/deps/v8/src/ia32/ic-ia32.cc b/deps/v8/src/ia32/ic-ia32.cc
index 6988fe09f6..58fe2dc994 100644
--- a/deps/v8/src/ia32/ic-ia32.cc
+++ b/deps/v8/src/ia32/ic-ia32.cc
@@ -48,9 +48,13 @@ namespace internal {
// must always call a backup property load that is complete.
// This function is safe to call if the receiver has fast properties,
// or if name is not a symbol, and will jump to the miss_label in that case.
-static void GenerateDictionaryLoad(MacroAssembler* masm, Label* miss_label,
- Register r0, Register r1, Register r2,
- Register name) {
+static void GenerateDictionaryLoad(MacroAssembler* masm,
+ Label* miss_label,
+ Register r0,
+ Register r1,
+ Register r2,
+ Register name,
+ DictionaryCheck check_dictionary) {
// Register use:
//
// r0 - used to hold the property dictionary.
@@ -86,11 +90,15 @@ static void GenerateDictionaryLoad(MacroAssembler* masm, Label* miss_label,
__ cmp(r0, JS_BUILTINS_OBJECT_TYPE);
__ j(equal, miss_label, not_taken);
- // Check that the properties array is a dictionary.
+ // Load properties array.
__ mov(r0, FieldOperand(r1, JSObject::kPropertiesOffset));
- __ cmp(FieldOperand(r0, HeapObject::kMapOffset),
- Immediate(Factory::hash_table_map()));
- __ j(not_equal, miss_label);
+
+ // Check that the properties array is a dictionary.
+ if (check_dictionary == CHECK_DICTIONARY) {
+ __ cmp(FieldOperand(r0, HeapObject::kMapOffset),
+ Immediate(Factory::hash_table_map()));
+ __ j(not_equal, miss_label);
+ }
// Compute the capacity mask.
const int kCapacityOffset =
@@ -223,7 +231,8 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
// -- esp[4] : name
// -- esp[8] : receiver
// -----------------------------------
- Label slow, check_string, index_int, index_string, check_pixel_array;
+ Label slow, check_string, index_int, index_string;
+ Label check_pixel_array, probe_dictionary;
// Load name and receiver.
__ mov(eax, Operand(esp, kPointerSize));
@@ -302,17 +311,72 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) {
__ test(ebx, Immediate(String::kIsArrayIndexMask));
__ j(not_zero, &index_string, not_taken);
- // If the string is a symbol, do a quick inline probe of the receiver's
- // dictionary, if it exists.
+ // Is the string a symbol?
__ movzx_b(ebx, FieldOperand(edx, Map::kInstanceTypeOffset));
__ test(ebx, Immediate(kIsSymbolMask));
__ j(zero, &slow, not_taken);
- // Probe the dictionary leaving result in ecx.
- GenerateDictionaryLoad(masm, &slow, ebx, ecx, edx, eax);
+
+ // If the receiver is a fast-case object, check the keyed lookup
+ // cache. Otherwise probe the dictionary leaving result in ecx.
+ __ mov(ebx, FieldOperand(ecx, JSObject::kPropertiesOffset));
+ __ cmp(FieldOperand(ebx, HeapObject::kMapOffset),
+ Immediate(Factory::hash_table_map()));
+ __ j(equal, &probe_dictionary);
+
+ // Load the map of the receiver, compute the keyed lookup cache hash
+ // based on 32 bits of the map pointer and the string hash.
+ __ mov(ebx, FieldOperand(ecx, HeapObject::kMapOffset));
+ __ mov(edx, ebx);
+ __ shr(edx, KeyedLookupCache::kMapHashShift);
+ __ mov(eax, FieldOperand(eax, String::kHashFieldOffset));
+ __ shr(eax, String::kHashShift);
+ __ xor_(edx, Operand(eax));
+ __ and_(edx, KeyedLookupCache::kCapacityMask);
+
+ // Load the key (consisting of map and symbol) from the cache and
+ // check for match.
+ ExternalReference cache_keys
+ = ExternalReference::keyed_lookup_cache_keys();
+ __ mov(edi, edx);
+ __ shl(edi, kPointerSizeLog2 + 1);
+ __ cmp(ebx, Operand::StaticArray(edi, times_1, cache_keys));
+ __ j(not_equal, &slow);
+ __ add(Operand(edi), Immediate(kPointerSize));
+ __ mov(edi, Operand::StaticArray(edi, times_1, cache_keys));
+ __ cmp(edi, Operand(esp, kPointerSize));
+ __ j(not_equal, &slow);
+
+ // Get field offset and check that it is an in-object property.
+ ExternalReference cache_field_offsets
+ = ExternalReference::keyed_lookup_cache_field_offsets();
+ __ mov(eax,
+ Operand::StaticArray(edx, times_pointer_size, cache_field_offsets));
+ __ movzx_b(edx, FieldOperand(ebx, Map::kInObjectPropertiesOffset));
+ __ cmp(eax, Operand(edx));
+ __ j(above_equal, &slow);
+
+ // Load in-object property.
+ __ sub(eax, Operand(edx));
+ __ movzx_b(edx, FieldOperand(ebx, Map::kInstanceSizeOffset));
+ __ add(eax, Operand(edx));
+ __ mov(eax, FieldOperand(ecx, eax, times_pointer_size, 0));
+ __ ret(0);
+
+ // Do a quick inline probe of the receiver's dictionary, if it
+ // exists.
+ __ bind(&probe_dictionary);
+ GenerateDictionaryLoad(masm,
+ &slow,
+ ebx,
+ ecx,
+ edx,
+ eax,
+ DICTIONARY_CHECK_DONE);
GenerateCheckNonObjectOrLoaded(masm, &slow, ecx, edx);
__ mov(eax, Operand(ecx));
__ IncrementCounter(&Counters::keyed_load_generic_symbol, 1);
__ ret(0);
+
// If the hash field contains an array index pick it out. The assert checks
// that the constants for the maximum number of digits for an array index
// cached in the hash field and the number of bits reserved for it does not
@@ -824,13 +888,16 @@ Object* CallIC_Miss(Arguments args);
void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
// ----------- S t a t e -------------
+ // -- ecx : name
+ // -- esp[0] : return address
+ // -- esp[(argc - n) * 4] : arg[n] (zero-based)
+ // -- ...
+ // -- esp[(argc + 1) * 4] : receiver
// -----------------------------------
Label number, non_number, non_string, boolean, probe, miss;
// Get the receiver of the function from the stack; 1 ~ return address.
__ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
- // Get the name of the function from the stack; 2 ~ return address, receiver
- __ mov(ecx, Operand(esp, (argc + 2) * kPointerSize));
// Probe the stub cache.
Code::Flags flags =
@@ -876,7 +943,7 @@ void CallIC::GenerateMegamorphic(MacroAssembler* masm, int argc) {
// Cache miss: Jump to runtime.
__ bind(&miss);
- Generate(masm, argc, ExternalReference(IC_Utility(kCallIC_Miss)));
+ GenerateMiss(masm, argc);
}
@@ -884,27 +951,34 @@ static void GenerateNormalHelper(MacroAssembler* masm,
int argc,
bool is_global_object,
Label* miss) {
- // Search dictionary - put result in register edx.
- GenerateDictionaryLoad(masm, miss, eax, edx, ebx, ecx);
+ // ----------- S t a t e -------------
+ // -- ecx : name
+ // -- edx : receiver
+ // -- esp[0] : return address
+ // -- esp[(argc - n) * 4] : arg[n] (zero-based)
+ // -- ...
+ // -- esp[(argc + 1) * 4] : receiver
+ // -----------------------------------
- // Move the result to register edi and check that it isn't a smi.
- __ mov(edi, Operand(edx));
- __ test(edx, Immediate(kSmiTagMask));
+ // Search dictionary - put result in register edi.
+ __ mov(edi, edx);
+ GenerateDictionaryLoad(masm, miss, eax, edi, ebx, ecx, CHECK_DICTIONARY);
+
+ // Check that the result is not a smi.
+ __ test(edi, Immediate(kSmiTagMask));
__ j(zero, miss, not_taken);
- // Check that the value is a JavaScript function.
- __ CmpObjectType(edx, JS_FUNCTION_TYPE, edx);
+ // Check that the value is a JavaScript function, fetching its map into eax.
+ __ CmpObjectType(edi, JS_FUNCTION_TYPE, eax);
__ j(not_equal, miss, not_taken);
- // Check that the function has been loaded.
- __ mov(edx, FieldOperand(edi, JSFunction::kMapOffset));
- __ mov(edx, FieldOperand(edx, Map::kBitField2Offset));
- __ test(edx, Immediate(1 << Map::kNeedsLoading));
+ // Check that the function has been loaded. eax holds function's map.
+ __ mov(eax, FieldOperand(eax, Map::kBitField2Offset));
+ __ test(eax, Immediate(1 << Map::kNeedsLoading));
__ j(not_zero, miss, not_taken);
- // Patch the receiver with the global proxy if necessary.
+ // Patch the receiver on stack with the global proxy if necessary.
if (is_global_object) {
- __ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
__ mov(edx, FieldOperand(edx, GlobalObject::kGlobalReceiverOffset));
__ mov(Operand(esp, (argc + 1) * kPointerSize), edx);
}
@@ -917,14 +991,17 @@ static void GenerateNormalHelper(MacroAssembler* masm,
void CallIC::GenerateNormal(MacroAssembler* masm, int argc) {
// ----------- S t a t e -------------
+ // -- ecx : name
+ // -- esp[0] : return address
+ // -- esp[(argc - n) * 4] : arg[n] (zero-based)
+ // -- ...
+ // -- esp[(argc + 1) * 4] : receiver
// -----------------------------------
Label miss, global_object, non_global_object;
// Get the receiver of the function from the stack; 1 ~ return address.
__ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
- // Get the name of the function from the stack; 2 ~ return address, receiver.
- __ mov(ecx, Operand(esp, (argc + 2) * kPointerSize));
// Check that the receiver isn't a smi.
__ test(edx, Immediate(kSmiTagMask));
@@ -973,33 +1050,33 @@ void CallIC::GenerateNormal(MacroAssembler* masm, int argc) {
// Cache miss: Jump to runtime.
__ bind(&miss);
- Generate(masm, argc, ExternalReference(IC_Utility(kCallIC_Miss)));
+ GenerateMiss(masm, argc);
}
-void CallIC::Generate(MacroAssembler* masm,
- int argc,
- const ExternalReference& f) {
+void CallIC::GenerateMiss(MacroAssembler* masm, int argc) {
// ----------- S t a t e -------------
+ // -- ecx : name
+ // -- esp[0] : return address
+ // -- esp[(argc - n) * 4] : arg[n] (zero-based)
+ // -- ...
+ // -- esp[(argc + 1) * 4] : receiver
// -----------------------------------
// Get the receiver of the function from the stack; 1 ~ return address.
__ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
- // Get the name of the function to call from the stack.
- // 2 ~ receiver, return address.
- __ mov(ebx, Operand(esp, (argc + 2) * kPointerSize));
// Enter an internal frame.
__ EnterInternalFrame();
// Push the receiver and the name of the function.
__ push(edx);
- __ push(ebx);
+ __ push(ecx);
// Call the entry.
CEntryStub stub(1);
__ mov(eax, Immediate(2));
- __ mov(ebx, Immediate(f));
+ __ mov(ebx, Immediate(ExternalReference(IC_Utility(kCallIC_Miss))));
__ CallStub(&stub);
// Move result to edi and exit the internal frame.
@@ -1011,11 +1088,11 @@ void CallIC::Generate(MacroAssembler* masm,
__ mov(edx, Operand(esp, (argc + 1) * kPointerSize)); // receiver
__ test(edx, Immediate(kSmiTagMask));
__ j(zero, &invoke, not_taken);
- __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
- __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
- __ cmp(ecx, JS_GLOBAL_OBJECT_TYPE);
+ __ mov(ebx, FieldOperand(edx, HeapObject::kMapOffset));
+ __ movzx_b(ebx, FieldOperand(ebx, Map::kInstanceTypeOffset));
+ __ cmp(ebx, JS_GLOBAL_OBJECT_TYPE);
__ j(equal, &global);
- __ cmp(ecx, JS_BUILTINS_OBJECT_TYPE);
+ __ cmp(ebx, JS_BUILTINS_OBJECT_TYPE);
__ j(not_equal, &invoke);
// Patch the receiver on the stack.
@@ -1088,7 +1165,7 @@ void LoadIC::GenerateNormal(MacroAssembler* masm) {
// Search the dictionary placing the result in eax.
__ bind(&probe);
- GenerateDictionaryLoad(masm, &miss, edx, eax, ebx, ecx);
+ GenerateDictionaryLoad(masm, &miss, edx, eax, ebx, ecx, CHECK_DICTIONARY);
GenerateCheckNonObjectOrLoaded(masm, &miss, eax, edx);
__ ret(0);
diff --git a/deps/v8/src/ia32/macro-assembler-ia32.cc b/deps/v8/src/ia32/macro-assembler-ia32.cc
index b91caa8ccc..ac2895efaf 100644
--- a/deps/v8/src/ia32/macro-assembler-ia32.cc
+++ b/deps/v8/src/ia32/macro-assembler-ia32.cc
@@ -504,6 +504,13 @@ void MacroAssembler::PushTryHandler(CodeLocation try_location,
}
+void MacroAssembler::PopTryHandler() {
+ ASSERT_EQ(0, StackHandlerConstants::kNextOffset);
+ pop(Operand::StaticVariable(ExternalReference(Top::k_handler_address)));
+ add(Operand(esp), Immediate(StackHandlerConstants::kSize - kPointerSize));
+}
+
+
Register MacroAssembler::CheckMaps(JSObject* object, Register object_reg,
JSObject* holder, Register holder_reg,
Register scratch,
@@ -834,10 +841,9 @@ void MacroAssembler::AllocateTwoByteString(Register result,
// Calculate the number of bytes needed for the characters in the string while
// observing object alignment.
ASSERT((SeqTwoByteString::kHeaderSize & kObjectAlignmentMask) == 0);
- mov(scratch1, length);
ASSERT(kShortSize == 2);
- shl(scratch1, 1);
- add(Operand(scratch1), Immediate(kObjectAlignmentMask));
+ // scratch1 = length * 2 + kObjectAlignmentMask.
+ lea(scratch1, Operand(length, length, times_1, kObjectAlignmentMask));
and_(Operand(scratch1), Immediate(~kObjectAlignmentMask));
// Allocate two byte string in new space.
@@ -1016,17 +1022,37 @@ void MacroAssembler::TryGetFunctionPrototype(Register function,
void MacroAssembler::CallStub(CodeStub* stub) {
- ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
+ ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
call(stub->GetCode(), RelocInfo::CODE_TARGET);
}
+Object* MacroAssembler::TryCallStub(CodeStub* stub) {
+ ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
+ Object* result = stub->TryGetCode();
+ if (!result->IsFailure()) {
+ call(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
+ }
+ return result;
+}
+
+
void MacroAssembler::TailCallStub(CodeStub* stub) {
- ASSERT(allow_stub_calls()); // calls are not allowed in some stubs
+ ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
jmp(stub->GetCode(), RelocInfo::CODE_TARGET);
}
+Object* MacroAssembler::TryTailCallStub(CodeStub* stub) {
+ ASSERT(allow_stub_calls()); // Calls are not allowed in some stubs.
+ Object* result = stub->TryGetCode();
+ if (!result->IsFailure()) {
+ jmp(Handle<Code>(Code::cast(result)), RelocInfo::CODE_TARGET);
+ }
+ return result;
+}
+
+
void MacroAssembler::StubReturn(int argc) {
ASSERT(argc >= 1 && generating_stub());
ret((argc - 1) * kPointerSize);
@@ -1331,6 +1357,18 @@ void MacroAssembler::Ret() {
}
+void MacroAssembler::Drop(int stack_elements) {
+ if (stack_elements > 0) {
+ add(Operand(esp), Immediate(stack_elements * kPointerSize));
+ }
+}
+
+
+void MacroAssembler::Move(Register dst, Handle<Object> value) {
+ mov(dst, value);
+}
+
+
void MacroAssembler::SetCounter(StatsCounter* counter, int value) {
if (FLAG_native_code_counters && counter->Enabled()) {
mov(Operand::StaticVariable(ExternalReference(counter)), Immediate(value));
diff --git a/deps/v8/src/ia32/macro-assembler-ia32.h b/deps/v8/src/ia32/macro-assembler-ia32.h
index a41d42e82b..160dbcbf16 100644
--- a/deps/v8/src/ia32/macro-assembler-ia32.h
+++ b/deps/v8/src/ia32/macro-assembler-ia32.h
@@ -149,6 +149,8 @@ class MacroAssembler: public Assembler {
// address must be pushed before calling this helper.
void PushTryHandler(CodeLocation try_location, HandlerType type);
+ // Unlink the stack handler on top of the stack from the try handler chain.
+ void PopTryHandler();
// ---------------------------------------------------------------------------
// Inline caching support
@@ -285,12 +287,22 @@ class MacroAssembler: public Assembler {
// ---------------------------------------------------------------------------
// Runtime calls
- // Call a code stub.
+ // Call a code stub. Generate the code if necessary.
void CallStub(CodeStub* stub);
- // Tail call a code stub (jump).
+ // Call a code stub and return the code object called. Try to generate
+ // the code if necessary. Do not perform a GC but instead return a retry
+ // after GC failure.
+ Object* TryCallStub(CodeStub* stub);
+
+ // Tail call a code stub (jump). Generate the code if necessary.
void TailCallStub(CodeStub* stub);
+ // Tail call a code stub (jump) and return the code object called. Try to
+ // generate the code if necessary. Do not perform a GC but instead return
+ // a retry after GC failure.
+ Object* TryTailCallStub(CodeStub* stub);
+
// Return from a code stub after popping its arguments.
void StubReturn(int argc);
@@ -323,6 +335,12 @@ class MacroAssembler: public Assembler {
void Ret();
+ void Drop(int element_count);
+
+ void Call(Label* target) { call(target); }
+
+ void Move(Register target, Handle<Object> value);
+
struct Unresolved {
int pc;
uint32_t flags; // see Bootstrapper::FixupFlags decoders/encoders.
diff --git a/deps/v8/src/ia32/stub-cache-ia32.cc b/deps/v8/src/ia32/stub-cache-ia32.cc
index 425c51dcae..0e836154d3 100644
--- a/deps/v8/src/ia32/stub-cache-ia32.cc
+++ b/deps/v8/src/ia32/stub-cache-ia32.cc
@@ -152,11 +152,10 @@ void StubCache::GenerateProbe(MacroAssembler* masm,
}
-template <typename Pushable>
static void PushInterceptorArguments(MacroAssembler* masm,
Register receiver,
Register holder,
- Pushable name,
+ Register name,
JSObject* holder_obj) {
__ push(receiver);
__ push(holder);
@@ -285,11 +284,10 @@ void StubCompiler::GenerateFastPropertyLoad(MacroAssembler* masm,
}
-template <class Pushable>
static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
Register receiver,
Register holder,
- Pushable name,
+ Register name,
JSObject* holder_obj) {
PushInterceptorArguments(masm, receiver, holder, name, holder_obj);
@@ -495,8 +493,8 @@ class LoadInterceptorCompiler BASE_EMBEDDED {
class CallInterceptorCompiler BASE_EMBEDDED {
public:
- explicit CallInterceptorCompiler(const ParameterCount& arguments)
- : arguments_(arguments), argc_(arguments.immediate()) {}
+ CallInterceptorCompiler(const ParameterCount& arguments, Register name)
+ : arguments_(arguments), argc_(arguments.immediate()), name_(name) {}
void CompileCacheable(MacroAssembler* masm,
StubCompiler* stub_compiler,
@@ -527,17 +525,17 @@ class CallInterceptorCompiler BASE_EMBEDDED {
}
__ EnterInternalFrame();
- __ push(holder); // save the holder
+ __ push(holder); // Save the holder.
+ __ push(name_); // Save the name.
- CompileCallLoadPropertyWithInterceptor(
- masm,
- receiver,
- holder,
- // Under EnterInternalFrame this refers to name.
- Operand(ebp, (argc_ + 3) * kPointerSize),
- holder_obj);
+ CompileCallLoadPropertyWithInterceptor(masm,
+ receiver,
+ holder,
+ name_,
+ holder_obj);
- __ pop(receiver); // restore holder
+ __ pop(name_); // Restore the name.
+ __ pop(receiver); // Restore the holder.
__ LeaveInternalFrame();
__ cmp(eax, Factory::no_interceptor_result_sentinel());
@@ -577,11 +575,13 @@ class CallInterceptorCompiler BASE_EMBEDDED {
JSObject* holder_obj,
Label* miss_label) {
__ EnterInternalFrame();
+ // Save the name_ register across the call.
+ __ push(name_);
PushInterceptorArguments(masm,
receiver,
holder,
- Operand(ebp, (argc_ + 3) * kPointerSize),
+ name_,
holder_obj);
ExternalReference ref = ExternalReference(
@@ -592,12 +592,15 @@ class CallInterceptorCompiler BASE_EMBEDDED {
CEntryStub stub(1);
__ CallStub(&stub);
+ // Restore the name_ register.
+ __ pop(name_);
__ LeaveInternalFrame();
}
private:
const ParameterCount& arguments_;
int argc_;
+ Register name_;
};
@@ -754,7 +757,7 @@ void StubCompiler::GenerateLoadField(JSObject* object,
}
-void StubCompiler::GenerateLoadCallback(JSObject* object,
+bool StubCompiler::GenerateLoadCallback(JSObject* object,
JSObject* holder,
Register receiver,
Register name_reg,
@@ -762,7 +765,8 @@ void StubCompiler::GenerateLoadCallback(JSObject* object,
Register scratch2,
AccessorInfo* callback,
String* name,
- Label* miss) {
+ Label* miss,
+ Failure** failure) {
// Check that the receiver isn't a smi.
__ test(receiver, Immediate(kSmiTagMask));
__ j(zero, miss, not_taken);
@@ -798,7 +802,14 @@ void StubCompiler::GenerateLoadCallback(JSObject* object,
Address getter_address = v8::ToCData<Address>(callback->getter());
ApiFunction fun(getter_address);
ApiGetterEntryStub stub(callback_handle, &fun);
- __ CallStub(&stub);
+ // Calling the stub may try to allocate (if the code is not already
+ // generated). Do not allow the call to perform a garbage
+ // collection but instead return the allocation failure object.
+ Object* result = masm()->TryCallStub(&stub);
+ if (result->IsFailure()) {
+ *failure = Failure::cast(result);
+ return false;
+ }
// We need to avoid using eax since that now holds the result.
Register tmp = other.is(eax) ? reg : other;
@@ -806,6 +817,7 @@ void StubCompiler::GenerateLoadCallback(JSObject* object,
__ LeaveInternalFrame();
__ ret(0);
+ return true;
}
@@ -885,6 +897,11 @@ Object* CallStubCompiler::CompileCallField(Object* object,
int index,
String* name) {
// ----------- S t a t e -------------
+ // -- ecx : name
+ // -- esp[0] : return address
+ // -- esp[(argc - n) * 4] : arg[n] (zero-based)
+ // -- ...
+ // -- esp[(argc + 1) * 4] : receiver
// -----------------------------------
Label miss;
@@ -899,7 +916,7 @@ Object* CallStubCompiler::CompileCallField(Object* object,
// Do the right check and compute the holder register.
Register reg =
CheckPrototypes(JSObject::cast(object), edx, holder,
- ebx, ecx, name, &miss);
+ ebx, eax, name, &miss);
GenerateFastPropertyLoad(masm(), edi, reg, holder, index);
@@ -935,6 +952,11 @@ Object* CallStubCompiler::CompileCallConstant(Object* object,
String* name,
CheckType check) {
// ----------- S t a t e -------------
+ // -- ecx : name
+ // -- esp[0] : return address
+ // -- esp[(argc - n) * 4] : arg[n] (zero-based)
+ // -- ...
+ // -- esp[(argc + 1) * 4] : receiver
// -----------------------------------
Label miss;
@@ -956,7 +978,7 @@ Object* CallStubCompiler::CompileCallConstant(Object* object,
case RECEIVER_MAP_CHECK:
// Check that the maps haven't changed.
CheckPrototypes(JSObject::cast(object), edx, holder,
- ebx, ecx, name, &miss);
+ ebx, eax, name, &miss);
// Patch the receiver on the stack with the global proxy if
// necessary.
@@ -968,15 +990,15 @@ Object* CallStubCompiler::CompileCallConstant(Object* object,
case STRING_CHECK:
// Check that the object is a two-byte string or a symbol.
- __ mov(ecx, FieldOperand(edx, HeapObject::kMapOffset));
- __ movzx_b(ecx, FieldOperand(ecx, Map::kInstanceTypeOffset));
- __ cmp(ecx, FIRST_NONSTRING_TYPE);
+ __ mov(eax, FieldOperand(edx, HeapObject::kMapOffset));
+ __ movzx_b(eax, FieldOperand(eax, Map::kInstanceTypeOffset));
+ __ cmp(eax, FIRST_NONSTRING_TYPE);
__ j(above_equal, &miss, not_taken);
// Check that the maps starting from the prototype haven't changed.
GenerateLoadGlobalFunctionPrototype(masm(),
Context::STRING_FUNCTION_INDEX,
- ecx);
- CheckPrototypes(JSObject::cast(object->GetPrototype()), ecx, holder,
+ eax);
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder,
ebx, edx, name, &miss);
break;
@@ -985,14 +1007,14 @@ Object* CallStubCompiler::CompileCallConstant(Object* object,
// Check that the object is a smi or a heap number.
__ test(edx, Immediate(kSmiTagMask));
__ j(zero, &fast, taken);
- __ CmpObjectType(edx, HEAP_NUMBER_TYPE, ecx);
+ __ CmpObjectType(edx, HEAP_NUMBER_TYPE, eax);
__ j(not_equal, &miss, not_taken);
__ bind(&fast);
// Check that the maps starting from the prototype haven't changed.
GenerateLoadGlobalFunctionPrototype(masm(),
Context::NUMBER_FUNCTION_INDEX,
- ecx);
- CheckPrototypes(JSObject::cast(object->GetPrototype()), ecx, holder,
+ eax);
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder,
ebx, edx, name, &miss);
break;
}
@@ -1008,15 +1030,15 @@ Object* CallStubCompiler::CompileCallConstant(Object* object,
// Check that the maps starting from the prototype haven't changed.
GenerateLoadGlobalFunctionPrototype(masm(),
Context::BOOLEAN_FUNCTION_INDEX,
- ecx);
- CheckPrototypes(JSObject::cast(object->GetPrototype()), ecx, holder,
+ eax);
+ CheckPrototypes(JSObject::cast(object->GetPrototype()), eax, holder,
ebx, edx, name, &miss);
break;
}
case JSARRAY_HAS_FAST_ELEMENTS_CHECK:
CheckPrototypes(JSObject::cast(object), edx, holder,
- ebx, ecx, name, &miss);
+ ebx, eax, name, &miss);
// Make sure object->HasFastElements().
// Get the elements array of the object.
__ mov(ebx, FieldOperand(edx, JSObject::kElementsOffset));
@@ -1059,6 +1081,11 @@ Object* CallStubCompiler::CompileCallInterceptor(Object* object,
JSObject* holder,
String* name) {
// ----------- S t a t e -------------
+ // -- ecx : name
+ // -- esp[0] : return address
+ // -- esp[(argc - n) * 4] : arg[n] (zero-based)
+ // -- ...
+ // -- esp[(argc + 1) * 4] : receiver
// -----------------------------------
Label miss;
@@ -1071,7 +1098,7 @@ Object* CallStubCompiler::CompileCallInterceptor(Object* object,
// Get the receiver from the stack.
__ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
- CallInterceptorCompiler compiler(arguments());
+ CallInterceptorCompiler compiler(arguments(), ecx);
CompileLoadInterceptor(&compiler,
this,
masm(),
@@ -1081,7 +1108,7 @@ Object* CallStubCompiler::CompileCallInterceptor(Object* object,
&lookup,
edx,
ebx,
- ecx,
+ edi,
&miss);
// Restore receiver.
@@ -1120,6 +1147,11 @@ Object* CallStubCompiler::CompileCallGlobal(JSObject* object,
JSFunction* function,
String* name) {
// ----------- S t a t e -------------
+ // -- ecx : name
+ // -- esp[0] : return address
+ // -- esp[(argc - n) * 4] : arg[n] (zero-based)
+ // -- ...
+ // -- esp[(argc + 1) * 4] : receiver
// -----------------------------------
Label miss;
@@ -1138,15 +1170,32 @@ Object* CallStubCompiler::CompileCallGlobal(JSObject* object,
}
// Check that the maps haven't changed.
- CheckPrototypes(object, edx, holder, ebx, ecx, name, &miss);
+ CheckPrototypes(object, edx, holder, ebx, eax, name, &miss);
// Get the value from the cell.
__ mov(edi, Immediate(Handle<JSGlobalPropertyCell>(cell)));
__ mov(edi, FieldOperand(edi, JSGlobalPropertyCell::kValueOffset));
// Check that the cell contains the same function.
- __ cmp(Operand(edi), Immediate(Handle<JSFunction>(function)));
- __ j(not_equal, &miss, not_taken);
+ if (Heap::InNewSpace(function)) {
+ // We can't embed a pointer to a function in new space so we have
+ // to verify that the shared function info is unchanged. This has
+ // the nice side effect that multiple closures based on the same
+ // function can all use this call IC. Before we load through the
+ // function, we have to verify that it still is a function.
+ __ test(edi, Immediate(kSmiTagMask));
+ __ j(zero, &miss, not_taken);
+ __ CmpObjectType(edi, JS_FUNCTION_TYPE, ebx);
+ __ j(not_equal, &miss, not_taken);
+
+ // Check the shared function info. Make sure it hasn't changed.
+ __ cmp(FieldOperand(edi, JSFunction::kSharedFunctionInfoOffset),
+ Immediate(Handle<SharedFunctionInfo>(function->shared())));
+ __ j(not_equal, &miss, not_taken);
+ } else {
+ __ cmp(Operand(edi), Immediate(Handle<JSFunction>(function)));
+ __ j(not_equal, &miss, not_taken);
+ }
// Patch the receiver on the stack with the global proxy.
if (object->IsGlobalObject()) {
@@ -1420,10 +1469,10 @@ Object* LoadStubCompiler::CompileLoadField(JSObject* object,
}
-Object* LoadStubCompiler::CompileLoadCallback(JSObject* object,
+Object* LoadStubCompiler::CompileLoadCallback(String* name,
+ JSObject* object,
JSObject* holder,
- AccessorInfo* callback,
- String* name) {
+ AccessorInfo* callback) {
// ----------- S t a t e -------------
// -- ecx : name
// -- esp[0] : return address
@@ -1432,8 +1481,11 @@ Object* LoadStubCompiler::CompileLoadCallback(JSObject* object,
Label miss;
__ mov(eax, Operand(esp, kPointerSize));
- GenerateLoadCallback(object, holder, eax, ecx, ebx, edx,
- callback, name, &miss);
+ Failure* failure = Failure::InternalError();
+ bool success = GenerateLoadCallback(object, holder, eax, ecx, ebx, edx,
+ callback, name, &miss, &failure);
+ if (!success) return failure;
+
__ bind(&miss);
GenerateLoadMiss(masm(), Code::LOAD_IC);
@@ -1597,8 +1649,11 @@ Object* KeyedLoadStubCompiler::CompileLoadCallback(String* name,
__ cmp(Operand(eax), Immediate(Handle<String>(name)));
__ j(not_equal, &miss, not_taken);
- GenerateLoadCallback(receiver, holder, ecx, eax, ebx, edx,
- callback, name, &miss);
+ Failure* failure = Failure::InternalError();
+ bool success = GenerateLoadCallback(receiver, holder, ecx, eax, ebx, edx,
+ callback, name, &miss, &failure);
+ if (!success) return failure;
+
__ bind(&miss);
__ DecrementCounter(&Counters::keyed_load_callback, 1);
GenerateLoadMiss(masm(), Code::KEYED_LOAD_IC);
diff --git a/deps/v8/src/ia32/virtual-frame-ia32.cc b/deps/v8/src/ia32/virtual-frame-ia32.cc
index e770cddb15..ba6488607d 100644
--- a/deps/v8/src/ia32/virtual-frame-ia32.cc
+++ b/deps/v8/src/ia32/virtual-frame-ia32.cc
@@ -925,14 +925,17 @@ Result VirtualFrame::CallKeyedStoreIC() {
Result VirtualFrame::CallCallIC(RelocInfo::Mode mode,
int arg_count,
int loop_nesting) {
- // Arguments, receiver, and function name are on top of the frame.
- // The IC expects them on the stack. It does not drop the function
- // name slot (but it does drop the rest).
+ // Function name, arguments, and receiver are on top of the frame.
+ // The IC expects the name in ecx and the rest on the stack and
+ // drops them all.
InLoopFlag in_loop = loop_nesting > 0 ? IN_LOOP : NOT_IN_LOOP;
Handle<Code> ic = cgen()->ComputeCallInitialize(arg_count, in_loop);
// Spill args, receiver, and function. The call will drop args and
// receiver.
- PrepareForCall(arg_count + 2, arg_count + 1);
+ Result name = Pop();
+ PrepareForCall(arg_count + 1, arg_count + 1); // Arguments + receiver.
+ name.ToRegister(ecx);
+ name.Unuse();
return RawCallCodeObject(ic, mode);
}
diff --git a/deps/v8/src/ia32/virtual-frame-ia32.h b/deps/v8/src/ia32/virtual-frame-ia32.h
index 314ea73b28..6c6b4816d7 100644
--- a/deps/v8/src/ia32/virtual-frame-ia32.h
+++ b/deps/v8/src/ia32/virtual-frame-ia32.h
@@ -341,9 +341,9 @@ class VirtualFrame: public ZoneObject {
// of the frame. Key and receiver are not dropped.
Result CallKeyedStoreIC();
- // Call call IC. Arguments, reciever, and function name are found
- // on top of the frame. Function name slot is not dropped. The
- // argument count does not include the receiver.
+ // Call call IC. Function name, arguments, and receiver are found on top
+ // of the frame and dropped by the call. The argument count does not
+ // include the receiver.
Result CallCallIC(RelocInfo::Mode mode, int arg_count, int loop_nesting);
// Allocate and call JS function as constructor. Arguments,