summaryrefslogtreecommitdiff
path: root/deps/v8/src/ia32/codegen-ia32.cc
diff options
context:
space:
mode:
authorRyan Dahl <ry@tinyclouds.org>2010-10-01 14:18:59 -0700
committerRyan Dahl <ry@tinyclouds.org>2010-10-01 14:19:11 -0700
commitc9627e0a0d191583e266836b8ce279e6dc527a40 (patch)
treef830b00031cc1d5eed1988d76e0a4647ca0be3a8 /deps/v8/src/ia32/codegen-ia32.cc
parent5829716649a543f2c7e43859e5c0e32491b61198 (diff)
downloadnode-new-c9627e0a0d191583e266836b8ce279e6dc527a40.tar.gz
Upgrade V8 to 2.4.7
Diffstat (limited to 'deps/v8/src/ia32/codegen-ia32.cc')
-rw-r--r--deps/v8/src/ia32/codegen-ia32.cc162
1 files changed, 104 insertions, 58 deletions
diff --git a/deps/v8/src/ia32/codegen-ia32.cc b/deps/v8/src/ia32/codegen-ia32.cc
index 86f3877c7e..9c8573cea0 100644
--- a/deps/v8/src/ia32/codegen-ia32.cc
+++ b/deps/v8/src/ia32/codegen-ia32.cc
@@ -249,7 +249,7 @@ void CodeGenerator::Generate(CompilationInfo* info) {
// the function.
for (int i = 0; i < scope()->num_parameters(); i++) {
Variable* par = scope()->parameter(i);
- Slot* slot = par->slot();
+ Slot* slot = par->AsSlot();
if (slot != NULL && slot->type() == Slot::CONTEXT) {
// The use of SlotOperand below is safe in unspilled code
// because the slot is guaranteed to be a context slot.
@@ -285,7 +285,7 @@ void CodeGenerator::Generate(CompilationInfo* info) {
// Initialize ThisFunction reference if present.
if (scope()->is_function_scope() && scope()->function() != NULL) {
frame_->Push(Factory::the_hole_value());
- StoreToSlot(scope()->function()->slot(), NOT_CONST_INIT);
+ StoreToSlot(scope()->function()->AsSlot(), NOT_CONST_INIT);
}
@@ -717,10 +717,10 @@ void CodeGenerator::LoadTypeofExpression(Expression* expr) {
Property property(&global, &key, RelocInfo::kNoPosition);
Reference ref(this, &property);
ref.GetValue();
- } else if (variable != NULL && variable->slot() != NULL) {
+ } else if (variable != NULL && variable->AsSlot() != NULL) {
// For a variable that rewrites to a slot, we signal it is the immediate
// subexpression of a typeof.
- LoadFromSlotCheckForArguments(variable->slot(), INSIDE_TYPEOF);
+ LoadFromSlotCheckForArguments(variable->AsSlot(), INSIDE_TYPEOF);
} else {
// Anything else can be handled normally.
Load(expr);
@@ -759,17 +759,17 @@ Result CodeGenerator::StoreArgumentsObject(bool initial) {
frame_->Push(&result);
}
- Variable* arguments = scope()->arguments()->var();
- Variable* shadow = scope()->arguments_shadow()->var();
- ASSERT(arguments != NULL && arguments->slot() != NULL);
- ASSERT(shadow != NULL && shadow->slot() != NULL);
+ Variable* arguments = scope()->arguments();
+ Variable* shadow = scope()->arguments_shadow();
+ ASSERT(arguments != NULL && arguments->AsSlot() != NULL);
+ ASSERT(shadow != NULL && shadow->AsSlot() != NULL);
JumpTarget done;
bool skip_arguments = false;
if (mode == LAZY_ARGUMENTS_ALLOCATION && !initial) {
// We have to skip storing into the arguments slot if it has
// already been written to. This can happen if the a function
// has a local variable named 'arguments'.
- LoadFromSlot(arguments->slot(), NOT_INSIDE_TYPEOF);
+ LoadFromSlot(arguments->AsSlot(), NOT_INSIDE_TYPEOF);
Result probe = frame_->Pop();
if (probe.is_constant()) {
// We have to skip updating the arguments object if it has
@@ -782,10 +782,10 @@ Result CodeGenerator::StoreArgumentsObject(bool initial) {
}
}
if (!skip_arguments) {
- StoreToSlot(arguments->slot(), NOT_CONST_INIT);
+ StoreToSlot(arguments->AsSlot(), NOT_CONST_INIT);
if (mode == LAZY_ARGUMENTS_ALLOCATION) done.Bind();
}
- StoreToSlot(shadow->slot(), NOT_CONST_INIT);
+ StoreToSlot(shadow->AsSlot(), NOT_CONST_INIT);
return frame_->Pop();
}
@@ -842,7 +842,7 @@ void CodeGenerator::LoadReference(Reference* ref) {
LoadGlobal();
ref->set_type(Reference::NAMED);
} else {
- ASSERT(var->slot() != NULL);
+ ASSERT(var->AsSlot() != NULL);
ref->set_type(Reference::SLOT);
}
} else {
@@ -3274,7 +3274,7 @@ void CodeGenerator::CallApplyLazy(Expression* applicand,
// Load the receiver and the existing arguments object onto the
// expression stack. Avoid allocating the arguments object here.
Load(receiver);
- LoadFromSlot(scope()->arguments()->var()->slot(), NOT_INSIDE_TYPEOF);
+ LoadFromSlot(scope()->arguments()->AsSlot(), NOT_INSIDE_TYPEOF);
// Emit the source position information after having loaded the
// receiver and the arguments.
@@ -3536,7 +3536,7 @@ void CodeGenerator::VisitDeclaration(Declaration* node) {
Comment cmnt(masm_, "[ Declaration");
Variable* var = node->proxy()->var();
ASSERT(var != NULL); // must have been resolved
- Slot* slot = var->slot();
+ Slot* slot = var->AsSlot();
// If it was not possible to allocate the variable at compile time,
// we need to "declare" it at runtime to make sure it actually
@@ -4252,7 +4252,7 @@ void CodeGenerator::VisitForStatement(ForStatement* node) {
// the bottom check of the loop condition.
if (node->is_fast_smi_loop()) {
// Set number type of the loop variable to smi.
- SetTypeForStackSlot(node->loop_variable()->slot(), TypeInfo::Smi());
+ SetTypeForStackSlot(node->loop_variable()->AsSlot(), TypeInfo::Smi());
}
Visit(node->body());
@@ -4278,7 +4278,7 @@ void CodeGenerator::VisitForStatement(ForStatement* node) {
// expression if we are in a fast smi loop condition.
if (node->is_fast_smi_loop() && has_valid_frame()) {
// Set number type of the loop variable to smi.
- SetTypeForStackSlot(node->loop_variable()->slot(), TypeInfo::Smi());
+ SetTypeForStackSlot(node->loop_variable()->AsSlot(), TypeInfo::Smi());
}
// Based on the condition analysis, compile the backward jump as
@@ -4577,8 +4577,8 @@ void CodeGenerator::VisitTryCatchStatement(TryCatchStatement* node) {
// Store the caught exception in the catch variable.
Variable* catch_var = node->catch_var()->var();
- ASSERT(catch_var != NULL && catch_var->slot() != NULL);
- StoreToSlot(catch_var->slot(), NOT_CONST_INIT);
+ ASSERT(catch_var != NULL && catch_var->AsSlot() != NULL);
+ StoreToSlot(catch_var->AsSlot(), NOT_CONST_INIT);
// Remove the exception from the stack.
frame_->Drop();
@@ -5173,7 +5173,7 @@ void CodeGenerator::EmitDynamicLoadFromSlotFastCase(Slot* slot,
done->Jump(result);
} else if (slot->var()->mode() == Variable::DYNAMIC_LOCAL) {
- Slot* potential_slot = slot->var()->local_if_not_shadowed()->slot();
+ Slot* potential_slot = slot->var()->local_if_not_shadowed()->AsSlot();
Expression* rewrite = slot->var()->local_if_not_shadowed()->rewrite();
if (potential_slot != NULL) {
// Generate fast case for locals that rewrite to slots.
@@ -5206,7 +5206,7 @@ void CodeGenerator::EmitDynamicLoadFromSlotFastCase(Slot* slot,
Result arguments = allocator()->Allocate();
ASSERT(arguments.is_valid());
__ mov(arguments.reg(),
- ContextSlotOperandCheckExtensions(obj_proxy->var()->slot(),
+ ContextSlotOperandCheckExtensions(obj_proxy->var()->AsSlot(),
arguments,
slow));
frame_->Push(&arguments);
@@ -5714,7 +5714,7 @@ void CodeGenerator::EmitSlotAssignment(Assignment* node) {
Comment cmnt(masm(), "[ Variable Assignment");
Variable* var = node->target()->AsVariableProxy()->AsVariable();
ASSERT(var != NULL);
- Slot* slot = var->slot();
+ Slot* slot = var->AsSlot();
ASSERT(slot != NULL);
// Evaluate the right-hand side.
@@ -6063,14 +6063,14 @@ void CodeGenerator::VisitCall(Call* node) {
// in generated code. If we succeed, there is no need to perform a
// context lookup in the runtime system.
JumpTarget done;
- if (var->slot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
- ASSERT(var->slot()->type() == Slot::LOOKUP);
+ if (var->AsSlot() != NULL && var->mode() == Variable::DYNAMIC_GLOBAL) {
+ ASSERT(var->AsSlot()->type() == Slot::LOOKUP);
JumpTarget slow;
// Prepare the stack for the call to
// ResolvePossiblyDirectEvalNoLookup by pushing the loaded
// function, the first argument to the eval call and the
// receiver.
- Result fun = LoadFromGlobalSlotCheckExtensions(var->slot(),
+ Result fun = LoadFromGlobalSlotCheckExtensions(var->AsSlot(),
NOT_INSIDE_TYPEOF,
&slow);
frame_->Push(&fun);
@@ -6153,8 +6153,8 @@ void CodeGenerator::VisitCall(Call* node) {
frame_->RestoreContextRegister();
frame_->Push(&result);
- } else if (var != NULL && var->slot() != NULL &&
- var->slot()->type() == Slot::LOOKUP) {
+ } else if (var != NULL && var->AsSlot() != NULL &&
+ var->AsSlot()->type() == Slot::LOOKUP) {
// ----------------------------------
// JavaScript examples:
//
@@ -6173,7 +6173,7 @@ void CodeGenerator::VisitCall(Call* node) {
// Generate fast case for loading functions from slots that
// correspond to local/global variables or arguments unless they
// are shadowed by eval-introduced bindings.
- EmitDynamicLoadFromSlotFastCase(var->slot(),
+ EmitDynamicLoadFromSlotFastCase(var->AsSlot(),
NOT_INSIDE_TYPEOF,
&function,
&slow,
@@ -8053,7 +8053,7 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) {
Variable* variable = node->expression()->AsVariableProxy()->AsVariable();
if (variable != NULL) {
- Slot* slot = variable->slot();
+ Slot* slot = variable->AsSlot();
if (variable->is_global()) {
LoadGlobal();
frame_->Push(variable->name());
@@ -9144,9 +9144,15 @@ class DeferredReferenceGetNamedValue: public DeferredCode {
public:
DeferredReferenceGetNamedValue(Register dst,
Register receiver,
- Handle<String> name)
- : dst_(dst), receiver_(receiver), name_(name) {
- set_comment("[ DeferredReferenceGetNamedValue");
+ Handle<String> name,
+ bool is_contextual)
+ : dst_(dst),
+ receiver_(receiver),
+ name_(name),
+ is_contextual_(is_contextual) {
+ set_comment(is_contextual
+ ? "[ DeferredReferenceGetNamedValue (contextual)"
+ : "[ DeferredReferenceGetNamedValue");
}
virtual void Generate();
@@ -9158,6 +9164,7 @@ class DeferredReferenceGetNamedValue: public DeferredCode {
Register dst_;
Register receiver_;
Handle<String> name_;
+ bool is_contextual_;
};
@@ -9167,9 +9174,15 @@ void DeferredReferenceGetNamedValue::Generate() {
}
__ Set(ecx, Immediate(name_));
Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize));
- __ call(ic, RelocInfo::CODE_TARGET);
- // The call must be followed by a test eax instruction to indicate
- // that the inobject property case was inlined.
+ RelocInfo::Mode mode = is_contextual_
+ ? RelocInfo::CODE_TARGET_CONTEXT
+ : RelocInfo::CODE_TARGET;
+ __ call(ic, mode);
+ // The call must be followed by:
+ // - a test eax instruction to indicate that the inobject property
+ // case was inlined.
+ // - a mov ecx instruction to indicate that the contextual property
+ // load was inlined.
//
// Store the delta to the map check instruction here in the test
// instruction. Use masm_-> instead of the __ macro since the
@@ -9177,8 +9190,13 @@ void DeferredReferenceGetNamedValue::Generate() {
int delta_to_patch_site = masm_->SizeOfCodeGeneratedSince(patch_site());
// Here we use masm_-> instead of the __ macro because this is the
// instruction that gets patched and coverage code gets in the way.
- masm_->test(eax, Immediate(-delta_to_patch_site));
- __ IncrementCounter(&Counters::named_load_inline_miss, 1);
+ if (is_contextual_) {
+ masm_->mov(ecx, -delta_to_patch_site);
+ __ IncrementCounter(&Counters::named_load_global_inline_miss, 1);
+ } else {
+ masm_->test(eax, Immediate(-delta_to_patch_site));
+ __ IncrementCounter(&Counters::named_load_inline_miss, 1);
+ }
if (!dst_.is(eax)) __ mov(dst_, eax);
}
@@ -9349,12 +9367,17 @@ Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
#ifdef DEBUG
int original_height = frame()->height();
#endif
+
+ bool contextual_load_in_builtin =
+ is_contextual &&
+ (Bootstrapper::IsActive() ||
+ (!info_->closure().is_null() && info_->closure()->IsBuiltin()));
+
Result result;
- // Do not inline the inobject property case for loads from the global
- // object. Also do not inline for unoptimized code. This saves time in
- // the code generator. Unoptimized code is toplevel code or code that is
- // not in a loop.
- if (is_contextual || scope()->is_global_scope() || loop_nesting() == 0) {
+ // Do not inline in the global code or when not in loop.
+ if (scope()->is_global_scope() ||
+ loop_nesting() == 0 ||
+ contextual_load_in_builtin) {
Comment cmnt(masm(), "[ Load from named Property");
frame()->Push(name);
@@ -9367,19 +9390,26 @@ Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
// instruction here.
__ nop();
} else {
- // Inline the inobject property case.
- Comment cmnt(masm(), "[ Inlined named property load");
+ // Inline the property load.
+ Comment cmnt(masm(), is_contextual
+ ? "[ Inlined contextual property load"
+ : "[ Inlined named property load");
Result receiver = frame()->Pop();
receiver.ToRegister();
result = allocator()->Allocate();
ASSERT(result.is_valid());
DeferredReferenceGetNamedValue* deferred =
- new DeferredReferenceGetNamedValue(result.reg(), receiver.reg(), name);
+ new DeferredReferenceGetNamedValue(result.reg(),
+ receiver.reg(),
+ name,
+ is_contextual);
- // Check that the receiver is a heap object.
- __ test(receiver.reg(), Immediate(kSmiTagMask));
- deferred->Branch(zero);
+ if (!is_contextual) {
+ // Check that the receiver is a heap object.
+ __ test(receiver.reg(), Immediate(kSmiTagMask));
+ deferred->Branch(zero);
+ }
__ bind(deferred->patch_site());
// This is the map check instruction that will be patched (so we can't
@@ -9391,17 +9421,33 @@ Result CodeGenerator::EmitNamedLoad(Handle<String> name, bool is_contextual) {
// which allows the assert below to succeed and patching to work.
deferred->Branch(not_equal);
- // The delta from the patch label to the load offset must be statically
- // known.
+ // The delta from the patch label to the actual load must be
+ // statically known.
ASSERT(masm()->SizeOfCodeGeneratedSince(deferred->patch_site()) ==
LoadIC::kOffsetToLoadInstruction);
- // The initial (invalid) offset has to be large enough to force a 32-bit
- // instruction encoding to allow patching with an arbitrary offset. Use
- // kMaxInt (minus kHeapObjectTag).
- int offset = kMaxInt;
- masm()->mov(result.reg(), FieldOperand(receiver.reg(), offset));
- __ IncrementCounter(&Counters::named_load_inline, 1);
+ if (is_contextual) {
+ // Load the (initialy invalid) cell and get its value.
+ masm()->mov(result.reg(), Factory::null_value());
+ if (FLAG_debug_code) {
+ __ cmp(FieldOperand(result.reg(), HeapObject::kMapOffset),
+ Factory::global_property_cell_map());
+ __ Assert(equal, "Uninitialized inlined contextual load");
+ }
+ __ mov(result.reg(),
+ FieldOperand(result.reg(), JSGlobalPropertyCell::kValueOffset));
+ __ cmp(result.reg(), Factory::the_hole_value());
+ deferred->Branch(equal);
+ __ IncrementCounter(&Counters::named_load_global_inline, 1);
+ } else {
+ // The initial (invalid) offset has to be large enough to force a 32-bit
+ // instruction encoding to allow patching with an arbitrary offset. Use
+ // kMaxInt (minus kHeapObjectTag).
+ int offset = kMaxInt;
+ masm()->mov(result.reg(), FieldOperand(receiver.reg(), offset));
+ __ IncrementCounter(&Counters::named_load_inline, 1);
+ }
+
deferred->BindExit();
}
ASSERT(frame()->height() == original_height - 1);
@@ -9741,7 +9787,7 @@ void Reference::GetValue() {
switch (type_) {
case SLOT: {
Comment cmnt(masm, "[ Load from Slot");
- Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
+ Slot* slot = expression_->AsVariableProxy()->AsVariable()->AsSlot();
ASSERT(slot != NULL);
cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF);
if (!persist_after_get_) set_unloaded();
@@ -9786,7 +9832,7 @@ void Reference::TakeValue() {
return;
}
- Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
+ Slot* slot = expression_->AsVariableProxy()->AsVariable()->AsSlot();
ASSERT(slot != NULL);
if (slot->type() == Slot::LOOKUP ||
slot->type() == Slot::CONTEXT ||
@@ -9819,7 +9865,7 @@ void Reference::SetValue(InitState init_state) {
switch (type_) {
case SLOT: {
Comment cmnt(masm, "[ Store to Slot");
- Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot();
+ Slot* slot = expression_->AsVariableProxy()->AsVariable()->AsSlot();
ASSERT(slot != NULL);
cgen_->StoreToSlot(slot, init_state);
set_unloaded();