summaryrefslogtreecommitdiff
path: root/deps/v8/src/ia32/full-codegen-ia32.cc
diff options
context:
space:
mode:
authorBen Noordhuis <info@bnoordhuis.nl>2013-04-17 16:10:37 +0200
committerBen Noordhuis <info@bnoordhuis.nl>2013-04-17 16:10:37 +0200
commit9f682265d6631a29457abeb53827d01fa77493c8 (patch)
tree92a1eec49b1f280931598a72dcf0cca3d795f210 /deps/v8/src/ia32/full-codegen-ia32.cc
parent951e0b69fa3c8b1a5d708e29de9d6f7d1db79827 (diff)
downloadnode-new-9f682265d6631a29457abeb53827d01fa77493c8.tar.gz
deps: upgrade v8 to 3.18.0
Diffstat (limited to 'deps/v8/src/ia32/full-codegen-ia32.cc')
-rw-r--r--deps/v8/src/ia32/full-codegen-ia32.cc34
1 files changed, 21 insertions, 13 deletions
diff --git a/deps/v8/src/ia32/full-codegen-ia32.cc b/deps/v8/src/ia32/full-codegen-ia32.cc
index efbdf13544..19989b1c62 100644
--- a/deps/v8/src/ia32/full-codegen-ia32.cc
+++ b/deps/v8/src/ia32/full-codegen-ia32.cc
@@ -164,6 +164,8 @@ void FullCodeGenerator::Generate() {
{ Comment cmnt(masm_, "[ Allocate locals");
int locals_count = info->scope()->num_stack_slots();
+ // Generators allocate locals, if any, in context slots.
+ ASSERT(!info->function()->is_generator() || locals_count == 0);
if (locals_count == 1) {
__ push(Immediate(isolate()->factory()->undefined_value()));
} else if (locals_count > 1) {
@@ -351,13 +353,6 @@ void FullCodeGenerator::EmitBackEdgeBookkeeping(IterationStatement* stmt,
// the deoptimization input data found in the optimized code.
RecordBackEdge(stmt->OsrEntryId());
- // Loop stack checks can be patched to perform on-stack replacement. In
- // order to decide whether or not to perform OSR we embed the loop depth
- // in a test instruction after the call so we can extract it from the OSR
- // builtin.
- ASSERT(loop_depth() > 0);
- __ test(eax, Immediate(Min(loop_depth(), Code::kMaxLoopNestingMarker)));
-
EmitProfilingCounterReset();
__ bind(&ok);
@@ -1214,7 +1209,7 @@ void FullCodeGenerator::EmitNewClosure(Handle<SharedFunctionInfo> info,
!pretenure &&
scope()->is_function_scope() &&
info->num_literals() == 0) {
- FastNewClosureStub stub(info->language_mode());
+ FastNewClosureStub stub(info->language_mode(), info->is_generator());
__ push(Immediate(info));
__ CallStub(&stub);
} else {
@@ -1670,24 +1665,33 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
Handle<FixedArrayBase> constant_elements_values(
FixedArrayBase::cast(constant_elements->get(1)));
- __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
- __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
- __ push(Immediate(Smi::FromInt(expr->literal_index())));
- __ push(Immediate(constant_elements));
Heap* heap = isolate()->heap();
if (has_constant_fast_elements &&
constant_elements_values->map() == heap->fixed_cow_array_map()) {
// If the elements are already FAST_*_ELEMENTS, the boilerplate cannot
// change, so it's possible to specialize the stub in advance.
__ IncrementCounter(isolate()->counters()->cow_arrays_created_stub(), 1);
+ __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
+ __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
+ __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
+ __ mov(ecx, Immediate(constant_elements));
FastCloneShallowArrayStub stub(
FastCloneShallowArrayStub::COPY_ON_WRITE_ELEMENTS,
DONT_TRACK_ALLOCATION_SITE,
length);
__ CallStub(&stub);
} else if (expr->depth() > 1) {
+ __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
+ __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
+ __ push(Immediate(Smi::FromInt(expr->literal_index())));
+ __ push(Immediate(constant_elements));
__ CallRuntime(Runtime::kCreateArrayLiteral, 3);
- } else if (length > FastCloneShallowArrayStub::kMaximumClonedLength) {
+ } else if (Serializer::enabled() ||
+ length > FastCloneShallowArrayStub::kMaximumClonedLength) {
+ __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
+ __ push(FieldOperand(ebx, JSFunction::kLiteralsOffset));
+ __ push(Immediate(Smi::FromInt(expr->literal_index())));
+ __ push(Immediate(constant_elements));
__ CallRuntime(Runtime::kCreateArrayLiteralShallow, 3);
} else {
ASSERT(IsFastSmiOrObjectElementsKind(constant_elements_kind) ||
@@ -1704,6 +1708,10 @@ void FullCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) {
allocation_site_mode = DONT_TRACK_ALLOCATION_SITE;
}
+ __ mov(ebx, Operand(ebp, JavaScriptFrameConstants::kFunctionOffset));
+ __ mov(eax, FieldOperand(ebx, JSFunction::kLiteralsOffset));
+ __ mov(ebx, Immediate(Smi::FromInt(expr->literal_index())));
+ __ mov(ecx, Immediate(constant_elements));
FastCloneShallowArrayStub stub(mode, allocation_site_mode, length);
__ CallStub(&stub);
}