summaryrefslogtreecommitdiff
path: root/deps/v8/src/ia32
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/ia32')
-rw-r--r--deps/v8/src/ia32/assembler-ia32.cc54
-rw-r--r--deps/v8/src/ia32/assembler-ia32.h17
-rw-r--r--deps/v8/src/ia32/code-stubs-ia32.cc20
-rw-r--r--deps/v8/src/ia32/codegen-ia32.cc90
-rw-r--r--deps/v8/src/ia32/codegen-ia32.h5
-rw-r--r--deps/v8/src/ia32/full-codegen-ia32.cc106
-rw-r--r--deps/v8/src/ia32/ic-ia32.cc1
-rw-r--r--deps/v8/src/ia32/macro-assembler-ia32.h2
-rw-r--r--deps/v8/src/ia32/stub-cache-ia32.cc178
9 files changed, 274 insertions, 199 deletions
diff --git a/deps/v8/src/ia32/assembler-ia32.cc b/deps/v8/src/ia32/assembler-ia32.cc
index 125f503bec..019f478adc 100644
--- a/deps/v8/src/ia32/assembler-ia32.cc
+++ b/deps/v8/src/ia32/assembler-ia32.cc
@@ -298,8 +298,7 @@ static void InitCoverageLog();
// Spare buffer.
byte* Assembler::spare_buffer_ = NULL;
-Assembler::Assembler(void* buffer, int buffer_size)
- : positions_recorder_(this) {
+Assembler::Assembler(void* buffer, int buffer_size) {
if (buffer == NULL) {
// Do our own buffer management.
if (buffer_size <= kMinimalBufferSize) {
@@ -340,6 +339,10 @@ Assembler::Assembler(void* buffer, int buffer_size)
reloc_info_writer.Reposition(buffer_ + buffer_size, pc_);
last_pc_ = NULL;
+ current_statement_position_ = RelocInfo::kNoPosition;
+ current_position_ = RelocInfo::kNoPosition;
+ written_statement_position_ = current_statement_position_;
+ written_position_ = current_position_;
#ifdef GENERATED_CODE_COVERAGE
InitCoverageLog();
#endif
@@ -1578,7 +1581,7 @@ void Assembler::call(const Operand& adr) {
void Assembler::call(Handle<Code> code, RelocInfo::Mode rmode) {
- positions_recorder()->WriteRecordedPositions();
+ WriteRecordedPositions();
EnsureSpace ensure_space(this);
last_pc_ = pc_;
ASSERT(RelocInfo::IsCodeTarget(rmode));
@@ -2461,14 +2464,14 @@ void Assembler::Print() {
void Assembler::RecordJSReturn() {
- positions_recorder()->WriteRecordedPositions();
+ WriteRecordedPositions();
EnsureSpace ensure_space(this);
RecordRelocInfo(RelocInfo::JS_RETURN);
}
void Assembler::RecordDebugBreakSlot() {
- positions_recorder()->WriteRecordedPositions();
+ WriteRecordedPositions();
EnsureSpace ensure_space(this);
RecordRelocInfo(RelocInfo::DEBUG_BREAK_SLOT);
}
@@ -2482,6 +2485,47 @@ void Assembler::RecordComment(const char* msg) {
}
+void Assembler::RecordPosition(int pos) {
+ ASSERT(pos != RelocInfo::kNoPosition);
+ ASSERT(pos >= 0);
+ current_position_ = pos;
+}
+
+
+void Assembler::RecordStatementPosition(int pos) {
+ ASSERT(pos != RelocInfo::kNoPosition);
+ ASSERT(pos >= 0);
+ current_statement_position_ = pos;
+}
+
+
+bool Assembler::WriteRecordedPositions() {
+ bool written = false;
+
+ // Write the statement position if it is different from what was written last
+ // time.
+ if (current_statement_position_ != written_statement_position_) {
+ EnsureSpace ensure_space(this);
+ RecordRelocInfo(RelocInfo::STATEMENT_POSITION, current_statement_position_);
+ written_statement_position_ = current_statement_position_;
+ written = true;
+ }
+
+ // Write the position if it is different from what was written last time and
+ // also different from the written statement position.
+ if (current_position_ != written_position_ &&
+ current_position_ != written_statement_position_) {
+ EnsureSpace ensure_space(this);
+ RecordRelocInfo(RelocInfo::POSITION, current_position_);
+ written_position_ = current_position_;
+ written = true;
+ }
+
+ // Return whether something was written.
+ return written;
+}
+
+
void Assembler::GrowBuffer() {
ASSERT(overflow());
if (!own_buffer_) FATAL("external code buffer is too small");
diff --git a/deps/v8/src/ia32/assembler-ia32.h b/deps/v8/src/ia32/assembler-ia32.h
index 79637a1901..5286788fa7 100644
--- a/deps/v8/src/ia32/assembler-ia32.h
+++ b/deps/v8/src/ia32/assembler-ia32.h
@@ -521,6 +521,7 @@ class Assembler : public Malloced {
void push(const Immediate& x);
void push(Register src);
void push(const Operand& src);
+ void push(Label* label, RelocInfo::Mode relocation_mode);
void pop(Register dst);
void pop(const Operand& dst);
@@ -846,11 +847,17 @@ class Assembler : public Malloced {
// Use --debug_code to enable.
void RecordComment(const char* msg);
+ void RecordPosition(int pos);
+ void RecordStatementPosition(int pos);
+ bool WriteRecordedPositions();
+
// Writes a single word of data in the code stream.
// Used for inline tables, e.g., jump-tables.
void dd(uint32_t data, RelocInfo::Mode reloc_info);
int pc_offset() const { return pc_ - buffer_; }
+ int current_statement_position() const { return current_statement_position_; }
+ int current_position() const { return current_position_; }
// Check if there is less than kGap bytes available in the buffer.
// If this is the case, we need to grow the buffer before emitting
@@ -862,8 +869,6 @@ class Assembler : public Malloced {
static bool IsNop(Address addr) { return *addr == 0x90; }
- PositionsRecorder* positions_recorder() { return &positions_recorder_; }
-
// Avoid overflows for displacements etc.
static const int kMaximalBufferSize = 512*MB;
static const int kMinimalBufferSize = 4*KB;
@@ -942,9 +947,11 @@ class Assembler : public Malloced {
// push-pop elimination
byte* last_pc_;
- PositionsRecorder positions_recorder_;
-
- friend class PositionsRecorder;
+ // source position information
+ int current_statement_position_;
+ int current_position_;
+ int written_statement_position_;
+ int written_position_;
};
diff --git a/deps/v8/src/ia32/code-stubs-ia32.cc b/deps/v8/src/ia32/code-stubs-ia32.cc
index a7d658bdce..b2b73926b9 100644
--- a/deps/v8/src/ia32/code-stubs-ia32.cc
+++ b/deps/v8/src/ia32/code-stubs-ia32.cc
@@ -3067,26 +3067,6 @@ void ApiGetterEntryStub::Generate(MacroAssembler* masm) {
}
-void ApiCallEntryStub::Generate(MacroAssembler* masm) {
- __ PrepareCallApiFunction(kStackSpace, kArgc);
- STATIC_ASSERT(kArgc == 5);
-
- // Allocate the v8::Arguments structure in the arguments' space since
- // it's not controlled by GC.
- __ mov(ApiParameterOperand(1), eax); // v8::Arguments::implicit_args_.
- __ mov(ApiParameterOperand(2), ebx); // v8::Arguments::values_.
- __ mov(ApiParameterOperand(3), edx); // v8::Arguments::length_.
- // v8::Arguments::is_construct_call_.
- __ mov(ApiParameterOperand(4), Immediate(0));
-
- // v8::InvocationCallback's argument.
- __ lea(eax, ApiParameterOperand(1));
- __ mov(ApiParameterOperand(0), eax);
-
- __ CallApiFunctionAndReturn(fun(), kArgc);
-}
-
-
void CEntryStub::GenerateCore(MacroAssembler* masm,
Label* throw_normal_exception,
Label* throw_termination_exception,
diff --git a/deps/v8/src/ia32/codegen-ia32.cc b/deps/v8/src/ia32/codegen-ia32.cc
index 6f4ef87e85..6d23dd7df9 100644
--- a/deps/v8/src/ia32/codegen-ia32.cc
+++ b/deps/v8/src/ia32/codegen-ia32.cc
@@ -3734,7 +3734,7 @@ void CodeGenerator::VisitReturnStatement(ReturnStatement* node) {
CodeForStatementPosition(node);
Load(node->expression());
Result return_value = frame_->Pop();
- masm()->positions_recorder()->WriteRecordedPositions();
+ masm()->WriteRecordedPositions();
if (function_return_is_shadowed_) {
function_return_.Jump(&return_value);
} else {
@@ -7292,6 +7292,88 @@ void CodeGenerator::GenerateRegExpConstructResult(ZoneList<Expression*>* args) {
}
+void CodeGenerator::GenerateRegExpCloneResult(ZoneList<Expression*>* args) {
+ ASSERT_EQ(1, args->length());
+
+ Load(args->at(0));
+ Result object_result = frame_->Pop();
+ object_result.ToRegister(eax);
+ object_result.Unuse();
+ {
+ VirtualFrame::SpilledScope spilled_scope;
+
+ Label done;
+
+ __ test(eax, Immediate(kSmiTagMask));
+ __ j(zero, &done);
+
+ // Load JSRegExpResult map into edx.
+ // Arguments to this function should be results of calling RegExp exec,
+ // which is either an unmodified JSRegExpResult or null. Anything not having
+ // the unmodified JSRegExpResult map is returned unmodified.
+ // This also ensures that elements are fast.
+ __ mov(edx, ContextOperand(esi, Context::GLOBAL_INDEX));
+ __ mov(edx, FieldOperand(edx, GlobalObject::kGlobalContextOffset));
+ __ mov(edx, ContextOperand(edx, Context::REGEXP_RESULT_MAP_INDEX));
+ __ cmp(edx, FieldOperand(eax, HeapObject::kMapOffset));
+ __ j(not_equal, &done);
+
+ if (FLAG_debug_code) {
+ // Check that object really has empty properties array, as the map
+ // should guarantee.
+ __ cmp(FieldOperand(eax, JSObject::kPropertiesOffset),
+ Immediate(Factory::empty_fixed_array()));
+ __ Check(equal, "JSRegExpResult: default map but non-empty properties.");
+ }
+
+ DeferredAllocateInNewSpace* allocate_fallback =
+ new DeferredAllocateInNewSpace(JSRegExpResult::kSize,
+ ebx,
+ edx.bit() | eax.bit());
+
+ // All set, copy the contents to a new object.
+ __ AllocateInNewSpace(JSRegExpResult::kSize,
+ ebx,
+ ecx,
+ no_reg,
+ allocate_fallback->entry_label(),
+ TAG_OBJECT);
+ __ bind(allocate_fallback->exit_label());
+
+ // Copy all fields from eax to ebx.
+ STATIC_ASSERT(JSRegExpResult::kSize % (2 * kPointerSize) == 0);
+ // There is an even number of fields, so unroll the loop once
+ // for efficiency.
+ for (int i = 0; i < JSRegExpResult::kSize; i += 2 * kPointerSize) {
+ STATIC_ASSERT(JSObject::kMapOffset % (2 * kPointerSize) == 0);
+ if (i != JSObject::kMapOffset) {
+ // The map was already loaded into edx.
+ __ mov(edx, FieldOperand(eax, i));
+ }
+ __ mov(ecx, FieldOperand(eax, i + kPointerSize));
+
+ STATIC_ASSERT(JSObject::kElementsOffset % (2 * kPointerSize) == 0);
+ if (i == JSObject::kElementsOffset) {
+ // If the elements array isn't empty, make it copy-on-write
+ // before copying it.
+ Label empty;
+ __ cmp(Operand(edx), Immediate(Factory::empty_fixed_array()));
+ __ j(equal, &empty);
+ __ mov(FieldOperand(edx, HeapObject::kMapOffset),
+ Immediate(Factory::fixed_cow_array_map()));
+ __ bind(&empty);
+ }
+ __ mov(FieldOperand(ebx, i), edx);
+ __ mov(FieldOperand(ebx, i + kPointerSize), ecx);
+ }
+ __ mov(eax, ebx);
+
+ __ bind(&done);
+ }
+ frame_->Push(eax);
+}
+
+
class DeferredSearchCache: public DeferredCode {
public:
DeferredSearchCache(Register dst, Register cache, Register key)
@@ -8578,11 +8660,9 @@ void CodeGenerator::Int32BinaryOperation(BinaryOperation* node) {
}
right.Unuse();
frame_->Push(&left);
- if (!node->to_int32() || op == Token::MUL) {
- // If ToInt32 is called on the result of ADD, SUB, we don't
+ if (!node->to_int32()) {
+ // If ToInt32 is called on the result of ADD, SUB, or MUL, we don't
// care about overflows.
- // Result of MUL can be non-representable precisely in double so
- // we have to check for overflow.
unsafe_bailout_->Branch(overflow);
}
break;
diff --git a/deps/v8/src/ia32/codegen-ia32.h b/deps/v8/src/ia32/codegen-ia32.h
index 5a12e10ea5..4594b19ddd 100644
--- a/deps/v8/src/ia32/codegen-ia32.h
+++ b/deps/v8/src/ia32/codegen-ia32.h
@@ -697,6 +697,11 @@ class CodeGenerator: public AstVisitor {
// Construct a RegExp exec result with two in-object properties.
void GenerateRegExpConstructResult(ZoneList<Expression*>* args);
+ // Clone the result of a regexp function.
+ // Must be an object created by GenerateRegExpConstructResult with
+ // no extra properties.
+ void GenerateRegExpCloneResult(ZoneList<Expression*>* args);
+
// Support for fast native caches.
void GenerateGetFromCache(ZoneList<Expression*>* args);
diff --git a/deps/v8/src/ia32/full-codegen-ia32.cc b/deps/v8/src/ia32/full-codegen-ia32.cc
index 1ea719d713..ee4e6458ae 100644
--- a/deps/v8/src/ia32/full-codegen-ia32.cc
+++ b/deps/v8/src/ia32/full-codegen-ia32.cc
@@ -1996,14 +1996,12 @@ void FullCodeGenerator::EmitCallWithIC(Call* expr,
// Code common for calls using the IC.
ZoneList<Expression*>* args = expr->arguments();
int arg_count = args->length();
- { PreserveStatementPositionScope scope(masm()->positions_recorder());
- for (int i = 0; i < arg_count; i++) {
- VisitForStackValue(args->at(i));
- }
- __ Set(ecx, Immediate(name));
+ for (int i = 0; i < arg_count; i++) {
+ VisitForStackValue(args->at(i));
}
+ __ Set(ecx, Immediate(name));
// Record source position of the IC call.
- SetSourcePosition(expr->position(), FORCED_POSITION);
+ SetSourcePosition(expr->position());
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
Handle<Code> ic = CodeGenerator::ComputeCallInitialize(arg_count, in_loop);
EmitCallIC(ic, mode);
@@ -2019,15 +2017,13 @@ void FullCodeGenerator::EmitKeyedCallWithIC(Call* expr,
// Code common for calls using the IC.
ZoneList<Expression*>* args = expr->arguments();
int arg_count = args->length();
- { PreserveStatementPositionScope scope(masm()->positions_recorder());
- for (int i = 0; i < arg_count; i++) {
- VisitForStackValue(args->at(i));
- }
- VisitForAccumulatorValue(key);
- __ mov(ecx, eax);
+ for (int i = 0; i < arg_count; i++) {
+ VisitForStackValue(args->at(i));
}
+ VisitForAccumulatorValue(key);
+ __ mov(ecx, eax);
// Record source position of the IC call.
- SetSourcePosition(expr->position(), FORCED_POSITION);
+ SetSourcePosition(expr->position());
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
Handle<Code> ic = CodeGenerator::ComputeKeyedCallInitialize(
arg_count, in_loop);
@@ -2042,13 +2038,11 @@ void FullCodeGenerator::EmitCallWithStub(Call* expr) {
// Code common for calls using the call stub.
ZoneList<Expression*>* args = expr->arguments();
int arg_count = args->length();
- { PreserveStatementPositionScope scope(masm()->positions_recorder());
- for (int i = 0; i < arg_count; i++) {
- VisitForStackValue(args->at(i));
- }
+ for (int i = 0; i < arg_count; i++) {
+ VisitForStackValue(args->at(i));
}
// Record source position for debugger.
- SetSourcePosition(expr->position(), FORCED_POSITION);
+ SetSourcePosition(expr->position());
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
__ CallStub(&stub);
@@ -2068,39 +2062,37 @@ void FullCodeGenerator::VisitCall(Call* expr) {
// resolve the function we need to call and the receiver of the
// call. Then we call the resolved function using the given
// arguments.
+ VisitForStackValue(fun);
+ __ push(Immediate(Factory::undefined_value())); // Reserved receiver slot.
+
+ // Push the arguments.
ZoneList<Expression*>* args = expr->arguments();
int arg_count = args->length();
- { PreserveStatementPositionScope pos_scope(masm()->positions_recorder());
- VisitForStackValue(fun);
- // Reserved receiver slot.
- __ push(Immediate(Factory::undefined_value()));
+ for (int i = 0; i < arg_count; i++) {
+ VisitForStackValue(args->at(i));
+ }
- // Push the arguments.
- for (int i = 0; i < arg_count; i++) {
- VisitForStackValue(args->at(i));
- }
+ // Push copy of the function - found below the arguments.
+ __ push(Operand(esp, (arg_count + 1) * kPointerSize));
- // Push copy of the function - found below the arguments.
- __ push(Operand(esp, (arg_count + 1) * kPointerSize));
+ // Push copy of the first argument or undefined if it doesn't exist.
+ if (arg_count > 0) {
+ __ push(Operand(esp, arg_count * kPointerSize));
+ } else {
+ __ push(Immediate(Factory::undefined_value()));
+ }
- // Push copy of the first argument or undefined if it doesn't exist.
- if (arg_count > 0) {
- __ push(Operand(esp, arg_count * kPointerSize));
- } else {
- __ push(Immediate(Factory::undefined_value()));
- }
+ // Push the receiver of the enclosing function and do runtime call.
+ __ push(Operand(ebp, (2 + scope()->num_parameters()) * kPointerSize));
+ __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 3);
- // Push the receiver of the enclosing function and do runtime call.
- __ push(Operand(ebp, (2 + scope()->num_parameters()) * kPointerSize));
- __ CallRuntime(Runtime::kResolvePossiblyDirectEval, 3);
+ // The runtime call returns a pair of values in eax (function) and
+ // edx (receiver). Touch up the stack with the right values.
+ __ mov(Operand(esp, (arg_count + 0) * kPointerSize), edx);
+ __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
- // The runtime call returns a pair of values in eax (function) and
- // edx (receiver). Touch up the stack with the right values.
- __ mov(Operand(esp, (arg_count + 0) * kPointerSize), edx);
- __ mov(Operand(esp, (arg_count + 1) * kPointerSize), eax);
- }
// Record source position for debugger.
- SetSourcePosition(expr->position(), FORCED_POSITION);
+ SetSourcePosition(expr->position());
InLoopFlag in_loop = (loop_depth() > 0) ? IN_LOOP : NOT_IN_LOOP;
CallFunctionStub stub(arg_count, in_loop, RECEIVER_MIGHT_BE_VALUE);
__ CallStub(&stub);
@@ -2116,14 +2108,12 @@ void FullCodeGenerator::VisitCall(Call* expr) {
// Call to a lookup slot (dynamically introduced variable).
Label slow, done;
- { PreserveStatementPositionScope scope(masm()->positions_recorder());
- // Generate code for loading from variables potentially shadowed
- // by eval-introduced variables.
- EmitDynamicLoadFromSlotFastCase(var->AsSlot(),
- NOT_INSIDE_TYPEOF,
- &slow,
- &done);
- }
+ // Generate code for loading from variables potentially shadowed
+ // by eval-introduced variables.
+ EmitDynamicLoadFromSlotFastCase(var->AsSlot(),
+ NOT_INSIDE_TYPEOF,
+ &slow,
+ &done);
__ bind(&slow);
// Call the runtime to find the function to call (returned in eax)
@@ -2162,15 +2152,11 @@ void FullCodeGenerator::VisitCall(Call* expr) {
// Call to a keyed property.
// For a synthetic property use keyed load IC followed by function call,
// for a regular property use keyed EmitCallIC.
- { PreserveStatementPositionScope scope(masm()->positions_recorder());
- VisitForStackValue(prop->obj());
- }
+ VisitForStackValue(prop->obj());
if (prop->is_synthetic()) {
- { PreserveStatementPositionScope scope(masm()->positions_recorder());
- VisitForAccumulatorValue(prop->key());
- }
+ VisitForAccumulatorValue(prop->key());
// Record source code position for IC call.
- SetSourcePosition(prop->position(), FORCED_POSITION);
+ SetSourcePosition(prop->position());
__ pop(edx); // We do not need to keep the receiver.
Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize));
@@ -2195,9 +2181,7 @@ void FullCodeGenerator::VisitCall(Call* expr) {
loop_depth() == 0) {
lit->set_try_full_codegen(true);
}
- { PreserveStatementPositionScope scope(masm()->positions_recorder());
- VisitForStackValue(fun);
- }
+ VisitForStackValue(fun);
// Load global receiver object.
__ mov(ebx, CodeGenerator::GlobalObject());
__ push(FieldOperand(ebx, GlobalObject::kGlobalReceiverOffset));
diff --git a/deps/v8/src/ia32/ic-ia32.cc b/deps/v8/src/ia32/ic-ia32.cc
index a0bc086d87..b5f4deefeb 100644
--- a/deps/v8/src/ia32/ic-ia32.cc
+++ b/deps/v8/src/ia32/ic-ia32.cc
@@ -33,6 +33,7 @@
#include "ic-inl.h"
#include "runtime.h"
#include "stub-cache.h"
+#include "utils.h"
namespace v8 {
namespace internal {
diff --git a/deps/v8/src/ia32/macro-assembler-ia32.h b/deps/v8/src/ia32/macro-assembler-ia32.h
index d65eebbc20..7b9b843939 100644
--- a/deps/v8/src/ia32/macro-assembler-ia32.h
+++ b/deps/v8/src/ia32/macro-assembler-ia32.h
@@ -488,7 +488,7 @@ class MacroAssembler: public Assembler {
// stored in ApiParameterOperand(0), ApiParameterOperand(1) etc.
void PrepareCallApiFunction(int stack_space, int argc);
- // Calls an API function. Allocates HandleScope, extracts
+ // Tail call an API function (jump). Allocates HandleScope, extracts
// returned value from handle and propagates exceptions.
// Clobbers ebx, esi, edi and caller-save registers.
void CallApiFunctionAndReturn(ApiFunction* function, int argc);
diff --git a/deps/v8/src/ia32/stub-cache-ia32.cc b/deps/v8/src/ia32/stub-cache-ia32.cc
index f59928fe2b..e387088359 100644
--- a/deps/v8/src/ia32/stub-cache-ia32.cc
+++ b/deps/v8/src/ia32/stub-cache-ia32.cc
@@ -413,10 +413,6 @@ static void CompileCallLoadPropertyWithInterceptor(MacroAssembler* masm,
}
-// Number of pointers to be reserved on stack for fast API call.
-static const int kFastApiCallArguments = 3;
-
-
// Reserves space for the extra arguments to FastHandleApiCall in the
// caller's frame.
//
@@ -427,9 +423,10 @@ static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
// -- esp[4] : last argument in the internal frame of the caller
// -----------------------------------
__ pop(scratch);
- for (int i = 0; i < kFastApiCallArguments; i++) {
- __ push(Immediate(Smi::FromInt(0)));
- }
+ __ push(Immediate(Smi::FromInt(0)));
+ __ push(Immediate(Smi::FromInt(0)));
+ __ push(Immediate(Smi::FromInt(0)));
+ __ push(Immediate(Smi::FromInt(0)));
__ push(scratch);
}
@@ -437,81 +434,75 @@ static void ReserveSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
// Undoes the effects of ReserveSpaceForFastApiCall.
static void FreeSpaceForFastApiCall(MacroAssembler* masm, Register scratch) {
// ----------- S t a t e -------------
- // -- esp[0] : return address.
- // -- esp[4] : last fast api call extra argument.
+ // -- esp[0] : return address
+ // -- esp[4] : last fast api call extra argument
// -- ...
- // -- esp[kFastApiCallArguments * 4] : first fast api call extra argument.
- // -- esp[kFastApiCallArguments * 4 + 4] : last argument in the internal
- // frame.
+ // -- esp[16] : first fast api call extra argument
+ // -- esp[20] : last argument in the internal frame
// -----------------------------------
__ pop(scratch);
- __ add(Operand(esp), Immediate(kPointerSize * kFastApiCallArguments));
+ __ add(Operand(esp), Immediate(kPointerSize * 4));
__ push(scratch);
}
// Generates call to FastHandleApiCall builtin.
-static bool GenerateFastApiCall(MacroAssembler* masm,
+static void GenerateFastApiCall(MacroAssembler* masm,
const CallOptimization& optimization,
- int argc,
- Failure** failure) {
+ int argc) {
// ----------- S t a t e -------------
// -- esp[0] : return address
// -- esp[4] : object passing the type check
// (last fast api call extra argument,
// set by CheckPrototypes)
- // -- esp[8] : api function
+ // -- esp[8] : api call data
+ // -- esp[12] : api callback
+ // -- esp[16] : api function
// (first fast api call extra argument)
- // -- esp[12] : api call data
- // -- esp[16] : last argument
+ // -- esp[20] : last argument
// -- ...
- // -- esp[(argc + 3) * 4] : first argument
- // -- esp[(argc + 4) * 4] : receiver
+ // -- esp[(argc + 5) * 4] : first argument
+ // -- esp[(argc + 6) * 4] : receiver
// -----------------------------------
+
// Get the function and setup the context.
JSFunction* function = optimization.constant_function();
__ mov(edi, Immediate(Handle<JSFunction>(function)));
__ mov(esi, FieldOperand(edi, JSFunction::kContextOffset));
// Pass the additional arguments FastHandleApiCall expects.
- __ mov(Operand(esp, 2 * kPointerSize), edi);
+ __ mov(Operand(esp, 4 * kPointerSize), edi);
+ bool info_loaded = false;
+ Object* callback = optimization.api_call_info()->callback();
+ if (Heap::InNewSpace(callback)) {
+ info_loaded = true;
+ __ mov(ecx, Handle<CallHandlerInfo>(optimization.api_call_info()));
+ __ mov(ebx, FieldOperand(ecx, CallHandlerInfo::kCallbackOffset));
+ __ mov(Operand(esp, 3 * kPointerSize), ebx);
+ } else {
+ __ mov(Operand(esp, 3 * kPointerSize), Immediate(Handle<Object>(callback)));
+ }
Object* call_data = optimization.api_call_info()->data();
- Handle<CallHandlerInfo> api_call_info_handle(optimization.api_call_info());
if (Heap::InNewSpace(call_data)) {
- __ mov(ecx, api_call_info_handle);
+ if (!info_loaded) {
+ __ mov(ecx, Handle<CallHandlerInfo>(optimization.api_call_info()));
+ }
__ mov(ebx, FieldOperand(ecx, CallHandlerInfo::kDataOffset));
- __ mov(Operand(esp, 3 * kPointerSize), ebx);
+ __ mov(Operand(esp, 2 * kPointerSize), ebx);
} else {
- __ mov(Operand(esp, 3 * kPointerSize),
+ __ mov(Operand(esp, 2 * kPointerSize),
Immediate(Handle<Object>(call_data)));
}
- // Prepare arguments for ApiCallEntryStub.
- __ lea(eax, Operand(esp, 3 * kPointerSize));
- __ lea(ebx, Operand(esp, (argc + 3) * kPointerSize));
- __ Set(edx, Immediate(argc));
-
- Object* callback = optimization.api_call_info()->callback();
- Address api_function_address = v8::ToCData<Address>(callback);
- ApiFunction fun(api_function_address);
-
- ApiCallEntryStub stub(api_call_info_handle, &fun);
-
- __ EnterInternalFrame();
+ // Set the number of arguments.
+ __ mov(eax, Immediate(argc + 4));
- // Emitting a stub call may try to allocate (if the code is not
- // already generated). Do not allow the assembler to perform a
- // garbage collection but instead return the allocation failure
- // object.
- MaybeObject* result = masm->TryCallStub(&stub);
- if (result->IsFailure()) {
- *failure = Failure::cast(result);
- return false;
- }
-
- __ LeaveInternalFrame();
- __ ret((argc + 4) * kPointerSize);
- return true;
+ // Jump to the fast api call builtin (tail call).
+ Handle<Code> code = Handle<Code>(
+ Builtins::builtin(Builtins::FastHandleApiCall));
+ ParameterCount expected(0);
+ __ InvokeCode(code, expected, expected,
+ RelocInfo::CODE_TARGET, JUMP_FUNCTION);
}
@@ -524,7 +515,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
arguments_(arguments),
name_(name) {}
- bool Compile(MacroAssembler* masm,
+ void Compile(MacroAssembler* masm,
JSObject* object,
JSObject* holder,
String* name,
@@ -533,8 +524,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
Register scratch1,
Register scratch2,
Register scratch3,
- Label* miss,
- Failure** failure) {
+ Label* miss) {
ASSERT(holder->HasNamedInterceptor());
ASSERT(!holder->GetNamedInterceptor()->getter()->IsUndefined());
@@ -545,18 +535,17 @@ class CallInterceptorCompiler BASE_EMBEDDED {
CallOptimization optimization(lookup);
if (optimization.is_constant_call()) {
- return CompileCacheable(masm,
- object,
- receiver,
- scratch1,
- scratch2,
- scratch3,
- holder,
- lookup,
- name,
- optimization,
- miss,
- failure);
+ CompileCacheable(masm,
+ object,
+ receiver,
+ scratch1,
+ scratch2,
+ scratch3,
+ holder,
+ lookup,
+ name,
+ optimization,
+ miss);
} else {
CompileRegular(masm,
object,
@@ -567,12 +556,11 @@ class CallInterceptorCompiler BASE_EMBEDDED {
name,
holder,
miss);
- return true;
}
}
private:
- bool CompileCacheable(MacroAssembler* masm,
+ void CompileCacheable(MacroAssembler* masm,
JSObject* object,
Register receiver,
Register scratch1,
@@ -582,8 +570,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
LookupResult* lookup,
String* name,
const CallOptimization& optimization,
- Label* miss_label,
- Failure** failure) {
+ Label* miss_label) {
ASSERT(optimization.is_constant_call());
ASSERT(!lookup->holder()->IsGlobalObject());
@@ -645,11 +632,7 @@ class CallInterceptorCompiler BASE_EMBEDDED {
// Invoke function.
if (can_do_fast_api_call) {
- bool success = GenerateFastApiCall(masm, optimization,
- arguments_.immediate(), failure);
- if (!success) {
- return false;
- }
+ GenerateFastApiCall(masm, optimization, arguments_.immediate());
} else {
__ InvokeFunction(optimization.constant_function(), arguments_,
JUMP_FUNCTION);
@@ -667,8 +650,6 @@ class CallInterceptorCompiler BASE_EMBEDDED {
if (can_do_fast_api_call) {
FreeSpaceForFastApiCall(masm, scratch1);
}
-
- return true;
}
void CompileRegular(MacroAssembler* masm,
@@ -924,7 +905,7 @@ Register StubCompiler::CheckPrototypes(JSObject* object,
MaybeObject* maybe_lookup_result = Heap::LookupSymbol(name);
Object* lookup_result = NULL; // Initialization to please compiler.
if (!maybe_lookup_result->ToObject(&lookup_result)) {
- set_failure(Failure::cast(maybe_lookup_result));
+ set_failure(Failure::cast(lookup_result));
return reg;
}
name = String::cast(lookup_result);
@@ -1065,7 +1046,8 @@ bool StubCompiler::GenerateLoadCallback(JSObject* object,
__ EnterInternalFrame();
// Push the stack address where the list of arguments ends.
- __ lea(scratch2, Operand(esp, -2 * kPointerSize));
+ __ mov(scratch2, esp);
+ __ sub(Operand(scratch2), Immediate(2 * kPointerSize));
__ push(scratch2);
__ push(receiver); // receiver
__ push(reg); // holder
@@ -1079,11 +1061,12 @@ bool StubCompiler::GenerateLoadCallback(JSObject* object,
__ push(name_reg); // name
// Save a pointer to where we pushed the arguments pointer.
// This will be passed as the const AccessorInfo& to the C++ callback.
- STATIC_ASSERT(ApiGetterEntryStub::kStackSpace == 5);
- __ lea(eax, Operand(esp, 4 * kPointerSize));
+ __ mov(eax, esp);
+ __ add(Operand(eax), Immediate(4 * kPointerSize));
__ mov(ebx, esp);
// Do call through the api.
+ ASSERT_EQ(5, ApiGetterEntryStub::kStackSpace);
Address getter_address = v8::ToCData<Address>(callback->getter());
ApiFunction fun(getter_address);
ApiGetterEntryStub stub(callback_handle, &fun);
@@ -1094,7 +1077,7 @@ bool StubCompiler::GenerateLoadCallback(JSObject* object,
Object* result = NULL; // Initialization to please compiler.
{ MaybeObject* try_call_result = masm()->TryCallStub(&stub);
if (!try_call_result->ToObject(&result)) {
- *failure = Failure::cast(try_call_result);
+ *failure = Failure::cast(result);
return false;
}
}
@@ -2225,11 +2208,7 @@ MaybeObject* CallStubCompiler::CompileCallConstant(Object* object,
}
if (depth != kInvalidProtoDepth) {
- Failure* failure;
- bool success = GenerateFastApiCall(masm(), optimization, argc, &failure);
- if (!success) {
- return failure;
- }
+ GenerateFastApiCall(masm(), optimization, argc);
} else {
__ InvokeFunction(function, arguments(), JUMP_FUNCTION);
}
@@ -2274,21 +2253,16 @@ MaybeObject* CallStubCompiler::CompileCallInterceptor(JSObject* object,
__ mov(edx, Operand(esp, (argc + 1) * kPointerSize));
CallInterceptorCompiler compiler(this, arguments(), ecx);
- Failure* failure;
- bool success = compiler.Compile(masm(),
- object,
- holder,
- name,
- &lookup,
- edx,
- ebx,
- edi,
- eax,
- &miss,
- &failure);
- if (!success) {
- return false;
- }
+ compiler.Compile(masm(),
+ object,
+ holder,
+ name,
+ &lookup,
+ edx,
+ ebx,
+ edi,
+ eax,
+ &miss);
// Restore receiver.
__ mov(edx, Operand(esp, (argc + 1) * kPointerSize));