diff options
Diffstat (limited to 'deps/v8/src/x64')
-rw-r--r-- | deps/v8/src/x64/assembler-x64-inl.h | 8 | ||||
-rw-r--r-- | deps/v8/src/x64/assembler-x64.cc | 21 | ||||
-rw-r--r-- | deps/v8/src/x64/assembler-x64.h | 58 | ||||
-rw-r--r-- | deps/v8/src/x64/builtins-x64.cc | 41 | ||||
-rw-r--r-- | deps/v8/src/x64/codegen-x64.cc | 221 | ||||
-rw-r--r-- | deps/v8/src/x64/codegen-x64.h | 54 | ||||
-rw-r--r-- | deps/v8/src/x64/disasm-x64.cc | 6 | ||||
-rw-r--r-- | deps/v8/src/x64/fast-codegen-x64.cc | 1255 | ||||
-rw-r--r-- | deps/v8/src/x64/frames-x64.cc | 14 | ||||
-rw-r--r-- | deps/v8/src/x64/frames-x64.h | 2 | ||||
-rw-r--r-- | deps/v8/src/x64/ic-x64.cc | 4 | ||||
-rw-r--r-- | deps/v8/src/x64/macro-assembler-x64.cc | 35 | ||||
-rw-r--r-- | deps/v8/src/x64/macro-assembler-x64.h | 8 | ||||
-rw-r--r-- | deps/v8/src/x64/regexp-macro-assembler-x64.cc | 14 | ||||
-rw-r--r-- | deps/v8/src/x64/simulator-x64.h | 9 | ||||
-rw-r--r-- | deps/v8/src/x64/stub-cache-x64.cc | 2 | ||||
-rw-r--r-- | deps/v8/src/x64/virtual-frame-x64.cc | 17 |
17 files changed, 1264 insertions, 505 deletions
diff --git a/deps/v8/src/x64/assembler-x64-inl.h b/deps/v8/src/x64/assembler-x64-inl.h index 8f078ff236..9c7f9b618d 100644 --- a/deps/v8/src/x64/assembler-x64-inl.h +++ b/deps/v8/src/x64/assembler-x64-inl.h @@ -176,7 +176,7 @@ Address Assembler::target_address_at(Address pc) { void Assembler::set_target_address_at(Address pc, Address target) { - Memory::int32_at(pc) = target - pc - 4; + Memory::int32_at(pc) = static_cast<int32_t>(target - pc - 4); CPU::FlushICache(pc, sizeof(int32_t)); } @@ -191,13 +191,13 @@ Handle<Object> Assembler::code_target_object_handle_at(Address pc) { void RelocInfo::apply(intptr_t delta) { if (IsInternalReference(rmode_)) { // absolute code pointer inside code object moves with the code object. - Memory::Address_at(pc_) += delta; + Memory::Address_at(pc_) += static_cast<int32_t>(delta); } else if (IsCodeTarget(rmode_)) { - Memory::int32_at(pc_) -= delta; + Memory::int32_at(pc_) -= static_cast<int32_t>(delta); } else if (rmode_ == JS_RETURN && IsPatchedReturnSequence()) { // Special handling of js_return when a break point is set (call // instruction has been inserted). - Memory::int32_at(pc_ + 1) -= delta; // relocate entry + Memory::int32_at(pc_ + 1) -= static_cast<int32_t>(delta); // relocate entry } } diff --git a/deps/v8/src/x64/assembler-x64.cc b/deps/v8/src/x64/assembler-x64.cc index 61e8753618..2d524eaf4b 100644 --- a/deps/v8/src/x64/assembler-x64.cc +++ b/deps/v8/src/x64/assembler-x64.cc @@ -80,11 +80,15 @@ XMMRegister xmm15 = { 15 }; // fpu, tsc, cx8, cmov, mmx, sse, sse2, fxsr, syscall uint64_t CpuFeatures::supported_ = kDefaultCpuFeatures; uint64_t CpuFeatures::enabled_ = 0; +uint64_t CpuFeatures::found_by_runtime_probing_ = 0; void CpuFeatures::Probe() { ASSERT(Heap::HasBeenSetup()); ASSERT(supported_ == kDefaultCpuFeatures); - if (Serializer::enabled()) return; // No features if we might serialize. + if (Serializer::enabled()) { + supported_ |= OS::CpuFeaturesImpliedByPlatform(); + return; // No features if we might serialize. + } Assembler assm(NULL, 0); Label cpuid, done; @@ -160,6 +164,11 @@ void CpuFeatures::Probe() { typedef uint64_t (*F0)(); F0 probe = FUNCTION_CAST<F0>(Code::cast(code)->entry()); supported_ = probe(); + found_by_runtime_probing_ = supported_; + found_by_runtime_probing_ &= ~kDefaultCpuFeatures; + uint64_t os_guarantees = OS::CpuFeaturesImpliedByPlatform(); + supported_ |= os_guarantees; + found_by_runtime_probing_ &= ~os_guarantees; // SSE2 and CMOV must be available on an X64 CPU. ASSERT(IsSupported(CPUID)); ASSERT(IsSupported(SSE2)); @@ -337,7 +346,8 @@ void Assembler::GetCode(CodeDesc* desc) { desc->buffer_size = buffer_size_; desc->instr_size = pc_offset(); ASSERT(desc->instr_size > 0); // Zero-size code objects upset the system. - desc->reloc_size = (buffer_ + buffer_size_) - reloc_info_writer.pos(); + desc->reloc_size = + static_cast<int>((buffer_ + buffer_size_) - reloc_info_writer.pos()); desc->origin = this; Counters::reloc_info_size.Increment(desc->reloc_size); @@ -400,7 +410,8 @@ void Assembler::GrowBuffer() { // setup new buffer desc.buffer = NewArray<byte>(desc.buffer_size); desc.instr_size = pc_offset(); - desc.reloc_size = (buffer_ + buffer_size_) - (reloc_info_writer.pos()); + desc.reloc_size = + static_cast<int>((buffer_ + buffer_size_) - (reloc_info_writer.pos())); // Clear the buffer in debug mode. Use 'int3' instructions to make // sure to get into problems if we ever run uninitialized code. @@ -887,7 +898,7 @@ void Assembler::cmpb_al(Immediate imm8) { void Assembler::cpuid() { - ASSERT(CpuFeatures::IsEnabled(CpuFeatures::CPUID)); + ASSERT(CpuFeatures::IsEnabled(CPUID)); EnsureSpace ensure_space(this); last_pc_ = pc_; emit(0x0F); @@ -2045,7 +2056,7 @@ void Assembler::fistp_s(const Operand& adr) { void Assembler::fisttp_s(const Operand& adr) { - ASSERT(CpuFeatures::IsEnabled(CpuFeatures::SSE3)); + ASSERT(CpuFeatures::IsEnabled(SSE3)); EnsureSpace ensure_space(this); last_pc_ = pc_; emit_optional_rex_32(adr); diff --git a/deps/v8/src/x64/assembler-x64.h b/deps/v8/src/x64/assembler-x64.h index 617f092bb4..50f4e0e454 100644 --- a/deps/v8/src/x64/assembler-x64.h +++ b/deps/v8/src/x64/assembler-x64.h @@ -37,6 +37,8 @@ #ifndef V8_X64_ASSEMBLER_X64_H_ #define V8_X64_ASSEMBLER_X64_H_ +#include "serialize.h" + namespace v8 { namespace internal { @@ -362,20 +364,11 @@ class Operand BASE_EMBEDDED { // } class CpuFeatures : public AllStatic { public: - // Feature flags bit positions. They are mostly based on the CPUID spec. - // (We assign CPUID itself to one of the currently reserved bits -- - // feel free to change this if needed.) - enum Feature { SSE3 = 32, - SSE2 = 26, - CMOV = 15, - RDTSC = 4, - CPUID = 10, - SAHF = 0}; // Detect features of the target CPU. Set safe defaults if the serializer // is enabled (snapshots must be portable). static void Probe(); // Check whether a feature is supported by the target CPU. - static bool IsSupported(Feature f) { + static bool IsSupported(CpuFeature f) { if (f == SSE2 && !FLAG_enable_sse2) return false; if (f == SSE3 && !FLAG_enable_sse3) return false; if (f == CMOV && !FLAG_enable_cmov) return false; @@ -384,33 +377,35 @@ class CpuFeatures : public AllStatic { return (supported_ & (V8_UINT64_C(1) << f)) != 0; } // Check whether a feature is currently enabled. - static bool IsEnabled(Feature f) { + static bool IsEnabled(CpuFeature f) { return (enabled_ & (V8_UINT64_C(1) << f)) != 0; } // Enable a specified feature within a scope. class Scope BASE_EMBEDDED { #ifdef DEBUG public: - explicit Scope(Feature f) { + explicit Scope(CpuFeature f) { + uint64_t mask = (V8_UINT64_C(1) << f); ASSERT(CpuFeatures::IsSupported(f)); + ASSERT(!Serializer::enabled() || (found_by_runtime_probing_ & mask) == 0); old_enabled_ = CpuFeatures::enabled_; - CpuFeatures::enabled_ |= (V8_UINT64_C(1) << f); + CpuFeatures::enabled_ |= mask; } ~Scope() { CpuFeatures::enabled_ = old_enabled_; } private: uint64_t old_enabled_; #else public: - explicit Scope(Feature f) {} + explicit Scope(CpuFeature f) {} #endif }; private: // Safe defaults include SSE2 and CMOV for X64. It is always available, if // anyone checks, but they shouldn't need to check. - static const uint64_t kDefaultCpuFeatures = - (1 << CpuFeatures::SSE2 | 1 << CpuFeatures::CMOV); + static const uint64_t kDefaultCpuFeatures = (1 << SSE2 | 1 << CMOV); static uint64_t supported_; static uint64_t enabled_; + static uint64_t found_by_runtime_probing_; }; @@ -458,14 +453,25 @@ class Assembler : public Malloced { // the relative displacements stored in the code. static inline Address target_address_at(Address pc); static inline void set_target_address_at(Address pc, Address target); + // This sets the branch destination (which is in the instruction on x64). + // This is for calls and branches within generated code. inline static void set_target_at(Address instruction_payload, Address target) { set_target_address_at(instruction_payload, target); } + + // This sets the branch destination (which is a load instruction on x64). + // This is for calls and branches to runtime code. + inline static void set_external_target_at(Address instruction_payload, + Address target) { + *reinterpret_cast<Address*>(instruction_payload) = target; + } + inline Handle<Object> code_target_object_handle_at(Address pc); // Number of bytes taken up by the branch target in the code. - static const int kCallTargetSize = 4; // Use 32-bit displacement. + static const int kCallTargetSize = 4; // Use 32-bit displacement. + static const int kExternalTargetSize = 8; // Use 64-bit absolute. // Distance between the address of the code target in the call instruction // and the return address pushed on the stack. static const int kCallTargetAddressOffset = 4; // Use 32-bit displacement. @@ -836,12 +842,12 @@ class Assembler : public Malloced { } // Shifts dst right, duplicating sign bit, by cl % 64 bits. - void sar(Register dst) { + void sar_cl(Register dst) { shift(dst, 0x7); } // Shifts dst right, duplicating sign bit, by cl % 64 bits. - void sarl(Register dst) { + void sarl_cl(Register dst) { shift_32(dst, 0x7); } @@ -849,11 +855,11 @@ class Assembler : public Malloced { shift(dst, shift_amount, 0x4); } - void shl(Register dst) { + void shl_cl(Register dst) { shift(dst, 0x4); } - void shll(Register dst) { + void shll_cl(Register dst) { shift_32(dst, 0x4); } @@ -865,11 +871,11 @@ class Assembler : public Malloced { shift(dst, shift_amount, 0x5); } - void shr(Register dst) { + void shr_cl(Register dst) { shift(dst, 0x5); } - void shrl(Register dst) { + void shrl_cl(Register dst) { shift_32(dst, 0x5); } @@ -1120,7 +1126,7 @@ class Assembler : public Malloced { void RecordStatementPosition(int pos); void WriteRecordedPositions(); - int pc_offset() const { return pc_ - buffer_; } + int pc_offset() const { return static_cast<int>(pc_ - buffer_); } int current_statement_position() const { return current_statement_position_; } int current_position() const { return current_position_; } @@ -1132,7 +1138,9 @@ class Assembler : public Malloced { } // Get the number of bytes available in the buffer. - inline int available_space() const { return reloc_info_writer.pos() - pc_; } + inline int available_space() const { + return static_cast<int>(reloc_info_writer.pos() - pc_); + } // Avoid overflows for displacements etc. static const int kMaximalBufferSize = 512*MB; diff --git a/deps/v8/src/x64/builtins-x64.cc b/deps/v8/src/x64/builtins-x64.cc index 8590365a17..f444d2cf85 100644 --- a/deps/v8/src/x64/builtins-x64.cc +++ b/deps/v8/src/x64/builtins-x64.cc @@ -320,42 +320,23 @@ void Builtins::Generate_FunctionApply(MacroAssembler* masm) { __ push(Operand(rbp, kArgumentsOffset)); __ InvokeBuiltin(Builtins::APPLY_PREPARE, CALL_FUNCTION); - // Check the stack for overflow or a break request. - // We need to catch preemptions right here, otherwise an unlucky preemption - // could show up as a failed apply. - Label retry_preemption; - Label no_preemption; - __ bind(&retry_preemption); - ExternalReference stack_guard_limit = - ExternalReference::address_of_stack_guard_limit(); - __ movq(kScratchRegister, stack_guard_limit); - __ movq(rcx, rsp); - __ subq(rcx, Operand(kScratchRegister, 0)); - // rcx contains the difference between the stack limit and the stack top. - // We use it below to check that there is enough room for the arguments. - __ j(above, &no_preemption); - - // Preemption! - // Because runtime functions always remove the receiver from the stack, we - // have to fake one to avoid underflowing the stack. - __ push(rax); - __ Push(Smi::FromInt(0)); - - // Do call to runtime routine. - __ CallRuntime(Runtime::kStackGuard, 1); - __ pop(rax); - __ jmp(&retry_preemption); - - __ bind(&no_preemption); - + // Check the stack for overflow. We are not trying need to catch + // interruptions (e.g. debug break and preemption) here, so the "real stack + // limit" is checked. Label okay; + __ LoadRoot(kScratchRegister, Heap::kRealStackLimitRootIndex); + __ movq(rcx, rsp); + // Make rcx the space we have left. The stack might already be overflowed + // here which will cause rcx to become negative. + __ subq(rcx, kScratchRegister); // Make rdx the space we need for the array when it is unrolled onto the // stack. __ PositiveSmiTimesPowerOfTwoToInteger64(rdx, rax, kPointerSizeLog2); + // Check if the arguments will overflow the stack. __ cmpq(rcx, rdx); - __ j(greater, &okay); + __ j(greater, &okay); // Signed comparison. - // Too bad: Out of stack space. + // Out of stack space. __ push(Operand(rbp, kFunctionOffset)); __ push(rax); __ InvokeBuiltin(Builtins::APPLY_OVERFLOW, CALL_FUNCTION); diff --git a/deps/v8/src/x64/codegen-x64.cc b/deps/v8/src/x64/codegen-x64.cc index 95f30d842d..e2296d9bd9 100644 --- a/deps/v8/src/x64/codegen-x64.cc +++ b/deps/v8/src/x64/codegen-x64.cc @@ -29,6 +29,7 @@ #include "bootstrapper.h" #include "codegen-inl.h" +#include "compiler.h" #include "debug.h" #include "ic-inl.h" #include "parser.h" @@ -74,7 +75,6 @@ void DeferredCode::RestoreRegisters() { CodeGenState::CodeGenState(CodeGenerator* owner) : owner_(owner), - typeof_state_(NOT_INSIDE_TYPEOF), destination_(NULL), previous_(NULL) { owner_->set_state(this); @@ -82,10 +82,8 @@ CodeGenState::CodeGenState(CodeGenerator* owner) CodeGenState::CodeGenState(CodeGenerator* owner, - TypeofState typeof_state, ControlDestination* destination) : owner_(owner), - typeof_state_(typeof_state), destination_(destination), previous_(owner->state()) { owner_->set_state(this); @@ -643,27 +641,6 @@ void DeferredReferenceSetKeyedValue::Generate() { } -class CallFunctionStub: public CodeStub { - public: - CallFunctionStub(int argc, InLoopFlag in_loop) - : argc_(argc), in_loop_(in_loop) { } - - void Generate(MacroAssembler* masm); - - private: - int argc_; - InLoopFlag in_loop_; - -#ifdef DEBUG - void Print() { PrintF("CallFunctionStub (args %d)\n", argc_); } -#endif - - Major MajorKey() { return CallFunction; } - int MinorKey() { return argc_; } - InLoopFlag InLoop() { return in_loop_; } -}; - - void CodeGenerator::CallApplyLazy(Property* apply, Expression* receiver, VariableProxy* arguments, @@ -676,7 +653,7 @@ void CodeGenerator::CallApplyLazy(Property* apply, // Load the apply function onto the stack. This will usually // give us a megamorphic load site. Not super, but it works. Reference ref(this, apply); - ref.GetValue(NOT_INSIDE_TYPEOF); + ref.GetValue(); ASSERT(ref.type() == Reference::NAMED); // Load the receiver and the existing arguments object onto the @@ -1001,7 +978,7 @@ void CodeGenerator::VisitIfStatement(IfStatement* node) { JumpTarget then; JumpTarget else_; ControlDestination dest(&then, &else_, true); - LoadCondition(node->condition(), NOT_INSIDE_TYPEOF, &dest, true); + LoadCondition(node->condition(), &dest, true); if (dest.false_was_fall_through()) { // The else target was bound, so we compile the else part first. @@ -1028,7 +1005,7 @@ void CodeGenerator::VisitIfStatement(IfStatement* node) { ASSERT(!has_else_stm); JumpTarget then; ControlDestination dest(&then, &exit, true); - LoadCondition(node->condition(), NOT_INSIDE_TYPEOF, &dest, true); + LoadCondition(node->condition(), &dest, true); if (dest.false_was_fall_through()) { // The exit label was bound. We may have dangling jumps to the @@ -1048,7 +1025,7 @@ void CodeGenerator::VisitIfStatement(IfStatement* node) { ASSERT(!has_then_stm); JumpTarget else_; ControlDestination dest(&exit, &else_, false); - LoadCondition(node->condition(), NOT_INSIDE_TYPEOF, &dest, true); + LoadCondition(node->condition(), &dest, true); if (dest.true_was_fall_through()) { // The exit label was bound. We may have dangling jumps to the @@ -1070,7 +1047,7 @@ void CodeGenerator::VisitIfStatement(IfStatement* node) { // or control flow effect). LoadCondition is called without // forcing control flow. ControlDestination dest(&exit, &exit, true); - LoadCondition(node->condition(), NOT_INSIDE_TYPEOF, &dest, false); + LoadCondition(node->condition(), &dest, false); if (!dest.is_used()) { // We got a value on the frame rather than (or in addition to) // control flow. @@ -1341,8 +1318,10 @@ void CodeGenerator::VisitDoWhileStatement(DoWhileStatement* node) { node->continue_target()->Bind(); } if (has_valid_frame()) { + Comment cmnt(masm_, "[ DoWhileCondition"); + CodeForDoWhileConditionPosition(node); ControlDestination dest(&body, node->break_target(), false); - LoadCondition(node->cond(), NOT_INSIDE_TYPEOF, &dest, true); + LoadCondition(node->cond(), &dest, true); } if (node->break_target()->is_linked()) { node->break_target()->Bind(); @@ -1399,7 +1378,7 @@ void CodeGenerator::VisitWhileStatement(WhileStatement* node) { // Compile the test with the body as the true target and preferred // fall-through and with the break target as the false target. ControlDestination dest(&body, node->break_target(), true); - LoadCondition(node->cond(), NOT_INSIDE_TYPEOF, &dest, true); + LoadCondition(node->cond(), &dest, true); if (dest.false_was_fall_through()) { // If we got the break target as fall-through, the test may have @@ -1446,7 +1425,7 @@ void CodeGenerator::VisitWhileStatement(WhileStatement* node) { // The break target is the fall-through (body is a backward // jump from here and thus an invalid fall-through). ControlDestination dest(&body, node->break_target(), false); - LoadCondition(node->cond(), NOT_INSIDE_TYPEOF, &dest, true); + LoadCondition(node->cond(), &dest, true); } } else { // If we have chosen not to recompile the test at the @@ -1538,7 +1517,7 @@ void CodeGenerator::VisitForStatement(ForStatement* node) { // Compile the test with the body as the true target and preferred // fall-through and with the break target as the false target. ControlDestination dest(&body, node->break_target(), true); - LoadCondition(node->cond(), NOT_INSIDE_TYPEOF, &dest, true); + LoadCondition(node->cond(), &dest, true); if (dest.false_was_fall_through()) { // If we got the break target as fall-through, the test may have @@ -1608,7 +1587,7 @@ void CodeGenerator::VisitForStatement(ForStatement* node) { // The break target is the fall-through (body is a backward // jump from here). ControlDestination dest(&body, node->break_target(), false); - LoadCondition(node->cond(), NOT_INSIDE_TYPEOF, &dest, true); + LoadCondition(node->cond(), &dest, true); } } else { // Otherwise, jump back to the test at the top. @@ -2188,7 +2167,8 @@ void CodeGenerator::VisitFunctionLiteral(FunctionLiteral* node) { Comment cmnt(masm_, "[ FunctionLiteral"); // Build the function boilerplate and instantiate it. - Handle<JSFunction> boilerplate = BuildBoilerplate(node); + Handle<JSFunction> boilerplate = + Compiler::BuildBoilerplate(node, script_, this); // Check for stack-overflow exception. if (HasStackOverflow()) return; InstantiateBoilerplate(boilerplate); @@ -2208,25 +2188,25 @@ void CodeGenerator::VisitConditional(Conditional* node) { JumpTarget else_; JumpTarget exit; ControlDestination dest(&then, &else_, true); - LoadCondition(node->condition(), NOT_INSIDE_TYPEOF, &dest, true); + LoadCondition(node->condition(), &dest, true); if (dest.false_was_fall_through()) { // The else target was bound, so we compile the else part first. - Load(node->else_expression(), typeof_state()); + Load(node->else_expression()); if (then.is_linked()) { exit.Jump(); then.Bind(); - Load(node->then_expression(), typeof_state()); + Load(node->then_expression()); } } else { // The then target was bound, so we compile the then part first. - Load(node->then_expression(), typeof_state()); + Load(node->then_expression()); if (else_.is_linked()) { exit.Jump(); else_.Bind(); - Load(node->else_expression(), typeof_state()); + Load(node->else_expression()); } } @@ -2236,7 +2216,7 @@ void CodeGenerator::VisitConditional(Conditional* node) { void CodeGenerator::VisitSlot(Slot* node) { Comment cmnt(masm_, "[ Slot"); - LoadFromSlotCheckForArguments(node, typeof_state()); + LoadFromSlotCheckForArguments(node, NOT_INSIDE_TYPEOF); } @@ -2249,7 +2229,7 @@ void CodeGenerator::VisitVariableProxy(VariableProxy* node) { } else { ASSERT(var->is_global()); Reference ref(this, node); - ref.GetValue(typeof_state()); + ref.GetValue(); } } @@ -2640,9 +2620,9 @@ void CodeGenerator::VisitAssignment(Assignment* node) { // the target, with an implicit promise that it will be written to again // before it is read. if (literal != NULL || (right_var != NULL && right_var != var)) { - target.TakeValue(NOT_INSIDE_TYPEOF); + target.TakeValue(); } else { - target.GetValue(NOT_INSIDE_TYPEOF); + target.GetValue(); } Load(node->value()); GenericBinaryOperation(node->binary_op(), @@ -2690,7 +2670,7 @@ void CodeGenerator::VisitThrow(Throw* node) { void CodeGenerator::VisitProperty(Property* node) { Comment cmnt(masm_, "[ Property"); Reference property(this, node); - property.GetValue(typeof_state()); + property.GetValue(); } @@ -2876,7 +2856,7 @@ void CodeGenerator::VisitCall(Call* node) { // Load the function to call from the property through a reference. Reference ref(this, property); - ref.GetValue(NOT_INSIDE_TYPEOF); + ref.GetValue(); // Pass receiver to called function. if (property->is_synthetic()) { @@ -2982,9 +2962,6 @@ void CodeGenerator::VisitCallRuntime(CallRuntime* node) { void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) { - // Note that because of NOT and an optimization in comparison of a typeof - // expression to a literal string, this function can fail to leave a value - // on top of the frame or in the cc register. Comment cmnt(masm_, "[ UnaryOperation"); Token::Value op = node->op(); @@ -2993,7 +2970,7 @@ void CodeGenerator::VisitUnaryOperation(UnaryOperation* node) { // Swap the true and false targets but keep the same actual label // as the fall through. destination()->Invert(); - LoadCondition(node->expression(), NOT_INSIDE_TYPEOF, destination(), true); + LoadCondition(node->expression(), destination(), true); // Swap the labels back. destination()->Invert(); @@ -3233,7 +3210,7 @@ void CodeGenerator::VisitCountOperation(CountOperation* node) { if (!is_postfix) frame_->Push(Smi::FromInt(0)); return; } - target.TakeValue(NOT_INSIDE_TYPEOF); + target.TakeValue(); Result new_value = frame_->Pop(); new_value.ToRegister(); @@ -3291,9 +3268,6 @@ void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) { // TODO(X64): This code was copied verbatim from codegen-ia32. // Either find a reason to change it or move it to a shared location. - // Note that due to an optimization in comparison operations (typeof - // compared to a string literal), we can evaluate a binary expression such - // as AND or OR and not leave a value on the frame or in the cc register. Comment cmnt(masm_, "[ BinaryOperation"); Token::Value op = node->op(); @@ -3309,7 +3283,7 @@ void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) { if (op == Token::AND) { JumpTarget is_true; ControlDestination dest(&is_true, destination()->false_target(), true); - LoadCondition(node->left(), NOT_INSIDE_TYPEOF, &dest, false); + LoadCondition(node->left(), &dest, false); if (dest.false_was_fall_through()) { // The current false target was used as the fall-through. If @@ -3328,7 +3302,7 @@ void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) { is_true.Bind(); // The left subexpression compiled to control flow, so the // right one is free to do so as well. - LoadCondition(node->right(), NOT_INSIDE_TYPEOF, destination(), false); + LoadCondition(node->right(), destination(), false); } else { // We have actually just jumped to or bound the current false // target but the current control destination is not marked as @@ -3339,7 +3313,7 @@ void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) { } else if (dest.is_used()) { // The left subexpression compiled to control flow (and is_true // was just bound), so the right is free to do so as well. - LoadCondition(node->right(), NOT_INSIDE_TYPEOF, destination(), false); + LoadCondition(node->right(), destination(), false); } else { // We have a materialized value on the frame, so we exit with @@ -3372,7 +3346,7 @@ void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) { } else if (op == Token::OR) { JumpTarget is_false; ControlDestination dest(destination()->true_target(), &is_false, false); - LoadCondition(node->left(), NOT_INSIDE_TYPEOF, &dest, false); + LoadCondition(node->left(), &dest, false); if (dest.true_was_fall_through()) { // The current true target was used as the fall-through. If @@ -3391,7 +3365,7 @@ void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) { is_false.Bind(); // The left subexpression compiled to control flow, so the // right one is free to do so as well. - LoadCondition(node->right(), NOT_INSIDE_TYPEOF, destination(), false); + LoadCondition(node->right(), destination(), false); } else { // We have just jumped to or bound the current true target but // the current control destination is not marked as used. @@ -3401,7 +3375,7 @@ void CodeGenerator::VisitBinaryOperation(BinaryOperation* node) { } else if (dest.is_used()) { // The left subexpression compiled to control flow (and is_false // was just bound), so the right is free to do so as well. - LoadCondition(node->right(), NOT_INSIDE_TYPEOF, destination(), false); + LoadCondition(node->right(), destination(), false); } else { // We have a materialized value on the frame, so we exit with @@ -3523,6 +3497,9 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) { destination()->false_target()->Branch(is_smi); frame_->Spill(answer.reg()); __ CmpObjectType(answer.reg(), JS_FUNCTION_TYPE, answer.reg()); + destination()->true_target()->Branch(equal); + // Regular expressions are callable so typeof == 'function'. + __ CmpInstanceType(answer.reg(), JS_REGEXP_TYPE); answer.Unuse(); destination()->Split(equal); @@ -3532,9 +3509,11 @@ void CodeGenerator::VisitCompareOperation(CompareOperation* node) { __ CompareRoot(answer.reg(), Heap::kNullValueRootIndex); destination()->true_target()->Branch(equal); + // Regular expressions are typeof == 'function', not 'object'. + __ CmpObjectType(answer.reg(), JS_REGEXP_TYPE, kScratchRegister); + destination()->false_target()->Branch(equal); + // It can be an undetectable object. - __ movq(kScratchRegister, - FieldOperand(answer.reg(), HeapObject::kMapOffset)); __ testb(FieldOperand(kScratchRegister, Map::kBitFieldOffset), Immediate(1 << Map::kIsUndetectable)); destination()->false_target()->Branch(not_zero); @@ -3679,7 +3658,6 @@ void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) { Label slow_case; Label end; Label not_a_flat_string; - Label a_cons_string; Label try_again_with_new_string; Label ascii_string; Label got_char_code; @@ -3758,7 +3736,7 @@ void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) { __ addq(rcx, Immediate(String::kLongLengthShift)); // Fetch the length field into the temporary register. __ movl(temp.reg(), FieldOperand(object.reg(), String::kLengthOffset)); - __ shrl(temp.reg()); // The shift amount in ecx is implicit operand. + __ shrl_cl(temp.reg()); // Check for index out of range. __ cmpl(index.reg(), temp.reg()); __ j(greater_equal, &slow_case); @@ -3767,10 +3745,11 @@ void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) { __ movzxbl(temp.reg(), FieldOperand(temp.reg(), Map::kInstanceTypeOffset)); // We need special handling for non-flat strings. - ASSERT(kSeqStringTag == 0); + ASSERT_EQ(0, kSeqStringTag); __ testb(temp.reg(), Immediate(kStringRepresentationMask)); __ j(not_zero, ¬_a_flat_string); // Check for 1-byte or 2-byte string. + ASSERT_EQ(0, kTwoByteStringTag); __ testb(temp.reg(), Immediate(kStringEncodingMask)); __ j(not_zero, &ascii_string); @@ -3797,21 +3776,16 @@ void CodeGenerator::GenerateFastCharCodeAt(ZoneList<Expression*>* args) { __ bind(¬_a_flat_string); __ and_(temp.reg(), Immediate(kStringRepresentationMask)); __ cmpb(temp.reg(), Immediate(kConsStringTag)); - __ j(equal, &a_cons_string); - __ cmpb(temp.reg(), Immediate(kSlicedStringTag)); __ j(not_equal, &slow_case); - // SlicedString. - // Add the offset to the index and trigger the slow case on overflow. - __ addl(index.reg(), FieldOperand(object.reg(), SlicedString::kStartOffset)); - __ j(overflow, &slow_case); - // Getting the underlying string is done by running the cons string code. - // ConsString. - __ bind(&a_cons_string); - // Get the first of the two strings. Both sliced and cons strings - // store their source string at the same offset. - ASSERT(SlicedString::kBufferOffset == ConsString::kFirstOffset); + // Check that the right hand side is the empty string (ie if this is really a + // flat string in a cons string). If that is not the case we would rather go + // to the runtime system now, to flatten the string. + __ movq(temp.reg(), FieldOperand(object.reg(), ConsString::kSecondOffset)); + __ CompareRoot(temp.reg(), Heap::kEmptyStringRootIndex); + __ j(not_equal, &slow_case); + // Get the first of the two strings. __ movq(object.reg(), FieldOperand(object.reg(), ConsString::kFirstOffset)); __ jmp(&try_again_with_new_string); @@ -4122,18 +4096,17 @@ void CodeGenerator::GenerateValueOf(ZoneList<Expression*>* args) { // ----------------------------------------------------------------------------- // CodeGenerator implementation of Expressions -void CodeGenerator::LoadAndSpill(Expression* expression, - TypeofState typeof_state) { +void CodeGenerator::LoadAndSpill(Expression* expression) { // TODO(x64): No architecture specific code. Move to shared location. ASSERT(in_spilled_code()); set_in_spilled_code(false); - Load(expression, typeof_state); + Load(expression); frame_->SpillAll(); set_in_spilled_code(true); } -void CodeGenerator::Load(Expression* x, TypeofState typeof_state) { +void CodeGenerator::Load(Expression* expr) { #ifdef DEBUG int original_height = frame_->height(); #endif @@ -4141,7 +4114,7 @@ void CodeGenerator::Load(Expression* x, TypeofState typeof_state) { JumpTarget true_target; JumpTarget false_target; ControlDestination dest(&true_target, &false_target, true); - LoadCondition(x, typeof_state, &dest, false); + LoadCondition(expr, &dest, false); if (dest.false_was_fall_through()) { // The false target was just bound. @@ -4201,13 +4174,12 @@ void CodeGenerator::Load(Expression* x, TypeofState typeof_state) { // partially compiled) into control flow to the control destination. // If force_control is true, control flow is forced. void CodeGenerator::LoadCondition(Expression* x, - TypeofState typeof_state, ControlDestination* dest, bool force_control) { ASSERT(!in_spilled_code()); int original_height = frame_->height(); - { CodeGenState new_state(this, typeof_state, dest); + { CodeGenState new_state(this, dest); Visit(x); // If we hit a stack overflow, we may not have actually visited @@ -4835,23 +4807,25 @@ Result CodeGenerator::StoreArgumentsObject(bool initial) { } -// TODO(1241834): Get rid of this function in favor of just using Load, now -// that we have the INSIDE_TYPEOF typeof state. => Need to handle global -// variables w/o reference errors elsewhere. -void CodeGenerator::LoadTypeofExpression(Expression* x) { - Variable* variable = x->AsVariableProxy()->AsVariable(); +void CodeGenerator::LoadTypeofExpression(Expression* expr) { + // Special handling of identifiers as subexpressions of typeof. + Variable* variable = expr->AsVariableProxy()->AsVariable(); if (variable != NULL && !variable->is_this() && variable->is_global()) { - // NOTE: This is somewhat nasty. We force the compiler to load - // the variable as if through '<global>.<variable>' to make sure we - // do not get reference errors. + // For a global variable we build the property reference + // <global>.<variable> and perform a (regular non-contextual) property + // load to make sure we do not get reference errors. Slot global(variable, Slot::CONTEXT, Context::GLOBAL_INDEX); Literal key(variable->name()); - // TODO(1241834): Fetch the position from the variable instead of using - // no position. Property property(&global, &key, RelocInfo::kNoPosition); - Load(&property); + Reference ref(this, &property); + ref.GetValue(); + } else if (variable != NULL && variable->slot() != NULL) { + // For a variable that rewrites to a slot, we signal it is the immediate + // subexpression of a typeof. + LoadFromSlotCheckForArguments(variable->slot(), INSIDE_TYPEOF); } else { - Load(x, INSIDE_TYPEOF); + // Anything else can be handled normally. + Load(expr); } } @@ -5746,7 +5720,7 @@ Handle<String> Reference::GetName() { } -void Reference::GetValue(TypeofState typeof_state) { +void Reference::GetValue() { ASSERT(!cgen_->in_spilled_code()); ASSERT(cgen_->HasValidEntryRegisters()); ASSERT(!is_illegal()); @@ -5763,17 +5737,11 @@ void Reference::GetValue(TypeofState typeof_state) { Comment cmnt(masm, "[ Load from Slot"); Slot* slot = expression_->AsVariableProxy()->AsVariable()->slot(); ASSERT(slot != NULL); - cgen_->LoadFromSlotCheckForArguments(slot, typeof_state); + cgen_->LoadFromSlotCheckForArguments(slot, NOT_INSIDE_TYPEOF); break; } case NAMED: { - // TODO(1241834): Make sure that it is safe to ignore the - // distinction between expressions in a typeof and not in a - // typeof. If there is a chance that reference errors can be - // thrown below, we must distinguish between the two kinds of - // loads (typeof expression loads must not throw a reference - // error). Variable* var = expression_->AsVariableProxy()->AsVariable(); bool is_global = var != NULL; ASSERT(!is_global || var->is_global()); @@ -5855,8 +5823,6 @@ void Reference::GetValue(TypeofState typeof_state) { } case KEYED: { - // TODO(1241834): Make sure that this it is safe to ignore the - // distinction between expressions in a typeof and not in a typeof. Comment cmnt(masm, "[ Load from keyed Property"); Variable* var = expression_->AsVariableProxy()->AsVariable(); bool is_global = var != NULL; @@ -5978,7 +5944,7 @@ void Reference::GetValue(TypeofState typeof_state) { } -void Reference::TakeValue(TypeofState typeof_state) { +void Reference::TakeValue() { // TODO(X64): This function is completely architecture independent. Move // it somewhere shared. @@ -5987,7 +5953,7 @@ void Reference::TakeValue(TypeofState typeof_state) { ASSERT(!cgen_->in_spilled_code()); ASSERT(!is_illegal()); if (type_ != SLOT) { - GetValue(typeof_state); + GetValue(); return; } @@ -5997,7 +5963,7 @@ void Reference::TakeValue(TypeofState typeof_state) { slot->type() == Slot::CONTEXT || slot->var()->mode() == Variable::CONST || slot->is_arguments()) { - GetValue(typeof_state); + GetValue(); return; } @@ -6601,11 +6567,11 @@ void InstanceofStub::Generate(MacroAssembler* masm) { __ jmp(&loop); __ bind(&is_instance); - __ xor_(rax, rax); + __ xorl(rax, rax); __ ret(2 * kPointerSize); __ bind(&is_not_instance); - __ Move(rax, Smi::FromInt(1)); + __ movl(rax, Immediate(1)); __ ret(2 * kPointerSize); // Slow-case: Go through the JavaScript implementation. @@ -6771,7 +6737,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, Label* throw_normal_exception, Label* throw_termination_exception, Label* throw_out_of_memory_exception, - StackFrame::Type frame_type, + ExitFrame::Mode mode, bool do_gc, bool always_allocate_scope) { // rax: result parameter for PerformGC, if any. @@ -6854,7 +6820,7 @@ void CEntryStub::GenerateCore(MacroAssembler* masm, __ j(zero, &failure_returned); // Exit the JavaScript to C++ exit frame. - __ LeaveExitFrame(frame_type, result_size_); + __ LeaveExitFrame(mode, result_size_); __ ret(0); // Handling of failure. @@ -6984,12 +6950,12 @@ void CEntryStub::GenerateBody(MacroAssembler* masm, bool is_debug_break) { // this by performing a garbage collection and retrying the // builtin once. - StackFrame::Type frame_type = is_debug_break ? - StackFrame::EXIT_DEBUG : - StackFrame::EXIT; + ExitFrame::Mode mode = is_debug_break ? + ExitFrame::MODE_DEBUG : + ExitFrame::MODE_NORMAL; // Enter the exit frame that transitions from JavaScript to C++. - __ EnterExitFrame(frame_type, result_size_); + __ EnterExitFrame(mode, result_size_); // rax: Holds the context at this point, but should not be used. // On entry to code generated by GenerateCore, it must hold @@ -7012,7 +6978,7 @@ void CEntryStub::GenerateBody(MacroAssembler* masm, bool is_debug_break) { &throw_normal_exception, &throw_termination_exception, &throw_out_of_memory_exception, - frame_type, + mode, false, false); @@ -7021,7 +6987,7 @@ void CEntryStub::GenerateBody(MacroAssembler* masm, bool is_debug_break) { &throw_normal_exception, &throw_termination_exception, &throw_out_of_memory_exception, - frame_type, + mode, true, false); @@ -7032,7 +6998,7 @@ void CEntryStub::GenerateBody(MacroAssembler* masm, bool is_debug_break) { &throw_normal_exception, &throw_termination_exception, &throw_out_of_memory_exception, - frame_type, + mode, true, true); @@ -7047,6 +7013,11 @@ void CEntryStub::GenerateBody(MacroAssembler* masm, bool is_debug_break) { } +void ApiGetterEntryStub::Generate(MacroAssembler* masm) { + UNREACHABLE(); +} + + void JSEntryStub::GenerateBody(MacroAssembler* masm, bool is_construct) { Label invoke, exit; #ifdef ENABLE_LOGGING_AND_PROFILING @@ -7604,7 +7575,7 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) { if (use_sse3_) { // Truncate the operands to 32-bit integers and check for // exceptions in doing so. - CpuFeatures::Scope scope(CpuFeatures::SSE3); + CpuFeatures::Scope scope(SSE3); __ fisttp_s(Operand(rsp, 0 * kPointerSize)); __ fisttp_s(Operand(rsp, 1 * kPointerSize)); __ fnstsw_ax(); @@ -7633,9 +7604,9 @@ void GenericBinaryOpStub::Generate(MacroAssembler* masm) { case Token::BIT_OR: __ orl(rax, rcx); break; case Token::BIT_AND: __ andl(rax, rcx); break; case Token::BIT_XOR: __ xorl(rax, rcx); break; - case Token::SAR: __ sarl(rax); break; - case Token::SHL: __ shll(rax); break; - case Token::SHR: __ shrl(rax); break; + case Token::SAR: __ sarl_cl(rax); break; + case Token::SHL: __ shll_cl(rax); break; + case Token::SHR: __ shrl_cl(rax); break; default: UNREACHABLE(); } if (op_ == Token::SHR) { @@ -7797,7 +7768,7 @@ ModuloFunction CreateModuloFunction() { &actual_size, true)); CHECK(buffer); - Assembler masm(buffer, actual_size); + Assembler masm(buffer, static_cast<int>(actual_size)); // Generated code is put into a fixed, unmovable, buffer, and not into // the V8 heap. We can't, and don't, refer to any relocatable addresses // (e.g. the JavaScript nan-object). diff --git a/deps/v8/src/x64/codegen-x64.h b/deps/v8/src/x64/codegen-x64.h index 0721d5228f..0301daf3d7 100644 --- a/deps/v8/src/x64/codegen-x64.h +++ b/deps/v8/src/x64/codegen-x64.h @@ -77,12 +77,12 @@ class Reference BASE_EMBEDDED { // Generate code to push the value of the reference on top of the // expression stack. The reference is expected to be already on top of // the expression stack, and it is left in place with its value above it. - void GetValue(TypeofState typeof_state); + void GetValue(); // Like GetValue except that the slot is expected to be written to before // being read from again. Thae value of the reference may be invalidated, // causing subsequent attempts to read it to fail. - void TakeValue(TypeofState typeof_state); + void TakeValue(); // Generate code to store the value on top of the expression stack in the // reference. The reference is expected to be immediately below the value @@ -241,28 +241,20 @@ class CodeGenState BASE_EMBEDDED { explicit CodeGenState(CodeGenerator* owner); // Create a code generator state based on a code generator's current - // state. The new state may or may not be inside a typeof, and has its - // own control destination. - CodeGenState(CodeGenerator* owner, - TypeofState typeof_state, - ControlDestination* destination); + // state. The new state has its own control destination. + CodeGenState(CodeGenerator* owner, ControlDestination* destination); // Destroy a code generator state and restore the owning code generator's // previous state. ~CodeGenState(); // Accessors for the state. - TypeofState typeof_state() const { return typeof_state_; } ControlDestination* destination() const { return destination_; } private: // The owning code generator. CodeGenerator* owner_; - // A flag indicating whether we are compiling the immediate subexpression - // of a typeof expression. - TypeofState typeof_state_; - // A control destination in case the expression has a control-flow // effect. ControlDestination* destination_; @@ -307,17 +299,12 @@ class CodeGenerator: public AstVisitor { static bool ShouldGenerateLog(Expression* type); #endif - static void SetFunctionInfo(Handle<JSFunction> fun, - FunctionLiteral* lit, - bool is_toplevel, - Handle<Script> script); - static void RecordPositions(MacroAssembler* masm, int pos); // Accessors MacroAssembler* masm() { return masm_; } - VirtualFrame* frame() const { return frame_; } + Handle<Script> script() { return script_; } bool has_valid_frame() const { return frame_ != NULL; } @@ -353,7 +340,6 @@ class CodeGenerator: public AstVisitor { bool is_eval() { return is_eval_; } // State - TypeofState typeof_state() const { return state_->typeof_state(); } ControlDestination* destination() const { return state_->destination(); } // Track loop nesting level. @@ -414,18 +400,16 @@ class CodeGenerator: public AstVisitor { } void LoadCondition(Expression* x, - TypeofState typeof_state, ControlDestination* destination, bool force_control); - void Load(Expression* x, TypeofState typeof_state = NOT_INSIDE_TYPEOF); + void Load(Expression* expr); void LoadGlobal(); void LoadGlobalReceiver(); // Generate code to push the value of an expression on top of the frame // and then spill the frame fully to memory. This function is used // temporarily while the code generator is being transformed. - void LoadAndSpill(Expression* expression, - TypeofState typeof_state = NOT_INSIDE_TYPEOF); + void LoadAndSpill(Expression* expression); // Read a value from a slot and leave it on top of the expression stack. void LoadFromSlot(Slot* slot, TypeofState typeof_state); @@ -511,8 +495,6 @@ class CodeGenerator: public AstVisitor { static bool PatchInlineRuntimeEntry(Handle<String> name, const InlineRuntimeLUT& new_entry, InlineRuntimeLUT* old_entry); - static Handle<Code> ComputeLazyCompile(int argc); - Handle<JSFunction> BuildBoilerplate(FunctionLiteral* node); void ProcessDeclarations(ZoneList<Declaration*>* declarations); static Handle<Code> ComputeCallInitialize(int argc, InLoopFlag in_loop); @@ -574,6 +556,7 @@ class CodeGenerator: public AstVisitor { void CodeForFunctionPosition(FunctionLiteral* fun); void CodeForReturnPosition(FunctionLiteral* fun); void CodeForStatementPosition(Statement* node); + void CodeForDoWhileConditionPosition(DoWhileStatement* stmt); void CodeForSourcePosition(int pos); #ifdef DEBUG @@ -633,6 +616,25 @@ class CodeGenerator: public AstVisitor { // times by generated code to perform common tasks, often the slow // case of a JavaScript operation. They are all subclasses of CodeStub, // which is declared in code-stubs.h. +class CallFunctionStub: public CodeStub { + public: + CallFunctionStub(int argc, InLoopFlag in_loop) + : argc_(argc), in_loop_(in_loop) { } + + void Generate(MacroAssembler* masm); + + private: + int argc_; + InLoopFlag in_loop_; + +#ifdef DEBUG + void Print() { PrintF("CallFunctionStub (args %d)\n", argc_); } +#endif + + Major MajorKey() { return CallFunction; } + int MinorKey() { return argc_; } + InLoopFlag InLoop() { return in_loop_; } +}; class ToBooleanStub: public CodeStub { @@ -664,7 +666,7 @@ class GenericBinaryOpStub: public CodeStub { flags_(flags), args_in_registers_(false), args_reversed_(false) { - use_sse3_ = CpuFeatures::IsSupported(CpuFeatures::SSE3); + use_sse3_ = CpuFeatures::IsSupported(SSE3); ASSERT(OpBits::is_valid(Token::NUM_TOKENS)); } diff --git a/deps/v8/src/x64/disasm-x64.cc b/deps/v8/src/x64/disasm-x64.cc index 9fd581df39..0b43e766e6 100644 --- a/deps/v8/src/x64/disasm-x64.cc +++ b/deps/v8/src/x64/disasm-x64.cc @@ -1069,7 +1069,7 @@ int DisassemblerX64::TwoByteOpcodeInstruction(byte* data) { } else { UnimplementedInstruction(); } - return current - data; + return static_cast<int>(current - data); } @@ -1474,7 +1474,7 @@ int DisassemblerX64::InstructionDecode(v8::internal::Vector<char> out_buffer, tmp_buffer_[tmp_buffer_pos_] = '\0'; } - int instr_len = data - instr; + int instr_len = static_cast<int>(data - instr); ASSERT(instr_len > 0); // Ensure progress. int outp = 0; @@ -1586,7 +1586,7 @@ void Disassembler::Disassemble(FILE* f, byte* begin, byte* end) { for (byte* bp = prev_pc; bp < pc; bp++) { fprintf(f, "%02x", *bp); } - for (int i = 6 - (pc - prev_pc); i >= 0; i--) { + for (int i = 6 - static_cast<int>(pc - prev_pc); i >= 0; i--) { fprintf(f, " "); } fprintf(f, " %s\n", buffer.start()); diff --git a/deps/v8/src/x64/fast-codegen-x64.cc b/deps/v8/src/x64/fast-codegen-x64.cc index b938119cd3..bb85ef5d69 100644 --- a/deps/v8/src/x64/fast-codegen-x64.cc +++ b/deps/v8/src/x64/fast-codegen-x64.cc @@ -28,6 +28,7 @@ #include "v8.h" #include "codegen-inl.h" +#include "compiler.h" #include "debug.h" #include "fast-codegen.h" #include "parser.h" @@ -61,9 +62,65 @@ void FastCodeGenerator::Generate(FunctionLiteral* fun) { { Comment cmnt(masm_, "[ Allocate locals"); int locals_count = fun->scope()->num_stack_slots(); - for (int i = 0; i < locals_count; i++) { - __ PushRoot(Heap::kUndefinedValueRootIndex); + if (locals_count <= 1) { + if (locals_count > 0) { + __ PushRoot(Heap::kUndefinedValueRootIndex); + } + } else { + __ LoadRoot(rdx, Heap::kUndefinedValueRootIndex); + for (int i = 0; i < locals_count; i++) { + __ push(rdx); + } + } + } + + bool function_in_register = true; + + Variable* arguments = fun->scope()->arguments()->AsVariable(); + if (arguments != NULL) { + // Function uses arguments object. + Comment cmnt(masm_, "[ Allocate arguments object"); + __ push(rdi); + // The receiver is just before the parameters on the caller's stack. + __ lea(rdx, Operand(rbp, StandardFrameConstants::kCallerSPOffset + + fun->num_parameters() * kPointerSize)); + __ push(rdx); + __ Push(Smi::FromInt(fun->num_parameters())); + // Arguments to ArgumentsAccessStub: + // function, receiver address, parameter count. + // The stub will rewrite receiver and parameter count if the previous + // stack frame was an arguments adapter frame. + ArgumentsAccessStub stub(ArgumentsAccessStub::NEW_OBJECT); + __ CallStub(&stub); + // Store new arguments object in both "arguments" and ".arguments" slots. + __ movq(Operand(rbp, SlotOffset(arguments->slot())), rax); + Slot* dot_arguments_slot = + fun->scope()->arguments_shadow()->AsVariable()->slot(); + __ movq(Operand(rbp, SlotOffset(dot_arguments_slot)), rax); + function_in_register = false; + } + + // Possibly allocate a local context. + if (fun->scope()->num_heap_slots() > 0) { + Comment cmnt(masm_, "[ Allocate local context"); + if (function_in_register) { + // Argument to NewContext is the function, still in rdi. + __ push(rdi); + } else { + // Argument to NewContext is the function, no longer in rdi. + __ push(Operand(rbp, JavaScriptFrameConstants::kFunctionOffset)); } + __ CallRuntime(Runtime::kNewContext, 1); + // Context is returned in both rax and rsi. It replaces the context + // passed to us. It's saved in the stack and kept live in rsi. + __ movq(Operand(rbp, StandardFrameConstants::kContextOffset), rsi); +#ifdef DEBUG + // Assert we do not have to copy any parameters into the context. + for (int i = 0, len = fun->scope()->num_parameters(); i < len; i++) { + Slot* slot = fun->scope()->parameter(i)->slot(); + ASSERT(slot != NULL && slot->type() != Slot::CONTEXT); + } +#endif } { Comment cmnt(masm_, "[ Stack check"); @@ -84,25 +141,41 @@ void FastCodeGenerator::Generate(FunctionLiteral* fun) { } { Comment cmnt(masm_, "[ Body"); + ASSERT(loop_depth() == 0); VisitStatements(fun->body()); + ASSERT(loop_depth() == 0); } { Comment cmnt(masm_, "[ return <undefined>;"); - // Emit a 'return undefined' in case control fell off the end of the - // body. + // Emit a 'return undefined' in case control fell off the end of the body. __ LoadRoot(rax, Heap::kUndefinedValueRootIndex); - SetReturnPosition(fun); + EmitReturnSequence(function_->end_position()); + } +} + + +void FastCodeGenerator::EmitReturnSequence(int position) { + Comment cmnt(masm_, "[ Return sequence"); + if (return_label_.is_bound()) { + __ jmp(&return_label_); + } else { + __ bind(&return_label_); if (FLAG_trace) { __ push(rax); __ CallRuntime(Runtime::kTraceExit, 1); } +#ifdef DEBUG + // Add a label for checking the size of the code used for returning. + Label check_exit_codesize; + masm_->bind(&check_exit_codesize); +#endif + CodeGenerator::RecordPositions(masm_, position); __ RecordJSReturn(); - // Do not use the leave instruction here because it is too short to // patch with the code required by the debugger. __ movq(rsp, rbp); __ pop(rbp); - __ ret((fun->scope()->num_parameters() + 1) * kPointerSize); + __ ret((function_->scope()->num_parameters() + 1) * kPointerSize); #ifdef ENABLE_DEBUGGER_SUPPORT // Add padding that will be overwritten by a debugger breakpoint. We // have just generated "movq rsp, rbp; pop rbp; ret k" with length 7 @@ -111,59 +184,229 @@ void FastCodeGenerator::Generate(FunctionLiteral* fun) { for (int i = 0; i < kPadding; ++i) { masm_->int3(); } + // Check that the size of the code used for returning matches what is + // expected by the debugger. + ASSERT_EQ(Debug::kX64JSReturnSequenceLength, + masm_->SizeOfCodeGeneratedSince(&check_exit_codesize)); #endif } } -void FastCodeGenerator::Move(Location destination, Slot* source) { - switch (destination.type()) { - case Location::kUninitialized: +void FastCodeGenerator::Move(Expression::Context context, Register source) { + switch (context) { + case Expression::kUninitialized: UNREACHABLE(); - case Location::kEffect: + case Expression::kEffect: break; - case Location::kValue: - __ push(Operand(rbp, SlotOffset(source))); + case Expression::kValue: + __ push(source); break; + case Expression::kTest: + TestAndBranch(source, true_label_, false_label_); + break; + case Expression::kValueTest: { + Label discard; + __ push(source); + TestAndBranch(source, true_label_, &discard); + __ bind(&discard); + __ addq(rsp, Immediate(kPointerSize)); + __ jmp(false_label_); + break; + } + case Expression::kTestValue: { + Label discard; + __ push(source); + TestAndBranch(source, &discard, false_label_); + __ bind(&discard); + __ addq(rsp, Immediate(kPointerSize)); + __ jmp(true_label_); + break; + } } } -void FastCodeGenerator::Move(Location destination, Literal* expr) { - switch (destination.type()) { - case Location::kUninitialized: +void FastCodeGenerator::Move(Expression::Context context, Slot* source) { + switch (context) { + case Expression::kUninitialized: UNREACHABLE(); - case Location::kEffect: + case Expression::kEffect: break; - case Location::kValue: - __ Push(expr->handle()); + case Expression::kValue: + __ push(Operand(rbp, SlotOffset(source))); + break; + case Expression::kTest: // Fall through. + case Expression::kValueTest: // Fall through. + case Expression::kTestValue: + __ movq(rax, Operand(rbp, SlotOffset(source))); + Move(context, rax); break; } } -void FastCodeGenerator::Move(Slot* destination, Location source) { - switch (source.type()) { - case Location::kUninitialized: // Fall through. - case Location::kEffect: +void FastCodeGenerator::Move(Expression::Context context, Literal* expr) { + switch (context) { + case Expression::kUninitialized: UNREACHABLE(); - case Location::kValue: - __ pop(Operand(rbp, SlotOffset(destination))); + case Expression::kEffect: + break; + case Expression::kValue: + __ Push(expr->handle()); + break; + case Expression::kTest: // Fall through. + case Expression::kValueTest: // Fall through. + case Expression::kTestValue: + __ Move(rax, expr->handle()); + Move(context, rax); break; } } -void FastCodeGenerator::DropAndMove(Location destination, Register source) { - switch (destination.type()) { - case Location::kUninitialized: +void FastCodeGenerator::DropAndMove(Expression::Context context, + Register source) { + switch (context) { + case Expression::kUninitialized: UNREACHABLE(); - case Location::kEffect: + case Expression::kEffect: + __ addq(rsp, Immediate(kPointerSize)); + break; + case Expression::kValue: + __ movq(Operand(rsp, 0), source); + break; + case Expression::kTest: + ASSERT(!source.is(rsp)); + __ addq(rsp, Immediate(kPointerSize)); + TestAndBranch(source, true_label_, false_label_); + break; + case Expression::kValueTest: { + Label discard; + __ movq(Operand(rsp, 0), source); + TestAndBranch(source, true_label_, &discard); + __ bind(&discard); __ addq(rsp, Immediate(kPointerSize)); + __ jmp(false_label_); break; - case Location::kValue: + } + case Expression::kTestValue: { + Label discard; __ movq(Operand(rsp, 0), source); + TestAndBranch(source, &discard, false_label_); + __ bind(&discard); + __ addq(rsp, Immediate(kPointerSize)); + __ jmp(true_label_); + break; + } + } +} + + +void FastCodeGenerator::TestAndBranch(Register source, + Label* true_label, + Label* false_label) { + ASSERT_NE(NULL, true_label); + ASSERT_NE(NULL, false_label); + // Use the shared ToBoolean stub to compile the value in the register into + // control flow to the code generator's true and false labels. Perform + // the fast checks assumed by the stub. + + // The undefined value is false. + __ CompareRoot(source, Heap::kUndefinedValueRootIndex); + __ j(equal, false_label); + __ CompareRoot(source, Heap::kTrueValueRootIndex); // True is true. + __ j(equal, true_label); + __ CompareRoot(source, Heap::kFalseValueRootIndex); // False is false. + __ j(equal, false_label); + ASSERT_EQ(0, kSmiTag); + __ SmiCompare(source, Smi::FromInt(0)); // The smi zero is false. + __ j(equal, false_label); + Condition is_smi = masm_->CheckSmi(source); // All other smis are true. + __ j(is_smi, true_label); + + // Call the stub for all other cases. + __ push(source); + ToBooleanStub stub; + __ CallStub(&stub); + __ testq(rax, rax); // The stub returns nonzero for true. + __ j(not_zero, true_label); + __ jmp(false_label); +} + + +void FastCodeGenerator::VisitDeclaration(Declaration* decl) { + Comment cmnt(masm_, "[ Declaration"); + Variable* var = decl->proxy()->var(); + ASSERT(var != NULL); // Must have been resolved. + Slot* slot = var->slot(); + ASSERT(slot != NULL); // No global declarations here. + + // We have 3 cases for slots: LOOKUP, LOCAL, CONTEXT. + switch (slot->type()) { + case Slot::LOOKUP: { + __ push(rsi); + __ Push(var->name()); + // Declaration nodes are always introduced in one of two modes. + ASSERT(decl->mode() == Variable::VAR || decl->mode() == Variable::CONST); + PropertyAttributes attr = decl->mode() == Variable::VAR ? + NONE : READ_ONLY; + __ Push(Smi::FromInt(attr)); + // Push initial value, if any. + // Note: For variables we must not push an initial value (such as + // 'undefined') because we may have a (legal) redeclaration and we + // must not destroy the current value. + if (decl->mode() == Variable::CONST) { + __ Push(Factory::the_hole_value()); + } else if (decl->fun() != NULL) { + Visit(decl->fun()); + } else { + __ Push(Smi::FromInt(0)); // no initial value! + } + __ CallRuntime(Runtime::kDeclareContextSlot, 4); + break; + } + case Slot::LOCAL: + if (decl->mode() == Variable::CONST) { + __ Move(Operand(rbp, SlotOffset(var->slot())), + Factory::the_hole_value()); + } else if (decl->fun() != NULL) { + Visit(decl->fun()); + __ pop(Operand(rbp, SlotOffset(var->slot()))); + } + break; + case Slot::CONTEXT: + // The variable in the decl always resides in the current context. + ASSERT(function_->scope()->ContextChainLength(slot->var()->scope()) == 0); + if (decl->mode() == Variable::CONST) { + __ Move(rax, Factory::the_hole_value()); + if (FLAG_debug_code) { + // Check if we have the correct context pointer. + __ movq(rbx, CodeGenerator::ContextOperand( + rsi, Context::FCONTEXT_INDEX)); + __ cmpq(rbx, rsi); + __ Check(equal, "Unexpected declaration in current context."); + } + __ movq(CodeGenerator::ContextOperand(rsi, slot->index()), rax); + // No write barrier since the_hole_value is in old space. + ASSERT(Heap::InNewSpace(*Factory::the_hole_value())); + } else if (decl->fun() != NULL) { + Visit(decl->fun()); + __ pop(rax); + if (FLAG_debug_code) { + // Check if we have the correct context pointer. + __ movq(rbx, CodeGenerator::ContextOperand( + rsi, Context::FCONTEXT_INDEX)); + __ cmpq(rbx, rsi); + __ Check(equal, "Unexpected declaration in current context."); + } + __ movq(CodeGenerator::ContextOperand(rsi, slot->index()), rax); + int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize; + __ RecordWrite(rsi, offset, rax, rcx); + } break; + default: + UNREACHABLE(); } } @@ -180,36 +423,15 @@ void FastCodeGenerator::DeclareGlobals(Handle<FixedArray> pairs) { void FastCodeGenerator::VisitReturnStatement(ReturnStatement* stmt) { Comment cmnt(masm_, "[ ReturnStatement"); - SetStatementPosition(stmt); Expression* expr = stmt->expression(); - // Complete the statement based on the type of the subexpression. if (expr->AsLiteral() != NULL) { __ Move(rax, expr->AsLiteral()->handle()); } else { Visit(expr); - Move(rax, expr->location()); - } - - if (FLAG_trace) { - __ push(rax); - __ CallRuntime(Runtime::kTraceExit, 1); - } - - __ RecordJSReturn(); - // Do not use the leave instruction here because it is too short to - // patch with the code required by the debugger. - __ movq(rsp, rbp); - __ pop(rbp); - __ ret((function_->scope()->num_parameters() + 1) * kPointerSize); -#ifdef ENABLE_DEBUGGER_SUPPORT - // Add padding that will be overwritten by a debugger breakpoint. We - // have just generated "movq rsp, rbp; pop rbp; ret k" with length 7 - // (3 + 1 + 3). - const int kPadding = Debug::kX64JSReturnSequenceLength - 7; - for (int i = 0; i < kPadding; ++i) { - masm_->int3(); + ASSERT_EQ(Expression::kValue, expr->context()); + __ pop(rax); } -#endif + EmitReturnSequence(stmt->statement_pos()); } @@ -217,7 +439,8 @@ void FastCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) { Comment cmnt(masm_, "[ FunctionLiteral"); // Build the function boilerplate and instantiate it. - Handle<JSFunction> boilerplate = BuildBoilerplate(expr); + Handle<JSFunction> boilerplate = + Compiler::BuildBoilerplate(expr, script_, this); if (HasStackOverflow()) return; ASSERT(boilerplate->IsBoilerplate()); @@ -226,7 +449,7 @@ void FastCodeGenerator::VisitFunctionLiteral(FunctionLiteral* expr) { __ push(rsi); __ Push(boilerplate); __ CallRuntime(Runtime::kNewClosure, 2); - Move(expr->location(), rax); + Move(expr->context(), rax); } @@ -234,6 +457,7 @@ void FastCodeGenerator::VisitVariableProxy(VariableProxy* expr) { Comment cmnt(masm_, "[ VariableProxy"); Expression* rewrite = expr->var()->rewrite(); if (rewrite == NULL) { + ASSERT(expr->var()->is_global()); Comment cmnt(masm_, "Global variable"); // Use inline caching. Variable name is passed in rcx and the global // object on the stack. @@ -241,20 +465,67 @@ void FastCodeGenerator::VisitVariableProxy(VariableProxy* expr) { __ Move(rcx, expr->name()); Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); __ Call(ic, RelocInfo::CODE_TARGET_CONTEXT); - // A test rax instruction following the call is used by the IC to // indicate that the inobject property case was inlined. Ensure there // is no test rax instruction here. - DropAndMove(expr->location(), rax); + __ nop(); + + DropAndMove(expr->context(), rax); + } else if (rewrite->AsSlot() != NULL) { + Slot* slot = rewrite->AsSlot(); + switch (slot->type()) { + case Slot::LOCAL: + case Slot::PARAMETER: { + Comment cmnt(masm_, "Stack slot"); + Move(expr->context(), slot); + break; + } + + case Slot::CONTEXT: { + Comment cmnt(masm_, "Context slot"); + int chain_length = + function_->scope()->ContextChainLength(slot->var()->scope()); + if (chain_length > 0) { + // Move up the chain of contexts to the context containing the slot. + __ movq(rax, + Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX))); + // Load the function context (which is the incoming, outer context). + __ movq(rax, FieldOperand(rax, JSFunction::kContextOffset)); + for (int i = 1; i < chain_length; i++) { + __ movq(rax, + Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX))); + __ movq(rax, FieldOperand(rax, JSFunction::kContextOffset)); + } + // The context may be an intermediate context, not a function context. + __ movq(rax, + Operand(rax, Context::SlotOffset(Context::FCONTEXT_INDEX))); + } else { // Slot is in the current function context. + // The context may be an intermediate context, not a function context. + __ movq(rax, + Operand(rsi, Context::SlotOffset(Context::FCONTEXT_INDEX))); + } + __ movq(rax, Operand(rax, Context::SlotOffset(slot->index()))); + Move(expr->context(), rax); + break; + } + + case Slot::LOOKUP: + UNREACHABLE(); + break; + } } else { - Comment cmnt(masm_, "Stack slot"); - Move(expr->location(), rewrite->AsSlot()); + // The parameter variable has been rewritten into an explict access to + // the arguments object. + Property* property = rewrite->AsProperty(); + ASSERT_NOT_NULL(property); + ASSERT_EQ(expr->context(), property->context()); + Visit(property); } } void FastCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { - Comment cmnt(masm_, "[ RegExp Literal"); + Comment cmnt(masm_, "[ RegExpLiteral"); Label done; // Registers will be used as follows: // rdi = JS function. @@ -276,7 +547,7 @@ void FastCodeGenerator::VisitRegExpLiteral(RegExpLiteral* expr) { __ CallRuntime(Runtime::kMaterializeRegExpLiteral, 4); // Label done: __ bind(&done); - Move(expr->location(), rax); + Move(expr->context(), rax); } @@ -329,7 +600,7 @@ void FastCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { case ObjectLiteral::Property::COMPUTED: if (key->handle()->IsSymbol()) { Visit(value); - ASSERT(value->location().is_value()); + ASSERT_EQ(Expression::kValue, value->context()); __ pop(rax); __ Move(rcx, key->handle()); Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); @@ -341,9 +612,9 @@ void FastCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { case ObjectLiteral::Property::PROTOTYPE: __ push(rax); Visit(key); - ASSERT(key->location().is_value()); + ASSERT_EQ(Expression::kValue, key->context()); Visit(value); - ASSERT(value->location().is_value()); + ASSERT_EQ(Expression::kValue, value->context()); __ CallRuntime(Runtime::kSetProperty, 3); __ movq(rax, Operand(rsp, 0)); // Restore result into rax. break; @@ -351,27 +622,49 @@ void FastCodeGenerator::VisitObjectLiteral(ObjectLiteral* expr) { case ObjectLiteral::Property::GETTER: __ push(rax); Visit(key); - ASSERT(key->location().is_value()); + ASSERT_EQ(Expression::kValue, key->context()); __ Push(property->kind() == ObjectLiteral::Property::SETTER ? Smi::FromInt(1) : Smi::FromInt(0)); Visit(value); - ASSERT(value->location().is_value()); + ASSERT_EQ(Expression::kValue, value->context()); __ CallRuntime(Runtime::kDefineAccessor, 4); __ movq(rax, Operand(rsp, 0)); // Restore result into rax. break; default: UNREACHABLE(); } } - switch (expr->location().type()) { - case Location::kUninitialized: + switch (expr->context()) { + case Expression::kUninitialized: UNREACHABLE(); - case Location::kEffect: + case Expression::kEffect: if (result_saved) __ addq(rsp, Immediate(kPointerSize)); break; - case Location::kValue: + case Expression::kValue: + if (!result_saved) __ push(rax); + break; + case Expression::kTest: + if (result_saved) __ pop(rax); + TestAndBranch(rax, true_label_, false_label_); + break; + case Expression::kValueTest: { + Label discard; + if (!result_saved) __ push(rax); + TestAndBranch(rax, true_label_, &discard); + __ bind(&discard); + __ addq(rsp, Immediate(kPointerSize)); + __ jmp(false_label_); + break; + } + case Expression::kTestValue: { + Label discard; if (!result_saved) __ push(rax); + TestAndBranch(rax, &discard, false_label_); + __ bind(&discard); + __ addq(rsp, Immediate(kPointerSize)); + __ jmp(true_label_); break; + } } } @@ -424,7 +717,7 @@ void FastCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { result_saved = true; } Visit(subexpr); - ASSERT(subexpr->location().is_value()); + ASSERT_EQ(Expression::kValue, subexpr->context()); // Store the subexpression value in the array's elements. __ pop(rax); // Subexpression value. @@ -437,81 +730,218 @@ void FastCodeGenerator::VisitArrayLiteral(ArrayLiteral* expr) { __ RecordWrite(rbx, offset, rax, rcx); } - switch (expr->location().type()) { - case Location::kUninitialized: + switch (expr->context()) { + case Expression::kUninitialized: UNREACHABLE(); - case Location::kEffect: + case Expression::kEffect: if (result_saved) __ addq(rsp, Immediate(kPointerSize)); break; - case Location::kValue: + case Expression::kValue: + if (!result_saved) __ push(rax); + break; + case Expression::kTest: + if (result_saved) __ pop(rax); + TestAndBranch(rax, true_label_, false_label_); + break; + case Expression::kValueTest: { + Label discard; + if (!result_saved) __ push(rax); + TestAndBranch(rax, true_label_, &discard); + __ bind(&discard); + __ addq(rsp, Immediate(kPointerSize)); + __ jmp(false_label_); + break; + } + case Expression::kTestValue: { + Label discard; if (!result_saved) __ push(rax); + TestAndBranch(rax, &discard, false_label_); + __ bind(&discard); + __ addq(rsp, Immediate(kPointerSize)); + __ jmp(true_label_); break; + } } } -void FastCodeGenerator::VisitAssignment(Assignment* expr) { - Comment cmnt(masm_, "[ Assignment"); - ASSERT(expr->op() == Token::ASSIGN || expr->op() == Token::INIT_VAR); - - // Left-hand side can only be a global or a (parameter or local) slot. +void FastCodeGenerator::EmitVariableAssignment(Assignment* expr) { Variable* var = expr->target()->AsVariableProxy()->AsVariable(); ASSERT(var != NULL); - ASSERT(var->is_global() || var->slot() != NULL); - Expression* rhs = expr->value(); - Location destination = expr->location(); if (var->is_global()) { - // Assignment to a global variable, use inline caching. Right-hand-side - // value is passed in rax, variable name in rcx, and the global object - // on the stack. - - // Code for the right-hand-side expression depends on its type. - if (rhs->AsLiteral() != NULL) { - __ Move(rax, rhs->AsLiteral()->handle()); - } else { - ASSERT(rhs->location().is_value()); - Visit(rhs); - __ pop(rax); - } + // Assignment to a global variable. Use inline caching for the + // assignment. Right-hand-side value is passed in rax, variable name in + // rcx, and the global object on the stack. + __ pop(rax); __ Move(rcx, var->name()); __ push(CodeGenerator::GlobalObject()); Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); __ Call(ic, RelocInfo::CODE_TARGET); // Overwrite the global object on the stack with the result if needed. - DropAndMove(expr->location(), rax); + DropAndMove(expr->context(), rax); + } else { - // Local or parameter assignment. - - // Code for the right-hand-side expression depends on its type. - if (rhs->AsLiteral() != NULL) { - // Two cases: 'temp <- (var = constant)', or 'var = constant' with a - // discarded result. Always perform the assignment. - __ Move(kScratchRegister, rhs->AsLiteral()->handle()); - __ movq(Operand(rbp, SlotOffset(var->slot())), kScratchRegister); - Move(expr->location(), kScratchRegister); - } else { - ASSERT(rhs->location().is_value()); - Visit(rhs); - switch (expr->location().type()) { - case Location::kUninitialized: - UNREACHABLE(); - case Location::kEffect: - // Case 'var = temp'. Discard right-hand-side temporary. - Move(var->slot(), rhs->location()); - break; - case Location::kValue: - // Case 'temp1 <- (var = temp0)'. Preserve right-hand-side - // temporary on the stack. - __ movq(kScratchRegister, Operand(rsp, 0)); - __ movq(Operand(rbp, SlotOffset(var->slot())), kScratchRegister); - break; + Slot* slot = var->slot(); + ASSERT_NOT_NULL(slot); // Variables rewritten as properties not handled. + switch (slot->type()) { + case Slot::LOCAL: + case Slot::PARAMETER: { + switch (expr->context()) { + case Expression::kUninitialized: + UNREACHABLE(); + case Expression::kEffect: + // Perform assignment and discard value. + __ pop(Operand(rbp, SlotOffset(var->slot()))); + break; + case Expression::kValue: + // Perform assignment and preserve value. + __ movq(rax, Operand(rsp, 0)); + __ movq(Operand(rbp, SlotOffset(var->slot())), rax); + break; + case Expression::kTest: + // Perform assignment and test (and discard) value. + __ pop(rax); + __ movq(Operand(rbp, SlotOffset(var->slot())), rax); + TestAndBranch(rax, true_label_, false_label_); + break; + case Expression::kValueTest: { + Label discard; + __ movq(rax, Operand(rsp, 0)); + __ movq(Operand(rbp, SlotOffset(var->slot())), rax); + TestAndBranch(rax, true_label_, &discard); + __ bind(&discard); + __ addq(rsp, Immediate(kPointerSize)); + __ jmp(false_label_); + break; + } + case Expression::kTestValue: { + Label discard; + __ movq(rax, Operand(rsp, 0)); + __ movq(Operand(rbp, SlotOffset(var->slot())), rax); + TestAndBranch(rax, &discard, false_label_); + __ bind(&discard); + __ addq(rsp, Immediate(kPointerSize)); + __ jmp(true_label_); + break; + } + } + break; } + + case Slot::CONTEXT: { + int chain_length = + function_->scope()->ContextChainLength(slot->var()->scope()); + if (chain_length > 0) { + // Move up the context chain to the context containing the slot. + __ movq(rax, + Operand(rsi, Context::SlotOffset(Context::CLOSURE_INDEX))); + // Load the function context (which is the incoming, outer context). + __ movq(rax, FieldOperand(rax, JSFunction::kContextOffset)); + for (int i = 1; i < chain_length; i++) { + __ movq(rax, + Operand(rax, Context::SlotOffset(Context::CLOSURE_INDEX))); + __ movq(rax, FieldOperand(rax, JSFunction::kContextOffset)); + } + } else { // Slot is in the current context. Generate optimized code. + __ movq(rax, rsi); // RecordWrite destroys the object register. + } + if (FLAG_debug_code) { + __ cmpq(rax, + Operand(rax, Context::SlotOffset(Context::FCONTEXT_INDEX))); + __ Check(equal, "Context Slot chain length wrong."); + } + __ pop(rcx); + __ movq(Operand(rax, Context::SlotOffset(slot->index())), rcx); + + // RecordWrite may destroy all its register arguments. + if (expr->context() == Expression::kValue) { + __ push(rcx); + } else if (expr->context() != Expression::kEffect) { + __ movq(rdx, rcx); + } + int offset = FixedArray::kHeaderSize + slot->index() * kPointerSize; + __ RecordWrite(rax, offset, rcx, rbx); + if (expr->context() != Expression::kEffect && + expr->context() != Expression::kValue) { + Move(expr->context(), rdx); + } + break; + } + + case Slot::LOOKUP: + UNREACHABLE(); + break; } } } +void FastCodeGenerator::EmitNamedPropertyAssignment(Assignment* expr) { + // Assignment to a property, using a named store IC. + Property* prop = expr->target()->AsProperty(); + ASSERT(prop != NULL); + ASSERT(prop->key()->AsLiteral() != NULL); + + // If the assignment starts a block of assignments to the same object, + // change to slow case to avoid the quadratic behavior of repeatedly + // adding fast properties. + if (expr->starts_initialization_block()) { + __ push(Operand(rsp, kPointerSize)); // Receiver is under value. + __ CallRuntime(Runtime::kToSlowProperties, 1); + } + + __ pop(rax); + __ Move(rcx, prop->key()->AsLiteral()->handle()); + Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); + __ Call(ic, RelocInfo::CODE_TARGET); + + // If the assignment ends an initialization block, revert to fast case. + if (expr->ends_initialization_block()) { + __ push(rax); // Result of assignment, saved even if not needed. + __ push(Operand(rsp, kPointerSize)); // Receiver is under value. + __ CallRuntime(Runtime::kToFastProperties, 1); + __ pop(rax); + } + + DropAndMove(expr->context(), rax); +} + + +void FastCodeGenerator::EmitKeyedPropertyAssignment(Assignment* expr) { + // Assignment to a property, using a keyed store IC. + + // If the assignment starts a block of assignments to the same object, + // change to slow case to avoid the quadratic behavior of repeatedly + // adding fast properties. + if (expr->starts_initialization_block()) { + // Reciever is under the key and value. + __ push(Operand(rsp, 2 * kPointerSize)); + __ CallRuntime(Runtime::kToSlowProperties, 1); + } + + __ pop(rax); + Handle<Code> ic(Builtins::builtin(Builtins::KeyedStoreIC_Initialize)); + __ Call(ic, RelocInfo::CODE_TARGET); + // This nop signals to the IC that there is no inlined code at the call + // site for it to patch. + __ nop(); + + // If the assignment ends an initialization block, revert to fast case. + if (expr->ends_initialization_block()) { + __ push(rax); // Result of assignment, saved even if not needed. + // Reciever is under the key and value. + __ push(Operand(rsp, 2 * kPointerSize)); + __ CallRuntime(Runtime::kToFastProperties, 1); + __ pop(rax); + } + + // Receiver and key are still on stack. + __ addq(rsp, Immediate(2 * kPointerSize)); + Move(expr->context(), rax); +} + + void FastCodeGenerator::VisitProperty(Property* expr) { Comment cmnt(masm_, "[ Property"); Expression* key = expr->key(); @@ -523,6 +953,7 @@ void FastCodeGenerator::VisitProperty(Property* expr) { // Evaluate receiver. Visit(expr->obj()); + if (key->AsLiteral() != NULL && key->AsLiteral()->handle()->IsSymbol() && !String::cast(*(key->AsLiteral()->handle()))->AsArrayIndex(&dummy)) { // Do a NAMED property load. @@ -530,7 +961,7 @@ void FastCodeGenerator::VisitProperty(Property* expr) { __ Move(rcx, key->AsLiteral()->handle()); Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); __ call(ic, RelocInfo::CODE_TARGET); - // By emitting a nop we make sure that we do not have a "test eax,..." + // By emitting a nop we make sure that we do not have a "test rax,..." // instruction after the call it is treated specially by the LoadIC code. __ nop(); } else { @@ -538,79 +969,154 @@ void FastCodeGenerator::VisitProperty(Property* expr) { Visit(expr->key()); Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); __ call(ic, RelocInfo::CODE_TARGET); - // By emitting a nop we make sure that we do not have a "test ..." + // By emitting a nop we make sure that we do not have a "test rax,..." // instruction after the call it is treated specially by the LoadIC code. __ nop(); // Drop key left on the stack by IC. __ addq(rsp, Immediate(kPointerSize)); } - switch (expr->location().type()) { - case Location::kUninitialized: - UNREACHABLE(); - case Location::kValue: - __ movq(Operand(rsp, 0), rax); - break; - case Location::kEffect: - __ addq(rsp, Immediate(kPointerSize)); - break; - } + DropAndMove(expr->context(), rax); } -void FastCodeGenerator::VisitCall(Call* expr) { - Expression* fun = expr->expression(); +void FastCodeGenerator::EmitCallWithIC(Call* expr, RelocInfo::Mode reloc_info) { + // Code common for calls using the IC. ZoneList<Expression*>* args = expr->arguments(); - Variable* var = fun->AsVariableProxy()->AsVariable(); - ASSERT(var != NULL && !var->is_this() && var->is_global()); - ASSERT(!var->is_possibly_eval()); - - __ Push(var->name()); - // Push global object (receiver). - __ push(CodeGenerator::GlobalObject()); int arg_count = args->length(); for (int i = 0; i < arg_count; i++) { Visit(args->at(i)); - ASSERT(args->at(i)->location().is_value()); + ASSERT_EQ(Expression::kValue, args->at(i)->context()); } - // Record source position for debugger + // Record source position for debugger. SetSourcePosition(expr->position()); // Call the IC initialization code. Handle<Code> ic = CodeGenerator::ComputeCallInitialize(arg_count, NOT_IN_LOOP); - __ call(ic, RelocInfo::CODE_TARGET_CONTEXT); + __ call(ic, reloc_info); // Restore context register. __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); // Discard the function left on TOS. - DropAndMove(expr->location(), rax); + DropAndMove(expr->context(), rax); } -void FastCodeGenerator::VisitCallNew(CallNew* node) { +void FastCodeGenerator::EmitCallWithStub(Call* expr) { + // Code common for calls using the call stub. + ZoneList<Expression*>* args = expr->arguments(); + int arg_count = args->length(); + for (int i = 0; i < arg_count; i++) { + Visit(args->at(i)); + } + // Record source position for debugger. + SetSourcePosition(expr->position()); + CallFunctionStub stub(arg_count, NOT_IN_LOOP); + __ CallStub(&stub); + // Restore context register. + __ movq(rsi, Operand(rbp, StandardFrameConstants::kContextOffset)); + // Discard the function left on TOS. + DropAndMove(expr->context(), rax); +} + + +void FastCodeGenerator::VisitCall(Call* expr) { + Comment cmnt(masm_, "[ Call"); + Expression* fun = expr->expression(); + Variable* var = fun->AsVariableProxy()->AsVariable(); + + if (var != NULL && var->is_possibly_eval()) { + // Call to the identifier 'eval'. + UNREACHABLE(); + } else if (var != NULL && !var->is_this() && var->is_global()) { + // Call to a global variable. + __ Push(var->name()); + // Push global object as receiver for the call IC lookup. + __ push(CodeGenerator::GlobalObject()); + EmitCallWithIC(expr, RelocInfo::CODE_TARGET_CONTEXT); + } else if (var != NULL && var->slot() != NULL && + var->slot()->type() == Slot::LOOKUP) { + // Call to a lookup slot. + UNREACHABLE(); + } else if (fun->AsProperty() != NULL) { + // Call to an object property. + Property* prop = fun->AsProperty(); + Literal* key = prop->key()->AsLiteral(); + if (key != NULL && key->handle()->IsSymbol()) { + // Call to a named property, use call IC. + __ Push(key->handle()); + Visit(prop->obj()); + EmitCallWithIC(expr, RelocInfo::CODE_TARGET); + } else { + // Call to a keyed property, use keyed load IC followed by function + // call. + Visit(prop->obj()); + Visit(prop->key()); + // Record source code position for IC call. + SetSourcePosition(prop->position()); + Handle<Code> ic(Builtins::builtin(Builtins::KeyedLoadIC_Initialize)); + __ call(ic, RelocInfo::CODE_TARGET); + // By emitting a nop we make sure that we do not have a "test eax,..." + // instruction after the call it is treated specially by the LoadIC code. + __ nop(); + // Drop key left on the stack by IC. + __ addq(rsp, Immediate(kPointerSize)); + // Pop receiver. + __ pop(rbx); + // Push result (function). + __ push(rax); + // Push receiver object on stack. + if (prop->is_synthetic()) { + __ push(CodeGenerator::GlobalObject()); + } else { + __ push(rbx); + } + EmitCallWithStub(expr); + } + } else { + // Call to some other expression. If the expression is an anonymous + // function literal not called in a loop, mark it as one that should + // also use the fast code generator. + FunctionLiteral* lit = fun->AsFunctionLiteral(); + if (lit != NULL && + lit->name()->Equals(Heap::empty_string()) && + loop_depth() == 0) { + lit->set_try_fast_codegen(true); + } + Visit(fun); + // Load global receiver object. + __ movq(rbx, CodeGenerator::GlobalObject()); + __ push(FieldOperand(rbx, GlobalObject::kGlobalReceiverOffset)); + // Emit function call. + EmitCallWithStub(expr); + } +} + + +void FastCodeGenerator::VisitCallNew(CallNew* expr) { Comment cmnt(masm_, "[ CallNew"); // According to ECMA-262, section 11.2.2, page 44, the function // expression in new calls must be evaluated before the // arguments. // Push function on the stack. - Visit(node->expression()); - ASSERT(node->expression()->location().is_value()); + Visit(expr->expression()); + ASSERT_EQ(Expression::kValue, expr->expression()->context()); // If location is value, already on the stack, // Push global object (receiver). __ push(CodeGenerator::GlobalObject()); // Push the arguments ("left-to-right") on the stack. - ZoneList<Expression*>* args = node->arguments(); + ZoneList<Expression*>* args = expr->arguments(); int arg_count = args->length(); for (int i = 0; i < arg_count; i++) { Visit(args->at(i)); - ASSERT(args->at(i)->location().is_value()); + ASSERT_EQ(Expression::kValue, args->at(i)->context()); // If location is value, it is already on the stack, // so nothing to do here. } // Call the construct call builtin that handles allocation and // constructor invocation. - SetSourcePosition(node->position()); + SetSourcePosition(expr->position()); // Load function, arg_count into rdi and rax. __ Set(rax, arg_count); @@ -621,7 +1127,7 @@ void FastCodeGenerator::VisitCallNew(CallNew* node) { __ Call(construct_builtin, RelocInfo::CONSTRUCT_CALL); // Replace function on TOS with result in rax, or pop it. - DropAndMove(node->location(), rax); + DropAndMove(expr->context(), rax); } @@ -636,19 +1142,220 @@ void FastCodeGenerator::VisitCallRuntime(CallRuntime* expr) { int arg_count = args->length(); for (int i = 0; i < arg_count; i++) { Visit(args->at(i)); - ASSERT(args->at(i)->location().is_value()); + ASSERT_EQ(Expression::kValue, args->at(i)->context()); } __ CallRuntime(function, arg_count); - Move(expr->location(), rax); + Move(expr->context(), rax); +} + +void FastCodeGenerator::VisitCountOperation(CountOperation* expr) { + Comment cmnt(masm_, "[ CountOperation"); + VariableProxy* proxy = expr->expression()->AsVariableProxy(); + ASSERT(proxy->AsVariable() != NULL); + ASSERT(proxy->AsVariable()->is_global()); + + Visit(proxy); + __ InvokeBuiltin(Builtins::TO_NUMBER, CALL_FUNCTION); + + switch (expr->context()) { + case Expression::kUninitialized: + UNREACHABLE(); + case Expression::kValue: // Fall through + case Expression::kTest: // Fall through + case Expression::kTestValue: // Fall through + case Expression::kValueTest: + // Duplicate the result on the stack. + __ push(rax); + break; + case Expression::kEffect: + // Do not save result. + break; + } + // Call runtime for +1/-1. + __ push(rax); + __ Push(Smi::FromInt(1)); + if (expr->op() == Token::INC) { + __ CallRuntime(Runtime::kNumberAdd, 2); + } else { + __ CallRuntime(Runtime::kNumberSub, 2); + } + // Call Store IC. + __ Move(rcx, proxy->AsVariable()->name()); + __ push(CodeGenerator::GlobalObject()); + Handle<Code> ic(Builtins::builtin(Builtins::StoreIC_Initialize)); + __ call(ic, RelocInfo::CODE_TARGET); + // Restore up stack after store IC + __ addq(rsp, Immediate(kPointerSize)); + + switch (expr->context()) { + case Expression::kUninitialized: + UNREACHABLE(); + case Expression::kEffect: // Fall through + case Expression::kValue: + // Do nothing. Result in either on the stack for value context + // or discarded for effect context. + break; + case Expression::kTest: + __ pop(rax); + TestAndBranch(rax, true_label_, false_label_); + break; + case Expression::kValueTest: { + Label discard; + __ movq(rax, Operand(rsp, 0)); + TestAndBranch(rax, true_label_, &discard); + __ bind(&discard); + __ addq(rsp, Immediate(kPointerSize)); + __ jmp(false_label_); + break; + } + case Expression::kTestValue: { + Label discard; + __ movq(rax, Operand(rsp, 0)); + TestAndBranch(rax, &discard, false_label_); + __ bind(&discard); + __ addq(rsp, Immediate(kPointerSize)); + __ jmp(true_label_); + break; + } + } +} + + +void FastCodeGenerator::VisitUnaryOperation(UnaryOperation* expr) { + switch (expr->op()) { + case Token::VOID: { + Comment cmnt(masm_, "[ UnaryOperation (VOID)"); + Visit(expr->expression()); + ASSERT_EQ(Expression::kEffect, expr->expression()->context()); + switch (expr->context()) { + case Expression::kUninitialized: + UNREACHABLE(); + break; + case Expression::kEffect: + break; + case Expression::kValue: + __ PushRoot(Heap::kUndefinedValueRootIndex); + break; + case Expression::kTestValue: + // Value is false so it's needed. + __ PushRoot(Heap::kUndefinedValueRootIndex); + // Fall through. + case Expression::kTest: // Fall through. + case Expression::kValueTest: + __ jmp(false_label_); + break; + } + break; + } + + case Token::NOT: { + Comment cmnt(masm_, "[ UnaryOperation (NOT)"); + ASSERT_EQ(Expression::kTest, expr->expression()->context()); + + Label push_true; + Label push_false; + Label done; + Label* saved_true = true_label_; + Label* saved_false = false_label_; + switch (expr->context()) { + case Expression::kUninitialized: + UNREACHABLE(); + break; + + case Expression::kValue: + true_label_ = &push_false; + false_label_ = &push_true; + Visit(expr->expression()); + __ bind(&push_true); + __ PushRoot(Heap::kTrueValueRootIndex); + __ jmp(&done); + __ bind(&push_false); + __ PushRoot(Heap::kFalseValueRootIndex); + __ bind(&done); + break; + + case Expression::kEffect: + true_label_ = &done; + false_label_ = &done; + Visit(expr->expression()); + __ bind(&done); + break; + + case Expression::kTest: + true_label_ = saved_false; + false_label_ = saved_true; + Visit(expr->expression()); + break; + + case Expression::kValueTest: + true_label_ = saved_false; + false_label_ = &push_true; + Visit(expr->expression()); + __ bind(&push_true); + __ PushRoot(Heap::kTrueValueRootIndex); + __ jmp(saved_true); + break; + + case Expression::kTestValue: + true_label_ = &push_false; + false_label_ = saved_true; + Visit(expr->expression()); + __ bind(&push_false); + __ PushRoot(Heap::kFalseValueRootIndex); + __ jmp(saved_false); + break; + } + true_label_ = saved_true; + false_label_ = saved_false; + break; + } + + case Token::TYPEOF: { + Comment cmnt(masm_, "[ UnaryOperation (TYPEOF)"); + ASSERT_EQ(Expression::kValue, expr->expression()->context()); + + VariableProxy* proxy = expr->expression()->AsVariableProxy(); + if (proxy != NULL && + !proxy->var()->is_this() && + proxy->var()->is_global()) { + Comment cmnt(masm_, "Global variable"); + __ push(CodeGenerator::GlobalObject()); + __ Move(rcx, proxy->name()); + Handle<Code> ic(Builtins::builtin(Builtins::LoadIC_Initialize)); + // Use a regular load, not a contextual load, to avoid a reference + // error. + __ Call(ic, RelocInfo::CODE_TARGET); + __ movq(Operand(rsp, 0), rax); + } else if (proxy != NULL && + proxy->var()->slot() != NULL && + proxy->var()->slot()->type() == Slot::LOOKUP) { + __ push(rsi); + __ Push(proxy->name()); + __ CallRuntime(Runtime::kLoadContextSlotNoReferenceError, 2); + __ push(rax); + } else { + // This expression cannot throw a reference error at the top level. + Visit(expr->expression()); + } + + __ CallRuntime(Runtime::kTypeof, 1); + Move(expr->context(), rax); + break; + } + + default: + UNREACHABLE(); + } } void FastCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) { + Comment cmnt(masm_, "[ BinaryOperation"); switch (expr->op()) { case Token::COMMA: - ASSERT(expr->left()->location().is_effect()); - ASSERT_EQ(expr->right()->location().type(), expr->location().type()); + ASSERT_EQ(Expression::kEffect, expr->left()->context()); + ASSERT_EQ(expr->context(), expr->right()->context()); Visit(expr->left()); Visit(expr->right()); break; @@ -669,8 +1376,8 @@ void FastCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) { case Token::SHL: case Token::SHR: case Token::SAR: { - ASSERT(expr->left()->location().is_value()); - ASSERT(expr->right()->location().is_value()); + ASSERT_EQ(Expression::kValue, expr->left()->context()); + ASSERT_EQ(Expression::kValue, expr->right()->context()); Visit(expr->left()); Visit(expr->right()); @@ -678,7 +1385,7 @@ void FastCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) { NO_OVERWRITE, NO_GENERIC_BINARY_FLAGS); __ CallStub(&stub); - Move(expr->location(), rax); + Move(expr->context(), rax); break; } @@ -688,93 +1395,163 @@ void FastCodeGenerator::VisitBinaryOperation(BinaryOperation* expr) { } -void FastCodeGenerator::EmitLogicalOperation(BinaryOperation* expr) { - // Compile a short-circuited boolean operation in a non-test context. +void FastCodeGenerator::VisitCompareOperation(CompareOperation* expr) { + Comment cmnt(masm_, "[ CompareOperation"); + ASSERT_EQ(Expression::kValue, expr->left()->context()); + ASSERT_EQ(Expression::kValue, expr->right()->context()); + Visit(expr->left()); + Visit(expr->right()); - // Compile (e0 || e1) as if it were - // (let (temp = e0) temp ? temp : e1). - // Compile (e0 && e1) as if it were - // (let (temp = e0) !temp ? temp : e1). + // Convert current context to test context: Pre-test code. + Label push_true; + Label push_false; + Label done; + Label* saved_true = true_label_; + Label* saved_false = false_label_; + switch (expr->context()) { + case Expression::kUninitialized: + UNREACHABLE(); + break; - Label eval_right, done; - Label *left_true, *left_false; // Where to branch to if lhs has that value. - if (expr->op() == Token::OR) { - left_true = &done; - left_false = &eval_right; - } else { - left_true = &eval_right; - left_false = &done; - } - Location destination = expr->location(); - Expression* left = expr->left(); - Expression* right = expr->right(); - - // Use the shared ToBoolean stub to find the boolean value of the - // left-hand subexpression. Load the value into rax to perform some - // inlined checks assumed by the stub. - - // Compile the left-hand value into rax. Put it on the stack if we may - // need it as the value of the whole expression. - if (left->AsLiteral() != NULL) { - __ Move(rax, left->AsLiteral()->handle()); - if (destination.is_value()) __ push(rax); - } else { - Visit(left); - ASSERT(left->location().is_value()); - switch (destination.type()) { - case Location::kUninitialized: - UNREACHABLE(); - case Location::kEffect: - // Pop the left-hand value into rax because we will not need it as the - // final result. - __ pop(rax); - break; - case Location::kValue: - // Copy the left-hand value into rax because we may need it as the - // final result. - __ movq(rax, Operand(rsp, 0)); - break; - } - } - // The left-hand value is in rax. It is also on the stack iff the - // destination location is value. + case Expression::kValue: + true_label_ = &push_true; + false_label_ = &push_false; + break; - // Perform fast checks assumed by the stub. - // The undefined value is false. - __ CompareRoot(rax, Heap::kUndefinedValueRootIndex); - __ j(equal, left_false); - __ CompareRoot(rax, Heap::kTrueValueRootIndex); // True is true. - __ j(equal, left_true); - __ CompareRoot(rax, Heap::kFalseValueRootIndex); // False is false. - __ j(equal, left_false); - ASSERT(kSmiTag == 0); - __ SmiCompare(rax, Smi::FromInt(0)); // The smi zero is false. - __ j(equal, left_false); - Condition is_smi = masm_->CheckSmi(rax); // All other smis are true. - __ j(is_smi, left_true); + case Expression::kEffect: + true_label_ = &done; + false_label_ = &done; + break; - // Call the stub for all other cases. - __ push(rax); - ToBooleanStub stub; - __ CallStub(&stub); - __ testq(rax, rax); // The stub returns nonzero for true. - if (expr->op() == Token::OR) { - __ j(not_zero, &done); - } else { - __ j(zero, &done); + case Expression::kTest: + break; + + case Expression::kValueTest: + true_label_ = &push_true; + break; + + case Expression::kTestValue: + false_label_ = &push_false; + break; } + // Convert current context to test context: End pre-test code. - __ bind(&eval_right); - // Discard the left-hand value if present on the stack. - if (destination.is_value()) { - __ addq(rsp, Immediate(kPointerSize)); + switch (expr->op()) { + case Token::IN: { + __ InvokeBuiltin(Builtins::IN, CALL_FUNCTION); + __ CompareRoot(rax, Heap::kTrueValueRootIndex); + __ j(equal, true_label_); + __ jmp(false_label_); + break; + } + + case Token::INSTANCEOF: { + InstanceofStub stub; + __ CallStub(&stub); + __ testq(rax, rax); + __ j(zero, true_label_); // The stub returns 0 for true. + __ jmp(false_label_); + break; + } + + default: { + Condition cc = no_condition; + bool strict = false; + switch (expr->op()) { + case Token::EQ_STRICT: + strict = true; + // Fall through + case Token::EQ: + cc = equal; + __ pop(rax); + __ pop(rdx); + break; + case Token::LT: + cc = less; + __ pop(rax); + __ pop(rdx); + break; + case Token::GT: + // Reverse left and right sizes to obtain ECMA-262 conversion order. + cc = less; + __ pop(rdx); + __ pop(rax); + break; + case Token::LTE: + // Reverse left and right sizes to obtain ECMA-262 conversion order. + cc = greater_equal; + __ pop(rdx); + __ pop(rax); + break; + case Token::GTE: + cc = greater_equal; + __ pop(rax); + __ pop(rdx); + break; + case Token::IN: + case Token::INSTANCEOF: + default: + UNREACHABLE(); + } + + // The comparison stub expects the smi vs. smi case to be handled + // before it is called. + Label slow_case; + __ JumpIfNotBothSmi(rax, rdx, &slow_case); + __ SmiCompare(rdx, rax); + __ j(cc, true_label_); + __ jmp(false_label_); + + __ bind(&slow_case); + CompareStub stub(cc, strict); + __ CallStub(&stub); + __ testq(rax, rax); + __ j(cc, true_label_); + __ jmp(false_label_); + } } - // Save or discard the right-hand value as needed. - Visit(right); - ASSERT_EQ(destination.type(), right->location().type()); - __ bind(&done); + // Convert current context to test context: Post-test code. + switch (expr->context()) { + case Expression::kUninitialized: + UNREACHABLE(); + break; + + case Expression::kValue: + __ bind(&push_true); + __ PushRoot(Heap::kTrueValueRootIndex); + __ jmp(&done); + __ bind(&push_false); + __ PushRoot(Heap::kFalseValueRootIndex); + __ bind(&done); + break; + + case Expression::kEffect: + __ bind(&done); + break; + + case Expression::kTest: + break; + + case Expression::kValueTest: + __ bind(&push_true); + __ PushRoot(Heap::kTrueValueRootIndex); + __ jmp(saved_true); + break; + + case Expression::kTestValue: + __ bind(&push_false); + __ PushRoot(Heap::kFalseValueRootIndex); + __ jmp(saved_false); + break; + } + true_label_ = saved_true; + false_label_ = saved_false; + // Convert current context to test context: End post-test code. } +#undef __ + + } } // namespace v8::internal diff --git a/deps/v8/src/x64/frames-x64.cc b/deps/v8/src/x64/frames-x64.cc index fe224ad998..6a0527cf6d 100644 --- a/deps/v8/src/x64/frames-x64.cc +++ b/deps/v8/src/x64/frames-x64.cc @@ -57,11 +57,7 @@ StackFrame::Type ExitFrame::GetStateForFramePointer(Address fp, State* state) { state->sp = sp; state->pc_address = reinterpret_cast<Address*>(sp - 1 * kPointerSize); // Determine frame type. - if (Memory::Address_at(fp + ExitFrameConstants::kDebugMarkOffset) != 0) { - return EXIT_DEBUG; - } else { - return EXIT; - } + return EXIT; } int JavaScriptFrame::GetProvidedParametersCount() const { @@ -69,10 +65,10 @@ int JavaScriptFrame::GetProvidedParametersCount() const { } -void ExitFrame::Iterate(ObjectVisitor* a) const { - // Exit frames on X64 do not contain any pointers. The arguments - // are traversed as part of the expression stack of the calling - // frame. +void ExitFrame::Iterate(ObjectVisitor* v) const { + v->VisitPointer(&code_slot()); + // The arguments are traversed as part of the expression stack of + // the calling frame. } byte* InternalFrame::GetCallerStackPointer() const { diff --git a/deps/v8/src/x64/frames-x64.h b/deps/v8/src/x64/frames-x64.h index eefaa0aeb5..a92b248d88 100644 --- a/deps/v8/src/x64/frames-x64.h +++ b/deps/v8/src/x64/frames-x64.h @@ -63,7 +63,7 @@ class EntryFrameConstants : public AllStatic { class ExitFrameConstants : public AllStatic { public: - static const int kDebugMarkOffset = -2 * kPointerSize; + static const int kCodeOffset = -2 * kPointerSize; static const int kSPOffset = -1 * kPointerSize; static const int kCallerFPOffset = +0 * kPointerSize; diff --git a/deps/v8/src/x64/ic-x64.cc b/deps/v8/src/x64/ic-x64.cc index 2812df1561..1642a0490e 100644 --- a/deps/v8/src/x64/ic-x64.cc +++ b/deps/v8/src/x64/ic-x64.cc @@ -313,7 +313,7 @@ void KeyedLoadIC::GenerateGeneric(MacroAssembler* masm) { __ cmpl(rax, FieldOperand(rcx, PixelArray::kLengthOffset)); __ j(above_equal, &slow); __ movq(rcx, FieldOperand(rcx, PixelArray::kExternalPointerOffset)); - __ movb(rax, Operand(rcx, rax, times_1, 0)); + __ movzxbq(rax, Operand(rcx, rax, times_1, 0)); __ Integer32ToSmi(rax, rax); __ ret(0); @@ -790,6 +790,8 @@ void KeyedStoreIC::GenerateExternalArray(MacroAssembler* masm, // top of FPU stack: value if (array_type == kExternalFloatArray) { __ fstp_s(Operand(rcx, rbx, times_4, 0)); + __ movq(rax, rdx); // Return the original value. + __ ret(0); } else { // Need to perform float-to-int conversion. // Test the top of the FP stack for NaN. diff --git a/deps/v8/src/x64/macro-assembler-x64.cc b/deps/v8/src/x64/macro-assembler-x64.cc index b2f69bb7a9..9dea616718 100644 --- a/deps/v8/src/x64/macro-assembler-x64.cc +++ b/deps/v8/src/x64/macro-assembler-x64.cc @@ -286,7 +286,7 @@ void MacroAssembler::Abort(const char* msg) { movq(kScratchRegister, p0, RelocInfo::NONE); push(kScratchRegister); movq(kScratchRegister, - reinterpret_cast<intptr_t>(Smi::FromInt(p1 - p0)), + reinterpret_cast<intptr_t>(Smi::FromInt(static_cast<int>(p1 - p0))), RelocInfo::NONE); push(kScratchRegister); CallRuntime(Runtime::kAbort, 2); @@ -402,9 +402,9 @@ void MacroAssembler::Set(Register dst, int64_t x) { if (x == 0) { xor_(dst, dst); } else if (is_int32(x)) { - movq(dst, Immediate(x)); + movq(dst, Immediate(static_cast<int32_t>(x))); } else if (is_uint32(x)) { - movl(dst, Immediate(x)); + movl(dst, Immediate(static_cast<uint32_t>(x))); } else { movq(dst, x, RelocInfo::NONE); } @@ -416,9 +416,9 @@ void MacroAssembler::Set(const Operand& dst, int64_t x) { xor_(kScratchRegister, kScratchRegister); movq(dst, kScratchRegister); } else if (is_int32(x)) { - movq(dst, Immediate(x)); + movq(dst, Immediate(static_cast<int32_t>(x))); } else if (is_uint32(x)) { - movl(dst, Immediate(x)); + movl(dst, Immediate(static_cast<uint32_t>(x))); } else { movq(kScratchRegister, x, RelocInfo::NONE); movq(dst, kScratchRegister); @@ -1078,7 +1078,7 @@ void MacroAssembler::SmiShiftLeft(Register dst, SmiToInteger32(rcx, src2); // Shift amount specified by lower 5 bits, not six as the shl opcode. and_(rcx, Immediate(0x1f)); - shl(dst); + shl_cl(dst); } @@ -1099,7 +1099,7 @@ void MacroAssembler::SmiShiftLogicalRight(Register dst, } SmiToInteger32(rcx, src2); orl(rcx, Immediate(kSmiShift)); - shr(dst); // Shift is rcx modulo 0x1f + 32. + shr_cl(dst); // Shift is rcx modulo 0x1f + 32. shl(dst, Immediate(kSmiShift)); testq(dst, dst); if (src1.is(rcx) || src2.is(rcx)) { @@ -1135,7 +1135,7 @@ void MacroAssembler::SmiShiftArithmeticRight(Register dst, } SmiToInteger32(rcx, src2); orl(rcx, Immediate(kSmiShift)); - sar(dst); // Shift 32 + original rcx & 0x1f. + sar_cl(dst); // Shift 32 + original rcx & 0x1f. shl(dst, Immediate(kSmiShift)); if (src1.is(rcx)) { movq(src1, kScratchRegister); @@ -1787,9 +1787,7 @@ void MacroAssembler::LeaveFrame(StackFrame::Type type) { } -void MacroAssembler::EnterExitFrame(StackFrame::Type type, int result_size) { - ASSERT(type == StackFrame::EXIT || type == StackFrame::EXIT_DEBUG); - +void MacroAssembler::EnterExitFrame(ExitFrame::Mode mode, int result_size) { // Setup the frame structure on the stack. // All constants are relative to the frame pointer of the exit frame. ASSERT(ExitFrameConstants::kCallerSPDisplacement == +2 * kPointerSize); @@ -1801,7 +1799,12 @@ void MacroAssembler::EnterExitFrame(StackFrame::Type type, int result_size) { // Reserve room for entry stack pointer and push the debug marker. ASSERT(ExitFrameConstants::kSPOffset == -1 * kPointerSize); push(Immediate(0)); // saved entry sp, patched before call - push(Immediate(type == StackFrame::EXIT_DEBUG ? 1 : 0)); + if (mode == ExitFrame::MODE_DEBUG) { + push(Immediate(0)); + } else { + movq(kScratchRegister, CodeObject(), RelocInfo::EMBEDDED_OBJECT); + push(kScratchRegister); + } // Save the frame pointer and the context in top. ExternalReference c_entry_fp_address(Top::k_c_entry_fp_address); @@ -1821,7 +1824,7 @@ void MacroAssembler::EnterExitFrame(StackFrame::Type type, int result_size) { #ifdef ENABLE_DEBUGGER_SUPPORT // Save the state of all registers to the stack from the memory // location. This is needed to allow nested break points. - if (type == StackFrame::EXIT_DEBUG) { + if (mode == ExitFrame::MODE_DEBUG) { // TODO(1243899): This should be symmetric to // CopyRegistersFromStackToMemory() but it isn't! esp is assumed // correct here, but computed for the other call. Very error @@ -1860,17 +1863,17 @@ void MacroAssembler::EnterExitFrame(StackFrame::Type type, int result_size) { } -void MacroAssembler::LeaveExitFrame(StackFrame::Type type, int result_size) { +void MacroAssembler::LeaveExitFrame(ExitFrame::Mode mode, int result_size) { // Registers: // r15 : argv #ifdef ENABLE_DEBUGGER_SUPPORT // Restore the memory copy of the registers by digging them out from // the stack. This is needed to allow nested break points. - if (type == StackFrame::EXIT_DEBUG) { + if (mode == ExitFrame::MODE_DEBUG) { // It's okay to clobber register rbx below because we don't need // the function pointer after this. const int kCallerSavedSize = kNumJSCallerSaved * kPointerSize; - int kOffset = ExitFrameConstants::kDebugMarkOffset - kCallerSavedSize; + int kOffset = ExitFrameConstants::kCodeOffset - kCallerSavedSize; lea(rbx, Operand(rbp, kOffset)); CopyRegistersFromStackToMemory(rbx, rcx, kJSCallerSaved); } diff --git a/deps/v8/src/x64/macro-assembler-x64.h b/deps/v8/src/x64/macro-assembler-x64.h index 4c2f35bd9c..11cdfc3c4c 100644 --- a/deps/v8/src/x64/macro-assembler-x64.h +++ b/deps/v8/src/x64/macro-assembler-x64.h @@ -106,16 +106,16 @@ class MacroAssembler: public Assembler { void EnterConstructFrame() { EnterFrame(StackFrame::CONSTRUCT); } void LeaveConstructFrame() { LeaveFrame(StackFrame::CONSTRUCT); } - // Enter specific kind of exit frame; either EXIT or - // EXIT_DEBUG. Expects the number of arguments in register rax and + // Enter specific kind of exit frame; either in normal or + // debug mode. Expects the number of arguments in register rax and // sets up the number of arguments in register rdi and the pointer // to the first argument in register rsi. - void EnterExitFrame(StackFrame::Type type, int result_size = 1); + void EnterExitFrame(ExitFrame::Mode mode, int result_size = 1); // Leave the current exit frame. Expects/provides the return value in // register rax:rdx (untouched) and the pointer to the first // argument in register rsi. - void LeaveExitFrame(StackFrame::Type type, int result_size = 1); + void LeaveExitFrame(ExitFrame::Mode mode, int result_size = 1); // --------------------------------------------------------------------------- diff --git a/deps/v8/src/x64/regexp-macro-assembler-x64.cc b/deps/v8/src/x64/regexp-macro-assembler-x64.cc index 88636f843e..639f5e95b6 100644 --- a/deps/v8/src/x64/regexp-macro-assembler-x64.cc +++ b/deps/v8/src/x64/regexp-macro-assembler-x64.cc @@ -643,10 +643,10 @@ Handle<Object> RegExpMacroAssemblerX64::GetCode(Handle<String> source) { Label stack_limit_hit; Label stack_ok; - ExternalReference stack_guard_limit = - ExternalReference::address_of_stack_guard_limit(); + ExternalReference stack_limit = + ExternalReference::address_of_stack_limit(); __ movq(rcx, rsp); - __ movq(kScratchRegister, stack_guard_limit); + __ movq(kScratchRegister, stack_limit); __ subq(rcx, Operand(kScratchRegister, 0)); // Handle it if the stack pointer is already below the stack limit. __ j(below_equal, &stack_limit_hit); @@ -1079,7 +1079,7 @@ int RegExpMacroAssemblerX64::CheckStackGuardState(Address* return_address, // If there is a difference, update the object pointer and start and end // addresses in the RegExp stack frame to match the new value. const byte* end_address = frame_entry<const byte* >(re_frame, kInputEnd); - int byte_length = end_address - start_address; + int byte_length = static_cast<int>(end_address - start_address); frame_entry<const String*>(re_frame, kInputString) = *subject; frame_entry<const byte*>(re_frame, kInputStart) = new_address; frame_entry<const byte*>(re_frame, kInputEnd) = new_address + byte_length; @@ -1196,9 +1196,9 @@ void RegExpMacroAssemblerX64::Drop() { void RegExpMacroAssemblerX64::CheckPreemption() { // Check for preemption. Label no_preempt; - ExternalReference stack_guard_limit = - ExternalReference::address_of_stack_guard_limit(); - __ load_rax(stack_guard_limit); + ExternalReference stack_limit = + ExternalReference::address_of_stack_limit(); + __ load_rax(stack_limit); __ cmpq(rsp, rax); __ j(above, &no_preempt); diff --git a/deps/v8/src/x64/simulator-x64.h b/deps/v8/src/x64/simulator-x64.h index 998c9095e7..c4f3a85af5 100644 --- a/deps/v8/src/x64/simulator-x64.h +++ b/deps/v8/src/x64/simulator-x64.h @@ -44,6 +44,12 @@ class SimulatorStack : public v8::internal::AllStatic { static inline uintptr_t JsLimitFromCLimit(uintptr_t c_limit) { return c_limit; } + + static inline uintptr_t RegisterCTryCatch(uintptr_t try_catch_address) { + return try_catch_address; + } + + static inline void UnregisterCTryCatch() { } }; // Call the generated regexp code directly. The entry function pointer should @@ -51,4 +57,7 @@ class SimulatorStack : public v8::internal::AllStatic { #define CALL_GENERATED_REGEXP_CODE(entry, p0, p1, p2, p3, p4, p5, p6) \ entry(p0, p1, p2, p3, p4, p5, p6) +#define TRY_CATCH_FROM_ADDRESS(try_catch_address) \ + reinterpret_cast<TryCatch*>(try_catch_address) + #endif // V8_X64_SIMULATOR_X64_H_ diff --git a/deps/v8/src/x64/stub-cache-x64.cc b/deps/v8/src/x64/stub-cache-x64.cc index 58a3e0f6a2..584fd2b214 100644 --- a/deps/v8/src/x64/stub-cache-x64.cc +++ b/deps/v8/src/x64/stub-cache-x64.cc @@ -327,7 +327,7 @@ void StubCompiler::GenerateLoadStringLength(MacroAssembler* masm, __ movl(rax, FieldOperand(receiver, String::kLengthOffset)); // rcx is also the receiver. __ lea(rcx, Operand(scratch, String::kLongLengthShift)); - __ shr(rax); // rcx is implicit shift register. + __ shr_cl(rax); __ Integer32ToSmi(rax, rax); __ ret(0); diff --git a/deps/v8/src/x64/virtual-frame-x64.cc b/deps/v8/src/x64/virtual-frame-x64.cc index 781efd14b9..fe65d34a08 100644 --- a/deps/v8/src/x64/virtual-frame-x64.cc +++ b/deps/v8/src/x64/virtual-frame-x64.cc @@ -893,16 +893,15 @@ void VirtualFrame::SyncRange(int begin, int end) { // on the stack. int start = Min(begin, stack_pointer_ + 1); - // Emit normal 'push' instructions for elements above stack pointer - // and use mov instructions if we are below stack pointer. + // If positive we have to adjust the stack pointer. + int delta = end - stack_pointer_; + if (delta > 0) { + stack_pointer_ = end; + __ subq(rsp, Immediate(delta * kPointerSize)); + } + for (int i = start; i <= end; i++) { - if (!elements_[i].is_synced()) { - if (i <= stack_pointer_) { - SyncElementBelowStackPointer(i); - } else { - SyncElementByPushing(i); - } - } + if (!elements_[i].is_synced()) SyncElementBelowStackPointer(i); } } |