diff options
Diffstat (limited to 'deps/v8/src/x64/lithium-codegen-x64.cc')
-rw-r--r-- | deps/v8/src/x64/lithium-codegen-x64.cc | 496 |
1 files changed, 254 insertions, 242 deletions
diff --git a/deps/v8/src/x64/lithium-codegen-x64.cc b/deps/v8/src/x64/lithium-codegen-x64.cc index f423133cf1..de43f86a3d 100644 --- a/deps/v8/src/x64/lithium-codegen-x64.cc +++ b/deps/v8/src/x64/lithium-codegen-x64.cc @@ -27,7 +27,7 @@ #include "v8.h" -#if defined(V8_TARGET_ARCH_X64) +#if V8_TARGET_ARCH_X64 #include "x64/lithium-codegen-x64.h" #include "code-stubs.h" @@ -67,7 +67,7 @@ class SafepointGenerator : public CallWrapper { #define __ masm()-> bool LCodeGen::GenerateCode() { - HPhase phase("Z_Code generation", chunk()); + LPhase phase("Z_Code generation", chunk()); ASSERT(is_unused()); status_ = GENERATING; @@ -92,20 +92,7 @@ void LCodeGen::FinishCode(Handle<Code> code) { RegisterDependentCodeForEmbeddedMaps(code); } PopulateDeoptimizationData(code); - for (int i = 0 ; i < prototype_maps_.length(); i++) { - prototype_maps_.at(i)->AddDependentCode( - DependentCode::kPrototypeCheckGroup, code); - } - for (int i = 0 ; i < transition_maps_.length(); i++) { - transition_maps_.at(i)->AddDependentCode( - DependentCode::kTransitionGroup, code); - } - if (graph()->depends_on_empty_array_proto_elements()) { - isolate()->initial_object_prototype()->map()->AddDependentCode( - DependentCode::kElementsCantBeAddedGroup, code); - isolate()->initial_array_prototype()->map()->AddDependentCode( - DependentCode::kElementsCantBeAddedGroup, code); - } + info()->CommitDependencies(code); } @@ -482,27 +469,15 @@ Operand LCodeGen::ToOperand(LOperand* op) const { void LCodeGen::WriteTranslation(LEnvironment* environment, - Translation* translation, - int* pushed_arguments_index, - int* pushed_arguments_count) { + Translation* translation) { if (environment == NULL) return; // The translation includes one command per value in the environment. - int translation_size = environment->values()->length(); + int translation_size = environment->translation_size(); // The output frame height does not include the parameters. int height = translation_size - environment->parameter_count(); - // Function parameters are arguments to the outermost environment. The - // arguments index points to the first element of a sequence of tagged - // values on the stack that represent the arguments. This needs to be - // kept in sync with the LArgumentsElements implementation. - *pushed_arguments_index = -environment->parameter_count(); - *pushed_arguments_count = environment->parameter_count(); - - WriteTranslation(environment->outer(), - translation, - pushed_arguments_index, - pushed_arguments_count); + WriteTranslation(environment->outer(), translation); bool has_closure_id = !info()->closure().is_null() && !info()->closure().is_identical_to(environment->closure()); int closure_id = has_closure_id @@ -534,60 +509,29 @@ void LCodeGen::WriteTranslation(LEnvironment* environment, break; } - // Inlined frames which push their arguments cause the index to be - // bumped and another stack area to be used for materialization, - // otherwise actual argument values are unknown for inlined frames. - bool arguments_known = true; - int arguments_index = *pushed_arguments_index; - int arguments_count = *pushed_arguments_count; - if (environment->entry() != NULL) { - arguments_known = environment->entry()->arguments_pushed(); - arguments_index = arguments_index < 0 - ? GetStackSlotCount() : arguments_index + arguments_count; - arguments_count = environment->entry()->arguments_count() + 1; - if (environment->entry()->arguments_pushed()) { - *pushed_arguments_index = arguments_index; - *pushed_arguments_count = arguments_count; - } - } - for (int i = 0; i < translation_size; ++i) { LOperand* value = environment->values()->at(i); - // spilled_registers_ and spilled_double_registers_ are either - // both NULL or both set. - if (environment->spilled_registers() != NULL && value != NULL) { - if (value->IsRegister() && - environment->spilled_registers()[value->index()] != NULL) { - translation->MarkDuplicate(); + + // TODO(mstarzinger): Introduce marker operands to indicate that this value + // is not present and must be reconstructed from the deoptimizer. Currently + // this is only used for the arguments object. + if (value == NULL) { + int arguments_count = environment->values()->length() - translation_size; + translation->BeginArgumentsObject(arguments_count); + for (int i = 0; i < arguments_count; ++i) { + LOperand* value = environment->values()->at(translation_size + i); AddToTranslation(translation, - environment->spilled_registers()[value->index()], - environment->HasTaggedValueAt(i), - environment->HasUint32ValueAt(i), - arguments_known, - arguments_index, - arguments_count); - } else if ( - value->IsDoubleRegister() && - environment->spilled_double_registers()[value->index()] != NULL) { - translation->MarkDuplicate(); - AddToTranslation( - translation, - environment->spilled_double_registers()[value->index()], - false, - false, - arguments_known, - arguments_index, - arguments_count); + value, + environment->HasTaggedValueAt(translation_size + i), + environment->HasUint32ValueAt(translation_size + i)); } + continue; } AddToTranslation(translation, value, environment->HasTaggedValueAt(i), - environment->HasUint32ValueAt(i), - arguments_known, - arguments_index, - arguments_count); + environment->HasUint32ValueAt(i)); } } @@ -595,17 +539,8 @@ void LCodeGen::WriteTranslation(LEnvironment* environment, void LCodeGen::AddToTranslation(Translation* translation, LOperand* op, bool is_tagged, - bool is_uint32, - bool arguments_known, - int arguments_index, - int arguments_count) { - if (op == NULL) { - // TODO(twuerthinger): Introduce marker operands to indicate that this value - // is not present and must be reconstructed from the deoptimizer. Currently - // this is only used for the arguments object. - translation->StoreArgumentsObject( - arguments_known, arguments_index, arguments_count); - } else if (op->IsStackSlot()) { + bool is_uint32) { + if (op->IsStackSlot()) { if (is_tagged) { translation->StoreStackSlot(op->index()); } else if (is_uint32) { @@ -710,8 +645,6 @@ void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment, int frame_count = 0; int jsframe_count = 0; - int args_index = 0; - int args_count = 0; for (LEnvironment* e = environment; e != NULL; e = e->outer()) { ++frame_count; if (e->frame_type() == JS_FUNCTION) { @@ -719,7 +652,7 @@ void LCodeGen::RegisterEnvironmentForDeoptimization(LEnvironment* environment, } } Translation translation(&translations_, frame_count, jsframe_count, zone()); - WriteTranslation(environment, &translation, &args_index, &args_count); + WriteTranslation(environment, &translation); int deoptimization_index = deoptimizations_.length(); int pc_offset = masm()->pc_offset(); environment->Register(deoptimization_index, @@ -746,7 +679,7 @@ void LCodeGen::DeoptimizeIf(Condition cc, ASSERT(FLAG_deopt_every_n_times == 0); // Not yet implemented on x64. - if (FLAG_trap_on_deopt) { + if (FLAG_trap_on_deopt && info()->IsOptimizing()) { Label done; if (cc != no_condition) { __ j(NegateCondition(cc), &done, Label::kNear); @@ -1040,7 +973,8 @@ void LCodeGen::DoCallStub(LCallStub* instr) { void LCodeGen::DoUnknownOSRValue(LUnknownOSRValue* instr) { - // Nothing to do. + // Record the address of the first unknown OSR value as the place to enter. + if (osr_pc_offset_ == -1) osr_pc_offset_ = masm()->pc_offset(); } @@ -1074,12 +1008,12 @@ void LCodeGen::DoModI(LModI* instr) { __ andl(left_reg, Immediate(divisor - 1)); __ bind(&done); - } else if (hmod->has_fixed_right_arg()) { + } else if (hmod->fixed_right_arg().has_value) { Register left_reg = ToRegister(instr->left()); ASSERT(left_reg.is(ToRegister(instr->result()))); Register right_reg = ToRegister(instr->right()); - int32_t divisor = hmod->fixed_right_arg_value(); + int32_t divisor = hmod->fixed_right_arg().value; ASSERT(IsPowerOf2(divisor)); // Check if our assumption of a fixed right operand still holds. @@ -1619,13 +1553,6 @@ void LCodeGen::DoConstantT(LConstantT* instr) { } -void LCodeGen::DoFixedArrayBaseLength(LFixedArrayBaseLength* instr) { - Register result = ToRegister(instr->result()); - Register array = ToRegister(instr->value()); - __ movq(result, FieldOperand(array, FixedArrayBase::kLengthOffset)); -} - - void LCodeGen::DoMapEnumLength(LMapEnumLength* instr) { Register result = ToRegister(instr->result()); Register map = ToRegister(instr->value()); @@ -1652,8 +1579,11 @@ void LCodeGen::DoValueOf(LValueOf* instr) { Register result = ToRegister(instr->result()); ASSERT(input.is(result)); Label done; - // If the object is a smi return the object. - __ JumpIfSmi(input, &done, Label::kNear); + + if (!instr->hydrogen()->value()->IsHeapObject()) { + // If the object is a smi return the object. + __ JumpIfSmi(input, &done, Label::kNear); + } // If the object is not a value type, return the object. __ CmpObjectType(input, JS_VALUE_TYPE, kScratchRegister); @@ -1757,10 +1687,11 @@ void LCodeGen::DoAddI(LAddI* instr) { if (LAddI::UseLea(instr->hydrogen()) && !left->Equals(instr->result())) { if (right->IsConstantOperand()) { int32_t offset = ToInteger32(LConstantOperand::cast(right)); - __ lea(ToRegister(instr->result()), MemOperand(ToRegister(left), offset)); + __ leal(ToRegister(instr->result()), + MemOperand(ToRegister(left), offset)); } else { Operand address(ToRegister(left), ToRegister(right), times_1, 0); - __ lea(ToRegister(instr->result()), address); + __ leal(ToRegister(instr->result()), address); } } else { if (right->IsConstantOperand()) { @@ -1901,10 +1832,12 @@ int LCodeGen::GetNextEmittedBlock() const { } -void LCodeGen::EmitBranch(int left_block, int right_block, Condition cc) { +template<class InstrType> +void LCodeGen::EmitBranch(InstrType instr, Condition cc) { + int right_block = instr->FalseDestination(chunk_); + int left_block = instr->TrueDestination(chunk_); + int next_block = GetNextEmittedBlock(); - right_block = chunk_->LookupDestination(right_block); - left_block = chunk_->LookupDestination(left_block); if (right_block == left_block) { EmitGoto(left_block); @@ -1927,26 +1860,23 @@ void LCodeGen::DoDebugBreak(LDebugBreak* instr) { void LCodeGen::DoBranch(LBranch* instr) { - int true_block = chunk_->LookupDestination(instr->true_block_id()); - int false_block = chunk_->LookupDestination(instr->false_block_id()); - Representation r = instr->hydrogen()->value()->representation(); if (r.IsInteger32()) { ASSERT(!info()->IsStub()); Register reg = ToRegister(instr->value()); __ testl(reg, reg); - EmitBranch(true_block, false_block, not_zero); + EmitBranch(instr, not_zero); } else if (r.IsSmi()) { ASSERT(!info()->IsStub()); Register reg = ToRegister(instr->value()); __ testq(reg, reg); - EmitBranch(true_block, false_block, not_zero); + EmitBranch(instr, not_zero); } else if (r.IsDouble()) { ASSERT(!info()->IsStub()); XMMRegister reg = ToDoubleRegister(instr->value()); __ xorps(xmm0, xmm0); __ ucomisd(reg, xmm0); - EmitBranch(true_block, false_block, not_equal); + EmitBranch(instr, not_equal); } else { ASSERT(r.IsTagged()); Register reg = ToRegister(instr->value()); @@ -1954,43 +1884,52 @@ void LCodeGen::DoBranch(LBranch* instr) { if (type.IsBoolean()) { ASSERT(!info()->IsStub()); __ CompareRoot(reg, Heap::kTrueValueRootIndex); - EmitBranch(true_block, false_block, equal); + EmitBranch(instr, equal); } else if (type.IsSmi()) { ASSERT(!info()->IsStub()); __ SmiCompare(reg, Smi::FromInt(0)); - EmitBranch(true_block, false_block, not_equal); + EmitBranch(instr, not_equal); + } else if (type.IsJSArray()) { + ASSERT(!info()->IsStub()); + EmitBranch(instr, no_condition); + } else if (type.IsHeapNumber()) { + ASSERT(!info()->IsStub()); + __ xorps(xmm0, xmm0); + __ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset)); + EmitBranch(instr, not_equal); + } else if (type.IsString()) { + ASSERT(!info()->IsStub()); + __ cmpq(FieldOperand(reg, String::kLengthOffset), Immediate(0)); + EmitBranch(instr, not_equal); } else { - Label* true_label = chunk_->GetAssemblyLabel(true_block); - Label* false_label = chunk_->GetAssemblyLabel(false_block); - ToBooleanStub::Types expected = instr->hydrogen()->expected_input_types(); // Avoid deopts in the case where we've never executed this path before. - if (expected.IsEmpty()) expected = ToBooleanStub::all_types(); + if (expected.IsEmpty()) expected = ToBooleanStub::Types::Generic(); if (expected.Contains(ToBooleanStub::UNDEFINED)) { // undefined -> false. __ CompareRoot(reg, Heap::kUndefinedValueRootIndex); - __ j(equal, false_label); + __ j(equal, instr->FalseLabel(chunk_)); } if (expected.Contains(ToBooleanStub::BOOLEAN)) { // true -> true. __ CompareRoot(reg, Heap::kTrueValueRootIndex); - __ j(equal, true_label); + __ j(equal, instr->TrueLabel(chunk_)); // false -> false. __ CompareRoot(reg, Heap::kFalseValueRootIndex); - __ j(equal, false_label); + __ j(equal, instr->FalseLabel(chunk_)); } if (expected.Contains(ToBooleanStub::NULL_TYPE)) { // 'null' -> false. __ CompareRoot(reg, Heap::kNullValueRootIndex); - __ j(equal, false_label); + __ j(equal, instr->FalseLabel(chunk_)); } if (expected.Contains(ToBooleanStub::SMI)) { // Smis: 0 -> false, all other -> true. __ Cmp(reg, Smi::FromInt(0)); - __ j(equal, false_label); - __ JumpIfSmi(reg, true_label); + __ j(equal, instr->FalseLabel(chunk_)); + __ JumpIfSmi(reg, instr->TrueLabel(chunk_)); } else if (expected.NeedsMap()) { // If we need a map later and have a Smi -> deopt. __ testb(reg, Immediate(kSmiTagMask)); @@ -2005,14 +1944,14 @@ void LCodeGen::DoBranch(LBranch* instr) { // Undetectable -> false. __ testb(FieldOperand(map, Map::kBitFieldOffset), Immediate(1 << Map::kIsUndetectable)); - __ j(not_zero, false_label); + __ j(not_zero, instr->FalseLabel(chunk_)); } } if (expected.Contains(ToBooleanStub::SPEC_OBJECT)) { // spec object -> true. __ CmpInstanceType(map, FIRST_SPEC_OBJECT_TYPE); - __ j(above_equal, true_label); + __ j(above_equal, instr->TrueLabel(chunk_)); } if (expected.Contains(ToBooleanStub::STRING)) { @@ -2021,8 +1960,8 @@ void LCodeGen::DoBranch(LBranch* instr) { __ CmpInstanceType(map, FIRST_NONSTRING_TYPE); __ j(above_equal, ¬_string, Label::kNear); __ cmpq(FieldOperand(reg, String::kLengthOffset), Immediate(0)); - __ j(not_zero, true_label); - __ jmp(false_label); + __ j(not_zero, instr->TrueLabel(chunk_)); + __ jmp(instr->FalseLabel(chunk_)); __ bind(¬_string); } @@ -2033,13 +1972,16 @@ void LCodeGen::DoBranch(LBranch* instr) { __ j(not_equal, ¬_heap_number, Label::kNear); __ xorps(xmm0, xmm0); __ ucomisd(xmm0, FieldOperand(reg, HeapNumber::kValueOffset)); - __ j(zero, false_label); - __ jmp(true_label); + __ j(zero, instr->FalseLabel(chunk_)); + __ jmp(instr->TrueLabel(chunk_)); __ bind(¬_heap_number); } - // We've seen something for the first time -> deopt. - DeoptimizeIf(no_condition, instr->environment()); + if (!expected.IsGeneric()) { + // We've seen something for the first time -> deopt. + // This can only happen if we are not generic already. + DeoptimizeIf(no_condition, instr->environment()); + } } } } @@ -2088,24 +2030,21 @@ inline Condition LCodeGen::TokenToCondition(Token::Value op, bool is_unsigned) { void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) { LOperand* left = instr->left(); LOperand* right = instr->right(); - int false_block = chunk_->LookupDestination(instr->false_block_id()); - int true_block = chunk_->LookupDestination(instr->true_block_id()); Condition cc = TokenToCondition(instr->op(), instr->is_double()); if (left->IsConstantOperand() && right->IsConstantOperand()) { // We can statically evaluate the comparison. double left_val = ToDouble(LConstantOperand::cast(left)); double right_val = ToDouble(LConstantOperand::cast(right)); - int next_block = - EvalComparison(instr->op(), left_val, right_val) ? true_block - : false_block; + int next_block = EvalComparison(instr->op(), left_val, right_val) ? + instr->TrueDestination(chunk_) : instr->FalseDestination(chunk_); EmitGoto(next_block); } else { if (instr->is_double()) { // Don't base result on EFLAGS when a NaN is involved. Instead // jump to the false block. __ ucomisd(ToDoubleRegister(left), ToDoubleRegister(right)); - __ j(parity_even, chunk_->GetAssemblyLabel(false_block)); + __ j(parity_even, instr->FalseLabel(chunk_)); } else { int32_t value; if (right->IsConstantOperand()) { @@ -2144,15 +2083,13 @@ void LCodeGen::DoCmpIDAndBranch(LCmpIDAndBranch* instr) { } } } - EmitBranch(true_block, false_block, cc); + EmitBranch(instr, cc); } } void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) { Register left = ToRegister(instr->left()); - int false_block = chunk_->LookupDestination(instr->false_block_id()); - int true_block = chunk_->LookupDestination(instr->true_block_id()); if (instr->right()->IsConstantOperand()) { Handle<Object> right = ToHandle(LConstantOperand::cast(instr->right())); @@ -2161,17 +2098,15 @@ void LCodeGen::DoCmpObjectEqAndBranch(LCmpObjectEqAndBranch* instr) { Register right = ToRegister(instr->right()); __ cmpq(left, right); } - EmitBranch(true_block, false_block, equal); + EmitBranch(instr, equal); } void LCodeGen::DoCmpConstantEqAndBranch(LCmpConstantEqAndBranch* instr) { Register left = ToRegister(instr->left()); - int true_block = chunk_->LookupDestination(instr->true_block_id()); - int false_block = chunk_->LookupDestination(instr->false_block_id()); __ cmpq(left, Immediate(instr->hydrogen()->right())); - EmitBranch(true_block, false_block, equal); + EmitBranch(instr, equal); } @@ -2203,21 +2138,21 @@ Condition LCodeGen::EmitIsObject(Register input, void LCodeGen::DoIsObjectAndBranch(LIsObjectAndBranch* instr) { Register reg = ToRegister(instr->value()); - int true_block = chunk_->LookupDestination(instr->true_block_id()); - int false_block = chunk_->LookupDestination(instr->false_block_id()); - Label* true_label = chunk_->GetAssemblyLabel(true_block); - Label* false_label = chunk_->GetAssemblyLabel(false_block); + Condition true_cond = EmitIsObject( + reg, instr->FalseLabel(chunk_), instr->TrueLabel(chunk_)); - Condition true_cond = EmitIsObject(reg, false_label, true_label); - - EmitBranch(true_block, false_block, true_cond); + EmitBranch(instr, true_cond); } Condition LCodeGen::EmitIsString(Register input, Register temp1, - Label* is_not_string) { - __ JumpIfSmi(input, is_not_string); + Label* is_not_string, + SmiCheck check_needed = INLINE_SMI_CHECK) { + if (check_needed == INLINE_SMI_CHECK) { + __ JumpIfSmi(input, is_not_string); + } + Condition cond = masm_->IsObjectStringType(input, temp1, temp1); return cond; @@ -2228,20 +2163,18 @@ void LCodeGen::DoIsStringAndBranch(LIsStringAndBranch* instr) { Register reg = ToRegister(instr->value()); Register temp = ToRegister(instr->temp()); - int true_block = chunk_->LookupDestination(instr->true_block_id()); - int false_block = chunk_->LookupDestination(instr->false_block_id()); - Label* false_label = chunk_->GetAssemblyLabel(false_block); + SmiCheck check_needed = + instr->hydrogen()->value()->IsHeapObject() + ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; - Condition true_cond = EmitIsString(reg, temp, false_label); + Condition true_cond = EmitIsString( + reg, temp, instr->FalseLabel(chunk_), check_needed); - EmitBranch(true_block, false_block, true_cond); + EmitBranch(instr, true_cond); } void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) { - int true_block = chunk_->LookupDestination(instr->true_block_id()); - int false_block = chunk_->LookupDestination(instr->false_block_id()); - Condition is_smi; if (instr->value()->IsRegister()) { Register input = ToRegister(instr->value()); @@ -2250,7 +2183,7 @@ void LCodeGen::DoIsSmiAndBranch(LIsSmiAndBranch* instr) { Operand input = ToOperand(instr->value()); is_smi = masm()->CheckSmi(input); } - EmitBranch(true_block, false_block, is_smi); + EmitBranch(instr, is_smi); } @@ -2258,21 +2191,18 @@ void LCodeGen::DoIsUndetectableAndBranch(LIsUndetectableAndBranch* instr) { Register input = ToRegister(instr->value()); Register temp = ToRegister(instr->temp()); - int true_block = chunk_->LookupDestination(instr->true_block_id()); - int false_block = chunk_->LookupDestination(instr->false_block_id()); - - __ JumpIfSmi(input, chunk_->GetAssemblyLabel(false_block)); + if (!instr->hydrogen()->value()->IsHeapObject()) { + __ JumpIfSmi(input, instr->FalseLabel(chunk_)); + } __ movq(temp, FieldOperand(input, HeapObject::kMapOffset)); __ testb(FieldOperand(temp, Map::kBitFieldOffset), Immediate(1 << Map::kIsUndetectable)); - EmitBranch(true_block, false_block, not_zero); + EmitBranch(instr, not_zero); } void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) { Token::Value op = instr->op(); - int true_block = chunk_->LookupDestination(instr->true_block_id()); - int false_block = chunk_->LookupDestination(instr->false_block_id()); Handle<Code> ic = CompareIC::GetUninitialized(isolate(), op); CallCode(ic, RelocInfo::CODE_TARGET, instr); @@ -2280,7 +2210,7 @@ void LCodeGen::DoStringCompareAndBranch(LStringCompareAndBranch* instr) { Condition condition = TokenToCondition(op, false); __ testq(rax, rax); - EmitBranch(true_block, false_block, condition); + EmitBranch(instr, condition); } @@ -2307,15 +2237,12 @@ static Condition BranchCondition(HHasInstanceTypeAndBranch* instr) { void LCodeGen::DoHasInstanceTypeAndBranch(LHasInstanceTypeAndBranch* instr) { Register input = ToRegister(instr->value()); - int true_block = chunk_->LookupDestination(instr->true_block_id()); - int false_block = chunk_->LookupDestination(instr->false_block_id()); - - Label* false_label = chunk_->GetAssemblyLabel(false_block); - - __ JumpIfSmi(input, false_label); + if (!instr->hydrogen()->value()->IsHeapObject()) { + __ JumpIfSmi(input, instr->FalseLabel(chunk_)); + } __ CmpObjectType(input, TestType(instr->hydrogen()), kScratchRegister); - EmitBranch(true_block, false_block, BranchCondition(instr->hydrogen())); + EmitBranch(instr, BranchCondition(instr->hydrogen())); } @@ -2335,12 +2262,9 @@ void LCodeGen::DoHasCachedArrayIndexAndBranch( LHasCachedArrayIndexAndBranch* instr) { Register input = ToRegister(instr->value()); - int true_block = chunk_->LookupDestination(instr->true_block_id()); - int false_block = chunk_->LookupDestination(instr->false_block_id()); - __ testl(FieldOperand(input, String::kHashFieldOffset), Immediate(String::kContainsCachedArrayIndexMask)); - EmitBranch(true_block, false_block, equal); + EmitBranch(instr, equal); } @@ -2418,25 +2342,18 @@ void LCodeGen::DoClassOfTestAndBranch(LClassOfTestAndBranch* instr) { Register temp2 = ToRegister(instr->temp2()); Handle<String> class_name = instr->hydrogen()->class_name(); - int true_block = chunk_->LookupDestination(instr->true_block_id()); - int false_block = chunk_->LookupDestination(instr->false_block_id()); + EmitClassOfTest(instr->TrueLabel(chunk_), instr->FalseLabel(chunk_), + class_name, input, temp, temp2); - Label* true_label = chunk_->GetAssemblyLabel(true_block); - Label* false_label = chunk_->GetAssemblyLabel(false_block); - - EmitClassOfTest(true_label, false_label, class_name, input, temp, temp2); - - EmitBranch(true_block, false_block, equal); + EmitBranch(instr, equal); } void LCodeGen::DoCmpMapAndBranch(LCmpMapAndBranch* instr) { Register reg = ToRegister(instr->value()); - int true_block = instr->true_block_id(); - int false_block = instr->false_block_id(); __ Cmp(FieldOperand(reg, HeapObject::kMapOffset), instr->map()); - EmitBranch(true_block, false_block, equal); + EmitBranch(instr, equal); } @@ -2490,9 +2407,8 @@ void LCodeGen::DoInstanceOfKnownGlobal(LInstanceOfKnownGlobal* instr) { Register map = ToRegister(instr->temp()); __ movq(map, FieldOperand(object, HeapObject::kMapOffset)); __ bind(deferred->map_check()); // Label for calculating code patching. - Handle<JSGlobalPropertyCell> cache_cell = - factory()->NewJSGlobalPropertyCell(factory()->the_hole_value()); - __ movq(kScratchRegister, cache_cell, RelocInfo::GLOBAL_PROPERTY_CELL); + Handle<Cell> cache_cell = factory()->NewCell(factory()->the_hole_value()); + __ movq(kScratchRegister, cache_cell, RelocInfo::CELL); __ cmpq(map, Operand(kScratchRegister, 0)); __ j(not_equal, &cache_miss, Label::kNear); // Patched to load either true or false. @@ -2661,7 +2577,7 @@ void LCodeGen::DoLoadGlobalGeneric(LLoadGlobalGeneric* instr) { void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) { Register value = ToRegister(instr->value()); - Handle<JSGlobalPropertyCell> cell_handle = instr->hydrogen()->cell(); + Handle<Cell> cell_handle = instr->hydrogen()->cell(); // If the cell we are storing to contains the hole it could have // been deleted from the property dictionary. In that case, we need @@ -2671,14 +2587,14 @@ void LCodeGen::DoStoreGlobalCell(LStoreGlobalCell* instr) { // We have a temp because CompareRoot might clobber kScratchRegister. Register cell = ToRegister(instr->temp()); ASSERT(!value.is(cell)); - __ movq(cell, cell_handle, RelocInfo::GLOBAL_PROPERTY_CELL); + __ movq(cell, cell_handle, RelocInfo::CELL); __ CompareRoot(Operand(cell, 0), Heap::kTheHoleValueRootIndex); DeoptimizeIf(equal, instr->environment()); // Store the value. __ movq(Operand(cell, 0), value); } else { // Store the value. - __ movq(kScratchRegister, cell_handle, RelocInfo::GLOBAL_PROPERTY_CELL); + __ movq(kScratchRegister, cell_handle, RelocInfo::CELL); __ movq(Operand(kScratchRegister, 0), value); } // Cells are always rescanned, so no write barrier here. @@ -2733,9 +2649,9 @@ void LCodeGen::DoStoreContextSlot(LStoreContextSlot* instr) { __ movq(target, value); if (instr->hydrogen()->NeedsWriteBarrier()) { - HType type = instr->hydrogen()->value()->type(); SmiCheck check_needed = - type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; + instr->hydrogen()->value()->IsHeapObject() + ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; int offset = Context::SlotOffset(instr->slot_index()); Register scratch = ToRegister(instr->temp()); __ RecordWriteContextSlot(context, @@ -3939,11 +3855,9 @@ void LCodeGen::DoCallNew(LCallNew* instr) { ASSERT(ToRegister(instr->result()).is(rax)); __ Set(rax, instr->arity()); - if (FLAG_optimize_constructed_arrays) { - // No cell in ebx for construct type feedback in optimized code - Handle<Object> undefined_value(isolate()->factory()->undefined_value()); - __ Move(rbx, undefined_value); - } + // No cell in ebx for construct type feedback in optimized code + Handle<Object> undefined_value(isolate()->factory()->undefined_value()); + __ Move(rbx, undefined_value); CallConstructStub stub(NO_CALL_FUNCTION_FLAGS); CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); } @@ -3952,22 +3866,42 @@ void LCodeGen::DoCallNew(LCallNew* instr) { void LCodeGen::DoCallNewArray(LCallNewArray* instr) { ASSERT(ToRegister(instr->constructor()).is(rdi)); ASSERT(ToRegister(instr->result()).is(rax)); - ASSERT(FLAG_optimize_constructed_arrays); __ Set(rax, instr->arity()); __ Move(rbx, instr->hydrogen()->property_cell()); ElementsKind kind = instr->hydrogen()->elements_kind(); - bool disable_allocation_sites = - (AllocationSiteInfo::GetMode(kind) == TRACK_ALLOCATION_SITE); + AllocationSiteOverrideMode override_mode = + (AllocationSiteInfo::GetMode(kind) == TRACK_ALLOCATION_SITE) + ? DISABLE_ALLOCATION_SITES + : DONT_OVERRIDE; + ContextCheckMode context_mode = CONTEXT_CHECK_NOT_REQUIRED; if (instr->arity() == 0) { - ArrayNoArgumentConstructorStub stub(kind, disable_allocation_sites); + ArrayNoArgumentConstructorStub stub(kind, context_mode, override_mode); CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); } else if (instr->arity() == 1) { - ArraySingleArgumentConstructorStub stub(kind, disable_allocation_sites); + Label done; + if (IsFastPackedElementsKind(kind)) { + Label packed_case; + // We might need a change here + // look at the first argument + __ movq(rcx, Operand(rsp, 0)); + __ testq(rcx, rcx); + __ j(zero, &packed_case); + + ElementsKind holey_kind = GetHoleyElementsKind(kind); + ArraySingleArgumentConstructorStub stub(holey_kind, context_mode, + override_mode); + CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); + __ jmp(&done); + __ bind(&packed_case); + } + + ArraySingleArgumentConstructorStub stub(kind, context_mode, override_mode); CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); + __ bind(&done); } else { - ArrayNArgumentsConstructorStub stub(kind, disable_allocation_sites); + ArrayNArgumentsConstructorStub stub(kind, context_mode, override_mode); CallCode(stub.GetCode(isolate()), RelocInfo::CONSTRUCT_CALL, instr); } } @@ -4025,9 +3959,6 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { } if (!transition.is_null()) { - if (transition->CanBeDeprecated()) { - transition_maps_.Add(transition, info()->zone()); - } if (!instr->hydrogen()->NeedsWriteBarrierForMap()) { __ Move(FieldOperand(object, HeapObject::kMapOffset), transition); } else { @@ -4046,9 +3977,9 @@ void LCodeGen::DoStoreNamedField(LStoreNamedField* instr) { } // Do the store. - HType type = instr->hydrogen()->value()->type(); SmiCheck check_needed = - type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; + instr->hydrogen()->value()->IsHeapObject() + ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; Register write_register = object; if (!access.IsInobject()) { @@ -4287,9 +4218,9 @@ void LCodeGen::DoStoreKeyedFixedArray(LStoreKeyed* instr) { ASSERT(instr->value()->IsRegister()); Register value = ToRegister(instr->value()); ASSERT(!instr->key()->IsConstantOperand()); - HType type = instr->hydrogen()->value()->type(); SmiCheck check_needed = - type.IsHeapObject() ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; + instr->hydrogen()->value()->IsHeapObject() + ? OMIT_SMI_CHECK : INLINE_SMI_CHECK; // Compute address of modified element and store it into key register. Register key_reg(ToRegister(key)); __ lea(key_reg, operand); @@ -4955,9 +4886,11 @@ void LCodeGen::DoCheckSmi(LCheckSmi* instr) { void LCodeGen::DoCheckNonSmi(LCheckNonSmi* instr) { - LOperand* input = instr->value(); - Condition cc = masm()->CheckSmi(ToRegister(input)); - DeoptimizeIf(cc, instr->environment()); + if (!instr->hydrogen()->value()->IsHeapObject()) { + LOperand* input = instr->value(); + Condition cc = masm()->CheckSmi(ToRegister(input)); + DeoptimizeIf(cc, instr->environment()); + } } @@ -5100,11 +5033,7 @@ void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { ASSERT(prototypes->length() == maps->length()); - if (instr->hydrogen()->CanOmitPrototypeChecks()) { - for (int i = 0; i < maps->length(); i++) { - prototype_maps_.Add(maps->at(i), info()->zone()); - } - } else { + if (!instr->hydrogen()->CanOmitPrototypeChecks()) { for (int i = 0; i < prototypes->length(); i++) { __ LoadHeapObject(reg, prototypes->at(i)); DoCheckMapCommon(reg, maps->at(i), instr); @@ -5113,6 +5042,94 @@ void LCodeGen::DoCheckPrototypeMaps(LCheckPrototypeMaps* instr) { } +void LCodeGen::DoAllocateObject(LAllocateObject* instr) { + class DeferredAllocateObject: public LDeferredCode { + public: + DeferredAllocateObject(LCodeGen* codegen, LAllocateObject* instr) + : LDeferredCode(codegen), instr_(instr) { } + virtual void Generate() { codegen()->DoDeferredAllocateObject(instr_); } + virtual LInstruction* instr() { return instr_; } + private: + LAllocateObject* instr_; + }; + + DeferredAllocateObject* deferred = + new(zone()) DeferredAllocateObject(this, instr); + + Register result = ToRegister(instr->result()); + Register scratch = ToRegister(instr->temp()); + Handle<JSFunction> constructor = instr->hydrogen()->constructor(); + Handle<Map> initial_map = instr->hydrogen()->constructor_initial_map(); + int instance_size = initial_map->instance_size(); + ASSERT(initial_map->pre_allocated_property_fields() + + initial_map->unused_property_fields() - + initial_map->inobject_properties() == 0); + + __ Allocate(instance_size, result, no_reg, scratch, deferred->entry(), + TAG_OBJECT); + + __ bind(deferred->exit()); + if (FLAG_debug_code) { + Label is_in_new_space; + __ JumpIfInNewSpace(result, scratch, &is_in_new_space); + __ Abort("Allocated object is not in new-space"); + __ bind(&is_in_new_space); + } + + // Load the initial map. + Register map = scratch; + __ LoadHeapObject(scratch, constructor); + __ movq(map, FieldOperand(scratch, JSFunction::kPrototypeOrInitialMapOffset)); + + if (FLAG_debug_code) { + __ AssertNotSmi(map); + __ cmpb(FieldOperand(map, Map::kInstanceSizeOffset), + Immediate(instance_size >> kPointerSizeLog2)); + __ Assert(equal, "Unexpected instance size"); + __ cmpb(FieldOperand(map, Map::kPreAllocatedPropertyFieldsOffset), + Immediate(initial_map->pre_allocated_property_fields())); + __ Assert(equal, "Unexpected pre-allocated property fields count"); + __ cmpb(FieldOperand(map, Map::kUnusedPropertyFieldsOffset), + Immediate(initial_map->unused_property_fields())); + __ Assert(equal, "Unexpected unused property fields count"); + __ cmpb(FieldOperand(map, Map::kInObjectPropertiesOffset), + Immediate(initial_map->inobject_properties())); + __ Assert(equal, "Unexpected in-object property fields count"); + } + + // Initialize map and fields of the newly allocated object. + ASSERT(initial_map->instance_type() == JS_OBJECT_TYPE); + __ movq(FieldOperand(result, JSObject::kMapOffset), map); + __ LoadRoot(scratch, Heap::kEmptyFixedArrayRootIndex); + __ movq(FieldOperand(result, JSObject::kElementsOffset), scratch); + __ movq(FieldOperand(result, JSObject::kPropertiesOffset), scratch); + if (initial_map->inobject_properties() != 0) { + __ LoadRoot(scratch, Heap::kUndefinedValueRootIndex); + for (int i = 0; i < initial_map->inobject_properties(); i++) { + int property_offset = JSObject::kHeaderSize + i * kPointerSize; + __ movq(FieldOperand(result, property_offset), scratch); + } + } +} + + +void LCodeGen::DoDeferredAllocateObject(LAllocateObject* instr) { + Register result = ToRegister(instr->result()); + Handle<Map> initial_map = instr->hydrogen()->constructor_initial_map(); + int instance_size = initial_map->instance_size(); + + // TODO(3095996): Get rid of this. For now, we need to make the + // result register contain a valid pointer because it is already + // contained in the register pointer map. + __ Set(result, 0); + + PushSafepointRegistersScope scope(this); + __ Push(Smi::FromInt(instance_size)); + CallRuntimeFromDeferred(Runtime::kAllocateInNewSpace, 1, instr); + __ StoreToSafepointRegisterSlot(result, rax); +} + + void LCodeGen::DoAllocate(LAllocate* instr) { class DeferredAllocate: public LDeferredCode { public: @@ -5288,15 +5305,12 @@ void LCodeGen::EmitPushTaggedOperand(LOperand* operand) { void LCodeGen::DoTypeofIsAndBranch(LTypeofIsAndBranch* instr) { Register input = ToRegister(instr->value()); - int true_block = chunk_->LookupDestination(instr->true_block_id()); - int false_block = chunk_->LookupDestination(instr->false_block_id()); - Label* true_label = chunk_->GetAssemblyLabel(true_block); - Label* false_label = chunk_->GetAssemblyLabel(false_block); Condition final_branch_condition = - EmitTypeofIs(true_label, false_label, input, instr->type_literal()); + EmitTypeofIs(instr->TrueLabel(chunk_), + instr->FalseLabel(chunk_), input, instr->type_literal()); if (final_branch_condition != no_condition) { - EmitBranch(true_block, false_block, final_branch_condition); + EmitBranch(instr, final_branch_condition); } } @@ -5379,11 +5393,9 @@ Condition LCodeGen::EmitTypeofIs(Label* true_label, void LCodeGen::DoIsConstructCallAndBranch(LIsConstructCallAndBranch* instr) { Register temp = ToRegister(instr->temp()); - int true_block = chunk_->LookupDestination(instr->true_block_id()); - int false_block = chunk_->LookupDestination(instr->false_block_id()); EmitIsConstructCall(temp); - EmitBranch(true_block, false_block, equal); + EmitBranch(instr, equal); } @@ -5535,15 +5547,15 @@ void LCodeGen::DoOsrEntry(LOsrEntry* instr) { // properly registered for deoptimization and records the assembler's PC // offset. LEnvironment* environment = instr->environment(); - environment->SetSpilledRegisters(instr->SpilledRegisterArray(), - instr->SpilledDoubleRegisterArray()); // If the environment were already registered, we would have no way of // backpatching it with the spill slot operands. ASSERT(!environment->HasBeenRegistered()); RegisterEnvironmentForDeoptimization(environment, Safepoint::kNoLazyDeopt); - ASSERT(osr_pc_offset_ == -1); - osr_pc_offset_ = masm()->pc_offset(); + + // Normally we record the first unknown OSR value as the entrypoint to the OSR + // code, but if there were none, record the entrypoint here. + if (osr_pc_offset_ == -1) osr_pc_offset_ = masm()->pc_offset(); } |