diff options
Diffstat (limited to 'deps/v8/src/builtins/arm/builtins-arm.cc')
-rw-r--r-- | deps/v8/src/builtins/arm/builtins-arm.cc | 156 |
1 files changed, 66 insertions, 90 deletions
diff --git a/deps/v8/src/builtins/arm/builtins-arm.cc b/deps/v8/src/builtins/arm/builtins-arm.cc index 6103971787..240d271b2b 100644 --- a/deps/v8/src/builtins/arm/builtins-arm.cc +++ b/deps/v8/src/builtins/arm/builtins-arm.cc @@ -326,11 +326,11 @@ void Builtins::Generate_NumberConstructor_ConstructStub(MacroAssembler* masm) { __ bind(&new_object); { FrameScope scope(masm, StackFrame::MANUAL); - FastNewObjectStub stub(masm->isolate()); __ SmiTag(r6); __ EnterBuiltinFrame(cp, r1, r6); __ Push(r2); // first argument - __ CallStub(&stub); + __ Call(CodeFactory::FastNewObject(masm->isolate()).code(), + RelocInfo::CODE_TARGET); __ Pop(r2); __ LeaveBuiltinFrame(cp, r1, r6); __ SmiUntag(r6); @@ -474,11 +474,11 @@ void Builtins::Generate_StringConstructor_ConstructStub(MacroAssembler* masm) { __ bind(&new_object); { FrameScope scope(masm, StackFrame::MANUAL); - FastNewObjectStub stub(masm->isolate()); __ SmiTag(r6); __ EnterBuiltinFrame(cp, r1, r6); __ Push(r2); // first argument - __ CallStub(&stub); + __ Call(CodeFactory::FastNewObject(masm->isolate()).code(), + RelocInfo::CODE_TARGET); __ Pop(r2); __ LeaveBuiltinFrame(cp, r1, r6); __ SmiUntag(r6); @@ -574,8 +574,8 @@ void Generate_JSConstructStubHelper(MacroAssembler* masm, bool is_api_function, if (create_implicit_receiver) { // Allocate the new receiver object. __ Push(r1, r3); - FastNewObjectStub stub(masm->isolate()); - __ CallStub(&stub); + __ Call(CodeFactory::FastNewObject(masm->isolate()).code(), + RelocInfo::CODE_TARGET); __ mov(r4, r0); __ Pop(r1, r3); @@ -737,19 +737,18 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kResumeModeOffset)); // Load suspended function and context. - __ ldr(cp, FieldMemOperand(r1, JSGeneratorObject::kContextOffset)); __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset)); + __ ldr(cp, FieldMemOperand(r4, JSFunction::kContextOffset)); // Flood function if we are stepping. Label prepare_step_in_if_stepping, prepare_step_in_suspended_generator; Label stepping_prepared; - ExternalReference last_step_action = - ExternalReference::debug_last_step_action_address(masm->isolate()); - STATIC_ASSERT(StepFrame > StepIn); - __ mov(ip, Operand(last_step_action)); + ExternalReference debug_hook = + ExternalReference::debug_hook_on_function_call_address(masm->isolate()); + __ mov(ip, Operand(debug_hook)); __ ldrsb(ip, MemOperand(ip)); - __ cmp(ip, Operand(StepIn)); - __ b(ge, &prepare_step_in_if_stepping); + __ cmp(ip, Operand(0)); + __ b(ne, &prepare_step_in_if_stepping); // Flood function if we need to continue stepping in the suspended generator. ExternalReference debug_suspended_generator = @@ -790,14 +789,15 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { __ bind(&done_loop); } - // Dispatch on the kind of generator object. - Label old_generator; - __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); - __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset)); - __ CompareObjectType(r3, r3, r3, BYTECODE_ARRAY_TYPE); - __ b(ne, &old_generator); + // Underlying function needs to have bytecode available. + if (FLAG_debug_code) { + __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); + __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kFunctionDataOffset)); + __ CompareObjectType(r3, r3, r3, BYTECODE_ARRAY_TYPE); + __ Assert(eq, kMissingBytecodeArray); + } - // New-style (ignition/turbofan) generator object + // Resume (Ignition/TurboFan) generator object. { __ ldr(r0, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); __ ldr(r0, FieldMemOperand( @@ -812,54 +812,11 @@ void Builtins::Generate_ResumeGeneratorTrampoline(MacroAssembler* masm) { __ Jump(r5); } - // Old-style (full-codegen) generator object - __ bind(&old_generator); - { - // Enter a new JavaScript frame, and initialize its slots as they were when - // the generator was suspended. - DCHECK(!FLAG_enable_embedded_constant_pool); - FrameScope scope(masm, StackFrame::MANUAL); - __ Push(lr, fp); - __ Move(fp, sp); - __ Push(cp, r4); - - // Restore the operand stack. - __ ldr(r0, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset)); - __ ldr(r3, FieldMemOperand(r0, FixedArray::kLengthOffset)); - __ add(r0, r0, Operand(FixedArray::kHeaderSize - kHeapObjectTag)); - __ add(r3, r0, Operand(r3, LSL, kPointerSizeLog2 - 1)); - { - Label done_loop, loop; - __ bind(&loop); - __ cmp(r0, r3); - __ b(eq, &done_loop); - __ ldr(ip, MemOperand(r0, kPointerSize, PostIndex)); - __ Push(ip); - __ b(&loop); - __ bind(&done_loop); - } - - // Reset operand stack so we don't leak. - __ LoadRoot(ip, Heap::kEmptyFixedArrayRootIndex); - __ str(ip, FieldMemOperand(r1, JSGeneratorObject::kOperandStackOffset)); - - // Resume the generator function at the continuation. - __ ldr(r3, FieldMemOperand(r4, JSFunction::kSharedFunctionInfoOffset)); - __ ldr(r3, FieldMemOperand(r3, SharedFunctionInfo::kCodeOffset)); - __ add(r3, r3, Operand(Code::kHeaderSize - kHeapObjectTag)); - __ ldr(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset)); - __ add(r3, r3, Operand(r2, ASR, 1)); - __ mov(r2, Operand(Smi::FromInt(JSGeneratorObject::kGeneratorExecuting))); - __ str(r2, FieldMemOperand(r1, JSGeneratorObject::kContinuationOffset)); - __ Move(r0, r1); // Continuation expects generator object in r0. - __ Jump(r3); - } - __ bind(&prepare_step_in_if_stepping); { FrameAndConstantPoolScope scope(masm, StackFrame::INTERNAL); __ Push(r1, r2, r4); - __ CallRuntime(Runtime::kDebugPrepareStepInIfStepping); + __ CallRuntime(Runtime::kDebugOnFunctionCall); __ Pop(r1, r2); __ ldr(r4, FieldMemOperand(r1, JSGeneratorObject::kFunctionOffset)); } @@ -1078,6 +1035,11 @@ void Builtins::Generate_InterpreterEntryTrampoline(MacroAssembler* masm) { __ Assert(eq, kFunctionDataShouldBeBytecodeArrayOnInterpreterEntry); } + // Reset code age. + __ mov(r9, Operand(BytecodeArray::kNoAgeBytecodeAge)); + __ strb(r9, FieldMemOperand(kInterpreterBytecodeArrayRegister, + BytecodeArray::kBytecodeAgeOffset)); + // Load the initial bytecode offset. __ mov(kInterpreterBytecodeOffsetRegister, Operand(BytecodeArray::kHeaderSize - kHeapObjectTag)); @@ -1407,12 +1369,6 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) { __ ldr(temp, FieldMemOperand(temp, WeakCell::kValueOffset)); __ cmp(temp, native_context); __ b(ne, &loop_bottom); - // OSR id set to none? - __ ldr(temp, FieldMemOperand(array_pointer, - SharedFunctionInfo::kOffsetToPreviousOsrAstId)); - const int bailout_id = BailoutId::None().ToInt(); - __ cmp(temp, Operand(Smi::FromInt(bailout_id))); - __ b(ne, &loop_bottom); // Literals available? __ ldr(temp, FieldMemOperand(array_pointer, SharedFunctionInfo::kOffsetToPreviousLiterals)); @@ -1485,14 +1441,14 @@ void Builtins::Generate_CompileLazy(MacroAssembler* masm) { SharedFunctionInfo::kMarkedForTierUpByteOffset)); __ tst(r5, Operand(1 << SharedFunctionInfo::kMarkedForTierUpBitWithinByte)); __ b(ne, &gotta_call_runtime_no_stack); - // Is the full code valid? + + // If SFI points to anything other than CompileLazy, install that. __ ldr(entry, FieldMemOperand(entry, SharedFunctionInfo::kCodeOffset)); - __ ldr(r5, FieldMemOperand(entry, Code::kFlagsOffset)); - __ and_(r5, r5, Operand(Code::KindField::kMask)); - __ mov(r5, Operand(r5, LSR, Code::KindField::kShift)); - __ cmp(r5, Operand(Code::BUILTIN)); + __ Move(r5, masm->CodeObject()); + __ cmp(entry, r5); __ b(eq, &gotta_call_runtime_no_stack); - // Yes, install the full code. + + // Install the SFI's code entry. __ add(entry, entry, Operand(Code::kHeaderSize - kHeapObjectTag)); __ str(entry, FieldMemOperand(closure, JSFunction::kCodeEntryOffset)); __ RecordWriteCodeEntryField(closure, entry, r5); @@ -1609,14 +1565,9 @@ static void GenerateMakeCodeYoungAgainCommon(MacroAssembler* masm) { __ mov(pc, r0); } -#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \ - void Builtins::Generate_Make##C##CodeYoungAgainEvenMarking( \ - MacroAssembler* masm) { \ - GenerateMakeCodeYoungAgainCommon(masm); \ - } \ - void Builtins::Generate_Make##C##CodeYoungAgainOddMarking( \ - MacroAssembler* masm) { \ - GenerateMakeCodeYoungAgainCommon(masm); \ +#define DEFINE_CODE_AGE_BUILTIN_GENERATOR(C) \ + void Builtins::Generate_Make##C##CodeYoungAgain(MacroAssembler* masm) { \ + GenerateMakeCodeYoungAgainCommon(masm); \ } CODE_AGE_LIST(DEFINE_CODE_AGE_BUILTIN_GENERATOR) #undef DEFINE_CODE_AGE_BUILTIN_GENERATOR @@ -2158,7 +2109,8 @@ void Builtins::Generate_Apply(MacroAssembler* masm) { // Create the list of arguments from the array-like argumentsList. { - Label create_arguments, create_array, create_runtime, done_create; + Label create_arguments, create_array, create_holey_array, create_runtime, + done_create; __ JumpIfSmi(r0, &create_runtime); // Load the map of argumentsList into r2. @@ -2202,17 +2154,37 @@ void Builtins::Generate_Apply(MacroAssembler* masm) { __ mov(r0, r4); __ b(&done_create); + // For holey JSArrays we need to check that the array prototype chain + // protector is intact and our prototype is the Array.prototype actually. + __ bind(&create_holey_array); + __ ldr(r2, FieldMemOperand(r2, Map::kPrototypeOffset)); + __ ldr(r4, ContextMemOperand(r4, Context::INITIAL_ARRAY_PROTOTYPE_INDEX)); + __ cmp(r2, r4); + __ b(ne, &create_runtime); + __ LoadRoot(r4, Heap::kArrayProtectorRootIndex); + __ ldr(r2, FieldMemOperand(r4, PropertyCell::kValueOffset)); + __ cmp(r2, Operand(Smi::FromInt(Isolate::kProtectorValid))); + __ b(ne, &create_runtime); + __ ldr(r2, FieldMemOperand(r0, JSArray::kLengthOffset)); + __ ldr(r0, FieldMemOperand(r0, JSArray::kElementsOffset)); + __ SmiUntag(r2); + __ b(&done_create); + // Try to create the list from a JSArray object. + // -- r2 and r4 must be preserved till bne create_holey_array. __ bind(&create_array); - __ ldr(r2, FieldMemOperand(r2, Map::kBitField2Offset)); - __ DecodeField<Map::ElementsKindBits>(r2); + __ ldr(r5, FieldMemOperand(r2, Map::kBitField2Offset)); + __ DecodeField<Map::ElementsKindBits>(r5); STATIC_ASSERT(FAST_SMI_ELEMENTS == 0); STATIC_ASSERT(FAST_HOLEY_SMI_ELEMENTS == 1); STATIC_ASSERT(FAST_ELEMENTS == 2); - __ cmp(r2, Operand(FAST_ELEMENTS)); + STATIC_ASSERT(FAST_HOLEY_ELEMENTS == 3); + __ cmp(r5, Operand(FAST_HOLEY_ELEMENTS)); __ b(hi, &create_runtime); - __ cmp(r2, Operand(FAST_HOLEY_SMI_ELEMENTS)); - __ b(eq, &create_runtime); + // Only FAST_XXX after this point, FAST_HOLEY_XXX are odd values. + __ tst(r5, Operand(1)); + __ b(ne, &create_holey_array); + // FAST_SMI_ELEMENTS or FAST_ELEMENTS after this point. __ ldr(r2, FieldMemOperand(r0, JSArray::kLengthOffset)); __ ldr(r0, FieldMemOperand(r0, JSArray::kElementsOffset)); __ SmiUntag(r2); @@ -2247,12 +2219,16 @@ void Builtins::Generate_Apply(MacroAssembler* masm) { // Push arguments onto the stack (thisArgument is already on the stack). { __ mov(r4, Operand(0)); + __ LoadRoot(r5, Heap::kTheHoleValueRootIndex); + __ LoadRoot(r6, Heap::kUndefinedValueRootIndex); Label done, loop; __ bind(&loop); __ cmp(r4, r2); __ b(eq, &done); __ add(ip, r0, Operand(r4, LSL, kPointerSizeLog2)); __ ldr(ip, FieldMemOperand(ip, FixedArray::kHeaderSize)); + __ cmp(r5, ip); + __ mov(ip, r6, LeaveCC, eq); __ Push(ip); __ add(r4, r4, Operand(1)); __ b(&loop); |