summaryrefslogtreecommitdiff
path: root/deps/v8/src/mips/code-stubs-mips.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/mips/code-stubs-mips.cc')
-rw-r--r--deps/v8/src/mips/code-stubs-mips.cc806
1 files changed, 19 insertions, 787 deletions
diff --git a/deps/v8/src/mips/code-stubs-mips.cc b/deps/v8/src/mips/code-stubs-mips.cc
index 04a0b9f012..e29da4cb70 100644
--- a/deps/v8/src/mips/code-stubs-mips.cc
+++ b/deps/v8/src/mips/code-stubs-mips.cc
@@ -235,8 +235,6 @@ static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
__ Branch(slow, greater, t4, Operand(FIRST_JS_RECEIVER_TYPE));
// Call runtime on identical symbols since we need to throw a TypeError.
__ Branch(slow, eq, t4, Operand(SYMBOL_TYPE));
- // Call runtime on identical SIMD values since we must throw a TypeError.
- __ Branch(slow, eq, t4, Operand(SIMD128_VALUE_TYPE));
} else {
__ Branch(&heap_number, eq, t4, Operand(HEAP_NUMBER_TYPE));
// Comparing JS objects with <=, >= is complicated.
@@ -244,8 +242,6 @@ static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
__ Branch(slow, greater, t4, Operand(FIRST_JS_RECEIVER_TYPE));
// Call runtime on identical symbols since we need to throw a TypeError.
__ Branch(slow, eq, t4, Operand(SYMBOL_TYPE));
- // Call runtime on identical SIMD values since we must throw a TypeError.
- __ Branch(slow, eq, t4, Operand(SIMD128_VALUE_TYPE));
// Normally here we fall through to return_equal, but undefined is
// special: (undefined == undefined) == true, but
// (undefined <= undefined) == false! See ECMAScript 11.8.5.
@@ -1146,9 +1142,9 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
// We build an EntryFrame.
__ li(t3, Operand(-1)); // Push a bad frame pointer to fail if it is used.
- int marker = type();
- __ li(t2, Operand(Smi::FromInt(marker)));
- __ li(t1, Operand(Smi::FromInt(marker)));
+ StackFrame::Type marker = type();
+ __ li(t2, Operand(StackFrame::TypeToMarker(marker)));
+ __ li(t1, Operand(StackFrame::TypeToMarker(marker)));
__ li(t0, Operand(ExternalReference(Isolate::kCEntryFPAddress,
isolate)));
__ lw(t0, MemOperand(t0));
@@ -1179,12 +1175,12 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
__ lw(t2, MemOperand(t1));
__ Branch(&non_outermost_js, ne, t2, Operand(zero_reg));
__ sw(fp, MemOperand(t1));
- __ li(t0, Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
+ __ li(t0, Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
Label cont;
__ b(&cont);
__ nop(); // Branch delay slot nop.
__ bind(&non_outermost_js);
- __ li(t0, Operand(Smi::FromInt(StackFrame::INNER_JSENTRY_FRAME)));
+ __ li(t0, Operand(StackFrame::INNER_JSENTRY_FRAME));
__ bind(&cont);
__ push(t0);
@@ -1251,10 +1247,8 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
// Check if the current stack frame is marked as the outermost JS frame.
Label non_outermost_js_2;
__ pop(t1);
- __ Branch(&non_outermost_js_2,
- ne,
- t1,
- Operand(Smi::FromInt(StackFrame::OUTERMOST_JSENTRY_FRAME)));
+ __ Branch(&non_outermost_js_2, ne, t1,
+ Operand(StackFrame::OUTERMOST_JSENTRY_FRAME));
__ li(t1, Operand(ExternalReference(js_entry_sp)));
__ sw(zero_reg, MemOperand(t1));
__ bind(&non_outermost_js_2);
@@ -1277,50 +1271,6 @@ void JSEntryStub::Generate(MacroAssembler* masm) {
__ Jump(ra);
}
-
-void LoadIndexedStringStub::Generate(MacroAssembler* masm) {
- // Return address is in ra.
- Label miss;
-
- Register receiver = LoadDescriptor::ReceiverRegister();
- Register index = LoadDescriptor::NameRegister();
- Register scratch = t1;
- Register result = v0;
- DCHECK(!scratch.is(receiver) && !scratch.is(index));
- DCHECK(!scratch.is(LoadWithVectorDescriptor::VectorRegister()));
-
- StringCharAtGenerator char_at_generator(receiver, index, scratch, result,
- &miss, // When not a string.
- &miss, // When not a number.
- &miss, // When index out of range.
- RECEIVER_IS_STRING);
- char_at_generator.GenerateFast(masm);
- __ Ret();
-
- StubRuntimeCallHelper call_helper;
- char_at_generator.GenerateSlow(masm, PART_OF_IC_HANDLER, call_helper);
-
- __ bind(&miss);
- PropertyAccessCompiler::TailCallBuiltin(
- masm, PropertyAccessCompiler::MissBuiltin(Code::KEYED_LOAD_IC));
-}
-
-
-void FunctionPrototypeStub::Generate(MacroAssembler* masm) {
- Label miss;
- Register receiver = LoadDescriptor::ReceiverRegister();
- // Ensure that the vector and slot registers won't be clobbered before
- // calling the miss handler.
- DCHECK(!AreAliased(t0, t1, LoadWithVectorDescriptor::VectorRegister(),
- LoadWithVectorDescriptor::SlotRegister()));
-
- NamedLoadHandlerCompiler::GenerateLoadFunctionPrototype(masm, receiver, t0,
- t1, &miss);
- __ bind(&miss);
- PropertyAccessCompiler::TailCallBuiltin(
- masm, PropertyAccessCompiler::MissBuiltin(Code::LOAD_IC));
-}
-
void RegExpExecStub::Generate(MacroAssembler* masm) {
// Just jump directly to runtime if native RegExp is not selected at compile
// time or if regexp entry in generated code is turned off runtime switch or
@@ -1422,7 +1372,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// (6) External string. Make it, offset-wise, look like a sequential string.
// Go to (4).
// (7) Short external string or not a string? If yes, bail out to runtime.
- // (8) Sliced string. Replace subject with parent. Go to (1).
+ // (8) Sliced or thin string. Replace subject with parent. Go to (1).
Label seq_string /* 4 */, external_string /* 6 */, check_underlying /* 1 */,
not_seq_nor_cons /* 5 */, not_long_external /* 7 */;
@@ -1443,6 +1393,7 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
// (2) Sequential or cons? If not, go to (5).
STATIC_ASSERT(kConsStringTag < kExternalStringTag);
STATIC_ASSERT(kSlicedStringTag > kExternalStringTag);
+ STATIC_ASSERT(kThinStringTag > kExternalStringTag);
STATIC_ASSERT(kIsNotStringMask > kExternalStringTag);
STATIC_ASSERT(kShortExternalStringTag > kExternalStringTag);
// Go to (5).
@@ -1469,12 +1420,12 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ Branch(&runtime, ls, a3, Operand(a1));
__ sra(a1, a1, kSmiTagSize); // Untag the Smi.
- STATIC_ASSERT(kStringEncodingMask == 4);
- STATIC_ASSERT(kOneByteStringTag == 4);
+ STATIC_ASSERT(kStringEncodingMask == 8);
+ STATIC_ASSERT(kOneByteStringTag == 8);
STATIC_ASSERT(kTwoByteStringTag == 0);
__ And(a0, a0, Operand(kStringEncodingMask)); // Non-zero for one-byte.
__ lw(t9, FieldMemOperand(regexp_data, JSRegExp::kDataOneByteCodeOffset));
- __ sra(a3, a0, 2); // a3 is 1 for ASCII, 0 for UC16 (used below).
+ __ sra(a3, a0, 3); // a3 is 1 for ASCII, 0 for UC16 (used below).
__ lw(t1, FieldMemOperand(regexp_data, JSRegExp::kDataUC16CodeOffset));
__ Movz(t9, t1, a0); // If UC16 (a0 is 0), replace t9 w/kDataUC16CodeOffset.
@@ -1719,12 +1670,18 @@ void RegExpExecStub::Generate(MacroAssembler* masm) {
__ And(at, a1, Operand(kIsNotStringMask | kShortExternalStringMask));
__ Branch(&runtime, ne, at, Operand(zero_reg));
- // (8) Sliced string. Replace subject with parent. Go to (4).
+ // (8) Sliced or thin string. Replace subject with parent. Go to (4).
+ Label thin_string;
+ __ Branch(&thin_string, eq, a1, Operand(kThinStringTag));
// Load offset into t0 and replace subject string with parent.
__ lw(t0, FieldMemOperand(subject, SlicedString::kOffsetOffset));
__ sra(t0, t0, kSmiTagSize);
__ lw(subject, FieldMemOperand(subject, SlicedString::kParentOffset));
__ jmp(&check_underlying); // Go to (4).
+
+ __ bind(&thin_string);
+ __ lw(subject, FieldMemOperand(subject, ThinString::kActualOffset));
+ __ jmp(&check_underlying); // Go to (4).
#endif // V8_INTERPRETED_REGEXP
}
@@ -1886,187 +1843,6 @@ void CallConstructStub::Generate(MacroAssembler* masm) {
__ Jump(isolate()->builtins()->Construct(), RelocInfo::CODE_TARGET);
}
-// Note: feedback_vector and slot are clobbered after the call.
-static void IncrementCallCount(MacroAssembler* masm, Register feedback_vector,
- Register slot) {
- __ Lsa(at, feedback_vector, slot, kPointerSizeLog2 - kSmiTagSize);
- __ lw(slot, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
- __ Addu(slot, slot, Operand(Smi::FromInt(1)));
- __ sw(slot, FieldMemOperand(at, FixedArray::kHeaderSize + kPointerSize));
-}
-
-void CallICStub::HandleArrayCase(MacroAssembler* masm, Label* miss) {
- // a0 - number of arguments
- // a1 - function
- // a3 - slot id
- // a2 - vector
- // t0 - loaded from vector[slot]
- __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, at);
- __ Branch(miss, ne, a1, Operand(at));
-
- // Increment the call count for monomorphic function calls.
- IncrementCallCount(masm, a2, a3);
-
- __ mov(a2, t0);
- __ mov(a3, a1);
- ArrayConstructorStub stub(masm->isolate());
- __ TailCallStub(&stub);
-}
-
-
-void CallICStub::Generate(MacroAssembler* masm) {
- // a0 - number of arguments
- // a1 - function
- // a3 - slot id (Smi)
- // a2 - vector
- Label extra_checks_or_miss, call, call_function, call_count_incremented;
-
- // The checks. First, does r1 match the recorded monomorphic target?
- __ Lsa(t0, a2, a3, kPointerSizeLog2 - kSmiTagSize);
- __ lw(t0, FieldMemOperand(t0, FixedArray::kHeaderSize));
-
- // We don't know that we have a weak cell. We might have a private symbol
- // or an AllocationSite, but the memory is safe to examine.
- // AllocationSite::kTransitionInfoOffset - contains a Smi or pointer to
- // FixedArray.
- // WeakCell::kValueOffset - contains a JSFunction or Smi(0)
- // Symbol::kHashFieldSlot - if the low bit is 1, then the hash is not
- // computed, meaning that it can't appear to be a pointer. If the low bit is
- // 0, then hash is computed, but the 0 bit prevents the field from appearing
- // to be a pointer.
- STATIC_ASSERT(WeakCell::kSize >= kPointerSize);
- STATIC_ASSERT(AllocationSite::kTransitionInfoOffset ==
- WeakCell::kValueOffset &&
- WeakCell::kValueOffset == Symbol::kHashFieldSlot);
-
- __ lw(t1, FieldMemOperand(t0, WeakCell::kValueOffset));
- __ Branch(&extra_checks_or_miss, ne, a1, Operand(t1));
-
- // The compare above could have been a SMI/SMI comparison. Guard against this
- // convincing us that we have a monomorphic JSFunction.
- __ JumpIfSmi(a1, &extra_checks_or_miss);
-
- __ bind(&call_function);
-
- // Increment the call count for monomorphic function calls.
- IncrementCallCount(masm, a2, a3);
-
- __ Jump(masm->isolate()->builtins()->CallFunction(convert_mode(),
- tail_call_mode()),
- RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg));
-
- __ bind(&extra_checks_or_miss);
- Label uninitialized, miss, not_allocation_site;
-
- __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
- __ Branch(&call, eq, t0, Operand(at));
-
- // Verify that t0 contains an AllocationSite
- __ lw(t1, FieldMemOperand(t0, HeapObject::kMapOffset));
- __ LoadRoot(at, Heap::kAllocationSiteMapRootIndex);
- __ Branch(&not_allocation_site, ne, t1, Operand(at));
-
- HandleArrayCase(masm, &miss);
-
- __ bind(&not_allocation_site);
-
- // The following cases attempt to handle MISS cases without going to the
- // runtime.
- if (FLAG_trace_ic) {
- __ Branch(&miss);
- }
-
- __ LoadRoot(at, Heap::kuninitialized_symbolRootIndex);
- __ Branch(&uninitialized, eq, t0, Operand(at));
-
- // We are going megamorphic. If the feedback is a JSFunction, it is fine
- // to handle it here. More complex cases are dealt with in the runtime.
- __ AssertNotSmi(t0);
- __ GetObjectType(t0, t1, t1);
- __ Branch(&miss, ne, t1, Operand(JS_FUNCTION_TYPE));
- __ Lsa(t0, a2, a3, kPointerSizeLog2 - kSmiTagSize);
- __ LoadRoot(at, Heap::kmegamorphic_symbolRootIndex);
- __ sw(at, FieldMemOperand(t0, FixedArray::kHeaderSize));
-
- __ bind(&call);
- IncrementCallCount(masm, a2, a3);
-
- __ bind(&call_count_incremented);
-
- __ Jump(masm->isolate()->builtins()->Call(convert_mode(), tail_call_mode()),
- RelocInfo::CODE_TARGET, al, zero_reg, Operand(zero_reg));
-
- __ bind(&uninitialized);
-
- // We are going monomorphic, provided we actually have a JSFunction.
- __ JumpIfSmi(a1, &miss);
-
- // Goto miss case if we do not have a function.
- __ GetObjectType(a1, t0, t0);
- __ Branch(&miss, ne, t0, Operand(JS_FUNCTION_TYPE));
-
- // Make sure the function is not the Array() function, which requires special
- // behavior on MISS.
- __ LoadNativeContextSlot(Context::ARRAY_FUNCTION_INDEX, t0);
- __ Branch(&miss, eq, a1, Operand(t0));
-
- // Make sure the function belongs to the same native context.
- __ lw(t0, FieldMemOperand(a1, JSFunction::kContextOffset));
- __ lw(t0, ContextMemOperand(t0, Context::NATIVE_CONTEXT_INDEX));
- __ lw(t1, NativeContextMemOperand());
- __ Branch(&miss, ne, t0, Operand(t1));
-
- // Store the function. Use a stub since we need a frame for allocation.
- // a2 - vector
- // a3 - slot
- // a1 - function
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- CreateWeakCellStub create_stub(masm->isolate());
- __ SmiTag(a0);
- __ Push(a0);
- __ Push(a2, a3);
- __ Push(cp, a1);
- __ CallStub(&create_stub);
- __ Pop(cp, a1);
- __ Pop(a2, a3);
- __ Pop(a0);
- __ SmiUntag(a0);
- }
-
- __ Branch(&call_function);
-
- // We are here because tracing is on or we encountered a MISS case we can't
- // handle here.
- __ bind(&miss);
- GenerateMiss(masm);
-
- __ Branch(&call_count_incremented);
-}
-
-
-void CallICStub::GenerateMiss(MacroAssembler* masm) {
- FrameScope scope(masm, StackFrame::INTERNAL);
-
- // Preserve the number of arguments as Smi.
- __ SmiTag(a0);
- __ Push(a0);
-
- // Push the receiver and the function and feedback info.
- __ Push(a1, a2, a3);
-
- // Call the entry.
- __ CallRuntime(Runtime::kCallIC_Miss);
-
- // Move result to a1 and exit the internal frame.
- __ mov(a1, v0);
-
- // Restore number of arguments.
- __ Pop(a0);
- __ SmiUntag(a0);
-}
-
-
// StringCharCodeAtGenerator.
void StringCharCodeAtGenerator::GenerateFast(MacroAssembler* masm) {
DCHECK(!t0.is(index_));
@@ -2164,51 +1940,6 @@ void StringCharCodeAtGenerator::GenerateSlow(
__ Abort(kUnexpectedFallthroughFromCharCodeAtSlowCase);
}
-
-// -------------------------------------------------------------------------
-// StringCharFromCodeGenerator
-
-void StringCharFromCodeGenerator::GenerateFast(MacroAssembler* masm) {
- // Fast case of Heap::LookupSingleCharacterStringFromCode.
-
- DCHECK(!t0.is(result_));
- DCHECK(!t0.is(code_));
-
- STATIC_ASSERT(kSmiTag == 0);
- STATIC_ASSERT(kSmiShiftSize == 0);
- DCHECK(base::bits::IsPowerOfTwo32(String::kMaxOneByteCharCodeU + 1));
- __ And(t0, code_, Operand(kSmiTagMask |
- ((~String::kMaxOneByteCharCodeU) << kSmiTagSize)));
- __ Branch(&slow_case_, ne, t0, Operand(zero_reg));
-
- __ LoadRoot(result_, Heap::kSingleCharacterStringCacheRootIndex);
- // At this point code register contains smi tagged one-byte char code.
- STATIC_ASSERT(kSmiTag == 0);
- __ Lsa(result_, result_, code_, kPointerSizeLog2 - kSmiTagSize);
- __ lw(result_, FieldMemOperand(result_, FixedArray::kHeaderSize));
- __ LoadRoot(t0, Heap::kUndefinedValueRootIndex);
- __ Branch(&slow_case_, eq, result_, Operand(t0));
- __ bind(&exit_);
-}
-
-
-void StringCharFromCodeGenerator::GenerateSlow(
- MacroAssembler* masm,
- const RuntimeCallHelper& call_helper) {
- __ Abort(kUnexpectedFallthroughToCharFromCodeSlowCase);
-
- __ bind(&slow_case_);
- call_helper.BeforeCall(masm);
- __ push(code_);
- __ CallRuntime(Runtime::kStringCharFromCode);
- __ Move(result_, v0);
-
- call_helper.AfterCall(masm);
- __ Branch(&exit_);
-
- __ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
-}
-
void StringHelper::GenerateFlatOneByteStringEquals(
MacroAssembler* masm, Register left, Register right, Register scratch1,
Register scratch2, Register scratch3) {
@@ -3127,12 +2858,6 @@ void StubFailureTrampolineStub::Generate(MacroAssembler* masm) {
__ Addu(sp, sp, a1);
}
-void CallICTrampolineStub::Generate(MacroAssembler* masm) {
- __ EmitLoadFeedbackVector(a2);
- CallICStub stub(isolate(), state());
- __ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
-}
-
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
if (masm->isolate()->function_entry_hook() != NULL) {
ProfileEntryHookStub stub(masm->isolate());
@@ -3487,499 +3212,6 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
GenerateCase(masm, FAST_ELEMENTS);
}
-void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
- // ----------- S t a t e -------------
- // -- a1 : function
- // -- cp : context
- // -- fp : frame pointer
- // -- ra : return address
- // -----------------------------------
- __ AssertFunction(a1);
-
- // Make a2 point to the JavaScript frame.
- __ mov(a2, fp);
- if (skip_stub_frame()) {
- // For Ignition we need to skip the handler/stub frame to reach the
- // JavaScript frame for the function.
- __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
- }
- if (FLAG_debug_code) {
- Label ok;
- __ lw(a3, MemOperand(a2, StandardFrameConstants::kFunctionOffset));
- __ Branch(&ok, eq, a1, Operand(a3));
- __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
- __ bind(&ok);
- }
-
- // Check if we have rest parameters (only possible if we have an
- // arguments adaptor frame below the function frame).
- Label no_rest_parameters;
- __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
- __ lw(a3, MemOperand(a2, CommonFrameConstants::kContextOrFrameTypeOffset));
- __ Branch(&no_rest_parameters, ne, a3,
- Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
-
- // Check if the arguments adaptor frame contains more arguments than
- // specified by the function's internal formal parameter count.
- Label rest_parameters;
- __ lw(a0, MemOperand(a2, ArgumentsAdaptorFrameConstants::kLengthOffset));
- __ lw(a3, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
- __ lw(a3,
- FieldMemOperand(a3, SharedFunctionInfo::kFormalParameterCountOffset));
- __ Subu(a0, a0, Operand(a3));
- __ Branch(&rest_parameters, gt, a0, Operand(zero_reg));
-
- // Return an empty rest parameter array.
- __ bind(&no_rest_parameters);
- {
- // ----------- S t a t e -------------
- // -- cp : context
- // -- ra : return address
- // -----------------------------------
-
- // Allocate an empty rest parameter array.
- Label allocate, done_allocate;
- __ Allocate(JSArray::kSize, v0, a0, a1, &allocate, NO_ALLOCATION_FLAGS);
- __ bind(&done_allocate);
-
- // Setup the rest parameter array in v0.
- __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, a1);
- __ sw(a1, FieldMemOperand(v0, JSArray::kMapOffset));
- __ LoadRoot(a1, Heap::kEmptyFixedArrayRootIndex);
- __ sw(a1, FieldMemOperand(v0, JSArray::kPropertiesOffset));
- __ sw(a1, FieldMemOperand(v0, JSArray::kElementsOffset));
- __ Move(a1, Smi::kZero);
- __ Ret(USE_DELAY_SLOT);
- __ sw(a1, FieldMemOperand(v0, JSArray::kLengthOffset)); // In delay slot
- STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
-
- // Fall back to %AllocateInNewSpace.
- __ bind(&allocate);
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ Push(Smi::FromInt(JSArray::kSize));
- __ CallRuntime(Runtime::kAllocateInNewSpace);
- }
- __ jmp(&done_allocate);
- }
-
- __ bind(&rest_parameters);
- {
- // Compute the pointer to the first rest parameter (skippping the receiver).
- __ Lsa(a2, a2, a0, kPointerSizeLog2 - 1);
- __ Addu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset -
- 1 * kPointerSize));
-
- // ----------- S t a t e -------------
- // -- cp : context
- // -- a0 : number of rest parameters (tagged)
- // -- a1 : function
- // -- a2 : pointer to first rest parameters
- // -- ra : return address
- // -----------------------------------
-
- // Allocate space for the rest parameter array plus the backing store.
- Label allocate, done_allocate;
- __ li(t0, Operand(JSArray::kSize + FixedArray::kHeaderSize));
- __ Lsa(t0, t0, a0, kPointerSizeLog2 - 1);
- __ Allocate(t0, v0, a3, t1, &allocate, NO_ALLOCATION_FLAGS);
- __ bind(&done_allocate);
-
- // Setup the elements array in v0.
- __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
- __ sw(at, FieldMemOperand(v0, FixedArray::kMapOffset));
- __ sw(a0, FieldMemOperand(v0, FixedArray::kLengthOffset));
- __ Addu(a3, v0, Operand(FixedArray::kHeaderSize));
- {
- Label loop, done_loop;
- __ sll(at, a0, kPointerSizeLog2 - 1);
- __ Addu(a1, a3, at);
- __ bind(&loop);
- __ Branch(&done_loop, eq, a1, Operand(a3));
- __ lw(at, MemOperand(a2, 0 * kPointerSize));
- __ sw(at, FieldMemOperand(a3, 0 * kPointerSize));
- __ Subu(a2, a2, Operand(1 * kPointerSize));
- __ Addu(a3, a3, Operand(1 * kPointerSize));
- __ jmp(&loop);
- __ bind(&done_loop);
- }
-
- // Setup the rest parameter array in a3.
- __ LoadNativeContextSlot(Context::JS_ARRAY_FAST_ELEMENTS_MAP_INDEX, at);
- __ sw(at, FieldMemOperand(a3, JSArray::kMapOffset));
- __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
- __ sw(at, FieldMemOperand(a3, JSArray::kPropertiesOffset));
- __ sw(v0, FieldMemOperand(a3, JSArray::kElementsOffset));
- __ sw(a0, FieldMemOperand(a3, JSArray::kLengthOffset));
- STATIC_ASSERT(JSArray::kSize == 4 * kPointerSize);
- __ Ret(USE_DELAY_SLOT);
- __ mov(v0, a3); // In delay slot
-
- // Fall back to %AllocateInNewSpace (if not too big).
- Label too_big_for_new_space;
- __ bind(&allocate);
- __ Branch(&too_big_for_new_space, gt, t0,
- Operand(kMaxRegularHeapObjectSize));
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ SmiTag(t0);
- __ Push(a0, a2, t0);
- __ CallRuntime(Runtime::kAllocateInNewSpace);
- __ Pop(a0, a2);
- }
- __ jmp(&done_allocate);
-
- // Fall back to %NewStrictArguments.
- __ bind(&too_big_for_new_space);
- __ Push(a1);
- __ TailCallRuntime(Runtime::kNewStrictArguments);
- }
-}
-
-
-void FastNewSloppyArgumentsStub::Generate(MacroAssembler* masm) {
- // ----------- S t a t e -------------
- // -- a1 : function
- // -- cp : context
- // -- fp : frame pointer
- // -- ra : return address
- // -----------------------------------
- __ AssertFunction(a1);
-
- // Make t0 point to the JavaScript frame.
- __ mov(t0, fp);
- if (skip_stub_frame()) {
- // For Ignition we need to skip the handler/stub frame to reach the
- // JavaScript frame for the function.
- __ lw(t0, MemOperand(t0, StandardFrameConstants::kCallerFPOffset));
- }
- if (FLAG_debug_code) {
- Label ok;
- __ lw(a3, MemOperand(t0, StandardFrameConstants::kFunctionOffset));
- __ Branch(&ok, eq, a1, Operand(a3));
- __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
- __ bind(&ok);
- }
-
- // TODO(bmeurer): Cleanup to match the FastNewStrictArgumentsStub.
- __ lw(a2, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
- __ lw(a2,
- FieldMemOperand(a2, SharedFunctionInfo::kFormalParameterCountOffset));
- __ Lsa(a3, t0, a2, kPointerSizeLog2 - 1);
- __ Addu(a3, a3, Operand(StandardFrameConstants::kCallerSPOffset));
-
- // a1 : function
- // a2 : number of parameters (tagged)
- // a3 : parameters pointer
- // t0 : Javascript frame pointer
- // Registers used over whole function:
- // t1 : arguments count (tagged)
- // t2 : mapped parameter count (tagged)
-
- // Check if the calling frame is an arguments adaptor frame.
- Label adaptor_frame, try_allocate, runtime;
- __ lw(t0, MemOperand(t0, StandardFrameConstants::kCallerFPOffset));
- __ lw(a0, MemOperand(t0, CommonFrameConstants::kContextOrFrameTypeOffset));
- __ Branch(&adaptor_frame, eq, a0,
- Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
-
- // No adaptor, parameter count = argument count.
- __ mov(t1, a2);
- __ Branch(USE_DELAY_SLOT, &try_allocate);
- __ mov(t2, a2); // In delay slot.
-
- // We have an adaptor frame. Patch the parameters pointer.
- __ bind(&adaptor_frame);
- __ lw(t1, MemOperand(t0, ArgumentsAdaptorFrameConstants::kLengthOffset));
- __ Lsa(t0, t0, t1, 1);
- __ Addu(a3, t0, Operand(StandardFrameConstants::kCallerSPOffset));
-
- // t1 = argument count (tagged)
- // t2 = parameter count (tagged)
- // Compute the mapped parameter count = min(t2, t1) in t2.
- __ mov(t2, a2);
- __ Branch(&try_allocate, le, t2, Operand(t1));
- __ mov(t2, t1);
-
- __ bind(&try_allocate);
-
- // Compute the sizes of backing store, parameter map, and arguments object.
- // 1. Parameter map, has 2 extra words containing context and backing store.
- const int kParameterMapHeaderSize =
- FixedArray::kHeaderSize + 2 * kPointerSize;
- // If there are no mapped parameters, we do not need the parameter_map.
- Label param_map_size;
- DCHECK_EQ(static_cast<Smi*>(0), Smi::kZero);
- __ Branch(USE_DELAY_SLOT, &param_map_size, eq, t2, Operand(zero_reg));
- __ mov(t5, zero_reg); // In delay slot: param map size = 0 when t2 == 0.
- __ sll(t5, t2, 1);
- __ addiu(t5, t5, kParameterMapHeaderSize);
- __ bind(&param_map_size);
-
- // 2. Backing store.
- __ Lsa(t5, t5, t1, 1);
- __ Addu(t5, t5, Operand(FixedArray::kHeaderSize));
-
- // 3. Arguments object.
- __ Addu(t5, t5, Operand(JSSloppyArgumentsObject::kSize));
-
- // Do the allocation of all three objects in one go.
- __ Allocate(t5, v0, t5, t0, &runtime, NO_ALLOCATION_FLAGS);
-
- // v0 = address of new object(s) (tagged)
- // a2 = argument count (smi-tagged)
- // Get the arguments boilerplate from the current native context into t0.
- const int kNormalOffset =
- Context::SlotOffset(Context::SLOPPY_ARGUMENTS_MAP_INDEX);
- const int kAliasedOffset =
- Context::SlotOffset(Context::FAST_ALIASED_ARGUMENTS_MAP_INDEX);
-
- __ lw(t0, NativeContextMemOperand());
- Label skip2_ne, skip2_eq;
- __ Branch(&skip2_ne, ne, t2, Operand(zero_reg));
- __ lw(t0, MemOperand(t0, kNormalOffset));
- __ bind(&skip2_ne);
-
- __ Branch(&skip2_eq, eq, t2, Operand(zero_reg));
- __ lw(t0, MemOperand(t0, kAliasedOffset));
- __ bind(&skip2_eq);
-
- // v0 = address of new object (tagged)
- // a2 = argument count (smi-tagged)
- // t0 = address of arguments map (tagged)
- // t2 = mapped parameter count (tagged)
- __ sw(t0, FieldMemOperand(v0, JSObject::kMapOffset));
- __ LoadRoot(t5, Heap::kEmptyFixedArrayRootIndex);
- __ sw(t5, FieldMemOperand(v0, JSObject::kPropertiesOffset));
- __ sw(t5, FieldMemOperand(v0, JSObject::kElementsOffset));
-
- // Set up the callee in-object property.
- __ AssertNotSmi(a1);
- __ sw(a1, FieldMemOperand(v0, JSSloppyArgumentsObject::kCalleeOffset));
-
- // Use the length (smi tagged) and set that as an in-object property too.
- __ AssertSmi(t1);
- __ sw(t1, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset));
-
- // Set up the elements pointer in the allocated arguments object.
- // If we allocated a parameter map, t0 will point there, otherwise
- // it will point to the backing store.
- __ Addu(t0, v0, Operand(JSSloppyArgumentsObject::kSize));
- __ sw(t0, FieldMemOperand(v0, JSObject::kElementsOffset));
-
- // v0 = address of new object (tagged)
- // a2 = argument count (tagged)
- // t0 = address of parameter map or backing store (tagged)
- // t2 = mapped parameter count (tagged)
- // Initialize parameter map. If there are no mapped arguments, we're done.
- Label skip_parameter_map;
- Label skip3;
- __ Branch(&skip3, ne, t2, Operand(Smi::kZero));
- // Move backing store address to a1, because it is
- // expected there when filling in the unmapped arguments.
- __ mov(a1, t0);
- __ bind(&skip3);
-
- __ Branch(&skip_parameter_map, eq, t2, Operand(Smi::kZero));
-
- __ LoadRoot(t1, Heap::kSloppyArgumentsElementsMapRootIndex);
- __ sw(t1, FieldMemOperand(t0, FixedArray::kMapOffset));
- __ Addu(t1, t2, Operand(Smi::FromInt(2)));
- __ sw(t1, FieldMemOperand(t0, FixedArray::kLengthOffset));
- __ sw(cp, FieldMemOperand(t0, FixedArray::kHeaderSize + 0 * kPointerSize));
- __ Lsa(t1, t0, t2, 1);
- __ Addu(t1, t1, Operand(kParameterMapHeaderSize));
- __ sw(t1, FieldMemOperand(t0, FixedArray::kHeaderSize + 1 * kPointerSize));
-
- // Copy the parameter slots and the holes in the arguments.
- // We need to fill in mapped_parameter_count slots. They index the context,
- // where parameters are stored in reverse order, at
- // MIN_CONTEXT_SLOTS .. MIN_CONTEXT_SLOTS+parameter_count-1
- // The mapped parameter thus need to get indices
- // MIN_CONTEXT_SLOTS+parameter_count-1 ..
- // MIN_CONTEXT_SLOTS+parameter_count-mapped_parameter_count
- // We loop from right to left.
- Label parameters_loop, parameters_test;
- __ mov(t1, t2);
- __ Addu(t5, a2, Operand(Smi::FromInt(Context::MIN_CONTEXT_SLOTS)));
- __ Subu(t5, t5, Operand(t2));
- __ LoadRoot(t3, Heap::kTheHoleValueRootIndex);
- __ Lsa(a1, t0, t1, 1);
- __ Addu(a1, a1, Operand(kParameterMapHeaderSize));
-
- // a1 = address of backing store (tagged)
- // t0 = address of parameter map (tagged)
- // a0 = temporary scratch (a.o., for address calculation)
- // t1 = loop variable (tagged)
- // t3 = the hole value
- __ jmp(&parameters_test);
-
- __ bind(&parameters_loop);
- __ Subu(t1, t1, Operand(Smi::FromInt(1)));
- __ sll(a0, t1, 1);
- __ Addu(a0, a0, Operand(kParameterMapHeaderSize - kHeapObjectTag));
- __ Addu(t6, t0, a0);
- __ sw(t5, MemOperand(t6));
- __ Subu(a0, a0, Operand(kParameterMapHeaderSize - FixedArray::kHeaderSize));
- __ Addu(t6, a1, a0);
- __ sw(t3, MemOperand(t6));
- __ Addu(t5, t5, Operand(Smi::FromInt(1)));
- __ bind(&parameters_test);
- __ Branch(&parameters_loop, ne, t1, Operand(Smi::kZero));
-
- // t1 = argument count (tagged).
- __ lw(t1, FieldMemOperand(v0, JSSloppyArgumentsObject::kLengthOffset));
-
- __ bind(&skip_parameter_map);
- // v0 = address of new object (tagged)
- // a1 = address of backing store (tagged)
- // t1 = argument count (tagged)
- // t2 = mapped parameter count (tagged)
- // t5 = scratch
- // Copy arguments header and remaining slots (if there are any).
- __ LoadRoot(t5, Heap::kFixedArrayMapRootIndex);
- __ sw(t5, FieldMemOperand(a1, FixedArray::kMapOffset));
- __ sw(t1, FieldMemOperand(a1, FixedArray::kLengthOffset));
-
- Label arguments_loop, arguments_test;
- __ sll(t6, t2, 1);
- __ Subu(a3, a3, Operand(t6));
- __ jmp(&arguments_test);
-
- __ bind(&arguments_loop);
- __ Subu(a3, a3, Operand(kPointerSize));
- __ lw(t0, MemOperand(a3, 0));
- __ Lsa(t5, a1, t2, 1);
- __ sw(t0, FieldMemOperand(t5, FixedArray::kHeaderSize));
- __ Addu(t2, t2, Operand(Smi::FromInt(1)));
-
- __ bind(&arguments_test);
- __ Branch(&arguments_loop, lt, t2, Operand(t1));
-
- // Return.
- __ Ret();
-
- // Do the runtime call to allocate the arguments object.
- // t1 = argument count (tagged)
- __ bind(&runtime);
- __ Push(a1, a3, t1);
- __ TailCallRuntime(Runtime::kNewSloppyArguments);
-}
-
-
-void FastNewStrictArgumentsStub::Generate(MacroAssembler* masm) {
- // ----------- S t a t e -------------
- // -- a1 : function
- // -- cp : context
- // -- fp : frame pointer
- // -- ra : return address
- // -----------------------------------
- __ AssertFunction(a1);
-
- // Make a2 point to the JavaScript frame.
- __ mov(a2, fp);
- if (skip_stub_frame()) {
- // For Ignition we need to skip the handler/stub frame to reach the
- // JavaScript frame for the function.
- __ lw(a2, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
- }
- if (FLAG_debug_code) {
- Label ok;
- __ lw(a3, MemOperand(a2, StandardFrameConstants::kFunctionOffset));
- __ Branch(&ok, eq, a1, Operand(a3));
- __ Abort(kInvalidFrameForFastNewRestArgumentsStub);
- __ bind(&ok);
- }
-
- // Check if we have an arguments adaptor frame below the function frame.
- Label arguments_adaptor, arguments_done;
- __ lw(a3, MemOperand(a2, StandardFrameConstants::kCallerFPOffset));
- __ lw(a0, MemOperand(a3, CommonFrameConstants::kContextOrFrameTypeOffset));
- __ Branch(&arguments_adaptor, eq, a0,
- Operand(Smi::FromInt(StackFrame::ARGUMENTS_ADAPTOR)));
- {
- __ lw(t0, FieldMemOperand(a1, JSFunction::kSharedFunctionInfoOffset));
- __ lw(a0,
- FieldMemOperand(t0, SharedFunctionInfo::kFormalParameterCountOffset));
- __ Lsa(a2, a2, a0, kPointerSizeLog2 - 1);
- __ Addu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset -
- 1 * kPointerSize));
- }
- __ Branch(&arguments_done);
- __ bind(&arguments_adaptor);
- {
- __ lw(a0, MemOperand(a3, ArgumentsAdaptorFrameConstants::kLengthOffset));
- __ Lsa(a2, a3, a0, kPointerSizeLog2 - 1);
- __ Addu(a2, a2, Operand(StandardFrameConstants::kCallerSPOffset -
- 1 * kPointerSize));
- }
- __ bind(&arguments_done);
-
- // ----------- S t a t e -------------
- // -- cp : context
- // -- a0 : number of rest parameters (tagged)
- // -- a1 : function
- // -- a2 : pointer to first rest parameters
- // -- ra : return address
- // -----------------------------------
-
- // Allocate space for the strict arguments object plus the backing store.
- Label allocate, done_allocate;
- __ li(t0, Operand(JSStrictArgumentsObject::kSize + FixedArray::kHeaderSize));
- __ Lsa(t0, t0, a0, kPointerSizeLog2 - 1);
- __ Allocate(t0, v0, a3, t1, &allocate, NO_ALLOCATION_FLAGS);
- __ bind(&done_allocate);
-
- // Setup the elements array in v0.
- __ LoadRoot(at, Heap::kFixedArrayMapRootIndex);
- __ sw(at, FieldMemOperand(v0, FixedArray::kMapOffset));
- __ sw(a0, FieldMemOperand(v0, FixedArray::kLengthOffset));
- __ Addu(a3, v0, Operand(FixedArray::kHeaderSize));
- {
- Label loop, done_loop;
- __ sll(at, a0, kPointerSizeLog2 - 1);
- __ Addu(a1, a3, at);
- __ bind(&loop);
- __ Branch(&done_loop, eq, a1, Operand(a3));
- __ lw(at, MemOperand(a2, 0 * kPointerSize));
- __ sw(at, FieldMemOperand(a3, 0 * kPointerSize));
- __ Subu(a2, a2, Operand(1 * kPointerSize));
- __ Addu(a3, a3, Operand(1 * kPointerSize));
- __ Branch(&loop);
- __ bind(&done_loop);
- }
-
- // Setup the strict arguments object in a3.
- __ LoadNativeContextSlot(Context::STRICT_ARGUMENTS_MAP_INDEX, at);
- __ sw(at, FieldMemOperand(a3, JSStrictArgumentsObject::kMapOffset));
- __ LoadRoot(at, Heap::kEmptyFixedArrayRootIndex);
- __ sw(at, FieldMemOperand(a3, JSStrictArgumentsObject::kPropertiesOffset));
- __ sw(v0, FieldMemOperand(a3, JSStrictArgumentsObject::kElementsOffset));
- __ sw(a0, FieldMemOperand(a3, JSStrictArgumentsObject::kLengthOffset));
- STATIC_ASSERT(JSStrictArgumentsObject::kSize == 4 * kPointerSize);
- __ Ret(USE_DELAY_SLOT);
- __ mov(v0, a3); // In delay slot
-
- // Fall back to %AllocateInNewSpace (if not too big).
- Label too_big_for_new_space;
- __ bind(&allocate);
- __ Branch(&too_big_for_new_space, gt, t0, Operand(kMaxRegularHeapObjectSize));
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ SmiTag(t0);
- __ Push(a0, a2, t0);
- __ CallRuntime(Runtime::kAllocateInNewSpace);
- __ Pop(a0, a2);
- }
- __ jmp(&done_allocate);
-
- // Fall back to %NewStrictArguments.
- __ bind(&too_big_for_new_space);
- __ Push(a1);
- __ TailCallRuntime(Runtime::kNewStrictArguments);
-}
-
-
static int AddressOffset(ExternalReference ref0, ExternalReference ref1) {
return ref0.address() - ref1.address();
}