summaryrefslogtreecommitdiff
path: root/deps/v8/src/mips64/code-stubs-mips64.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/mips64/code-stubs-mips64.cc')
-rw-r--r--deps/v8/src/mips64/code-stubs-mips64.cc491
1 files changed, 5 insertions, 486 deletions
diff --git a/deps/v8/src/mips64/code-stubs-mips64.cc b/deps/v8/src/mips64/code-stubs-mips64.cc
index 97f5b73f9c..e570447e8f 100644
--- a/deps/v8/src/mips64/code-stubs-mips64.cc
+++ b/deps/v8/src/mips64/code-stubs-mips64.cc
@@ -32,17 +32,6 @@ void ArrayNArgumentsConstructorStub::Generate(MacroAssembler* masm) {
__ TailCallRuntime(Runtime::kNewArray);
}
-void FastArrayPushStub::InitializeDescriptor(CodeStubDescriptor* descriptor) {
- Address deopt_handler = Runtime::FunctionForId(Runtime::kArrayPush)->entry;
- descriptor->Initialize(a0, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
-}
-
-void FastFunctionBindStub::InitializeDescriptor(
- CodeStubDescriptor* descriptor) {
- Address deopt_handler = Runtime::FunctionForId(Runtime::kFunctionBind)->entry;
- descriptor->Initialize(a0, deopt_handler, -1, JS_FUNCTION_STUB_MODE);
-}
-
static void EmitIdenticalObjectComparison(MacroAssembler* masm, Label* slow,
Condition cc);
static void EmitSmiNonsmiComparison(MacroAssembler* masm,
@@ -681,8 +670,11 @@ void CompareICStub::GenerateGeneric(MacroAssembler* masm) {
if (cc == eq) {
{
FrameScope scope(masm, StackFrame::INTERNAL);
- __ Push(lhs, rhs);
- __ CallRuntime(strict() ? Runtime::kStrictEqual : Runtime::kEqual);
+ __ Push(cp);
+ __ Call(strict() ? isolate()->builtins()->StrictEqual()
+ : isolate()->builtins()->Equal(),
+ RelocInfo::CODE_TARGET);
+ __ Pop(cp);
}
// Turn true into 0 and false into some non-zero value.
STATIC_ASSERT(EQUAL == 0);
@@ -913,7 +905,6 @@ void CodeStub::GenerateFPStubs(Isolate* isolate) {
SaveFPRegsMode mode = kSaveFPRegs;
CEntryStub(isolate, 1, mode).GetCode();
StoreBufferOverflowStub(isolate, mode).GetCode();
- isolate->set_fp_stubs_generated(true);
}
@@ -2221,51 +2212,6 @@ void StringCharFromCodeGenerator::GenerateSlow(
__ Abort(kUnexpectedFallthroughFromCharFromCodeSlowCase);
}
-
-enum CopyCharactersFlags { COPY_ONE_BYTE = 1, DEST_ALWAYS_ALIGNED = 2 };
-
-
-void StringHelper::GenerateCopyCharacters(MacroAssembler* masm,
- Register dest,
- Register src,
- Register count,
- Register scratch,
- String::Encoding encoding) {
- if (FLAG_debug_code) {
- // Check that destination is word aligned.
- __ And(scratch, dest, Operand(kPointerAlignmentMask));
- __ Check(eq,
- kDestinationOfCopyNotAligned,
- scratch,
- Operand(zero_reg));
- }
-
- // Assumes word reads and writes are little endian.
- // Nothing to do for zero characters.
- Label done;
-
- if (encoding == String::TWO_BYTE_ENCODING) {
- __ Daddu(count, count, count);
- }
-
- Register limit = count; // Read until dest equals this.
- __ Daddu(limit, dest, Operand(count));
-
- Label loop_entry, loop;
- // Copy bytes from src to dest until dest hits limit.
- __ Branch(&loop_entry);
- __ bind(&loop);
- __ lbu(scratch, MemOperand(src));
- __ daddiu(src, src, 1);
- __ sb(scratch, MemOperand(dest));
- __ daddiu(dest, dest, 1);
- __ bind(&loop_entry);
- __ Branch(&loop, lt, dest, Operand(limit));
-
- __ bind(&done);
-}
-
-
void StringHelper::GenerateFlatOneByteStringEquals(
MacroAssembler* masm, Register left, Register right, Register scratch1,
Register scratch2, Register scratch3) {
@@ -2893,84 +2839,6 @@ void NameDictionaryLookupStub::GenerateNegativeLookup(MacroAssembler* masm,
__ Branch(miss, ne, at, Operand(zero_reg));
}
-
-// Probe the name dictionary in the |elements| register. Jump to the
-// |done| label if a property with the given name is found. Jump to
-// the |miss| label otherwise.
-// If lookup was successful |scratch2| will be equal to elements + 4 * index.
-void NameDictionaryLookupStub::GeneratePositiveLookup(MacroAssembler* masm,
- Label* miss,
- Label* done,
- Register elements,
- Register name,
- Register scratch1,
- Register scratch2) {
- DCHECK(!elements.is(scratch1));
- DCHECK(!elements.is(scratch2));
- DCHECK(!name.is(scratch1));
- DCHECK(!name.is(scratch2));
-
- __ AssertName(name);
-
- // Compute the capacity mask.
- __ ld(scratch1, FieldMemOperand(elements, kCapacityOffset));
- __ SmiUntag(scratch1);
- __ Dsubu(scratch1, scratch1, Operand(1));
-
- // Generate an unrolled loop that performs a few probes before
- // giving up. Measurements done on Gmail indicate that 2 probes
- // cover ~93% of loads from dictionaries.
- for (int i = 0; i < kInlinedProbes; i++) {
- // Compute the masked index: (hash + i + i * i) & mask.
- __ lwu(scratch2, FieldMemOperand(name, Name::kHashFieldOffset));
- if (i > 0) {
- // Add the probe offset (i + i * i) left shifted to avoid right shifting
- // the hash in a separate instruction. The value hash + i + i * i is right
- // shifted in the following and instruction.
- DCHECK(NameDictionary::GetProbeOffset(i) <
- 1 << (32 - Name::kHashFieldOffset));
- __ Daddu(scratch2, scratch2, Operand(
- NameDictionary::GetProbeOffset(i) << Name::kHashShift));
- }
- __ dsrl(scratch2, scratch2, Name::kHashShift);
- __ And(scratch2, scratch1, scratch2);
-
- // Scale the index by multiplying by the entry size.
- STATIC_ASSERT(NameDictionary::kEntrySize == 3);
- // scratch2 = scratch2 * 3.
- __ Dlsa(scratch2, scratch2, scratch2, 1);
-
- // Check if the key is identical to the name.
- __ Dlsa(scratch2, elements, scratch2, kPointerSizeLog2);
- __ ld(at, FieldMemOperand(scratch2, kElementsStartOffset));
- __ Branch(done, eq, name, Operand(at));
- }
-
- const int spill_mask =
- (ra.bit() | a6.bit() | a5.bit() | a4.bit() |
- a3.bit() | a2.bit() | a1.bit() | a0.bit() | v0.bit()) &
- ~(scratch1.bit() | scratch2.bit());
-
- __ MultiPush(spill_mask);
- if (name.is(a0)) {
- DCHECK(!elements.is(a1));
- __ Move(a1, name);
- __ Move(a0, elements);
- } else {
- __ Move(a0, elements);
- __ Move(a1, name);
- }
- NameDictionaryLookupStub stub(masm->isolate(), POSITIVE_LOOKUP);
- __ CallStub(&stub);
- __ mov(scratch2, a2);
- __ mov(at, v0);
- __ MultiPop(spill_mask);
-
- __ Branch(done, ne, at, Operand(zero_reg));
- __ Branch(miss, eq, at, Operand(zero_reg));
-}
-
-
void NameDictionaryLookupStub::Generate(MacroAssembler* masm) {
// This stub overrides SometimesSetsUpAFrame() to return false. That means
// we cannot call anything that could cause a GC from this stub.
@@ -3269,233 +3137,6 @@ void CallICTrampolineStub::Generate(MacroAssembler* masm) {
__ Jump(stub.GetCode(), RelocInfo::CODE_TARGET);
}
-
-static void HandleArrayCases(MacroAssembler* masm, Register feedback,
- Register receiver_map, Register scratch1,
- Register scratch2, bool is_polymorphic,
- Label* miss) {
- // feedback initially contains the feedback array
- Label next_loop, prepare_next;
- Label start_polymorphic;
-
- Register cached_map = scratch1;
-
- __ ld(cached_map,
- FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(0)));
- __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset));
- __ Branch(&start_polymorphic, ne, receiver_map, Operand(cached_map));
- // found, now call handler.
- Register handler = feedback;
- __ ld(handler, FieldMemOperand(feedback, FixedArray::OffsetOfElementAt(1)));
- __ Daddu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag));
- __ Jump(t9);
-
- Register length = scratch2;
- __ bind(&start_polymorphic);
- __ ld(length, FieldMemOperand(feedback, FixedArray::kLengthOffset));
- if (!is_polymorphic) {
- // If the IC could be monomorphic we have to make sure we don't go past the
- // end of the feedback array.
- __ Branch(miss, eq, length, Operand(Smi::FromInt(2)));
- }
-
- Register too_far = length;
- Register pointer_reg = feedback;
-
- // +-----+------+------+-----+-----+ ... ----+
- // | map | len | wm0 | h0 | wm1 | hN |
- // +-----+------+------+-----+-----+ ... ----+
- // 0 1 2 len-1
- // ^ ^
- // | |
- // pointer_reg too_far
- // aka feedback scratch2
- // also need receiver_map
- // use cached_map (scratch1) to look in the weak map values.
- __ SmiScale(too_far, length, kPointerSizeLog2);
- __ Daddu(too_far, feedback, Operand(too_far));
- __ Daddu(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- __ Daddu(pointer_reg, feedback,
- Operand(FixedArray::OffsetOfElementAt(2) - kHeapObjectTag));
-
- __ bind(&next_loop);
- __ ld(cached_map, MemOperand(pointer_reg));
- __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset));
- __ Branch(&prepare_next, ne, receiver_map, Operand(cached_map));
- __ ld(handler, MemOperand(pointer_reg, kPointerSize));
- __ Daddu(t9, handler, Operand(Code::kHeaderSize - kHeapObjectTag));
- __ Jump(t9);
-
- __ bind(&prepare_next);
- __ Daddu(pointer_reg, pointer_reg, Operand(kPointerSize * 2));
- __ Branch(&next_loop, lt, pointer_reg, Operand(too_far));
-
- // We exhausted our array of map handler pairs.
- __ Branch(miss);
-}
-
-
-static void HandleMonomorphicCase(MacroAssembler* masm, Register receiver,
- Register receiver_map, Register feedback,
- Register vector, Register slot,
- Register scratch, Label* compare_map,
- Label* load_smi_map, Label* try_array) {
- __ JumpIfSmi(receiver, load_smi_map);
- __ ld(receiver_map, FieldMemOperand(receiver, HeapObject::kMapOffset));
- __ bind(compare_map);
- Register cached_map = scratch;
- // Move the weak map into the weak_cell register.
- __ ld(cached_map, FieldMemOperand(feedback, WeakCell::kValueOffset));
- __ Branch(try_array, ne, cached_map, Operand(receiver_map));
- Register handler = feedback;
- __ SmiScale(handler, slot, kPointerSizeLog2);
- __ Daddu(handler, vector, Operand(handler));
- __ ld(handler,
- FieldMemOperand(handler, FixedArray::kHeaderSize + kPointerSize));
- __ Daddu(t9, handler, Code::kHeaderSize - kHeapObjectTag);
- __ Jump(t9);
-}
-
-void KeyedStoreICTrampolineStub::Generate(MacroAssembler* masm) {
- __ EmitLoadTypeFeedbackVector(StoreWithVectorDescriptor::VectorRegister());
- KeyedStoreICStub stub(isolate(), state());
- stub.GenerateForTrampoline(masm);
-}
-
-void KeyedStoreICStub::Generate(MacroAssembler* masm) {
- GenerateImpl(masm, false);
-}
-
-void KeyedStoreICStub::GenerateForTrampoline(MacroAssembler* masm) {
- GenerateImpl(masm, true);
-}
-
-
-static void HandlePolymorphicStoreCase(MacroAssembler* masm, Register feedback,
- Register receiver_map, Register scratch1,
- Register scratch2, Label* miss) {
- // feedback initially contains the feedback array
- Label next_loop, prepare_next;
- Label start_polymorphic;
- Label transition_call;
-
- Register cached_map = scratch1;
- Register too_far = scratch2;
- Register pointer_reg = feedback;
-
- __ ld(too_far, FieldMemOperand(feedback, FixedArray::kLengthOffset));
-
- // +-----+------+------+-----+-----+-----+ ... ----+
- // | map | len | wm0 | wt0 | h0 | wm1 | hN |
- // +-----+------+------+-----+-----+ ----+ ... ----+
- // 0 1 2 len-1
- // ^ ^
- // | |
- // pointer_reg too_far
- // aka feedback scratch2
- // also need receiver_map
- // use cached_map (scratch1) to look in the weak map values.
- __ SmiScale(too_far, too_far, kPointerSizeLog2);
- __ Daddu(too_far, feedback, Operand(too_far));
- __ Daddu(too_far, too_far, Operand(FixedArray::kHeaderSize - kHeapObjectTag));
- __ Daddu(pointer_reg, feedback,
- Operand(FixedArray::OffsetOfElementAt(0) - kHeapObjectTag));
-
- __ bind(&next_loop);
- __ ld(cached_map, MemOperand(pointer_reg));
- __ ld(cached_map, FieldMemOperand(cached_map, WeakCell::kValueOffset));
- __ Branch(&prepare_next, ne, receiver_map, Operand(cached_map));
- // Is it a transitioning store?
- __ ld(too_far, MemOperand(pointer_reg, kPointerSize));
- __ LoadRoot(at, Heap::kUndefinedValueRootIndex);
- __ Branch(&transition_call, ne, too_far, Operand(at));
-
- __ ld(pointer_reg, MemOperand(pointer_reg, kPointerSize * 2));
- __ Daddu(t9, pointer_reg, Operand(Code::kHeaderSize - kHeapObjectTag));
- __ Jump(t9);
-
- __ bind(&transition_call);
- __ ld(too_far, FieldMemOperand(too_far, WeakCell::kValueOffset));
- __ JumpIfSmi(too_far, miss);
-
- __ ld(receiver_map, MemOperand(pointer_reg, kPointerSize * 2));
- // Load the map into the correct register.
- DCHECK(feedback.is(StoreTransitionDescriptor::MapRegister()));
- __ Move(feedback, too_far);
- __ Daddu(t9, receiver_map, Operand(Code::kHeaderSize - kHeapObjectTag));
- __ Jump(t9);
-
- __ bind(&prepare_next);
- __ Daddu(pointer_reg, pointer_reg, Operand(kPointerSize * 3));
- __ Branch(&next_loop, lt, pointer_reg, Operand(too_far));
-
- // We exhausted our array of map handler pairs.
- __ Branch(miss);
-}
-
-void KeyedStoreICStub::GenerateImpl(MacroAssembler* masm, bool in_frame) {
- Register receiver = StoreWithVectorDescriptor::ReceiverRegister(); // a1
- Register key = StoreWithVectorDescriptor::NameRegister(); // a2
- Register vector = StoreWithVectorDescriptor::VectorRegister(); // a3
- Register slot = StoreWithVectorDescriptor::SlotRegister(); // a4
- DCHECK(StoreWithVectorDescriptor::ValueRegister().is(a0)); // a0
- Register feedback = a5;
- Register receiver_map = a6;
- Register scratch1 = a7;
-
- __ SmiScale(scratch1, slot, kPointerSizeLog2);
- __ Daddu(feedback, vector, Operand(scratch1));
- __ ld(feedback, FieldMemOperand(feedback, FixedArray::kHeaderSize));
-
- // Try to quickly handle the monomorphic case without knowing for sure
- // if we have a weak cell in feedback. We do know it's safe to look
- // at WeakCell::kValueOffset.
- Label try_array, load_smi_map, compare_map;
- Label not_array, miss;
- HandleMonomorphicCase(masm, receiver, receiver_map, feedback, vector, slot,
- scratch1, &compare_map, &load_smi_map, &try_array);
-
- __ bind(&try_array);
- // Is it a fixed array?
- __ ld(scratch1, FieldMemOperand(feedback, HeapObject::kMapOffset));
- __ Branch(&not_array, ne, scratch1, Heap::kFixedArrayMapRootIndex);
-
- // We have a polymorphic element handler.
- Label try_poly_name;
-
- Register scratch2 = t0;
-
- HandlePolymorphicStoreCase(masm, feedback, receiver_map, scratch1, scratch2,
- &miss);
-
- __ bind(&not_array);
- // Is it generic?
- __ Branch(&try_poly_name, ne, feedback, Heap::kmegamorphic_symbolRootIndex);
- Handle<Code> megamorphic_stub =
- KeyedStoreIC::ChooseMegamorphicStub(masm->isolate(), GetExtraICState());
- __ Jump(megamorphic_stub, RelocInfo::CODE_TARGET);
-
- __ bind(&try_poly_name);
- // We might have a name in feedback, and a fixed array in the next slot.
- __ Branch(&miss, ne, key, Operand(feedback));
- // If the name comparison succeeded, we know we have a fixed array with
- // at least one map/handler pair.
- __ SmiScale(scratch1, slot, kPointerSizeLog2);
- __ Daddu(feedback, vector, Operand(scratch1));
- __ ld(feedback,
- FieldMemOperand(feedback, FixedArray::kHeaderSize + kPointerSize));
- HandleArrayCases(masm, feedback, receiver_map, scratch1, scratch2, false,
- &miss);
-
- __ bind(&miss);
- KeyedStoreIC::GenerateMiss(masm);
-
- __ bind(&load_smi_map);
- __ Branch(USE_DELAY_SLOT, &compare_map);
- __ LoadRoot(receiver_map, Heap::kHeapNumberMapRootIndex); // In delay slot.
-}
-
-
void ProfileEntryHookStub::MaybeCallEntryHook(MacroAssembler* masm) {
if (masm->isolate()->function_entry_hook() != NULL) {
ProfileEntryHookStub stub(masm->isolate());
@@ -3850,128 +3491,6 @@ void InternalArrayConstructorStub::Generate(MacroAssembler* masm) {
GenerateCase(masm, FAST_ELEMENTS);
}
-
-void FastNewObjectStub::Generate(MacroAssembler* masm) {
- // ----------- S t a t e -------------
- // -- a1 : target
- // -- a3 : new target
- // -- cp : context
- // -- ra : return address
- // -----------------------------------
- __ AssertFunction(a1);
- __ AssertReceiver(a3);
-
- // Verify that the new target is a JSFunction.
- Label new_object;
- __ GetObjectType(a3, a2, a2);
- __ Branch(&new_object, ne, a2, Operand(JS_FUNCTION_TYPE));
-
- // Load the initial map and verify that it's in fact a map.
- __ ld(a2, FieldMemOperand(a3, JSFunction::kPrototypeOrInitialMapOffset));
- __ JumpIfSmi(a2, &new_object);
- __ GetObjectType(a2, a0, a0);
- __ Branch(&new_object, ne, a0, Operand(MAP_TYPE));
-
- // Fall back to runtime if the target differs from the new target's
- // initial map constructor.
- __ ld(a0, FieldMemOperand(a2, Map::kConstructorOrBackPointerOffset));
- __ Branch(&new_object, ne, a0, Operand(a1));
-
- // Allocate the JSObject on the heap.
- Label allocate, done_allocate;
- __ lbu(a4, FieldMemOperand(a2, Map::kInstanceSizeOffset));
- __ Allocate(a4, v0, a5, a0, &allocate, SIZE_IN_WORDS);
- __ bind(&done_allocate);
-
- // Initialize the JSObject fields.
- __ sd(a2, FieldMemOperand(v0, JSObject::kMapOffset));
- __ LoadRoot(a3, Heap::kEmptyFixedArrayRootIndex);
- __ sd(a3, FieldMemOperand(v0, JSObject::kPropertiesOffset));
- __ sd(a3, FieldMemOperand(v0, JSObject::kElementsOffset));
- STATIC_ASSERT(JSObject::kHeaderSize == 3 * kPointerSize);
- __ Daddu(a1, v0, Operand(JSObject::kHeaderSize - kHeapObjectTag));
-
- // ----------- S t a t e -------------
- // -- v0 : result (tagged)
- // -- a1 : result fields (untagged)
- // -- a5 : result end (untagged)
- // -- a2 : initial map
- // -- cp : context
- // -- ra : return address
- // -----------------------------------
-
- // Perform in-object slack tracking if requested.
- Label slack_tracking;
- STATIC_ASSERT(Map::kNoSlackTracking == 0);
- __ lwu(a3, FieldMemOperand(a2, Map::kBitField3Offset));
- __ And(at, a3, Operand(Map::ConstructionCounter::kMask));
- __ Branch(USE_DELAY_SLOT, &slack_tracking, ne, at, Operand(zero_reg));
- __ LoadRoot(a0, Heap::kUndefinedValueRootIndex); // In delay slot.
- {
- // Initialize all in-object fields with undefined.
- __ InitializeFieldsWithFiller(a1, a5, a0);
- __ Ret();
- }
- __ bind(&slack_tracking);
- {
- // Decrease generous allocation count.
- STATIC_ASSERT(Map::ConstructionCounter::kNext == 32);
- __ Subu(a3, a3, Operand(1 << Map::ConstructionCounter::kShift));
- __ sw(a3, FieldMemOperand(a2, Map::kBitField3Offset));
-
- // Initialize the in-object fields with undefined.
- __ lbu(a4, FieldMemOperand(a2, Map::kUnusedPropertyFieldsOffset));
- __ dsll(a4, a4, kPointerSizeLog2);
- __ Dsubu(a4, a5, a4);
- __ InitializeFieldsWithFiller(a1, a4, a0);
-
- // Initialize the remaining (reserved) fields with one pointer filler map.
- __ LoadRoot(a0, Heap::kOnePointerFillerMapRootIndex);
- __ InitializeFieldsWithFiller(a1, a5, a0);
-
- // Check if we can finalize the instance size.
- Label finalize;
- STATIC_ASSERT(Map::kSlackTrackingCounterEnd == 1);
- __ And(a3, a3, Operand(Map::ConstructionCounter::kMask));
- __ Branch(&finalize, eq, a3, Operand(zero_reg));
- __ Ret();
-
- // Finalize the instance size.
- __ bind(&finalize);
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- __ Push(v0, a2);
- __ CallRuntime(Runtime::kFinalizeInstanceSize);
- __ Pop(v0);
- }
- __ Ret();
- }
-
- // Fall back to %AllocateInNewSpace.
- __ bind(&allocate);
- {
- FrameScope scope(masm, StackFrame::INTERNAL);
- STATIC_ASSERT(kSmiTag == 0);
- STATIC_ASSERT(kSmiTagSize == 1);
- __ dsll(a4, a4, kPointerSizeLog2 + kSmiShiftSize + kSmiTagSize);
- __ SmiTag(a4);
- __ Push(a2, a4);
- __ CallRuntime(Runtime::kAllocateInNewSpace);
- __ Pop(a2);
- }
- __ lbu(a5, FieldMemOperand(a2, Map::kInstanceSizeOffset));
- __ Dlsa(a5, v0, a5, kPointerSizeLog2);
- STATIC_ASSERT(kHeapObjectTag == 1);
- __ Dsubu(a5, a5, Operand(kHeapObjectTag));
- __ jmp(&done_allocate);
-
- // Fall back to %NewObject.
- __ bind(&new_object);
- __ Push(a1, a3);
- __ TailCallRuntime(Runtime::kNewObject);
-}
-
-
void FastNewRestParameterStub::Generate(MacroAssembler* masm) {
// ----------- S t a t e -------------
// -- a1 : function