diff options
author | Michaël Zasso <targos@protonmail.com> | 2016-09-06 22:49:51 +0200 |
---|---|---|
committer | Michaël Zasso <targos@protonmail.com> | 2016-09-22 09:51:19 +0200 |
commit | ec02b811a8a5c999bab4de312be2d732b7d9d50b (patch) | |
tree | ca3068017254f238cf413a451c57a803572983a4 /deps/v8/src/ic/x87 | |
parent | d2eb7ce0105369a9cad82787cb33a665e9bd00ad (diff) | |
download | node-new-ec02b811a8a5c999bab4de312be2d732b7d9d50b.tar.gz |
deps: update V8 to 5.4.500.27
Pick up latest commit from the 5.4-lkgr branch.
deps: edit V8 gitignore to allow trace event copy
deps: update V8 trace event to 315bf1e2d45be7d53346c31cfcc37424a32c30c8
deps: edit V8 gitignore to allow gtest_prod.h copy
deps: update V8 gtest to 6f8a66431cb592dad629028a50b3dd418a408c87
PR-URL: https://github.com/nodejs/node/pull/8317
Reviewed-By: Ben Noordhuis <info@bnoordhuis.nl>
Reviewed-By: Ali Ijaz Sheikh <ofrobots@google.com>
Diffstat (limited to 'deps/v8/src/ic/x87')
-rw-r--r-- | deps/v8/src/ic/x87/access-compiler-x87.cc | 8 | ||||
-rw-r--r-- | deps/v8/src/ic/x87/handler-compiler-x87.cc | 168 | ||||
-rw-r--r-- | deps/v8/src/ic/x87/ic-x87.cc | 44 | ||||
-rw-r--r-- | deps/v8/src/ic/x87/stub-cache-x87.cc | 53 |
4 files changed, 75 insertions, 198 deletions
diff --git a/deps/v8/src/ic/x87/access-compiler-x87.cc b/deps/v8/src/ic/x87/access-compiler-x87.cc index 2c1b942756..e528de65ba 100644 --- a/deps/v8/src/ic/x87/access-compiler-x87.cc +++ b/deps/v8/src/ic/x87/access-compiler-x87.cc @@ -18,19 +18,19 @@ void PropertyAccessCompiler::GenerateTailCall(MacroAssembler* masm, Register* PropertyAccessCompiler::load_calling_convention() { - // receiver, name, scratch1, scratch2, scratch3, scratch4. + // receiver, name, scratch1, scratch2, scratch3. Register receiver = LoadDescriptor::ReceiverRegister(); Register name = LoadDescriptor::NameRegister(); - static Register registers[] = {receiver, name, ebx, eax, edi, no_reg}; + static Register registers[] = {receiver, name, ebx, eax, edi}; return registers; } Register* PropertyAccessCompiler::store_calling_convention() { - // receiver, name, scratch1, scratch2, scratch3. + // receiver, name, scratch1, scratch2. Register receiver = StoreDescriptor::ReceiverRegister(); Register name = StoreDescriptor::NameRegister(); - static Register registers[] = {receiver, name, ebx, edi, no_reg}; + static Register registers[] = {receiver, name, ebx, edi}; return registers; } diff --git a/deps/v8/src/ic/x87/handler-compiler-x87.cc b/deps/v8/src/ic/x87/handler-compiler-x87.cc index 281faba3c7..4bf0af2569 100644 --- a/deps/v8/src/ic/x87/handler-compiler-x87.cc +++ b/deps/v8/src/ic/x87/handler-compiler-x87.cc @@ -199,7 +199,7 @@ void PropertyHandlerCompiler::GenerateApiAccessorCall( Handle<CallHandlerInfo> api_call_info = optimization.api_call_info(); bool call_data_undefined = false; // Put call data in place. - if (api_call_info->data()->IsUndefined()) { + if (api_call_info->data()->IsUndefined(isolate)) { call_data_undefined = true; __ mov(data, Immediate(isolate->factory()->undefined_value())); } else { @@ -236,13 +236,14 @@ void PropertyHandlerCompiler::GenerateApiAccessorCall( void PropertyHandlerCompiler::GenerateCheckPropertyCell( MacroAssembler* masm, Handle<JSGlobalObject> global, Handle<Name> name, Register scratch, Label* miss) { - Handle<PropertyCell> cell = JSGlobalObject::EnsurePropertyCell(global, name); - DCHECK(cell->value()->IsTheHole()); - Factory* factory = masm->isolate()->factory(); - Handle<WeakCell> weak_cell = factory->NewWeakCell(cell); + Handle<PropertyCell> cell = JSGlobalObject::EnsureEmptyPropertyCell( + global, name, PropertyCellType::kInvalidated); + Isolate* isolate = masm->isolate(); + DCHECK(cell->value()->IsTheHole(isolate)); + Handle<WeakCell> weak_cell = isolate->factory()->NewWeakCell(cell); __ LoadWeakValue(scratch, weak_cell, miss); __ cmp(FieldOperand(scratch, PropertyCell::kValueOffset), - Immediate(factory->the_hole_value())); + Immediate(isolate->factory()->the_hole_value())); __ j(not_equal, miss); } @@ -320,8 +321,8 @@ static void StoreIC_PushArgs(MacroAssembler* masm) { Register receiver = StoreDescriptor::ReceiverRegister(); Register name = StoreDescriptor::NameRegister(); Register value = StoreDescriptor::ValueRegister(); - Register slot = VectorStoreICDescriptor::SlotRegister(); - Register vector = VectorStoreICDescriptor::VectorRegister(); + Register slot = StoreWithVectorDescriptor::SlotRegister(); + Register vector = StoreWithVectorDescriptor::VectorRegister(); __ xchg(receiver, Operand(esp, 0)); __ push(name); @@ -332,15 +333,6 @@ static void StoreIC_PushArgs(MacroAssembler* masm) { } -void NamedStoreHandlerCompiler::GenerateSlow(MacroAssembler* masm) { - // Return address is on the stack. - StoreIC_PushArgs(masm); - - // Do tail-call to runtime routine. - __ TailCallRuntime(Runtime::kStoreIC_Slow); -} - - void ElementHandlerCompiler::GenerateStoreSlow(MacroAssembler* masm) { // Return address is on the stack. StoreIC_PushArgs(masm); @@ -439,28 +431,25 @@ Register PropertyHandlerCompiler::CheckPrototypes( DCHECK(!scratch2.is(object_reg) && !scratch2.is(holder_reg) && !scratch2.is(scratch1)); - if (FLAG_eliminate_prototype_chain_checks) { - Handle<Cell> validity_cell = - Map::GetOrCreatePrototypeChainValidityCell(receiver_map, isolate()); - if (!validity_cell.is_null()) { - DCHECK_EQ(Smi::FromInt(Map::kPrototypeChainValid), - validity_cell->value()); - // Operand::ForCell(...) points to the cell's payload! - __ cmp(Operand::ForCell(validity_cell), - Immediate(Smi::FromInt(Map::kPrototypeChainValid))); - __ j(not_equal, miss); - } + Handle<Cell> validity_cell = + Map::GetOrCreatePrototypeChainValidityCell(receiver_map, isolate()); + if (!validity_cell.is_null()) { + DCHECK_EQ(Smi::FromInt(Map::kPrototypeChainValid), validity_cell->value()); + // Operand::ForCell(...) points to the cell's payload! + __ cmp(Operand::ForCell(validity_cell), + Immediate(Smi::FromInt(Map::kPrototypeChainValid))); + __ j(not_equal, miss); + } - // The prototype chain of primitives (and their JSValue wrappers) depends - // on the native context, which can't be guarded by validity cells. - // |object_reg| holds the native context specific prototype in this case; - // we need to check its map. - if (check == CHECK_ALL_MAPS) { - __ mov(scratch1, FieldOperand(object_reg, HeapObject::kMapOffset)); - Handle<WeakCell> cell = Map::WeakCellForMap(receiver_map); - __ CmpWeakValue(scratch1, cell, scratch2); - __ j(not_equal, miss); - } + // The prototype chain of primitives (and their JSValue wrappers) depends + // on the native context, which can't be guarded by validity cells. + // |object_reg| holds the native context specific prototype in this case; + // we need to check its map. + if (check == CHECK_ALL_MAPS) { + __ mov(scratch1, FieldOperand(object_reg, HeapObject::kMapOffset)); + Handle<WeakCell> cell = Map::WeakCellForMap(receiver_map); + __ CmpWeakValue(scratch1, cell, scratch2); + __ j(not_equal, miss); } // Keep track of the current object in register reg. @@ -496,8 +485,10 @@ Register PropertyHandlerCompiler::CheckPrototypes( !current_map->is_access_check_needed()); prototype = handle(JSObject::cast(current_map->prototype())); - if (current_map->is_dictionary_map() && - !current_map->IsJSGlobalObjectMap()) { + if (current_map->IsJSGlobalObjectMap()) { + GenerateCheckPropertyCell(masm(), Handle<JSGlobalObject>::cast(current), + name, scratch2, miss); + } else if (current_map->is_dictionary_map()) { DCHECK(!current_map->IsJSGlobalProxyMap()); // Proxy maps are fast. if (!name->IsUniqueName()) { DCHECK(name->IsString()); @@ -507,34 +498,12 @@ Register PropertyHandlerCompiler::CheckPrototypes( current->property_dictionary()->FindEntry(name) == NameDictionary::kNotFound); - if (FLAG_eliminate_prototype_chain_checks && depth > 1) { + if (depth > 1) { // TODO(jkummerow): Cache and re-use weak cell. __ LoadWeakValue(reg, isolate()->factory()->NewWeakCell(current), miss); } GenerateDictionaryNegativeLookup(masm(), miss, reg, name, scratch1, scratch2); - - if (!FLAG_eliminate_prototype_chain_checks) { - __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset)); - __ mov(holder_reg, FieldOperand(scratch1, Map::kPrototypeOffset)); - } - } else { - Register map_reg = scratch1; - if (!FLAG_eliminate_prototype_chain_checks) { - __ mov(map_reg, FieldOperand(reg, HeapObject::kMapOffset)); - } - if (current_map->IsJSGlobalObjectMap()) { - GenerateCheckPropertyCell(masm(), Handle<JSGlobalObject>::cast(current), - name, scratch2, miss); - } else if (!FLAG_eliminate_prototype_chain_checks && - (depth != 1 || check == CHECK_ALL_MAPS)) { - Handle<WeakCell> cell = Map::WeakCellForMap(current_map); - __ CmpWeakValue(map_reg, cell, scratch2); - __ j(not_equal, miss); - } - if (!FLAG_eliminate_prototype_chain_checks) { - __ mov(holder_reg, FieldOperand(map_reg, Map::kPrototypeOffset)); - } } reg = holder_reg; // From now on the object will be in holder_reg. @@ -548,17 +517,8 @@ Register PropertyHandlerCompiler::CheckPrototypes( // Log the check depth. LOG(isolate(), IntEvent("check-maps-depth", depth + 1)); - if (!FLAG_eliminate_prototype_chain_checks && - (depth != 0 || check == CHECK_ALL_MAPS)) { - // Check the holder map. - __ mov(scratch1, FieldOperand(reg, HeapObject::kMapOffset)); - Handle<WeakCell> cell = Map::WeakCellForMap(current_map); - __ CmpWeakValue(scratch1, cell, scratch2); - __ j(not_equal, miss); - } - bool return_holder = return_what == RETURN_HOLDER; - if (FLAG_eliminate_prototype_chain_checks && return_holder && depth != 0) { + if (return_holder && depth != 0) { __ LoadWeakValue(reg, isolate()->factory()->NewWeakCell(current), miss); } @@ -594,58 +554,6 @@ void NamedStoreHandlerCompiler::FrontendFooter(Handle<Name> name, Label* miss) { } -void NamedLoadHandlerCompiler::GenerateLoadCallback( - Register reg, Handle<AccessorInfo> callback) { - DCHECK(!AreAliased(scratch2(), scratch3(), receiver())); - DCHECK(!AreAliased(scratch2(), scratch3(), reg)); - - // Insert additional parameters into the stack frame above return address. - __ pop(scratch3()); // Get return address to place it below. - - // Build v8::PropertyCallbackInfo::args_ array on the stack and push property - // name below the exit frame to make GC aware of them. - STATIC_ASSERT(PropertyCallbackArguments::kShouldThrowOnErrorIndex == 0); - STATIC_ASSERT(PropertyCallbackArguments::kHolderIndex == 1); - STATIC_ASSERT(PropertyCallbackArguments::kIsolateIndex == 2); - STATIC_ASSERT(PropertyCallbackArguments::kReturnValueDefaultValueIndex == 3); - STATIC_ASSERT(PropertyCallbackArguments::kReturnValueOffset == 4); - STATIC_ASSERT(PropertyCallbackArguments::kDataIndex == 5); - STATIC_ASSERT(PropertyCallbackArguments::kThisIndex == 6); - STATIC_ASSERT(PropertyCallbackArguments::kArgsLength == 7); - - __ push(receiver()); // receiver - // Push data from AccessorInfo. - Handle<Object> data(callback->data(), isolate()); - if (data->IsUndefined() || data->IsSmi()) { - __ push(Immediate(data)); - } else { - Handle<WeakCell> cell = - isolate()->factory()->NewWeakCell(Handle<HeapObject>::cast(data)); - // The callback is alive if this instruction is executed, - // so the weak cell is not cleared and points to data. - __ GetWeakValue(scratch2(), cell); - __ push(scratch2()); - } - __ push(Immediate(isolate()->factory()->undefined_value())); // ReturnValue - // ReturnValue default value - __ push(Immediate(isolate()->factory()->undefined_value())); - __ push(Immediate(reinterpret_cast<int>(isolate()))); - __ push(reg); // holder - __ push(Immediate(Smi::FromInt(0))); // should_throw_on_error -> false - - __ push(name()); // name - __ push(scratch3()); // Restore return address. - - // Abi for CallApiGetter - Register getter_address = ApiGetterDescriptor::function_address(); - Address function_address = v8::ToCData<Address>(callback->getter()); - __ mov(getter_address, Immediate(function_address)); - - CallApiGetterStub stub(isolate()); - __ TailCallStub(&stub); -} - - void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) { // Return the constant value. __ LoadObject(eax, value); @@ -656,7 +564,7 @@ void NamedLoadHandlerCompiler::GenerateLoadConstant(Handle<Object> value) { void NamedLoadHandlerCompiler::GenerateLoadInterceptorWithFollowup( LookupIterator* it, Register holder_reg) { DCHECK(holder()->HasNamedInterceptor()); - DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined()); + DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined(isolate())); // Compile the interceptor call, followed by inline code to load the // property from further up the prototype chain if the call fails. @@ -723,7 +631,7 @@ void NamedLoadHandlerCompiler::GenerateLoadInterceptorWithFollowup( void NamedLoadHandlerCompiler::GenerateLoadInterceptor(Register holder_reg) { DCHECK(holder()->HasNamedInterceptor()); - DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined()); + DCHECK(!holder()->GetNamedInterceptor()->getter()->IsUndefined(isolate())); // Call the runtime system to load the interceptor. __ pop(scratch2()); // save old return address PushInterceptorArguments(masm(), receiver(), holder_reg, this->name(), @@ -744,7 +652,7 @@ Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback( __ push(holder_reg); // If the callback cannot leak, then push the callback directly, // otherwise wrap it in a weak cell. - if (callback->data()->IsUndefined() || callback->data()->IsSmi()) { + if (callback->data()->IsUndefined(isolate()) || callback->data()->IsSmi()) { __ Push(callback); } else { Handle<WeakCell> cell = isolate()->factory()->NewWeakCell(callback); @@ -759,7 +667,7 @@ Handle<Code> NamedStoreHandlerCompiler::CompileStoreCallback( __ TailCallRuntime(Runtime::kStoreCallbackProperty); // Return the generated code. - return GetCode(kind(), Code::FAST, name); + return GetCode(kind(), name); } @@ -801,7 +709,7 @@ Handle<Code> NamedLoadHandlerCompiler::CompileLoadGlobal( FrontendFooter(name, &miss); // Return the generated code. - return GetCode(kind(), Code::NORMAL, name); + return GetCode(kind(), name); } diff --git a/deps/v8/src/ic/x87/ic-x87.cc b/deps/v8/src/ic/x87/ic-x87.cc index b51045bee8..76933f01bb 100644 --- a/deps/v8/src/ic/x87/ic-x87.cc +++ b/deps/v8/src/ic/x87/ic-x87.cc @@ -336,10 +336,8 @@ void KeyedLoadIC::GenerateMegamorphic(MacroAssembler* masm) { __ push(Immediate(Smi::FromInt(slot))); __ push(Immediate(dummy_vector)); - Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( - Code::ComputeHandlerFlags(Code::LOAD_IC)); - masm->isolate()->stub_cache()->GenerateProbe(masm, Code::KEYED_LOAD_IC, flags, - receiver, key, ebx, edi); + masm->isolate()->load_stub_cache()->GenerateProbe(masm, receiver, key, ebx, + edi); __ pop(LoadWithVectorDescriptor::VectorRegister()); __ pop(LoadDescriptor::SlotRegister()); @@ -519,10 +517,10 @@ void KeyedStoreIC::GenerateMegamorphic(MacroAssembler* masm, __ JumpIfSmi(receiver, &slow); // Get the map from the receiver. __ mov(edi, FieldOperand(receiver, HeapObject::kMapOffset)); - // Check that the receiver does not require access checks and is not observed. - // The generic stub does not perform map checks or handle observed objects. + // Check that the receiver does not require access checks. + // The generic stub does not perform map checks. __ test_b(FieldOperand(edi, Map::kBitFieldOffset), - Immediate(1 << Map::kIsAccessCheckNeeded | 1 << Map::kIsObserved)); + Immediate(1 << Map::kIsAccessCheckNeeded)); __ j(not_zero, &slow); // Check that the key is a smi. __ JumpIfNotSmi(key, &maybe_name_key); @@ -563,13 +561,11 @@ void KeyedStoreIC::GenerateMegamorphic(MacroAssembler* masm, __ push(Immediate(Smi::FromInt(slot))); __ push(Immediate(dummy_vector)); - Code::Flags flags = Code::RemoveTypeAndHolderFromFlags( - Code::ComputeHandlerFlags(Code::STORE_IC)); - masm->isolate()->stub_cache()->GenerateProbe(masm, Code::STORE_IC, flags, - receiver, key, edi, no_reg); + masm->isolate()->store_stub_cache()->GenerateProbe(masm, receiver, key, edi, + no_reg); - __ pop(VectorStoreICDescriptor::VectorRegister()); - __ pop(VectorStoreICDescriptor::SlotRegister()); + __ pop(StoreWithVectorDescriptor::VectorRegister()); + __ pop(StoreWithVectorDescriptor::SlotRegister()); // Cache miss. __ jmp(&miss); @@ -708,21 +704,12 @@ void KeyedLoadIC::GenerateRuntimeGetProperty(MacroAssembler* masm) { __ TailCallRuntime(Runtime::kKeyedGetProperty); } - -void StoreIC::GenerateMegamorphic(MacroAssembler* masm) { - // This shouldn't be called. - // TODO(mvstanton): remove this method. - __ int3(); - return; -} - - static void StoreIC_PushArgs(MacroAssembler* masm) { Register receiver = StoreDescriptor::ReceiverRegister(); Register name = StoreDescriptor::NameRegister(); Register value = StoreDescriptor::ValueRegister(); - Register slot = VectorStoreICDescriptor::SlotRegister(); - Register vector = VectorStoreICDescriptor::VectorRegister(); + Register slot = StoreWithVectorDescriptor::SlotRegister(); + Register vector = StoreWithVectorDescriptor::VectorRegister(); __ xchg(receiver, Operand(esp, 0)); __ push(name); @@ -747,8 +734,8 @@ void StoreIC::GenerateNormal(MacroAssembler* masm) { Register receiver = StoreDescriptor::ReceiverRegister(); Register name = StoreDescriptor::NameRegister(); Register value = StoreDescriptor::ValueRegister(); - Register vector = VectorStoreICDescriptor::VectorRegister(); - Register slot = VectorStoreICDescriptor::SlotRegister(); + Register vector = StoreWithVectorDescriptor::VectorRegister(); + Register slot = StoreWithVectorDescriptor::SlotRegister(); // A lot of registers are needed for storing to slow case // objects. Push and restore receiver but rely on @@ -836,8 +823,9 @@ void PatchInlinedSmiCode(Isolate* isolate, Address address, // condition code uses at the patched jump. uint8_t delta = *reinterpret_cast<uint8_t*>(delta_address); if (FLAG_trace_ic) { - PrintF("[ patching ic at %p, test=%p, delta=%d\n", address, - test_instruction_address, delta); + PrintF("[ patching ic at %p, test=%p, delta=%d\n", + static_cast<void*>(address), + static_cast<void*>(test_instruction_address), delta); } // Patch with a short conditional jump. Enabling means switching from a short diff --git a/deps/v8/src/ic/x87/stub-cache-x87.cc b/deps/v8/src/ic/x87/stub-cache-x87.cc index dfc0ef6c66..e0656f7cff 100644 --- a/deps/v8/src/ic/x87/stub-cache-x87.cc +++ b/deps/v8/src/ic/x87/stub-cache-x87.cc @@ -14,19 +14,19 @@ namespace internal { #define __ ACCESS_MASM(masm) - -static void ProbeTable(Isolate* isolate, MacroAssembler* masm, - Code::Kind ic_kind, Code::Flags flags, +static void ProbeTable(StubCache* stub_cache, MacroAssembler* masm, StubCache::Table table, Register name, Register receiver, - // Number of the cache entry pointer-size scaled. + // The offset is scaled by 4, based on + // kCacheIndexShift, which is two bits Register offset, Register extra) { - ExternalReference key_offset(isolate->stub_cache()->key_reference(table)); - ExternalReference value_offset(isolate->stub_cache()->value_reference(table)); - ExternalReference map_offset(isolate->stub_cache()->map_reference(table)); + ExternalReference key_offset(stub_cache->key_reference(table)); + ExternalReference value_offset(stub_cache->value_reference(table)); + ExternalReference map_offset(stub_cache->map_reference(table)); ExternalReference virtual_register = ExternalReference::virtual_handler_register(masm->isolate()); Label miss; + Code::Kind ic_kind = stub_cache->ic_kind(); bool is_vector_store = IC::ICUseVector(ic_kind) && (ic_kind == Code::STORE_IC || ic_kind == Code::KEYED_STORE_IC); @@ -47,12 +47,6 @@ static void ProbeTable(Isolate* isolate, MacroAssembler* masm, __ cmp(offset, FieldOperand(receiver, HeapObject::kMapOffset)); __ j(not_equal, &miss); - // Check that the flags match what we're looking for. - __ mov(offset, FieldOperand(extra, Code::kFlagsOffset)); - __ and_(offset, ~Code::kFlagsNotUsedInLookup); - __ cmp(offset, flags); - __ j(not_equal, &miss); - #ifdef DEBUG if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { __ jmp(&miss); @@ -65,8 +59,8 @@ static void ProbeTable(Isolate* isolate, MacroAssembler* masm, // probe, and need to be dropped before calling the handler. if (is_vector_store) { // The overlap here is rather embarrassing. One does what one must. - Register vector = VectorStoreICDescriptor::VectorRegister(); - DCHECK(extra.is(VectorStoreICDescriptor::SlotRegister())); + Register vector = StoreWithVectorDescriptor::VectorRegister(); + DCHECK(extra.is(StoreWithVectorDescriptor::SlotRegister())); __ add(extra, Immediate(Code::kHeaderSize - kHeapObjectTag)); __ pop(vector); __ mov(Operand::StaticVariable(virtual_register), extra); @@ -102,12 +96,6 @@ static void ProbeTable(Isolate* isolate, MacroAssembler* masm, // Get the code entry from the cache. __ mov(offset, Operand::StaticArray(offset, times_1, value_offset)); - // Check that the flags match what we're looking for. - __ mov(offset, FieldOperand(offset, Code::kFlagsOffset)); - __ and_(offset, ~Code::kFlagsNotUsedInLookup); - __ cmp(offset, flags); - __ j(not_equal, &miss); - #ifdef DEBUG if (FLAG_test_secondary_stub_cache && table == StubCache::kPrimary) { __ jmp(&miss); @@ -124,8 +112,8 @@ static void ProbeTable(Isolate* isolate, MacroAssembler* masm, if (is_vector_store) { // The vector and slot were pushed onto the stack before starting the // probe, and need to be dropped before calling the handler. - Register vector = VectorStoreICDescriptor::VectorRegister(); - DCHECK(offset.is(VectorStoreICDescriptor::SlotRegister())); + Register vector = StoreWithVectorDescriptor::VectorRegister(); + DCHECK(offset.is(StoreWithVectorDescriptor::SlotRegister())); __ add(offset, Immediate(Code::kHeaderSize - kHeapObjectTag)); __ mov(Operand::StaticVariable(virtual_register), offset); __ pop(vector); @@ -142,9 +130,7 @@ static void ProbeTable(Isolate* isolate, MacroAssembler* masm, } } - -void StubCache::GenerateProbe(MacroAssembler* masm, Code::Kind ic_kind, - Code::Flags flags, Register receiver, +void StubCache::GenerateProbe(MacroAssembler* masm, Register receiver, Register name, Register scratch, Register extra, Register extra2, Register extra3) { Label miss; @@ -153,9 +139,6 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Code::Kind ic_kind, // being 12. DCHECK(sizeof(Entry) == 12); - // Assert the flags do not name a specific type. - DCHECK(Code::ExtractTypeFromFlags(flags) == 0); - // Assert that there are no register conflicts. DCHECK(!scratch.is(receiver)); DCHECK(!scratch.is(name)); @@ -180,7 +163,7 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Code::Kind ic_kind, // Get the map of the receiver and compute the hash. __ mov(offset, FieldOperand(name, Name::kHashFieldOffset)); __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset)); - __ xor_(offset, flags); + __ xor_(offset, kPrimaryMagic); // We mask out the last two bits because they are not part of the hash and // they are always 01 for maps. Also in the two 'and' instructions below. __ and_(offset, (kPrimaryTableSize - 1) << kCacheIndexShift); @@ -189,21 +172,19 @@ void StubCache::GenerateProbe(MacroAssembler* masm, Code::Kind ic_kind, DCHECK(kCacheIndexShift == kPointerSizeLog2); // Probe the primary table. - ProbeTable(isolate(), masm, ic_kind, flags, kPrimary, name, receiver, offset, - extra); + ProbeTable(this, masm, kPrimary, name, receiver, offset, extra); // Primary miss: Compute hash for secondary probe. __ mov(offset, FieldOperand(name, Name::kHashFieldOffset)); __ add(offset, FieldOperand(receiver, HeapObject::kMapOffset)); - __ xor_(offset, flags); + __ xor_(offset, kPrimaryMagic); __ and_(offset, (kPrimaryTableSize - 1) << kCacheIndexShift); __ sub(offset, name); - __ add(offset, Immediate(flags)); + __ add(offset, Immediate(kSecondaryMagic)); __ and_(offset, (kSecondaryTableSize - 1) << kCacheIndexShift); // Probe the secondary table. - ProbeTable(isolate(), masm, ic_kind, flags, kSecondary, name, receiver, - offset, extra); + ProbeTable(this, masm, kSecondary, name, receiver, offset, extra); // Cache miss: Fall-through and let caller handle the miss by // entering the runtime system. |