diff options
Diffstat (limited to 'deps/v8/src/heap/heap.cc')
-rw-r--r-- | deps/v8/src/heap/heap.cc | 397 |
1 files changed, 261 insertions, 136 deletions
diff --git a/deps/v8/src/heap/heap.cc b/deps/v8/src/heap/heap.cc index 2059dae6b7..478be1f03a 100644 --- a/deps/v8/src/heap/heap.cc +++ b/deps/v8/src/heap/heap.cc @@ -20,6 +20,7 @@ #include "src/global-handles.h" #include "src/heap/array-buffer-tracker-inl.h" #include "src/heap/code-stats.h" +#include "src/heap/embedder-tracing.h" #include "src/heap/gc-idle-time-handler.h" #include "src/heap/gc-tracer.h" #include "src/heap/incremental-marking.h" @@ -80,6 +81,7 @@ Heap::Heap() max_semi_space_size_(8 * (kPointerSize / 4) * MB), initial_semispace_size_(MB), max_old_generation_size_(700ul * (kPointerSize / 4) * MB), + initial_max_old_generation_size_(max_old_generation_size_), initial_old_generation_size_(max_old_generation_size_ / kInitalOldGenerationLimitFactor), old_generation_size_configured_(false), @@ -93,6 +95,8 @@ Heap::Heap() survived_last_scavenge_(0), always_allocate_scope_count_(0), memory_pressure_level_(MemoryPressureLevel::kNone), + out_of_memory_callback_(nullptr), + out_of_memory_callback_data_(nullptr), contexts_disposed_(0), number_of_disposed_maps_(0), global_ic_age_(0), @@ -155,7 +159,7 @@ Heap::Heap() deserialization_complete_(false), strong_roots_list_(NULL), heap_iterator_depth_(0), - embedder_heap_tracer_(nullptr), + local_embedder_heap_tracer_(nullptr), force_oom_(false), delay_sweeper_tasks_for_testing_(false) { // Allow build-time customization of the max semispace size. Building @@ -292,6 +296,9 @@ GarbageCollector Heap::SelectGarbageCollector(AllocationSpace space, return YoungGenerationCollector(); } +void Heap::SetGCState(HeapState state) { + gc_state_ = state; +} // TODO(1238405): Combine the infrastructure for --heap-stats and // --log-gc to avoid the complicated preprocessor and flag testing. @@ -442,7 +449,6 @@ void Heap::GarbageCollectionPrologue() { } CheckNewSpaceExpansionCriteria(); UpdateNewSpaceAllocationCounter(); - store_buffer()->MoveAllEntriesToRememberedSet(); } size_t Heap::SizeOfObjects() { @@ -510,6 +516,22 @@ void Heap::MergeAllocationSitePretenuringFeedback( } } +class Heap::SkipStoreBufferScope { + public: + explicit SkipStoreBufferScope(StoreBuffer* store_buffer) + : store_buffer_(store_buffer) { + store_buffer_->MoveAllEntriesToRememberedSet(); + store_buffer_->SetMode(StoreBuffer::IN_GC); + } + + ~SkipStoreBufferScope() { + DCHECK(store_buffer_->Empty()); + store_buffer_->SetMode(StoreBuffer::NOT_IN_GC); + } + + private: + StoreBuffer* store_buffer_; +}; class Heap::PretenuringScope { public: @@ -861,6 +883,10 @@ void Heap::CollectAllAvailableGarbage(GarbageCollectionReason gc_reason) { // Note: as weak callbacks can execute arbitrary code, we cannot // hope that eventually there will be no weak callbacks invocations. // Therefore stop recollecting after several attempts. + if (gc_reason == GarbageCollectionReason::kLastResort) { + InvokeOutOfMemoryCallback(); + } + RuntimeCallTimerScope(isolate(), &RuntimeCallStats::GC_AllAvailableGarbage); if (isolate()->concurrent_recompilation_enabled()) { // The optimizing compiler may be unnecessarily holding on to memory. DisallowHeapAllocation no_recursive_gc; @@ -943,6 +969,7 @@ bool Heap::CollectGarbage(GarbageCollector collector, const v8::GCCallbackFlags gc_callback_flags) { // The VM is in the GC state until exiting this function. VMState<GC> state(isolate_); + RuntimeCallTimerScope(isolate(), &RuntimeCallStats::GC); #ifdef DEBUG // Reset the allocation timeout to the GC interval, but make sure to @@ -1022,6 +1049,7 @@ bool Heap::CollectGarbage(GarbageCollector collector, (committed_memory_before > committed_memory_after + MB) || HasHighFragmentation(used_memory_after, committed_memory_after) || (detached_contexts()->length() > 0); + event.committed_memory = committed_memory_after; if (deserialization_complete_) { memory_reducer_->NotifyMarkCompact(event); } @@ -1164,7 +1192,7 @@ bool Heap::ReserveSpace(Reservation* reservations, List<Address>* maps) { // deserializing. Address free_space_address = free_space->address(); CreateFillerObjectAt(free_space_address, Map::kSize, - ClearRecordedSlots::kNo, ClearBlackArea::kNo); + ClearRecordedSlots::kNo); maps->Add(free_space_address); } else { perform_gc = true; @@ -1195,7 +1223,7 @@ bool Heap::ReserveSpace(Reservation* reservations, List<Address>* maps) { // deserializing. Address free_space_address = free_space->address(); CreateFillerObjectAt(free_space_address, size, - ClearRecordedSlots::kNo, ClearBlackArea::kNo); + ClearRecordedSlots::kNo); DCHECK(space < SerializerDeserializer::kNumberOfPreallocatedSpaces); chunk.start = free_space_address; chunk.end = free_space_address + size; @@ -1313,6 +1341,7 @@ bool Heap::PerformGarbageCollection( { Heap::PretenuringScope pretenuring_scope(this); + Heap::SkipStoreBufferScope skip_store_buffer_scope(store_buffer_); switch (collector) { case MARK_COMPACTOR: @@ -1394,6 +1423,7 @@ bool Heap::PerformGarbageCollection( void Heap::CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags) { + RuntimeCallTimerScope(isolate(), &RuntimeCallStats::GCPrologueCallback); for (int i = 0; i < gc_prologue_callbacks_.length(); ++i) { if (gc_type & gc_prologue_callbacks_[i].gc_type) { if (!gc_prologue_callbacks_[i].pass_isolate) { @@ -1415,6 +1445,7 @@ void Heap::CallGCPrologueCallbacks(GCType gc_type, GCCallbackFlags flags) { void Heap::CallGCEpilogueCallbacks(GCType gc_type, GCCallbackFlags gc_callback_flags) { + RuntimeCallTimerScope(isolate(), &RuntimeCallStats::GCEpilogueCallback); for (int i = 0; i < gc_epilogue_callbacks_.length(); ++i) { if (gc_type & gc_epilogue_callbacks_[i].gc_type) { if (!gc_epilogue_callbacks_[i].pass_isolate) { @@ -1433,7 +1464,8 @@ void Heap::CallGCEpilogueCallbacks(GCType gc_type, void Heap::MarkCompact() { PauseAllocationObserversScope pause_observers(this); - gc_state_ = MARK_COMPACT; + SetGCState(MARK_COMPACT); + LOG(isolate_, ResourceEvent("markcompact", "begin")); uint64_t size_of_objects_before_gc = SizeOfObjects(); @@ -1459,7 +1491,7 @@ void Heap::MinorMarkCompact() { UNREACHABLE(); } void Heap::MarkCompactEpilogue() { TRACE_GC(tracer(), GCTracer::Scope::MC_EPILOGUE); - gc_state_ = NOT_IN_GC; + SetGCState(NOT_IN_GC); isolate_->counters()->objs_since_last_full()->Set(0); @@ -1512,21 +1544,6 @@ static bool IsUnscavengedHeapObject(Heap* heap, Object** p) { !HeapObject::cast(*p)->map_word().IsForwardingAddress(); } - -static bool IsUnmodifiedHeapObject(Object** p) { - Object* object = *p; - if (object->IsSmi()) return false; - HeapObject* heap_object = HeapObject::cast(object); - if (!object->IsJSObject()) return false; - JSObject* js_object = JSObject::cast(object); - if (!js_object->WasConstructedFromApiFunction()) return false; - JSFunction* constructor = - JSFunction::cast(js_object->map()->GetConstructor()); - - return constructor->initial_map() == heap_object->map(); -} - - void PromotionQueue::Initialize() { // The last to-space page may be used for promotion queue. On promotion // conflict, we use the emergency stack. @@ -1605,7 +1622,7 @@ void Heap::Scavenge() { mark_compact_collector()->sweeper().EnsureNewSpaceCompleted(); - gc_state_ = SCAVENGE; + SetGCState(SCAVENGE); // Implements Cheney's copying algorithm LOG(isolate_, ResourceEvent("scavenge", "begin")); @@ -1615,13 +1632,6 @@ void Heap::Scavenge() { scavenge_collector_->SelectScavengingVisitorsTable(); - if (UsingEmbedderHeapTracer()) { - // Register found wrappers with embedder so it can add them to its marking - // deque and correctly manage the case when v8 scavenger collects the - // wrappers by either keeping wrappables alive, or cleaning marking deque. - RegisterWrappersWithEmbedderHeapTracer(); - } - // Flip the semispaces. After flipping, to space is empty, from space has // live objects. new_space_->Flip(); @@ -1701,8 +1711,10 @@ void Heap::Scavenge() { isolate()->global_handles()->MarkNewSpaceWeakUnmodifiedObjectsPending( &IsUnscavengedHeapObject); - isolate()->global_handles()->IterateNewSpaceWeakUnmodifiedRoots( - &scavenge_visitor); + isolate() + ->global_handles() + ->IterateNewSpaceWeakUnmodifiedRoots< + GlobalHandles::HANDLE_PHANTOM_NODES_VISIT_OTHERS>(&scavenge_visitor); new_space_front = DoScavenge(&scavenge_visitor, new_space_front); UpdateNewSpaceReferencesInExternalStringTable( @@ -1727,9 +1739,13 @@ void Heap::Scavenge() { IncrementYoungSurvivorsCounter(PromotedSpaceSizeOfObjects() + new_space_->Size() - survived_watermark); + // Scavenger may find new wrappers by iterating objects promoted onto a black + // page. + local_embedder_heap_tracer()->RegisterWrappersWithRemoteTracer(); + LOG(isolate_, ResourceEvent("scavenge", "end")); - gc_state_ = NOT_IN_GC; + SetGCState(NOT_IN_GC); } @@ -1882,7 +1898,7 @@ void Heap::VisitExternalResources(v8::ExternalResourceVisitor* visitor) { v8::ExternalResourceVisitor* visitor_; } external_string_table_visitor(visitor); - external_string_table_.Iterate(&external_string_table_visitor); + external_string_table_.IterateAll(&external_string_table_visitor); } Address Heap::DoScavenge(ObjectVisitor* scavenge_visitor, @@ -2008,7 +2024,6 @@ void Heap::UnregisterArrayBuffer(JSArrayBuffer* buffer) { ArrayBufferTracker::Unregister(this, buffer); } - void Heap::ConfigureInitialOldGenerationSize() { if (!old_generation_size_configured_ && tracer()->SurvivalEventsRecorded()) { old_generation_allocation_limit_ = @@ -2019,7 +2034,6 @@ void Heap::ConfigureInitialOldGenerationSize() { } } - AllocationResult Heap::AllocatePartialMap(InstanceType instance_type, int instance_size) { Object* result = nullptr; @@ -2107,8 +2121,7 @@ AllocationResult Heap::AllocateFillerObject(int size, bool double_align, MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address()); DCHECK(chunk->owner()->identity() == space); #endif - CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo, - ClearBlackArea::kNo); + CreateFillerObjectAt(obj->address(), size, ClearRecordedSlots::kNo); return obj; } @@ -2256,6 +2269,7 @@ bool Heap::CreateInitialMaps() { ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, scope_info) ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, module_info) + ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, type_feedback_vector) ALLOCATE_PRIMITIVE_MAP(HEAP_NUMBER_TYPE, HeapNumber::kSize, heap_number, Context::NUMBER_FUNCTION_INDEX) ALLOCATE_MAP(MUTABLE_HEAP_NUMBER_TYPE, HeapNumber::kSize, @@ -2279,6 +2293,9 @@ bool Heap::CreateInitialMaps() { ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, optimized_out); ALLOCATE_MAP(ODDBALL_TYPE, Oddball::kSize, stale_register); + ALLOCATE_MAP(JS_PROMISE_CAPABILITY_TYPE, JSPromiseCapability::kSize, + js_promise_capability); + for (unsigned i = 0; i < arraysize(string_type_table); i++) { const StringTypeTable& entry = string_type_table[i]; { @@ -2344,6 +2361,7 @@ bool Heap::CreateInitialMaps() { ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, debug_evaluate_context) ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, block_context) ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, module_context) + ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, eval_context) ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, script_context) ALLOCATE_VARSIZE_MAP(FIXED_ARRAY_TYPE, script_context_table) @@ -2523,10 +2541,18 @@ AllocationResult Heap::AllocateTransitionArray(int capacity) { return array; } - -void Heap::CreateApiObjects() { +bool Heap::CreateApiObjects() { HandleScope scope(isolate()); set_message_listeners(*TemplateList::New(isolate(), 2)); + HeapObject* obj = nullptr; + { + AllocationResult allocation = AllocateStruct(INTERCEPTOR_INFO_TYPE); + if (!allocation.To(&obj)) return false; + } + InterceptorInfo* info = InterceptorInfo::cast(obj); + info->set_flags(0); + set_noop_interceptor_info(info); + return true; } @@ -2697,10 +2723,14 @@ void Heap::CreateInitialObjects() { } Handle<NameDictionary> empty_properties_dictionary = - NameDictionary::New(isolate(), 0, TENURED); + NameDictionary::NewEmpty(isolate(), TENURED); empty_properties_dictionary->SetRequiresCopyOnCapacityChange(); set_empty_properties_dictionary(*empty_properties_dictionary); + set_public_symbol_table(*empty_properties_dictionary); + set_api_symbol_table(*empty_properties_dictionary); + set_api_private_symbol_table(*empty_properties_dictionary); + set_number_string_cache( *factory->NewFixedArray(kInitialNumberStringCacheSize * 2, TENURED)); @@ -2729,9 +2759,6 @@ void Heap::CreateInitialObjects() { set_undefined_cell(*factory->NewCell(factory->undefined_value())); - // The symbol registry is initialized lazily. - set_symbol_registry(Smi::kZero); - // Microtask queue uses the empty fixed array as a sentinel for "empty". // Number of queued microtasks stored in Isolate::pending_microtask_count(). set_microtask_queue(empty_fixed_array()); @@ -2779,6 +2806,7 @@ void Heap::CreateInitialObjects() { empty_fixed_array()); empty_type_feedback_vector->set(TypeFeedbackVector::kInvocationCountIndex, Smi::kZero); + empty_type_feedback_vector->set_map(type_feedback_vector_map()); set_empty_type_feedback_vector(*empty_type_feedback_vector); // We use a canonical empty LiteralsArray for all functions that neither @@ -2817,7 +2845,7 @@ void Heap::CreateInitialObjects() { set_script_list(Smi::kZero); Handle<SeededNumberDictionary> slow_element_dictionary = - SeededNumberDictionary::New(isolate(), 0, TENURED); + SeededNumberDictionary::NewEmpty(isolate(), TENURED); slow_element_dictionary->set_requires_slow_elements(); set_empty_slow_element_dictionary(*slow_element_dictionary); @@ -2864,7 +2892,12 @@ void Heap::CreateInitialObjects() { handle(Smi::FromInt(Isolate::kProtectorValid), isolate())); set_array_iterator_protector(*array_iterator_cell); + cell = factory->NewPropertyCell(); + cell->set_value(Smi::FromInt(Isolate::kProtectorValid)); + set_array_buffer_neutering_protector(*cell); + set_serialized_templates(empty_fixed_array()); + set_serialized_global_proxy_sizes(empty_fixed_array()); set_weak_stack_trace_list(Smi::kZero); @@ -2878,6 +2911,42 @@ void Heap::CreateInitialObjects() { // Initialize compilation cache. isolate_->compilation_cache()->Clear(); + + // Finish creating JSPromiseCapabilityMap + { + // TODO(caitp): This initialization can be removed once PromiseCapability + // object is no longer used by builtins implemented in javascript. + Handle<Map> map = factory->js_promise_capability_map(); + map->set_inobject_properties_or_constructor_function_index(3); + + Map::EnsureDescriptorSlack(map, 3); + + PropertyAttributes attrs = + static_cast<PropertyAttributes>(READ_ONLY | DONT_DELETE); + { // promise + Descriptor d = Descriptor::DataField(factory->promise_string(), + JSPromiseCapability::kPromiseIndex, + attrs, Representation::Tagged()); + map->AppendDescriptor(&d); + } + + { // resolve + Descriptor d = Descriptor::DataField(factory->resolve_string(), + JSPromiseCapability::kResolveIndex, + attrs, Representation::Tagged()); + map->AppendDescriptor(&d); + } + + { // reject + Descriptor d = Descriptor::DataField(factory->reject_string(), + JSPromiseCapability::kRejectIndex, + attrs, Representation::Tagged()); + map->AppendDescriptor(&d); + } + + map->set_is_extensible(false); + set_js_promise_capability_map(*map); + } } bool Heap::RootCanBeWrittenAfterInitialization(Heap::RootListIndex root_index) { @@ -2888,7 +2957,6 @@ bool Heap::RootCanBeWrittenAfterInitialization(Heap::RootListIndex root_index) { case kInstanceofCacheAnswerRootIndex: case kCodeStubsRootIndex: case kEmptyScriptRootIndex: - case kSymbolRegistryRootIndex: case kScriptListRootIndex: case kMaterializedObjectsRootIndex: case kMicrotaskQueueRootIndex: @@ -2899,6 +2967,10 @@ bool Heap::RootCanBeWrittenAfterInitialization(Heap::RootListIndex root_index) { case kNoScriptSharedFunctionInfosRootIndex: case kWeakStackTraceListRootIndex: case kSerializedTemplatesRootIndex: + case kSerializedGlobalProxySizesRootIndex: + case kPublicSymbolTableRootIndex: + case kApiSymbolTableRootIndex: + case kApiPrivateSymbolTableRootIndex: // Smi values #define SMI_ENTRY(type, name, Name) case k##Name##RootIndex: SMI_ROOT_LIST(SMI_ENTRY) @@ -2918,6 +2990,18 @@ bool Heap::RootCanBeTreatedAsConstant(RootListIndex root_index) { !InNewSpace(root(root_index)); } +bool Heap::IsUnmodifiedHeapObject(Object** p) { + Object* object = *p; + if (object->IsSmi()) return false; + HeapObject* heap_object = HeapObject::cast(object); + if (!object->IsJSObject()) return false; + JSObject* js_object = JSObject::cast(object); + if (!js_object->WasConstructedFromApiFunction()) return false; + JSFunction* constructor = + JSFunction::cast(js_object->map()->GetConstructor()); + + return constructor->initial_map() == heap_object->map(); +} int Heap::FullSizeNumberStringCacheLength() { // Compute the size of the number string cache based on the max newspace size. @@ -3042,6 +3126,7 @@ AllocationResult Heap::AllocateBytecodeArray(int length, instance->set_parameter_count(parameter_count); instance->set_interrupt_budget(interpreter::Interpreter::InterruptBudget()); instance->set_osr_loop_nesting_level(0); + instance->set_bytecode_age(BytecodeArray::kNoAgeBytecodeAge); instance->set_constant_pool(constant_pool); instance->set_handler_table(empty_fixed_array()); instance->set_source_position_table(empty_byte_array()); @@ -3050,9 +3135,9 @@ AllocationResult Heap::AllocateBytecodeArray(int length, return result; } -void Heap::CreateFillerObjectAt(Address addr, int size, ClearRecordedSlots mode, - ClearBlackArea black_area_mode) { - if (size == 0) return; +HeapObject* Heap::CreateFillerObjectAt(Address addr, int size, + ClearRecordedSlots mode) { + if (size == 0) return nullptr; HeapObject* filler = HeapObject::FromAddress(addr); if (size == kPointerSize) { filler->set_map_no_write_barrier( @@ -3070,20 +3155,11 @@ void Heap::CreateFillerObjectAt(Address addr, int size, ClearRecordedSlots mode, ClearRecordedSlotRange(addr, addr + size); } - // If the location where the filler is created is within a black area we have - // to clear the mark bits of the filler space. - if (black_area_mode == ClearBlackArea::kYes && - incremental_marking()->black_allocation() && - Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(addr))) { - Page* page = Page::FromAddress(addr); - page->markbits()->ClearRange(page->AddressToMarkbitIndex(addr), - page->AddressToMarkbitIndex(addr + size)); - } - // At this point, we may be deserializing the heap from a snapshot, and // none of the maps have been created yet and are NULL. DCHECK((filler->map() == NULL && !deserialization_complete_) || filler->map()->IsMap()); + return filler; } @@ -3101,8 +3177,12 @@ bool Heap::CanMoveObjectStart(HeapObject* object) { return Page::FromAddress(address)->SweepingDone(); } +bool Heap::IsImmovable(HeapObject* object) { + MemoryChunk* chunk = MemoryChunk::FromAddress(object->address()); + return chunk->NeverEvacuate() || chunk->owner()->identity() == LO_SPACE; +} -void Heap::AdjustLiveBytes(HeapObject* object, int by, InvocationMode mode) { +void Heap::AdjustLiveBytes(HeapObject* object, int by) { // As long as the inspected object is black and we are currently not iterating // the heap using HeapIterator, we can update the live byte count. We cannot // update while using HeapIterator because the iterator is temporarily @@ -3111,12 +3191,9 @@ void Heap::AdjustLiveBytes(HeapObject* object, int by, InvocationMode mode) { lo_space()->AdjustLiveBytes(by); } else if (!in_heap_iterator() && !mark_compact_collector()->sweeping_in_progress() && - Marking::IsBlack(ObjectMarking::MarkBitFrom(object->address()))) { - if (mode == SEQUENTIAL_TO_SWEEPER) { - MemoryChunk::IncrementLiveBytesFromGC(object, by); - } else { - MemoryChunk::IncrementLiveBytesFromMutator(object, by); - } + Marking::IsBlack(ObjectMarking::MarkBitFrom(object))) { + DCHECK(MemoryChunk::FromAddress(object->address())->SweepingDone()); + MemoryChunk::IncrementLiveBytes(object, by); } } @@ -3150,14 +3227,27 @@ FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object, // Transfer the mark bits to their new location if the object is not within // a black area. if (!incremental_marking()->black_allocation() || - !Marking::IsBlack(ObjectMarking::MarkBitFrom(new_start))) { - IncrementalMarking::TransferMark(this, old_start, new_start); + !Marking::IsBlack( + ObjectMarking::MarkBitFrom(HeapObject::FromAddress(new_start)))) { + IncrementalMarking::TransferMark(this, object, + HeapObject::FromAddress(new_start)); } // Technically in new space this write might be omitted (except for // debug mode which iterates through the heap), but to play safer // we still do it. CreateFillerObjectAt(old_start, bytes_to_trim, ClearRecordedSlots::kYes); + + // Clear the mark bits of the black area that belongs now to the filler. + // This is an optimization. The sweeper will release black fillers anyway. + if (incremental_marking()->black_allocation() && + Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(object))) { + Page* page = Page::FromAddress(old_start); + page->markbits()->ClearRange( + page->AddressToMarkbitIndex(old_start), + page->AddressToMarkbitIndex(old_start + bytes_to_trim)); + } + // Initialize header of the trimmed array. Since left trimming is only // performed on pages which are not concurrently swept creating a filler // object does not require synchronization. @@ -3171,7 +3261,7 @@ FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object, FixedArrayBase::cast(HeapObject::FromAddress(new_start)); // Maintain consistency of live bytes during incremental marking - AdjustLiveBytes(new_object, -bytes_to_trim, Heap::CONCURRENT_TO_SWEEPER); + AdjustLiveBytes(new_object, -bytes_to_trim); // Remove recorded slots for the new map and length offset. ClearRecordedSlot(new_object, HeapObject::RawField(new_object, 0)); @@ -3183,15 +3273,6 @@ FixedArrayBase* Heap::LeftTrimFixedArray(FixedArrayBase* object, return new_object; } - -// Force instantiation of templatized method. -template void Heap::RightTrimFixedArray<Heap::SEQUENTIAL_TO_SWEEPER>( - FixedArrayBase*, int); -template void Heap::RightTrimFixedArray<Heap::CONCURRENT_TO_SWEEPER>( - FixedArrayBase*, int); - - -template<Heap::InvocationMode mode> void Heap::RightTrimFixedArray(FixedArrayBase* object, int elements_to_trim) { const int len = object->length(); DCHECK_LE(elements_to_trim, len); @@ -3235,7 +3316,18 @@ void Heap::RightTrimFixedArray(FixedArrayBase* object, int elements_to_trim) { // TODO(hpayer): We should shrink the large object page if the size // of the object changed significantly. if (!lo_space()->Contains(object)) { - CreateFillerObjectAt(new_end, bytes_to_trim, ClearRecordedSlots::kYes); + HeapObject* filler = + CreateFillerObjectAt(new_end, bytes_to_trim, ClearRecordedSlots::kYes); + DCHECK_NOT_NULL(filler); + // Clear the mark bits of the black area that belongs now to the filler. + // This is an optimization. The sweeper will release black fillers anyway. + if (incremental_marking()->black_allocation() && + Marking::IsBlackOrGrey(ObjectMarking::MarkBitFrom(filler))) { + Page* page = Page::FromAddress(new_end); + page->markbits()->ClearRange( + page->AddressToMarkbitIndex(new_end), + page->AddressToMarkbitIndex(new_end + bytes_to_trim)); + } } // Initialize header of the trimmed array. We are storing the new length @@ -3244,7 +3336,7 @@ void Heap::RightTrimFixedArray(FixedArrayBase* object, int elements_to_trim) { object->synchronized_set_length(len - elements_to_trim); // Maintain consistency of live bytes during incremental marking - AdjustLiveBytes(object, -bytes_to_trim, mode); + AdjustLiveBytes(object, -bytes_to_trim); // Notify the heap profiler of change in object layout. The array may not be // moved during GC, and size has to be adjusted nevertheless. @@ -3331,18 +3423,24 @@ AllocationResult Heap::AllocateCode(int object_size, bool immovable) { if (!allocation.To(&result)) return allocation; if (immovable) { Address address = result->address(); + MemoryChunk* chunk = MemoryChunk::FromAddress(address); // Code objects which should stay at a fixed address are allocated either // in the first page of code space (objects on the first page of each space - // are never moved) or in large object space. - if (!code_space_->FirstPage()->Contains(address) && - MemoryChunk::FromAddress(address)->owner()->identity() != LO_SPACE) { - // Discard the first code allocation, which was on a page where it could - // be moved. - CreateFillerObjectAt(result->address(), object_size, - ClearRecordedSlots::kNo); - allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE); - if (!allocation.To(&result)) return allocation; - OnAllocationEvent(result, object_size); + // are never moved), in large object space, or (during snapshot creation) + // the containing page is marked as immovable. + if (!Heap::IsImmovable(result) && + !code_space_->FirstPage()->Contains(address)) { + if (isolate()->serializer_enabled()) { + chunk->MarkNeverEvacuate(); + } else { + // Discard the first code allocation, which was on a page where it could + // be moved. + CreateFillerObjectAt(result->address(), object_size, + ClearRecordedSlots::kNo); + allocation = lo_space_->AllocateRaw(object_size, EXECUTABLE); + if (!allocation.To(&result)) return allocation; + OnAllocationEvent(result, object_size); + } } } @@ -3405,6 +3503,7 @@ AllocationResult Heap::CopyBytecodeArray(BytecodeArray* bytecode_array) { copy->set_source_position_table(bytecode_array->source_position_table()); copy->set_interrupt_budget(bytecode_array->interrupt_budget()); copy->set_osr_loop_nesting_level(bytecode_array->osr_loop_nesting_level()); + copy->set_bytecode_age(bytecode_array->bytecode_age()); bytecode_array->CopyBytecodesTo(copy); return copy; } @@ -4045,9 +4144,7 @@ void Heap::MakeHeapIterable() { CollectAllGarbage(kMakeHeapIterableMask, GarbageCollectionReason::kMakeHeapIterable); } - if (mark_compact_collector()->sweeping_in_progress()) { - mark_compact_collector()->EnsureSweepingCompleted(); - } + mark_compact_collector()->EnsureSweepingCompleted(); DCHECK(IsHeapIterable()); } @@ -4169,21 +4266,18 @@ void Heap::ReduceNewSpaceSize() { } } -bool Heap::MarkingDequesAreEmpty() { - return mark_compact_collector()->marking_deque()->IsEmpty() && - (!UsingEmbedderHeapTracer() || - (wrappers_to_trace() == 0 && - embedder_heap_tracer()->NumberOfWrappersToTrace() == 0)); -} - void Heap::FinalizeIncrementalMarkingIfComplete( GarbageCollectionReason gc_reason) { if (incremental_marking()->IsMarking() && (incremental_marking()->IsReadyToOverApproximateWeakClosure() || (!incremental_marking()->finalize_marking_completed() && - MarkingDequesAreEmpty()))) { + mark_compact_collector()->marking_deque()->IsEmpty() && + local_embedder_heap_tracer()->ShouldFinalizeIncrementalMarking()))) { FinalizeIncrementalMarking(gc_reason); - } else if (incremental_marking()->IsComplete() || MarkingDequesAreEmpty()) { + } else if (incremental_marking()->IsComplete() || + (mark_compact_collector()->marking_deque()->IsEmpty() && + local_embedder_heap_tracer() + ->ShouldFinalizeIncrementalMarking())) { CollectAllGarbage(current_gc_flags_, gc_reason); } } @@ -4195,13 +4289,16 @@ bool Heap::TryFinalizeIdleIncrementalMarking( tracer()->FinalIncrementalMarkCompactSpeedInBytesPerMillisecond(); if (incremental_marking()->IsReadyToOverApproximateWeakClosure() || (!incremental_marking()->finalize_marking_completed() && - MarkingDequesAreEmpty() && + mark_compact_collector()->marking_deque()->IsEmpty() && + local_embedder_heap_tracer()->ShouldFinalizeIncrementalMarking() && gc_idle_time_handler_->ShouldDoOverApproximateWeakClosure( idle_time_in_ms))) { FinalizeIncrementalMarking(gc_reason); return true; } else if (incremental_marking()->IsComplete() || - (MarkingDequesAreEmpty() && + (mark_compact_collector()->marking_deque()->IsEmpty() && + local_embedder_heap_tracer() + ->ShouldFinalizeIncrementalMarking() && gc_idle_time_handler_->ShouldDoFinalIncrementalMarkCompact( idle_time_in_ms, size_of_objects, final_incremental_mark_compact_speed_in_bytes_per_ms))) { @@ -4484,6 +4581,18 @@ void Heap::MemoryPressureNotification(MemoryPressureLevel level, } } +void Heap::SetOutOfMemoryCallback(v8::debug::OutOfMemoryCallback callback, + void* data) { + out_of_memory_callback_ = callback; + out_of_memory_callback_data_ = data; +} + +void Heap::InvokeOutOfMemoryCallback() { + if (out_of_memory_callback_) { + out_of_memory_callback_(out_of_memory_callback_data_); + } +} + void Heap::CollectCodeStatistics() { CodeStatistics::ResetCodeAndMetadataStatistics(isolate()); // We do not look for code in new space, or map space. If code @@ -4698,10 +4807,8 @@ void Heap::Verify() { CHECK(HasBeenSetUp()); HandleScope scope(isolate()); - if (mark_compact_collector()->sweeping_in_progress()) { - // We have to wait here for the sweeper threads to have an iterable heap. - mark_compact_collector()->EnsureSweepingCompleted(); - } + // We have to wait here for the sweeper threads to have an iterable heap. + mark_compact_collector()->EnsureSweepingCompleted(); VerifyPointersVisitor visitor; IterateRoots(&visitor, VISIT_ONLY_STRONG); @@ -4729,8 +4836,8 @@ void Heap::Verify() { void Heap::ZapFromSpace() { if (!new_space_->IsFromSpaceCommitted()) return; - for (Page* page : NewSpacePageRange(new_space_->FromSpaceStart(), - new_space_->FromSpaceEnd())) { + for (Page* page : + PageRange(new_space_->FromSpaceStart(), new_space_->FromSpaceEnd())) { for (Address cursor = page->area_start(), limit = page->area_end(); cursor < limit; cursor += kPointerSize) { Memory::Address_at(cursor) = kFromSpaceZapValue; @@ -4838,7 +4945,7 @@ void Heap::IterateWeakRoots(ObjectVisitor* v, VisitMode mode) { v->Synchronize(VisitorSynchronization::kStringTable); if (mode != VISIT_ALL_IN_SCAVENGE && mode != VISIT_ALL_IN_SWEEP_NEWSPACE) { // Scavenge collections have special processing for this. - external_string_table_.Iterate(v); + external_string_table_.IterateAll(v); } v->Synchronize(VisitorSynchronization::kExternalStringsTable); } @@ -4937,8 +5044,9 @@ void Heap::IterateStrongRoots(ObjectVisitor* v, VisitMode mode) { case VISIT_ONLY_STRONG_ROOT_LIST: UNREACHABLE(); break; - case VISIT_ONLY_STRONG: case VISIT_ONLY_STRONG_FOR_SERIALIZATION: + break; + case VISIT_ONLY_STRONG: isolate_->global_handles()->IterateStrongRoots(v); break; case VISIT_ALL_IN_SCAVENGE: @@ -5052,7 +5160,7 @@ bool Heap::ConfigureHeap(size_t max_semi_space_size, size_t max_old_space_size, // The old generation is paged and needs at least one page for each space. int paged_space_count = LAST_PAGED_SPACE - FIRST_PAGED_SPACE + 1; - max_old_generation_size_ = + initial_max_old_generation_size_ = max_old_generation_size_ = Max(static_cast<size_t>(paged_space_count * Page::kPageSize), max_old_generation_size_); @@ -5307,6 +5415,13 @@ void Heap::DampenOldGenerationAllocationLimit(size_t old_gen_size, } } +bool Heap::ShouldOptimizeForLoadTime() { + return isolate()->rail_mode() == PERFORMANCE_LOAD && + !AllocationLimitOvershotByLargeMargin() && + MonotonicallyIncreasingTimeInMs() < + isolate()->LoadStartTimeMs() + kMaxLoadTimeMs; +} + // This predicate is called when an old generation space cannot allocated from // the free list and is about to add a new page. Returning false will cause a // major GC. It happens when the old generation allocation limit is reached and @@ -5318,6 +5433,8 @@ bool Heap::ShouldExpandOldGenerationOnSlowAllocation() { if (ShouldOptimizeForMemoryUsage()) return false; + if (ShouldOptimizeForLoadTime()) return true; + if (incremental_marking()->NeedsFinalization()) { return !AllocationLimitOvershotByLargeMargin(); } @@ -5352,9 +5469,13 @@ Heap::IncrementalMarkingLimit Heap::IncrementalMarkingLimitReached() { if (old_generation_space_available > new_space_->Capacity()) { return IncrementalMarkingLimit::kNoLimit; } - // We are close to the allocation limit. - // Choose between the hard and the soft limits. - if (old_generation_space_available == 0 || ShouldOptimizeForMemoryUsage()) { + if (ShouldOptimizeForMemoryUsage()) { + return IncrementalMarkingLimit::kHardLimit; + } + if (ShouldOptimizeForLoadTime()) { + return IncrementalMarkingLimit::kNoLimit; + } + if (old_generation_space_available == 0) { return IncrementalMarkingLimit::kHardLimit; } return IncrementalMarkingLimit::kSoftLimit; @@ -5477,6 +5598,7 @@ bool Heap::SetUp() { dead_object_stats_ = new ObjectStats(this); } scavenge_job_ = new ScavengeJob(); + local_embedder_heap_tracer_ = new LocalEmbedderHeapTracer(); LOG(isolate_, IntPtrTEvent("heap-capacity", Capacity())); LOG(isolate_, IntPtrTEvent("heap-available", Available())); @@ -5496,7 +5618,7 @@ bool Heap::SetUp() { bool Heap::CreateHeapObjects() { // Create initial maps. if (!CreateInitialMaps()) return false; - CreateApiObjects(); + if (!CreateApiObjects()) return false; // Create initial objects CreateInitialObjects(); @@ -5552,16 +5674,7 @@ void Heap::NotifyDeserializationComplete() { void Heap::SetEmbedderHeapTracer(EmbedderHeapTracer* tracer) { DCHECK_EQ(gc_state_, HeapState::NOT_IN_GC); - embedder_heap_tracer_ = tracer; -} - -void Heap::RegisterWrappersWithEmbedderHeapTracer() { - DCHECK(UsingEmbedderHeapTracer()); - if (wrappers_to_trace_.empty()) { - return; - } - embedder_heap_tracer()->RegisterV8References(wrappers_to_trace_); - wrappers_to_trace_.clear(); + local_embedder_heap_tracer()->SetRemoteTracer(tracer); } void Heap::TracePossibleWrapper(JSObject* js_object) { @@ -5571,17 +5684,12 @@ void Heap::TracePossibleWrapper(JSObject* js_object) { js_object->GetInternalField(0) != undefined_value() && js_object->GetInternalField(1) != undefined_value()) { DCHECK(reinterpret_cast<intptr_t>(js_object->GetInternalField(0)) % 2 == 0); - wrappers_to_trace_.push_back(std::pair<void*, void*>( + local_embedder_heap_tracer()->AddWrapperToTrace(std::pair<void*, void*>( reinterpret_cast<void*>(js_object->GetInternalField(0)), reinterpret_cast<void*>(js_object->GetInternalField(1)))); } } -bool Heap::RequiresImmediateWrapperProcessing() { - const size_t kTooManyWrappers = 16000; - return wrappers_to_trace_.size() > kTooManyWrappers; -} - void Heap::RegisterExternallyReferencedObject(Object** object) { HeapObject* heap_object = HeapObject::cast(*object); DCHECK(Contains(heap_object)); @@ -5658,6 +5766,9 @@ void Heap::TearDown() { dead_object_stats_ = nullptr; } + delete local_embedder_heap_tracer_; + local_embedder_heap_tracer_ = nullptr; + delete scavenge_job_; scavenge_job_ = nullptr; @@ -5803,8 +5914,6 @@ void Heap::CompactWeakFixedArrays() { WeakFixedArray* array = WeakFixedArray::cast(prototype_users); array->Compact<JSObject::PrototypeRegistryCompactionCallback>(); } - } else if (o->IsScript()) { - CompactWeakFixedArray(Script::cast(o)->shared_function_infos()); } } CompactWeakFixedArray(noscript_shared_function_infos()); @@ -5909,6 +6018,18 @@ void Heap::ClearRecordedSlot(HeapObject* object, Object** slot) { } } +bool Heap::HasRecordedSlot(HeapObject* object, Object** slot) { + if (InNewSpace(object)) { + return false; + } + Address slot_addr = reinterpret_cast<Address>(slot); + Page* page = Page::FromAddress(slot_addr); + DCHECK_EQ(page->owner()->identity(), OLD_SPACE); + store_buffer()->MoveAllEntriesToRememberedSet(); + return RememberedSet<OLD_TO_NEW>::Contains(page, slot_addr) || + RememberedSet<OLD_TO_OLD>::Contains(page, slot_addr); +} + void Heap::ClearRecordedSlotRange(Address start, Address end) { Page* page = Page::FromAddress(start); if (!page->InNewSpace()) { @@ -6330,7 +6451,7 @@ void Heap::UpdateTotalGCTime(double duration) { } } -void Heap::ExternalStringTable::CleanUp() { +void Heap::ExternalStringTable::CleanUpNewSpaceStrings() { int last = 0; Isolate* isolate = heap_->isolate(); for (int i = 0; i < new_space_strings_.length(); ++i) { @@ -6346,8 +6467,12 @@ void Heap::ExternalStringTable::CleanUp() { } new_space_strings_.Rewind(last); new_space_strings_.Trim(); +} - last = 0; +void Heap::ExternalStringTable::CleanUpAll() { + CleanUpNewSpaceStrings(); + int last = 0; + Isolate* isolate = heap_->isolate(); for (int i = 0; i < old_space_strings_.length(); ++i) { if (old_space_strings_[i]->IsTheHole(isolate)) { continue; |