summaryrefslogtreecommitdiff
path: root/deps/v8/src/heap/heap.h
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/heap/heap.h')
-rw-r--r--deps/v8/src/heap/heap.h206
1 files changed, 111 insertions, 95 deletions
diff --git a/deps/v8/src/heap/heap.h b/deps/v8/src/heap/heap.h
index ba89686672..013cd9a8fe 100644
--- a/deps/v8/src/heap/heap.h
+++ b/deps/v8/src/heap/heap.h
@@ -48,8 +48,6 @@ using v8::MemoryPressureLevel;
V(Map, one_byte_string_map, OneByteStringMap) \
V(Map, one_byte_internalized_string_map, OneByteInternalizedStringMap) \
V(Map, scope_info_map, ScopeInfoMap) \
- V(Map, module_info_entry_map, ModuleInfoEntryMap) \
- V(Map, module_info_map, ModuleInfoMap) \
V(Map, shared_function_info_map, SharedFunctionInfoMap) \
V(Map, code_map, CodeMap) \
V(Map, function_context_map, FunctionContextMap) \
@@ -62,13 +60,13 @@ using v8::MemoryPressureLevel;
V(FixedArray, empty_literals_array, EmptyLiteralsArray) \
V(FixedArray, empty_type_feedback_vector, EmptyTypeFeedbackVector) \
V(FixedArray, empty_fixed_array, EmptyFixedArray) \
- V(ScopeInfo, empty_scope_info, EmptyScopeInfo) \
- V(FixedArray, cleared_optimized_code_map, ClearedOptimizedCodeMap) \
V(DescriptorArray, empty_descriptor_array, EmptyDescriptorArray) \
/* Entries beyond the first 32 */ \
/* The roots above this line should be boring from a GC point of view. */ \
/* This means they are never in new space and never on a page that is */ \
/* being compacted. */ \
+ /* Empty scope info */ \
+ V(ScopeInfo, empty_scope_info, EmptyScopeInfo) \
/* Oddballs */ \
V(Oddball, no_interceptor_result_sentinel, NoInterceptorResultSentinel) \
V(Oddball, arguments_marker, ArgumentsMarker) \
@@ -92,9 +90,9 @@ using v8::MemoryPressureLevel;
V(Map, unseeded_number_dictionary_map, UnseededNumberDictionaryMap) \
V(Map, sloppy_arguments_elements_map, SloppyArgumentsElementsMap) \
V(Map, message_object_map, JSMessageObjectMap) \
- V(Map, neander_map, NeanderMap) \
V(Map, external_map, ExternalMap) \
V(Map, bytecode_array_map, BytecodeArrayMap) \
+ V(Map, module_info_map, ModuleInfoMap) \
/* String maps */ \
V(Map, native_source_string_map, NativeSourceStringMap) \
V(Map, string_map, StringMap) \
@@ -168,6 +166,8 @@ using v8::MemoryPressureLevel;
V(PropertyCell, has_instance_protector, HasInstanceProtector) \
V(Cell, species_protector, SpeciesProtector) \
V(PropertyCell, string_length_protector, StringLengthProtector) \
+ V(Cell, fast_array_iteration_protector, FastArrayIterationProtector) \
+ V(Cell, array_iterator_protector, ArrayIteratorProtector) \
/* Special numbers */ \
V(HeapNumber, nan_value, NanValue) \
V(HeapNumber, hole_nan_value, HoleNanValue) \
@@ -278,7 +278,6 @@ using v8::MemoryPressureLevel;
V(FixedArrayMap) \
V(CodeMap) \
V(ScopeInfoMap) \
- V(ModuleInfoEntryMap) \
V(ModuleInfoMap) \
V(FixedCOWArrayMap) \
V(FixedDoubleArrayMap) \
@@ -307,7 +306,6 @@ using v8::MemoryPressureLevel;
V(ArgumentsMarkerMap) \
V(JSMessageObjectMap) \
V(ForeignMap) \
- V(NeanderMap) \
V(NanValue) \
V(InfinityValue) \
V(MinusZeroValue) \
@@ -342,8 +340,6 @@ class WeakObjectRetainer;
typedef void (*ObjectSlotCallback)(HeapObject** from, HeapObject* to);
-enum PromotionMode { PROMOTE_MARKED, DEFAULT_PROMOTION };
-
enum ArrayStorageAllocationMode {
DONT_INITIALIZE_ARRAY_ELEMENTS,
INITIALIZE_ARRAY_ELEMENTS_WITH_HOLE
@@ -626,8 +622,8 @@ class Heap {
static const int kTraceRingBufferSize = 512;
static const int kStacktraceBufferSize = 512;
- static const double kMinHeapGrowingFactor;
- static const double kMaxHeapGrowingFactor;
+ V8_EXPORT_PRIVATE static const double kMinHeapGrowingFactor;
+ V8_EXPORT_PRIVATE static const double kMaxHeapGrowingFactor;
static const double kMaxHeapGrowingFactorMemoryConstrained;
static const double kMaxHeapGrowingFactorIdle;
static const double kConservativeHeapGrowingFactor;
@@ -690,7 +686,28 @@ class Heap {
#endif
}
- static double HeapGrowingFactor(double gc_speed, double mutator_speed);
+ static inline bool IsYoungGenerationCollector(GarbageCollector collector) {
+ return collector == SCAVENGER || collector == MINOR_MARK_COMPACTOR;
+ }
+
+ static inline GarbageCollector YoungGenerationCollector() {
+ return (FLAG_minor_mc) ? MINOR_MARK_COMPACTOR : SCAVENGER;
+ }
+
+ static inline const char* CollectorName(GarbageCollector collector) {
+ switch (collector) {
+ case SCAVENGER:
+ return "Scavenger";
+ case MARK_COMPACTOR:
+ return "Mark-Compact";
+ case MINOR_MARK_COMPACTOR:
+ return "Minor Mark-Compact";
+ }
+ return "Unknown collector";
+ }
+
+ V8_EXPORT_PRIVATE static double HeapGrowingFactor(double gc_speed,
+ double mutator_speed);
// Copy block of memory from src to dst. Size of block should be aligned
// by pointer size.
@@ -835,11 +852,8 @@ class Heap {
// An object should be promoted if the object has survived a
// scavenge operation.
- template <PromotionMode promotion_mode>
inline bool ShouldBePromoted(Address old_address, int object_size);
- inline PromotionMode CurrentPromotionMode();
-
void ClearNormalizedMapCaches();
void IncrementDeferredCount(v8::Isolate::UseCounterFeature feature);
@@ -916,7 +930,7 @@ class Heap {
bool HasLowAllocationRate();
bool HasHighFragmentation();
- bool HasHighFragmentation(intptr_t used, intptr_t committed);
+ bool HasHighFragmentation(size_t used, size_t committed);
void ActivateMemoryReducerIfNeeded();
@@ -940,8 +954,8 @@ class Heap {
// Configure heap size in MB before setup. Return false if the heap has been
// set up already.
- bool ConfigureHeap(int max_semi_space_size, int max_old_space_size,
- int max_executable_size, size_t code_range_size);
+ bool ConfigureHeap(size_t max_semi_space_size, size_t max_old_space_size,
+ size_t max_executable_size, size_t code_range_size);
bool ConfigureHeapDefault();
// Prepares the heap, setting up memory areas that are needed in the isolate
@@ -952,6 +966,9 @@ class Heap {
// Returns whether it succeeded.
bool CreateHeapObjects();
+ // Create ObjectStats if live_object_stats_ or dead_object_stats_ are nullptr.
+ V8_INLINE void CreateObjectStats();
+
// Destroys all memory allocated by the heap.
void TearDown();
@@ -1023,6 +1040,14 @@ class Heap {
Handle<Object> root_handle(RootListIndex index) {
return Handle<Object>(&roots_[index]);
}
+ template <typename T>
+ bool IsRootHandle(Handle<T> handle, RootListIndex* index) const {
+ Object** const handle_location = bit_cast<Object**>(handle.address());
+ if (handle_location >= &roots_[kRootListLength]) return false;
+ if (handle_location < &roots_[0]) return false;
+ *index = static_cast<RootListIndex>(handle_location - &roots_[0]);
+ return true;
+ }
// Generated code can embed this address to get access to the roots.
Object** roots_array_start() { return roots_; }
@@ -1127,13 +1152,8 @@ class Heap {
void IterateWeakRoots(ObjectVisitor* v, VisitMode mode);
// Iterate pointers of promoted objects.
- void IteratePromotedObject(HeapObject* target, int size,
- bool was_marked_black,
- ObjectSlotCallback callback);
-
- void IteratePromotedObjectPointers(HeapObject* object, Address start,
- Address end, bool record_slots,
- ObjectSlotCallback callback);
+ void IterateAndScavengePromotedObject(HeapObject* target, int size,
+ bool was_marked_black);
// ===========================================================================
// Store buffer API. =========================================================
@@ -1199,10 +1219,6 @@ class Heap {
EmbedderHeapTracer* embedder_heap_tracer() { return embedder_heap_tracer_; }
- EmbedderReachableReferenceReporter* embedder_reachable_reference_reporter() {
- return embedder_reference_reporter_;
- }
-
size_t wrappers_to_trace() { return wrappers_to_trace_.size(); }
// ===========================================================================
@@ -1274,20 +1290,20 @@ class Heap {
// ===========================================================================
// Returns the maximum amount of memory reserved for the heap.
- intptr_t MaxReserved() {
+ size_t MaxReserved() {
return 2 * max_semi_space_size_ + max_old_generation_size_;
}
- int MaxSemiSpaceSize() { return max_semi_space_size_; }
- int InitialSemiSpaceSize() { return initial_semispace_size_; }
- intptr_t MaxOldGenerationSize() { return max_old_generation_size_; }
- intptr_t MaxExecutableSize() { return max_executable_size_; }
+ size_t MaxSemiSpaceSize() { return max_semi_space_size_; }
+ size_t InitialSemiSpaceSize() { return initial_semispace_size_; }
+ size_t MaxOldGenerationSize() { return max_old_generation_size_; }
+ size_t MaxExecutableSize() { return max_executable_size_; }
// Returns the capacity of the heap in bytes w/o growing. Heap grows when
// more spaces are needed until it reaches the limit.
- intptr_t Capacity();
+ size_t Capacity();
// Returns the capacity of the old generation.
- intptr_t OldGenerationCapacity();
+ size_t OldGenerationCapacity();
// Returns the amount of memory currently committed for the heap.
size_t CommittedMemory();
@@ -1311,28 +1327,26 @@ class Heap {
// Returns the available bytes in space w/o growing.
// Heap doesn't guarantee that it can allocate an object that requires
// all available bytes. Check MaxHeapObjectSize() instead.
- intptr_t Available();
+ size_t Available();
// Returns of size of all objects residing in the heap.
- intptr_t SizeOfObjects();
+ size_t SizeOfObjects();
void UpdateSurvivalStatistics(int start_new_space_size);
- inline void IncrementPromotedObjectsSize(intptr_t object_size) {
- DCHECK_GE(object_size, 0);
+ inline void IncrementPromotedObjectsSize(size_t object_size) {
promoted_objects_size_ += object_size;
}
- inline intptr_t promoted_objects_size() { return promoted_objects_size_; }
+ inline size_t promoted_objects_size() { return promoted_objects_size_; }
- inline void IncrementSemiSpaceCopiedObjectSize(intptr_t object_size) {
- DCHECK_GE(object_size, 0);
+ inline void IncrementSemiSpaceCopiedObjectSize(size_t object_size) {
semi_space_copied_object_size_ += object_size;
}
- inline intptr_t semi_space_copied_object_size() {
+ inline size_t semi_space_copied_object_size() {
return semi_space_copied_object_size_;
}
- inline intptr_t SurvivedNewSpaceObjectSize() {
+ inline size_t SurvivedNewSpaceObjectSize() {
return promoted_objects_size_ + semi_space_copied_object_size_;
}
@@ -1342,20 +1356,13 @@ class Heap {
inline void IncrementNodesPromoted() { nodes_promoted_++; }
- inline void IncrementYoungSurvivorsCounter(intptr_t survived) {
- DCHECK_GE(survived, 0);
+ inline void IncrementYoungSurvivorsCounter(size_t survived) {
survived_last_scavenge_ = survived;
survived_since_last_expansion_ += survived;
}
- inline intptr_t PromotedTotalSize() {
- int64_t total = PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
- if (total > std::numeric_limits<intptr_t>::max()) {
- // TODO(erikcorry): Use uintptr_t everywhere we do heap size calculations.
- return std::numeric_limits<intptr_t>::max();
- }
- if (total < 0) return 0;
- return static_cast<intptr_t>(total);
+ inline uint64_t PromotedTotalSize() {
+ return PromotedSpaceSizeOfObjects() + PromotedExternalMemorySize();
}
inline void UpdateNewSpaceAllocationCounter();
@@ -1389,7 +1396,7 @@ class Heap {
int gc_count() const { return gc_count_; }
// Returns the size of objects residing in non new spaces.
- intptr_t PromotedSpaceSizeOfObjects();
+ size_t PromotedSpaceSizeOfObjects();
double total_regexp_code_generated() { return total_regexp_code_generated_; }
void IncreaseTotalRegexpCodeGenerated(int size) {
@@ -1439,6 +1446,10 @@ class Heap {
// ArrayBuffer tracking. =====================================================
// ===========================================================================
+ // TODO(gc): API usability: encapsulate mutation of JSArrayBuffer::is_external
+ // in the registration/unregistration APIs. Consider dropping the "New" from
+ // "RegisterNewArrayBuffer" because one can re-register a previously
+ // unregistered buffer, too, and the name is confusing.
void RegisterNewArrayBuffer(JSArrayBuffer* buffer);
void UnregisterArrayBuffer(JSArrayBuffer* buffer);
@@ -1770,6 +1781,8 @@ class Heap {
// Performs a major collection in the whole heap.
void MarkCompact();
+ // Performs a minor collection of just the young generation.
+ void MinorMarkCompact();
// Code to be run before and after mark-compact.
void MarkCompactPrologue();
@@ -1778,8 +1791,7 @@ class Heap {
// Performs a minor collection in new generation.
void Scavenge();
- Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front,
- PromotionMode promotion_mode);
+ Address DoScavenge(ObjectVisitor* scavenge_visitor, Address new_space_front);
void UpdateNewSpaceReferencesInExternalStringTable(
ExternalStringTableUpdaterCallback updater_func);
@@ -1797,8 +1809,27 @@ class Heap {
// GC statistics. ============================================================
// ===========================================================================
- inline intptr_t OldGenerationSpaceAvailable() {
- return old_generation_allocation_limit_ - PromotedTotalSize();
+ inline size_t OldGenerationSpaceAvailable() {
+ if (old_generation_allocation_limit_ <= PromotedTotalSize()) return 0;
+ return old_generation_allocation_limit_ -
+ static_cast<size_t>(PromotedTotalSize());
+ }
+
+ // We allow incremental marking to overshoot the allocation limit for
+ // performace reasons. If the overshoot is too large then we are more
+ // eager to finalize incremental marking.
+ inline bool AllocationLimitOvershotByLargeMargin() {
+ // This guards against too eager finalization in small heaps.
+ // The number is chosen based on v8.browsing_mobile on Nexus 7v2.
+ size_t kMarginForSmallHeaps = 32u * MB;
+ if (old_generation_allocation_limit_ >= PromotedTotalSize()) return false;
+ uint64_t overshoot = PromotedTotalSize() - old_generation_allocation_limit_;
+ // Overshoot margin is 50% of allocation limit or half-way to the max heap
+ // with special handling of small heaps.
+ uint64_t margin =
+ Min(Max(old_generation_allocation_limit_ / 2, kMarginForSmallHeaps),
+ (max_old_generation_size_ - old_generation_allocation_limit_) / 2);
+ return overshoot >= margin;
}
void UpdateTotalGCTime(double duration);
@@ -1811,23 +1842,21 @@ class Heap {
// Decrease the allocation limit if the new limit based on the given
// parameters is lower than the current limit.
- void DampenOldGenerationAllocationLimit(intptr_t old_gen_size,
- double gc_speed,
+ void DampenOldGenerationAllocationLimit(size_t old_gen_size, double gc_speed,
double mutator_speed);
-
// Calculates the allocation limit based on a given growing factor and a
// given old generation size.
- intptr_t CalculateOldGenerationAllocationLimit(double factor,
- intptr_t old_gen_size);
+ size_t CalculateOldGenerationAllocationLimit(double factor,
+ size_t old_gen_size);
// Sets the allocation limit to trigger the next full garbage collection.
- void SetOldGenerationAllocationLimit(intptr_t old_gen_size, double gc_speed,
+ void SetOldGenerationAllocationLimit(size_t old_gen_size, double gc_speed,
double mutator_speed);
- intptr_t MinimumAllocationLimitGrowingStep();
+ size_t MinimumAllocationLimitGrowingStep();
- intptr_t old_generation_allocation_limit() const {
+ size_t old_generation_allocation_limit() const {
return old_generation_allocation_limit_;
}
@@ -1842,7 +1871,7 @@ class Heap {
return OldGenerationCapacity() + slack >= MaxOldGenerationSize();
}
- bool ShouldExpandOldGenerationOnAllocationFailure();
+ bool ShouldExpandOldGenerationOnSlowAllocation();
enum class IncrementalMarkingLimit { kNoLimit, kSoftLimit, kHardLimit };
IncrementalMarkingLimit IncrementalMarkingLimitReached();
@@ -2096,20 +2125,20 @@ class Heap {
Object* roots_[kRootListLength];
size_t code_range_size_;
- int max_semi_space_size_;
- int initial_semispace_size_;
- intptr_t max_old_generation_size_;
- intptr_t initial_old_generation_size_;
+ size_t max_semi_space_size_;
+ size_t initial_semispace_size_;
+ size_t max_old_generation_size_;
+ size_t initial_old_generation_size_;
bool old_generation_size_configured_;
- intptr_t max_executable_size_;
+ size_t max_executable_size_;
size_t maximum_committed_;
// For keeping track of how much data has survived
// scavenge since last new space expansion.
- intptr_t survived_since_last_expansion_;
+ size_t survived_since_last_expansion_;
// ... and since the last scavenge.
- intptr_t survived_last_scavenge_;
+ size_t survived_last_scavenge_;
// This is not the depth of nested AlwaysAllocateScope's but rather a single
// count, as scopes can be acquired from multiple tasks (read: threads).
@@ -2141,7 +2170,7 @@ class Heap {
Address new_space_top_after_last_gc_;
// Returns the amount of external memory registered since last global gc.
- int64_t PromotedExternalMemorySize();
+ uint64_t PromotedExternalMemorySize();
// How many "runtime allocations" happened.
uint32_t allocations_count_;
@@ -2170,7 +2199,7 @@ class Heap {
// is checked when we have already decided to do a GC to help determine
// which collector to invoke, before expanding a paged space in the old
// generation and on every allocation in large object space.
- intptr_t old_generation_allocation_limit_;
+ size_t old_generation_allocation_limit_;
// Indicates that inline bump-pointer allocation has been globally disabled
// for all spaces. This is used to disable allocations in generated code.
@@ -2200,11 +2229,11 @@ class Heap {
GCTracer* tracer_;
- intptr_t promoted_objects_size_;
+ size_t promoted_objects_size_;
double promotion_ratio_;
double promotion_rate_;
- intptr_t semi_space_copied_object_size_;
- intptr_t previous_semi_space_copied_object_size_;
+ size_t semi_space_copied_object_size_;
+ size_t previous_semi_space_copied_object_size_;
double semi_space_copied_rate_;
int nodes_died_in_new_space_;
int nodes_copied_in_new_space_;
@@ -2310,11 +2339,11 @@ class Heap {
int heap_iterator_depth_;
EmbedderHeapTracer* embedder_heap_tracer_;
- EmbedderReachableReferenceReporter* embedder_reference_reporter_;
std::vector<std::pair<void*, void*>> wrappers_to_trace_;
// Used for testing purposes.
bool force_oom_;
+ bool delay_sweeper_tasks_for_testing_;
// Classes in "heap" can be friends.
friend class AlwaysAllocateScope;
@@ -2324,7 +2353,6 @@ class Heap {
friend class IdleScavengeObserver;
friend class IncrementalMarking;
friend class IncrementalMarkingJob;
- friend class IteratePromotedObjectsVisitor;
friend class LargeObjectSpace;
friend class MarkCompactCollector;
friend class MarkCompactMarkingVisitor;
@@ -2633,18 +2661,6 @@ class AllocationObserver {
DISALLOW_COPY_AND_ASSIGN(AllocationObserver);
};
-class TracePossibleWrapperReporter : public EmbedderReachableReferenceReporter {
- public:
- explicit TracePossibleWrapperReporter(Heap* heap) : heap_(heap) {}
- void ReportExternalReference(Value* object) override {
- heap_->RegisterExternallyReferencedObject(
- reinterpret_cast<Object**>(object));
- }
-
- private:
- Heap* heap_;
-};
-
} // namespace internal
} // namespace v8