summaryrefslogtreecommitdiff
path: root/chromium/v8/src/heap/heap.h
diff options
context:
space:
mode:
authorAllan Sandfeld Jensen <allan.jensen@qt.io>2020-10-12 14:27:29 +0200
committerAllan Sandfeld Jensen <allan.jensen@qt.io>2020-10-13 09:35:20 +0000
commitc30a6232df03e1efbd9f3b226777b07e087a1122 (patch)
treee992f45784689f373bcc38d1b79a239ebe17ee23 /chromium/v8/src/heap/heap.h
parent7b5b123ac58f58ffde0f4f6e488bcd09aa4decd3 (diff)
downloadqtwebengine-chromium-85-based.tar.gz
BASELINE: Update Chromium to 85.0.4183.14085-based
Change-Id: Iaa42f4680837c57725b1344f108c0196741f6057 Reviewed-by: Allan Sandfeld Jensen <allan.jensen@qt.io>
Diffstat (limited to 'chromium/v8/src/heap/heap.h')
-rw-r--r--chromium/v8/src/heap/heap.h54
1 files changed, 37 insertions, 17 deletions
diff --git a/chromium/v8/src/heap/heap.h b/chromium/v8/src/heap/heap.h
index 888d174c02f..91214f40398 100644
--- a/chromium/v8/src/heap/heap.h
+++ b/chromium/v8/src/heap/heap.h
@@ -63,6 +63,7 @@ using v8::MemoryPressureLevel;
class AllocationObserver;
class ArrayBufferCollector;
class ArrayBufferSweeper;
+class BasicMemoryChunk;
class CodeLargeObjectSpace;
class ConcurrentMarking;
class GCIdleTimeHandler;
@@ -88,6 +89,7 @@ class Page;
class PagedSpace;
class ReadOnlyHeap;
class RootVisitor;
+class SafepointScope;
class ScavengeJob;
class Scavenger;
class ScavengerCollector;
@@ -449,7 +451,7 @@ class Heap {
void NotifyBootstrapComplete();
- void NotifyOldGenerationExpansion();
+ void NotifyOldGenerationExpansion(AllocationSpace space, MemoryChunk* chunk);
inline Address* NewSpaceAllocationTopAddress();
inline Address* NewSpaceAllocationLimitAddress();
@@ -458,8 +460,9 @@ class Heap {
// Move len non-weak tagged elements from src_slot to dst_slot of dst_object.
// The source and destination memory ranges can overlap.
- void MoveRange(HeapObject dst_object, ObjectSlot dst_slot,
- ObjectSlot src_slot, int len, WriteBarrierMode mode);
+ V8_EXPORT_PRIVATE void MoveRange(HeapObject dst_object, ObjectSlot dst_slot,
+ ObjectSlot src_slot, int len,
+ WriteBarrierMode mode);
// Copy len non-weak tagged elements from src_slot to dst_slot of dst_object.
// The source and destination memory ranges must not overlap.
@@ -474,6 +477,9 @@ class Heap {
V8_EXPORT_PRIVATE HeapObject CreateFillerObjectAt(
Address addr, int size, ClearRecordedSlots clear_slots_mode);
+ void CreateFillerObjectAtBackground(Address addr, int size,
+ ClearFreedMemoryMode clear_memory_mode);
+
template <typename T>
void CreateFillerForArray(T object, int elements_to_trim, int bytes_to_trim);
@@ -663,6 +669,8 @@ class Heap {
void SetSerializedObjects(FixedArray objects);
void SetSerializedGlobalProxySizes(FixedArray sizes);
+ void SetBasicBlockProfilingData(Handle<ArrayList> list);
+
// For post mortem debugging.
void RememberUnmappedPage(Address page, bool compacted);
@@ -749,7 +757,7 @@ class Heap {
void TearDown();
// Returns whether SetUp has been called.
- bool HasBeenSetUp();
+ bool HasBeenSetUp() const;
// ===========================================================================
// Getters for spaces. =======================================================
@@ -769,9 +777,6 @@ class Heap {
inline PagedSpace* paged_space(int idx);
inline Space* space(int idx);
- // Returns name of the space.
- V8_EXPORT_PRIVATE static const char* GetSpaceName(AllocationSpace space);
-
// ===========================================================================
// Getters to other components. ==============================================
// ===========================================================================
@@ -779,6 +784,9 @@ class Heap {
GCTracer* tracer() { return tracer_.get(); }
MemoryAllocator* memory_allocator() { return memory_allocator_.get(); }
+ const MemoryAllocator* memory_allocator() const {
+ return memory_allocator_.get();
+ }
inline Isolate* isolate();
@@ -1056,7 +1064,7 @@ class Heap {
return local_embedder_heap_tracer_.get();
}
- void SetEmbedderHeapTracer(EmbedderHeapTracer* tracer);
+ V8_EXPORT_PRIVATE void SetEmbedderHeapTracer(EmbedderHeapTracer* tracer);
EmbedderHeapTracer* GetEmbedderHeapTracer() const;
void RegisterExternallyReferencedObject(Address* location);
@@ -1107,15 +1115,15 @@ class Heap {
// Checks whether an address/object is in the non-read-only heap (including
// auxiliary area and unused area). Use IsValidHeapObject if checking both
// heaps is required.
- V8_EXPORT_PRIVATE bool Contains(HeapObject value);
+ V8_EXPORT_PRIVATE bool Contains(HeapObject value) const;
// Checks whether an address/object in a space.
// Currently used by tests, serialization and heap verification only.
- V8_EXPORT_PRIVATE bool InSpace(HeapObject value, AllocationSpace space);
+ V8_EXPORT_PRIVATE bool InSpace(HeapObject value, AllocationSpace space) const;
// Slow methods that can be used for verification as they can also be used
// with off-heap Addresses.
- V8_EXPORT_PRIVATE bool InSpaceSlow(Address addr, AllocationSpace space);
+ V8_EXPORT_PRIVATE bool InSpaceSlow(Address addr, AllocationSpace space) const;
static inline Heap* FromWritableHeapObject(HeapObject obj);
@@ -1539,12 +1547,15 @@ class Heap {
Heap* heap_;
base::Mutex mutex_;
base::ConditionVariable cond_;
- bool requested_;
+ bool gc_requested_;
+ bool shutdown_requested_;
public:
- explicit CollectionBarrier(Heap* heap) : heap_(heap), requested_(false) {}
+ explicit CollectionBarrier(Heap* heap)
+ : heap_(heap), gc_requested_(false), shutdown_requested_(false) {}
- void Increment();
+ void CollectionPerformed();
+ void ShutdownRequested();
void Wait();
};
@@ -1635,6 +1646,9 @@ class Heap {
// over all objects. May cause a GC.
void MakeHeapIterable();
+ // Ensure that LABs of local heaps are iterable.
+ void MakeLocalHeapLabsIterable();
+
// Performs garbage collection in a safepoint.
// Returns the number of freed global handles.
size_t PerformGarbageCollection(
@@ -1771,6 +1785,7 @@ class Heap {
// Code that should be run before and after each GC. Includes some
// reporting/verification activities when compiled with DEBUG set.
void GarbageCollectionPrologue();
+ void GarbageCollectionPrologueInSafepoint();
void GarbageCollectionEpilogue();
void GarbageCollectionEpilogueInSafepoint();
@@ -1851,11 +1866,14 @@ class Heap {
bool always_allocate() { return always_allocate_scope_count_ != 0; }
V8_EXPORT_PRIVATE bool CanExpandOldGeneration(size_t size);
+ V8_EXPORT_PRIVATE bool CanExpandOldGenerationBackground(size_t size);
bool ShouldExpandOldGenerationOnSlowAllocation(
LocalHeap* local_heap = nullptr);
bool IsRetryOfFailedAllocation(LocalHeap* local_heap);
+ void AlwaysAllocateAfterTearDownStarted();
+
HeapGrowingMode CurrentHeapGrowingMode();
double PercentToOldGenerationLimit();
@@ -1867,7 +1885,7 @@ class Heap {
return FLAG_global_gc_scheduling && local_embedder_heap_tracer();
}
- size_t GlobalMemoryAvailable();
+ base::Optional<size_t> GlobalMemoryAvailable();
void RecomputeLimits(GarbageCollector collector);
@@ -2269,6 +2287,7 @@ class Heap {
friend class IncrementalMarking;
friend class IncrementalMarkingJob;
friend class OffThreadHeap;
+ friend class OffThreadSpace;
friend class OldLargeObjectSpace;
template <typename ConcreteVisitor, typename MarkingState>
friend class MarkingVisitorBase;
@@ -2389,12 +2408,12 @@ class CodePageCollectionMemoryModificationScope {
// was registered to be executable. It can be used by concurrent threads.
class CodePageMemoryModificationScope {
public:
- explicit inline CodePageMemoryModificationScope(MemoryChunk* chunk);
+ explicit inline CodePageMemoryModificationScope(BasicMemoryChunk* chunk);
explicit inline CodePageMemoryModificationScope(Code object);
inline ~CodePageMemoryModificationScope();
private:
- MemoryChunk* chunk_;
+ BasicMemoryChunk* chunk_;
bool scope_active_;
// Disallow any GCs inside this scope, as a relocation of the underlying
@@ -2497,6 +2516,7 @@ class V8_EXPORT_PRIVATE HeapObjectIterator {
DISALLOW_HEAP_ALLOCATION(no_heap_allocation_)
Heap* heap_;
+ std::unique_ptr<SafepointScope> safepoint_scope_;
HeapObjectsFiltering filtering_;
HeapObjectsFilter* filter_;
// Space iterator for iterating all the spaces.