summaryrefslogtreecommitdiff
path: root/deps/v8/src/heap/incremental-marking.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/heap/incremental-marking.cc')
-rw-r--r--deps/v8/src/heap/incremental-marking.cc321
1 files changed, 181 insertions, 140 deletions
diff --git a/deps/v8/src/heap/incremental-marking.cc b/deps/v8/src/heap/incremental-marking.cc
index 3d31d19d2e..58731d570b 100644
--- a/deps/v8/src/heap/incremental-marking.cc
+++ b/deps/v8/src/heap/incremental-marking.cc
@@ -17,16 +17,27 @@
#include "src/heap/objects-visiting.h"
#include "src/tracing/trace-event.h"
#include "src/v8.h"
+#include "src/visitors.h"
+#include "src/vm-state-inl.h"
namespace v8 {
namespace internal {
+void IncrementalMarking::Observer::Step(int bytes_allocated, Address, size_t) {
+ VMState<GC> state(incremental_marking_.heap()->isolate());
+ RuntimeCallTimerScope runtime_timer(
+ incremental_marking_.heap()->isolate(),
+ &RuntimeCallStats::GC_IncrementalMarkingObserver);
+ incremental_marking_.AdvanceIncrementalMarkingOnAllocation();
+}
+
IncrementalMarking::IncrementalMarking(Heap* heap)
: heap_(heap),
- state_(STOPPED),
+ marking_deque_(nullptr),
initial_old_generation_size_(0),
bytes_marked_ahead_of_schedule_(0),
unscanned_bytes_of_large_object_(0),
+ state_(STOPPED),
idle_marking_delay_counter_(0),
incremental_marking_finalization_rounds_(0),
is_compacting_(false),
@@ -41,15 +52,13 @@ IncrementalMarking::IncrementalMarking(Heap* heap)
bool IncrementalMarking::BaseRecordWrite(HeapObject* obj, Object* value) {
HeapObject* value_heap_obj = HeapObject::cast(value);
- DCHECK(!ObjectMarking::IsImpossible(value_heap_obj,
- MarkingState::Internal(value_heap_obj)));
- DCHECK(!ObjectMarking::IsImpossible(obj, MarkingState::Internal(obj)));
+ DCHECK(!ObjectMarking::IsImpossible<kAtomicity>(
+ value_heap_obj, marking_state(value_heap_obj)));
+ DCHECK(!ObjectMarking::IsImpossible<kAtomicity>(obj, marking_state(obj)));
const bool is_black =
- ObjectMarking::IsBlack(obj, MarkingState::Internal(obj));
+ ObjectMarking::IsBlack<kAtomicity>(obj, marking_state(obj));
- if (is_black && ObjectMarking::IsWhite(
- value_heap_obj, MarkingState::Internal(value_heap_obj))) {
- WhiteToGreyAndPush(value_heap_obj);
+ if (is_black && WhiteToGreyAndPush(value_heap_obj)) {
RestartIfNotMarking();
}
return is_compacting_ && is_black;
@@ -120,9 +129,26 @@ void IncrementalMarking::RecordWriteIntoCodeSlow(Code* host, RelocInfo* rinfo,
}
}
-void IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) {
- ObjectMarking::WhiteToGrey(obj, MarkingState::Internal(obj));
- heap_->mark_compact_collector()->marking_deque()->Push(obj);
+bool IncrementalMarking::WhiteToGreyAndPush(HeapObject* obj) {
+ if (ObjectMarking::WhiteToGrey<kAtomicity>(obj, marking_state(obj))) {
+ marking_deque()->Push(obj);
+ return true;
+ }
+ return false;
+}
+
+void IncrementalMarking::MarkBlackAndPush(HeapObject* obj) {
+ // Color the object black and push it into the bailout deque.
+ ObjectMarking::WhiteToGrey<kAtomicity>(obj, marking_state(obj));
+ if (ObjectMarking::GreyToBlack<kAtomicity>(obj, marking_state(obj))) {
+#ifdef V8_CONCURRENT_MARKING
+ marking_deque()->Push(obj, MarkingThread::kMain, TargetDeque::kBailout);
+#else
+ if (!marking_deque()->Push(obj)) {
+ ObjectMarking::BlackToGrey<kAtomicity>(obj, marking_state(obj));
+ }
+#endif
+ }
}
void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from,
@@ -132,24 +158,42 @@ void IncrementalMarking::TransferMark(Heap* heap, HeapObject* from,
DCHECK(MemoryChunk::FromAddress(from->address()) ==
MemoryChunk::FromAddress(to->address()));
- if (!heap->incremental_marking()->IsMarking()) return;
+ if (!IsMarking()) return;
// If the mark doesn't move, we don't check the color of the object.
// It doesn't matter whether the object is black, since it hasn't changed
// size, so the adjustment to the live data count will be zero anyway.
if (from == to) return;
- MarkBit new_mark_bit =
- ObjectMarking::MarkBitFrom(to, MarkingState::Internal(to));
- MarkBit old_mark_bit =
- ObjectMarking::MarkBitFrom(from, MarkingState::Internal(from));
+ MarkBit new_mark_bit = ObjectMarking::MarkBitFrom(to, marking_state(to));
+ MarkBit old_mark_bit = ObjectMarking::MarkBitFrom(from, marking_state(from));
- if (Marking::IsBlack(old_mark_bit)) {
- Marking::MarkBlack(new_mark_bit);
- } else if (Marking::IsGrey(old_mark_bit)) {
- Marking::WhiteToGrey(new_mark_bit);
- heap->mark_compact_collector()->marking_deque()->Push(to);
- heap->incremental_marking()->RestartIfNotMarking();
+ if (Marking::IsBlack<kAtomicity>(old_mark_bit)) {
+ if (from->address() + kPointerSize == to->address()) {
+ // The old and the new markbits overlap. The |to| object has the
+ // grey color. To make it black, we need to set the second bit.
+ DCHECK(new_mark_bit.Get<kAtomicity>());
+ new_mark_bit.Next().Set<kAtomicity>();
+ } else {
+ bool success = Marking::WhiteToBlack<kAtomicity>(new_mark_bit);
+ DCHECK(success);
+ USE(success);
+ }
+ } else if (Marking::IsGrey<kAtomicity>(old_mark_bit)) {
+ if (from->address() + kPointerSize == to->address()) {
+ // The old and the new markbits overlap. The |to| object has the
+ // white color. To make it grey, we need to set the first bit.
+ // Note that Marking::WhiteToGrey does not work here because
+ // old_mark_bit.Next() can be set by the concurrent marker at any time.
+ new_mark_bit.Set();
+ DCHECK(!new_mark_bit.Next().Get());
+ } else {
+ bool success = Marking::WhiteToGrey<kAtomicity>(new_mark_bit);
+ DCHECK(success);
+ USE(success);
+ }
+ marking_deque()->Push(to);
+ RestartIfNotMarking();
}
}
@@ -186,16 +230,16 @@ class IncrementalMarkingMarkingVisitor
HeapObject::RawField(object, end_offset));
start_offset = end_offset;
end_offset = Min(object_size, end_offset + kProgressBarScanningChunk);
- scan_until_end =
- heap->mark_compact_collector()->marking_deque()->IsFull();
+ scan_until_end = heap->incremental_marking()->marking_deque()->IsFull();
} while (scan_until_end && start_offset < object_size);
chunk->set_progress_bar(start_offset);
if (start_offset < object_size) {
- if (ObjectMarking::IsGrey(object, MarkingState::Internal(object))) {
- heap->mark_compact_collector()->marking_deque()->Unshift(object);
+ if (ObjectMarking::IsGrey<IncrementalMarking::kAtomicity>(
+ object, heap->incremental_marking()->marking_state(object))) {
+ heap->incremental_marking()->marking_deque()->Unshift(object);
} else {
- DCHECK(
- ObjectMarking::IsBlack(object, MarkingState::Internal(object)));
+ DCHECK(ObjectMarking::IsBlack<IncrementalMarking::kAtomicity>(
+ object, heap->incremental_marking()->marking_state(object)));
heap->mark_compact_collector()->UnshiftBlack(object);
}
heap->incremental_marking()->NotifyIncompleteScanOfObject(
@@ -218,11 +262,11 @@ class IncrementalMarkingMarkingVisitor
HeapObject* heap_obj = HeapObject::cast(cache);
// Mark the object grey if it is white, do not enque it into the marking
// deque.
- if (ObjectMarking::IsWhite(heap_obj,
- MarkingState::Internal(heap_obj))) {
- ObjectMarking::WhiteToGrey(heap_obj,
- MarkingState::Internal(heap_obj));
- }
+ Heap* heap = map->GetHeap();
+ bool ignored =
+ ObjectMarking::WhiteToGrey<IncrementalMarking::kAtomicity>(
+ heap_obj, heap->incremental_marking()->marking_state(heap_obj));
+ USE(ignored);
}
}
VisitNativeContext(map, context);
@@ -249,46 +293,43 @@ class IncrementalMarkingMarkingVisitor
// Marks the object grey and pushes it on the marking stack.
INLINE(static void MarkObject(Heap* heap, Object* obj)) {
- IncrementalMarking::MarkGrey(heap, HeapObject::cast(obj));
+ heap->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj));
}
// Marks the object black without pushing it on the marking stack.
// Returns true if object needed marking and false otherwise.
INLINE(static bool MarkObjectWithoutPush(Heap* heap, Object* obj)) {
HeapObject* heap_object = HeapObject::cast(obj);
- if (ObjectMarking::IsWhite(heap_object,
- MarkingState::Internal(heap_object))) {
- ObjectMarking::WhiteToBlack(heap_object,
- MarkingState::Internal(heap_object));
- return true;
- }
- return false;
+ return ObjectMarking::WhiteToBlack<IncrementalMarking::kAtomicity>(
+ heap_object, heap->incremental_marking()->marking_state(heap_object));
}
};
void IncrementalMarking::IterateBlackObject(HeapObject* object) {
if (IsMarking() &&
- ObjectMarking::IsBlack(object, MarkingState::Internal(object))) {
+ ObjectMarking::IsBlack<kAtomicity>(object, marking_state(object))) {
Page* page = Page::FromAddress(object->address());
if ((page->owner() != nullptr) && (page->owner()->identity() == LO_SPACE)) {
// IterateBlackObject requires us to visit the whole object.
page->ResetProgressBar();
}
Map* map = object->map();
- MarkGrey(heap_, map);
+ WhiteToGreyAndPush(map);
IncrementalMarkingMarkingVisitor::IterateBody(map, object);
}
}
-class IncrementalMarkingRootMarkingVisitor : public ObjectVisitor {
+class IncrementalMarkingRootMarkingVisitor : public RootVisitor {
public:
explicit IncrementalMarkingRootMarkingVisitor(
IncrementalMarking* incremental_marking)
: heap_(incremental_marking->heap()) {}
- void VisitPointer(Object** p) override { MarkObjectByPointer(p); }
+ void VisitRootPointer(Root root, Object** p) override {
+ MarkObjectByPointer(p);
+ }
- void VisitPointers(Object** start, Object** end) override {
+ void VisitRootPointers(Root root, Object** start, Object** end) override {
for (Object** p = start; p < end; p++) MarkObjectByPointer(p);
}
@@ -297,7 +338,7 @@ class IncrementalMarkingRootMarkingVisitor : public ObjectVisitor {
Object* obj = *p;
if (!obj->IsHeapObject()) return;
- IncrementalMarking::MarkGrey(heap_, HeapObject::cast(obj));
+ heap_->incremental_marking()->WhiteToGreyAndPush(HeapObject::cast(obj));
}
Heap* heap_;
@@ -528,7 +569,7 @@ void IncrementalMarking::StartMarking() {
PatchIncrementalMarkingRecordWriteStubs(heap_, mode);
- heap_->mark_compact_collector()->marking_deque()->StartUsing();
+ marking_deque()->StartUsing();
ActivateIncrementalWriteBarrier();
@@ -548,10 +589,6 @@ void IncrementalMarking::StartMarking() {
if (FLAG_concurrent_marking) {
ConcurrentMarking* concurrent_marking = heap_->concurrent_marking();
- heap_->mark_compact_collector()->marking_deque()->Iterate(
- [concurrent_marking](HeapObject* obj) {
- concurrent_marking->AddRoot(obj);
- });
concurrent_marking->StartTask();
}
@@ -574,6 +611,19 @@ void IncrementalMarking::StartBlackAllocation() {
}
}
+void IncrementalMarking::PauseBlackAllocation() {
+ DCHECK(FLAG_black_allocation);
+ DCHECK(IsMarking());
+ heap()->old_space()->UnmarkAllocationInfo();
+ heap()->map_space()->UnmarkAllocationInfo();
+ heap()->code_space()->UnmarkAllocationInfo();
+ if (FLAG_trace_incremental_marking) {
+ heap()->isolate()->PrintWithTimestamp(
+ "[IncrementalMarking] Black allocation paused\n");
+ }
+ black_allocation_ = false;
+}
+
void IncrementalMarking::FinishBlackAllocation() {
if (black_allocation_) {
black_allocation_ = false;
@@ -614,7 +664,7 @@ void IncrementalMarking::ProcessWeakCells() {
HeapObject* value = HeapObject::cast(weak_cell->value());
// Remove weak cells with live objects from the list, they do not need
// clearing.
- if (ObjectMarking::IsBlackOrGrey(value, MarkingState::Internal(value))) {
+ if (ObjectMarking::IsBlackOrGrey<kAtomicity>(value, marking_state(value))) {
// Record slot, if value is pointing to an evacuation candidate.
Object** slot = HeapObject::RawField(weak_cell, WeakCell::kValueOffset);
heap_->mark_compact_collector()->RecordSlot(weak_cell, slot, *slot);
@@ -643,10 +693,12 @@ bool ShouldRetainMap(Map* map, int age) {
return false;
}
Object* constructor = map->GetConstructor();
+ Heap* heap = map->GetHeap();
if (!constructor->IsHeapObject() ||
- ObjectMarking::IsWhite(
+ ObjectMarking::IsWhite<IncrementalMarking::kAtomicity>(
HeapObject::cast(constructor),
- MarkingState::Internal(HeapObject::cast(constructor)))) {
+ heap->incremental_marking()->marking_state(
+ HeapObject::cast(constructor)))) {
// The constructor is dead, no new objects with this map can
// be created. Do not retain this map.
return false;
@@ -676,15 +728,15 @@ void IncrementalMarking::RetainMaps() {
int new_age;
Map* map = Map::cast(cell->value());
if (i >= number_of_disposed_maps && !map_retaining_is_disabled &&
- ObjectMarking::IsWhite(map, MarkingState::Internal(map))) {
+ ObjectMarking::IsWhite<kAtomicity>(map, marking_state(map))) {
if (ShouldRetainMap(map, age)) {
- MarkGrey(heap(), map);
+ WhiteToGreyAndPush(map);
}
Object* prototype = map->prototype();
if (age > 0 && prototype->IsHeapObject() &&
- ObjectMarking::IsWhite(
+ ObjectMarking::IsWhite<kAtomicity>(
HeapObject::cast(prototype),
- MarkingState::Internal(HeapObject::cast(prototype)))) {
+ marking_state(HeapObject::cast(prototype)))) {
// The prototype is not marked, age the map.
new_age = age - 1;
} else {
@@ -709,9 +761,6 @@ void IncrementalMarking::FinalizeIncrementally() {
double start = heap_->MonotonicallyIncreasingTimeInMs();
- int old_marking_deque_top =
- heap_->mark_compact_collector()->marking_deque()->top();
-
// After finishing incremental marking, we try to discover all unmarked
// objects to reduce the marking load in the final pause.
// 1) We scan and mark the roots again to find all changes to the root set.
@@ -728,11 +777,9 @@ void IncrementalMarking::FinalizeIncrementally() {
ProcessWeakCells();
int marking_progress =
- abs(old_marking_deque_top -
- heap_->mark_compact_collector()->marking_deque()->top());
-
- marking_progress += static_cast<int>(
- heap_->local_embedder_heap_tracer()->NumberOfCachedWrappersToTrace());
+ heap_->mark_compact_collector()->marking_deque()->Size() +
+ static_cast<int>(
+ heap_->local_embedder_heap_tracer()->NumberOfCachedWrappersToTrace());
double end = heap_->MonotonicallyIncreasingTimeInMs();
double delta = end - start;
@@ -764,97 +811,90 @@ void IncrementalMarking::FinalizeIncrementally() {
void IncrementalMarking::UpdateMarkingDequeAfterScavenge() {
if (!IsMarking()) return;
- MarkingDeque* marking_deque =
- heap_->mark_compact_collector()->marking_deque();
- int current = marking_deque->bottom();
- int mask = marking_deque->mask();
- int limit = marking_deque->top();
- HeapObject** array = marking_deque->array();
- int new_top = current;
-
Map* filler_map = heap_->one_pointer_filler_map();
- while (current != limit) {
- HeapObject* obj = array[current];
+ marking_deque()->Update([this, filler_map](HeapObject* obj) -> HeapObject* {
DCHECK(obj->IsHeapObject());
- current = ((current + 1) & mask);
// Only pointers to from space have to be updated.
if (heap_->InFromSpace(obj)) {
MapWord map_word = obj->map_word();
- // There may be objects on the marking deque that do not exist anymore,
- // e.g. left trimmed objects or objects from the root set (frames).
- // If these object are dead at scavenging time, their marking deque
- // entries will not point to forwarding addresses. Hence, we can discard
- // them.
- if (map_word.IsForwardingAddress()) {
- HeapObject* dest = map_word.ToForwardingAddress();
- if (ObjectMarking::IsBlack(dest, MarkingState::Internal(dest)))
- continue;
- array[new_top] = dest;
- new_top = ((new_top + 1) & mask);
- DCHECK(new_top != marking_deque->bottom());
- DCHECK(ObjectMarking::IsGrey(obj, MarkingState::Internal(obj)) ||
- (obj->IsFiller() &&
- ObjectMarking::IsWhite(obj, MarkingState::Internal(obj))));
+ if (!map_word.IsForwardingAddress()) {
+ // There may be objects on the marking deque that do not exist anymore,
+ // e.g. left trimmed objects or objects from the root set (frames).
+ // If these object are dead at scavenging time, their marking deque
+ // entries will not point to forwarding addresses. Hence, we can discard
+ // them.
+ return nullptr;
+ }
+ HeapObject* dest = map_word.ToForwardingAddress();
+ DCHECK_IMPLIES(
+ ObjectMarking::IsWhite<kAtomicity>(obj, marking_state(obj)),
+ obj->IsFiller());
+ return dest;
+ } else if (heap_->InToSpace(obj)) {
+ // The object may be on a page that was moved in new space.
+ DCHECK(
+ Page::FromAddress(obj->address())->IsFlagSet(Page::SWEEP_TO_ITERATE));
+ return ObjectMarking::IsBlack<kAtomicity>(obj,
+ MarkingState::External(obj))
+ ? obj
+ : nullptr;
+ } else {
+ // The object may be on a page that was moved from new to old space.
+ if (Page::FromAddress(obj->address())
+ ->IsFlagSet(Page::SWEEP_TO_ITERATE)) {
+ return ObjectMarking::IsBlack<kAtomicity>(obj,
+ MarkingState::External(obj))
+ ? obj
+ : nullptr;
}
- } else if (obj->map() != filler_map) {
+ DCHECK_IMPLIES(
+ ObjectMarking::IsWhite<kAtomicity>(obj, marking_state(obj)),
+ obj->IsFiller());
// Skip one word filler objects that appear on the
// stack when we perform in place array shift.
- array[new_top] = obj;
- new_top = ((new_top + 1) & mask);
- DCHECK(new_top != marking_deque->bottom());
- DCHECK(ObjectMarking::IsGrey(obj, MarkingState::Internal(obj)) ||
- (obj->IsFiller() &&
- ObjectMarking::IsWhite(obj, MarkingState::Internal(obj))) ||
- (MemoryChunk::FromAddress(obj->address())
- ->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
- ObjectMarking::IsBlack(obj, MarkingState::Internal(obj))));
+ return (obj->map() == filler_map) ? nullptr : obj;
}
- }
- marking_deque->set_top(new_top);
+ });
}
-
-void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
- MarkGrey(heap_, map);
-
- IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
-
-#if ENABLE_SLOW_DCHECKS
- MarkBit mark_bit =
- ObjectMarking::MarkBitFrom(obj, MarkingState::Internal(obj));
+bool IncrementalMarking::IsFixedArrayWithProgressBar(HeapObject* obj) {
+ if (!obj->IsFixedArray()) return false;
MemoryChunk* chunk = MemoryChunk::FromAddress(obj->address());
- SLOW_DCHECK(Marking::IsGrey(mark_bit) ||
- (chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR) &&
- Marking::IsBlack(mark_bit)));
-#endif
- MarkBlack(obj, size);
+ return chunk->IsFlagSet(MemoryChunk::HAS_PROGRESS_BAR);
}
-void IncrementalMarking::MarkGrey(Heap* heap, HeapObject* object) {
- if (ObjectMarking::IsWhite(object, MarkingState::Internal(object))) {
- heap->incremental_marking()->WhiteToGreyAndPush(object);
- }
-}
-
-void IncrementalMarking::MarkBlack(HeapObject* obj, int size) {
- if (ObjectMarking::IsBlack(obj, MarkingState::Internal(obj))) return;
- ObjectMarking::GreyToBlack(obj, MarkingState::Internal(obj));
+void IncrementalMarking::VisitObject(Map* map, HeapObject* obj, int size) {
+ MarkBit mark_bit = ObjectMarking::MarkBitFrom(obj, marking_state(obj));
+ DCHECK(Marking::IsGrey<kAtomicity>(mark_bit) ||
+ Marking::IsBlack<kAtomicity>(mark_bit));
+ USE(mark_bit);
+ // The object can already be black in two cases:
+ // 1. The object is a fixed array with the progress bar.
+ // 2. The object is a JSObject that was colored black before
+ // unsafe layout change.
+ // 3. The object is a string that was colored black before
+ // unsafe layout change.
+ if (!ObjectMarking::GreyToBlack<kAtomicity>(obj, marking_state(obj))) {
+ DCHECK(IsFixedArrayWithProgressBar(obj) || obj->IsJSObject() ||
+ obj->IsString());
+ }
+ DCHECK(ObjectMarking::IsBlack<kAtomicity>(obj, marking_state(obj)));
+ WhiteToGreyAndPush(map);
+ IncrementalMarkingMarkingVisitor::IterateBody(map, obj);
}
intptr_t IncrementalMarking::ProcessMarkingDeque(
intptr_t bytes_to_process, ForceCompletionAction completion) {
intptr_t bytes_processed = 0;
- MarkingDeque* marking_deque =
- heap_->mark_compact_collector()->marking_deque();
- while (!marking_deque->IsEmpty() && (bytes_processed < bytes_to_process ||
- completion == FORCE_COMPLETION)) {
- HeapObject* obj = marking_deque->Pop();
+ while (!marking_deque()->IsEmpty() && (bytes_processed < bytes_to_process ||
+ completion == FORCE_COMPLETION)) {
+ HeapObject* obj = marking_deque()->Pop();
// Left trimming may result in white, grey, or black filler objects on the
// marking deque. Ignore these objects.
if (obj->IsFiller()) {
- DCHECK(!ObjectMarking::IsImpossible(obj, MarkingState::Internal(obj)));
+ DCHECK(!ObjectMarking::IsImpossible<kAtomicity>(obj, marking_state(obj)));
continue;
}
@@ -879,7 +919,7 @@ void IncrementalMarking::Hurry() {
// forced e.g. in tests. It should not happen when COMPLETE was set when
// incremental marking finished and a regular GC was triggered after that
// because should_hurry_ will force a full GC.
- if (!heap_->mark_compact_collector()->marking_deque()->IsEmpty()) {
+ if (!marking_deque()->IsEmpty()) {
double start = 0.0;
if (FLAG_trace_incremental_marking) {
start = heap_->MonotonicallyIncreasingTimeInMs();
@@ -909,9 +949,10 @@ void IncrementalMarking::Hurry() {
HeapObject* cache = HeapObject::cast(
Context::cast(context)->get(Context::NORMALIZED_MAP_CACHE_INDEX));
if (!cache->IsUndefined(heap_->isolate())) {
- if (ObjectMarking::IsGrey(cache, MarkingState::Internal(cache))) {
- ObjectMarking::GreyToBlack(cache, MarkingState::Internal(cache));
- }
+ // Mark the cache black if it is grey.
+ bool ignored =
+ ObjectMarking::GreyToBlack<kAtomicity>(cache, marking_state(cache));
+ USE(ignored);
}
context = Context::cast(context)->next_context_link();
}
@@ -1040,7 +1081,7 @@ double IncrementalMarking::AdvanceIncrementalMarking(
remaining_time_in_ms =
deadline_in_ms - heap()->MonotonicallyIncreasingTimeInMs();
} while (remaining_time_in_ms >= kStepSizeInMs && !IsComplete() &&
- !heap()->mark_compact_collector()->marking_deque()->IsEmpty());
+ !marking_deque()->IsEmpty());
return remaining_time_in_ms;
}
@@ -1142,7 +1183,7 @@ size_t IncrementalMarking::Step(size_t bytes_to_process,
bytes_marked_ahead_of_schedule_ += bytes_processed;
}
- if (heap_->mark_compact_collector()->marking_deque()->IsEmpty()) {
+ if (marking_deque()->IsEmpty()) {
if (heap_->local_embedder_heap_tracer()
->ShouldFinalizeIncrementalMarking()) {
if (completion == FORCE_COMPLETION ||