summaryrefslogtreecommitdiff
path: root/deps/v8/src/heap/cppgc/marker.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/heap/cppgc/marker.cc')
-rw-r--r--deps/v8/src/heap/cppgc/marker.cc77
1 files changed, 42 insertions, 35 deletions
diff --git a/deps/v8/src/heap/cppgc/marker.cc b/deps/v8/src/heap/cppgc/marker.cc
index e792c4c844..fa8732fde7 100644
--- a/deps/v8/src/heap/cppgc/marker.cc
+++ b/deps/v8/src/heap/cppgc/marker.cc
@@ -60,33 +60,11 @@ bool ExitIncrementalMarkingIfNeeded(Marker::MarkingConfig config,
return false;
}
-// Visit remembered set that was recorded in the generational barrier.
-void VisitRememberedSlots(HeapBase& heap,
- MutatorMarkingState& mutator_marking_state) {
-#if defined(CPPGC_YOUNG_GENERATION)
- StatsCollector::EnabledScope stats_scope(
- heap.stats_collector(), StatsCollector::kMarkVisitRememberedSets);
- for (void* slot : heap.remembered_slots()) {
- auto& slot_header = BasePage::FromInnerAddress(&heap, slot)
- ->ObjectHeaderFromInnerAddress(slot);
- if (slot_header.IsYoung()) continue;
- // The design of young generation requires collections to be executed at the
- // top level (with the guarantee that no objects are currently being in
- // construction). This can be ensured by running young GCs from safe points
- // or by reintroducing nested allocation scopes that avoid finalization.
- DCHECK(!slot_header.template IsInConstruction<AccessMode::kNonAtomic>());
-
- void* value = *reinterpret_cast<void**>(slot);
- mutator_marking_state.DynamicallyMarkAddress(static_cast<Address>(value));
- }
-#endif
-}
-
static constexpr size_t kDefaultDeadlineCheckInterval = 150u;
template <size_t kDeadlineCheckInterval = kDefaultDeadlineCheckInterval,
typename WorklistLocal, typename Callback>
-bool DrainWorklistWithBytesAndTimeDeadline(MarkingStateBase& marking_state,
+bool DrainWorklistWithBytesAndTimeDeadline(BasicMarkingState& marking_state,
size_t marked_bytes_deadline,
v8::base::TimeTicks time_deadline,
WorklistLocal& worklist_local,
@@ -153,7 +131,7 @@ void MarkerBase::IncrementalMarkingTask::Run() {
}
}
-MarkerBase::MarkerBase(Key, HeapBase& heap, cppgc::Platform* platform,
+MarkerBase::MarkerBase(HeapBase& heap, cppgc::Platform* platform,
MarkingConfig config)
: heap_(heap),
config_(config),
@@ -248,6 +226,13 @@ void MarkerBase::StartMarking() {
incremental_marking_allocation_observer_.get());
}
}
+void MarkerBase::HandleNotFullyConstructedObjects() {
+ if (config_.stack_state == MarkingConfig::StackState::kNoHeapPointers) {
+ mutator_marking_state_.FlushNotFullyConstructedObjects();
+ } else {
+ MarkNotFullyConstructedObjects();
+ }
+}
void MarkerBase::EnterAtomicPause(MarkingConfig::StackState stack_state) {
StatsCollector::EnabledScope top_stats_scope(heap().stats_collector(),
@@ -271,12 +256,7 @@ void MarkerBase::EnterAtomicPause(MarkingConfig::StackState stack_state) {
{
// VisitRoots also resets the LABs.
VisitRoots(config_.stack_state);
- if (config_.stack_state == MarkingConfig::StackState::kNoHeapPointers) {
- mutator_marking_state_.FlushNotFullyConstructedObjects();
- DCHECK(marking_worklists_.not_fully_constructed_worklist()->IsEmpty());
- } else {
- MarkNotFullyConstructedObjects();
- }
+ HandleNotFullyConstructedObjects();
}
if (heap().marking_support() ==
MarkingConfig::MarkingType::kIncrementalAndConcurrent) {
@@ -339,12 +319,32 @@ void MarkerBase::ProcessWeakness() {
heap().GetWeakCrossThreadPersistentRegion().Trace(&visitor());
// Call weak callbacks on objects that may now be pointing to dead objects.
- MarkingWorklists::WeakCallbackItem item;
LivenessBroker broker = LivenessBrokerFactory::Create();
+#if defined(CPPGC_YOUNG_GENERATION)
+ auto& remembered_set = heap().remembered_set();
+ if (config_.collection_type == MarkingConfig::CollectionType::kMinor) {
+ // Custom callbacks assume that untraced pointers point to not yet freed
+ // objects. They must make sure that upon callback completion no
+ // UntracedMember points to a freed object. This may not hold true if a
+ // custom callback for an old object operates with a reference to a young
+ // object that was freed on a minor collection cycle. To maintain the
+ // invariant that UntracedMembers always point to valid objects, execute
+ // custom callbacks for old objects on each minor collection cycle.
+ remembered_set.ExecuteCustomCallbacks(broker);
+ } else {
+ // For major GCs, just release all the remembered weak callbacks.
+ remembered_set.ReleaseCustomCallbacks();
+ }
+#endif // defined(CPPGC_YOUNG_GENERATION)
+
+ MarkingWorklists::WeakCallbackItem item;
MarkingWorklists::WeakCallbackWorklist::Local& local =
mutator_marking_state_.weak_callback_worklist();
while (local.Pop(&item)) {
item.callback(broker, item.parameter);
+#if defined(CPPGC_YOUNG_GENERATION)
+ heap().remembered_set().AddWeakCallback(item);
+#endif // defined(CPPGC_YOUNG_GENERATION)
}
// Weak callbacks should not add any new objects for marking.
@@ -372,9 +372,13 @@ void MarkerBase::VisitRoots(MarkingConfig::StackState stack_state) {
heap().stats_collector(), StatsCollector::kMarkVisitStack);
heap().stack()->IteratePointers(&stack_visitor());
}
+#if defined(CPPGC_YOUNG_GENERATION)
if (config_.collection_type == MarkingConfig::CollectionType::kMinor) {
- VisitRememberedSlots(heap(), mutator_marking_state_);
+ StatsCollector::EnabledScope stats_scope(
+ heap().stats_collector(), StatsCollector::kMarkVisitRememberedSets);
+ heap().remembered_set().Visit(visitor(), mutator_marking_state_);
}
+#endif // defined(CPPGC_YOUNG_GENERATION)
}
bool MarkerBase::VisitCrossThreadPersistentsIfNeeded() {
@@ -434,6 +438,10 @@ bool MarkerBase::CancelConcurrentMarkingIfNeeded() {
concurrent_marker_->Cancel();
concurrent_marking_active_ = false;
+ // Concurrent markers may have pushed some "leftover" in-construction objects
+ // after flushing in EnterAtomicPause.
+ HandleNotFullyConstructedObjects();
+ DCHECK(marking_worklists_.not_fully_constructed_worklist()->IsEmpty());
return true;
}
@@ -618,9 +626,8 @@ void MarkerBase::WaitForConcurrentMarkingForTesting() {
concurrent_marker_->JoinForTesting();
}
-Marker::Marker(Key key, HeapBase& heap, cppgc::Platform* platform,
- MarkingConfig config)
- : MarkerBase(key, heap, platform, config),
+Marker::Marker(HeapBase& heap, cppgc::Platform* platform, MarkingConfig config)
+ : MarkerBase(heap, platform, config),
marking_visitor_(heap, mutator_marking_state_),
conservative_marking_visitor_(heap, mutator_marking_state_,
marking_visitor_) {