summaryrefslogtreecommitdiff
path: root/deps/v8/src/heap/spaces.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/src/heap/spaces.cc')
-rw-r--r--deps/v8/src/heap/spaces.cc98
1 files changed, 52 insertions, 46 deletions
diff --git a/deps/v8/src/heap/spaces.cc b/deps/v8/src/heap/spaces.cc
index a29cb88d5a..822a06ec8a 100644
--- a/deps/v8/src/heap/spaces.cc
+++ b/deps/v8/src/heap/spaces.cc
@@ -14,9 +14,7 @@
#include "src/base/sanitizer/msan.h"
#include "src/common/globals.h"
#include "src/heap/base/active-system-pages.h"
-#include "src/heap/combined-heap.h"
#include "src/heap/concurrent-marking.h"
-#include "src/heap/heap-controller.h"
#include "src/heap/heap.h"
#include "src/heap/incremental-marking-inl.h"
#include "src/heap/invalidated-slots-inl.h"
@@ -229,55 +227,40 @@ void Space::RemoveAllocationObserver(AllocationObserver* observer) {
allocation_counter_.RemoveAllocationObserver(observer);
}
-void Space::PauseAllocationObservers() { allocation_counter_.Pause(); }
-
-void Space::ResumeAllocationObservers() { allocation_counter_.Resume(); }
-
Address SpaceWithLinearArea::ComputeLimit(Address start, Address end,
size_t min_size) const {
DCHECK_GE(end - start, min_size);
- if (!allocation_info_.enabled()) {
+ // During GCs we always use the full LAB.
+ if (heap()->IsInGC()) return end;
+
+ if (!heap()->IsInlineAllocationEnabled()) {
// LABs are disabled, so we fit the requested area exactly.
return start + min_size;
}
- if (SupportsAllocationObserver() && allocation_counter_.IsActive()) {
+ // When LABs are enabled, pick the largest possible LAB size by default.
+ size_t step_size = end - start;
+
+ if (SupportsAllocationObserver() && heap()->IsAllocationObserverActive()) {
// Ensure there are no unaccounted allocations.
DCHECK_EQ(allocation_info_.start(), allocation_info_.top());
- // Generated code may allocate inline from the linear allocation area for.
- // To make sure we can observe these allocations, we use a lower ©limit.
size_t step = allocation_counter_.NextBytes();
DCHECK_NE(step, 0);
- size_t rounded_step =
- RoundSizeDownToObjectAlignment(static_cast<int>(step - 1));
- // Use uint64_t to avoid overflow on 32-bit
- uint64_t step_end =
- static_cast<uint64_t>(start) + std::max(min_size, rounded_step);
- uint64_t new_end = std::min(step_end, static_cast<uint64_t>(end));
- return static_cast<Address>(new_end);
+ // Generated code may allocate inline from the linear allocation area. To
+ // make sure we can observe these allocations, we use a lower limit.
+ size_t rounded_step = static_cast<size_t>(
+ RoundSizeDownToObjectAlignment(static_cast<int>(step - 1)));
+ step_size = std::min(step_size, rounded_step);
}
- // LABs are enabled and no observers attached. Return the whole node for the
- // LAB.
- return end;
-}
-
-void SpaceWithLinearArea::DisableInlineAllocation() {
- if (!allocation_info_.enabled()) return;
-
- allocation_info_.SetEnabled(false);
- FreeLinearAllocationArea();
- UpdateInlineAllocationLimit(0);
-}
-
-void SpaceWithLinearArea::EnableInlineAllocation() {
- if (allocation_info_.enabled()) return;
+ if (v8_flags.stress_marking) {
+ step_size = std::min(step_size, static_cast<size_t>(64));
+ }
- allocation_info_.SetEnabled(true);
- AdvanceAllocationObservers();
- UpdateInlineAllocationLimit(0);
+ DCHECK_LE(start + step_size, end);
+ return start + std::max(step_size, min_size);
}
void SpaceWithLinearArea::UpdateAllocationOrigins(AllocationOrigin origin) {
@@ -335,7 +318,7 @@ void SpaceWithLinearArea::AddAllocationObserver(AllocationObserver* observer) {
if (!allocation_counter_.IsStepInProgress()) {
AdvanceAllocationObservers();
Space::AddAllocationObserver(observer);
- UpdateInlineAllocationLimit(0);
+ UpdateInlineAllocationLimit();
} else {
Space::AddAllocationObserver(observer);
}
@@ -346,7 +329,7 @@ void SpaceWithLinearArea::RemoveAllocationObserver(
if (!allocation_counter_.IsStepInProgress()) {
AdvanceAllocationObservers();
Space::RemoveAllocationObserver(observer);
- UpdateInlineAllocationLimit(0);
+ UpdateInlineAllocationLimit();
} else {
Space::RemoveAllocationObserver(observer);
}
@@ -354,20 +337,20 @@ void SpaceWithLinearArea::RemoveAllocationObserver(
void SpaceWithLinearArea::PauseAllocationObservers() {
AdvanceAllocationObservers();
- Space::PauseAllocationObservers();
}
void SpaceWithLinearArea::ResumeAllocationObservers() {
- Space::ResumeAllocationObservers();
MarkLabStartInitialized();
- UpdateInlineAllocationLimit(0);
+ UpdateInlineAllocationLimit();
}
void SpaceWithLinearArea::AdvanceAllocationObservers() {
if (allocation_info_.top() &&
allocation_info_.start() != allocation_info_.top()) {
- allocation_counter_.AdvanceAllocationObservers(allocation_info_.top() -
- allocation_info_.start());
+ if (heap()->IsAllocationObserverActive()) {
+ allocation_counter_.AdvanceAllocationObservers(allocation_info_.top() -
+ allocation_info_.start());
+ }
MarkLabStartInitialized();
}
}
@@ -399,7 +382,8 @@ void SpaceWithLinearArea::InvokeAllocationObservers(
DCHECK(size_in_bytes == aligned_size_in_bytes ||
aligned_size_in_bytes == allocation_size);
- if (!SupportsAllocationObserver() || !allocation_counter_.IsActive()) return;
+ if (!SupportsAllocationObserver() || !heap()->IsAllocationObserverActive())
+ return;
if (allocation_size >= allocation_counter_.NextBytes()) {
// Only the first object in a LAB should reach the next step.
@@ -434,9 +418,8 @@ void SpaceWithLinearArea::InvokeAllocationObservers(
DCHECK_EQ(saved_allocation_info.limit(), allocation_info_.limit());
}
- DCHECK_IMPLIES(allocation_counter_.IsActive(),
- (allocation_info_.limit() - allocation_info_.start()) <
- allocation_counter_.NextBytes());
+ DCHECK_LT(allocation_info_.limit() - allocation_info_.start(),
+ allocation_counter_.NextBytes());
}
#if DEBUG
@@ -458,5 +441,28 @@ int MemoryChunk::FreeListsLength() {
return length;
}
+SpaceIterator::SpaceIterator(Heap* heap)
+ : heap_(heap), current_space_(FIRST_MUTABLE_SPACE) {}
+
+SpaceIterator::~SpaceIterator() = default;
+
+bool SpaceIterator::HasNext() {
+ while (current_space_ <= LAST_MUTABLE_SPACE) {
+ Space* space = heap_->space(current_space_);
+ if (space) return true;
+ ++current_space_;
+ }
+
+ // No more spaces left.
+ return false;
+}
+
+Space* SpaceIterator::Next() {
+ DCHECK_LE(current_space_, LAST_MUTABLE_SPACE);
+ Space* space = heap_->space(current_space_++);
+ DCHECK_NOT_NULL(space);
+ return space;
+}
+
} // namespace internal
} // namespace v8