summaryrefslogtreecommitdiff
path: root/deps/v8/test/unittests/heap
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/test/unittests/heap')
-rw-r--r--deps/v8/test/unittests/heap/base/run-all-unittests.cc17
-rw-r--r--deps/v8/test/unittests/heap/base/worklist-unittest.cc311
-rw-r--r--deps/v8/test/unittests/heap/cppgc/concurrent-marking-unittest.cc159
-rw-r--r--deps/v8/test/unittests/heap/cppgc/concurrent-sweeper-unittest.cc2
-rw-r--r--deps/v8/test/unittests/heap/cppgc/gc-invoker-unittest.cc43
-rw-r--r--deps/v8/test/unittests/heap/cppgc/heap-growing-unittest.cc39
-rw-r--r--deps/v8/test/unittests/heap/cppgc/incremental-marking-schedule-unittest.cc95
-rw-r--r--deps/v8/test/unittests/heap/cppgc/marker-unittest.cc173
-rw-r--r--deps/v8/test/unittests/heap/cppgc/marking-verifier-unittest.cc22
-rw-r--r--deps/v8/test/unittests/heap/cppgc/marking-visitor-unittest.cc4
-rw-r--r--deps/v8/test/unittests/heap/cppgc/member-unittest.cc35
-rw-r--r--deps/v8/test/unittests/heap/cppgc/sweeper-unittest.cc2
-rw-r--r--deps/v8/test/unittests/heap/cppgc/test-platform.cc85
-rw-r--r--deps/v8/test/unittests/heap/cppgc/test-platform.h66
-rw-r--r--deps/v8/test/unittests/heap/cppgc/worklist-unittest.cc346
-rw-r--r--deps/v8/test/unittests/heap/cppgc/write-barrier-unittest.cc54
-rw-r--r--deps/v8/test/unittests/heap/heap-unittest.cc2
-rw-r--r--deps/v8/test/unittests/heap/index-generator-unittest.cc50
-rw-r--r--deps/v8/test/unittests/heap/object-start-bitmap-unittest.cc174
-rw-r--r--deps/v8/test/unittests/heap/safepoint-unittest.cc14
-rw-r--r--deps/v8/test/unittests/heap/unmapper-unittest.cc1
21 files changed, 1207 insertions, 487 deletions
diff --git a/deps/v8/test/unittests/heap/base/run-all-unittests.cc b/deps/v8/test/unittests/heap/base/run-all-unittests.cc
new file mode 100644
index 0000000000..cdc862e309
--- /dev/null
+++ b/deps/v8/test/unittests/heap/base/run-all-unittests.cc
@@ -0,0 +1,17 @@
+// Copyright 2020 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "testing/gmock/include/gmock/gmock.h"
+
+int main(int argc, char** argv) {
+ // Don't catch SEH exceptions and continue as the following tests might hang
+ // in an broken environment on windows.
+ testing::GTEST_FLAG(catch_exceptions) = false;
+
+ // Most unit-tests are multi-threaded, so enable thread-safe death-tests.
+ testing::FLAGS_gtest_death_test_style = "threadsafe";
+
+ testing::InitGoogleMock(&argc, argv);
+ return RUN_ALL_TESTS();
+}
diff --git a/deps/v8/test/unittests/heap/base/worklist-unittest.cc b/deps/v8/test/unittests/heap/base/worklist-unittest.cc
new file mode 100644
index 0000000000..ae737a7aa3
--- /dev/null
+++ b/deps/v8/test/unittests/heap/base/worklist-unittest.cc
@@ -0,0 +1,311 @@
+// Copyright 2020 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/heap/base/worklist.h"
+
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace heap {
+namespace base {
+
+class SomeObject {};
+
+using TestWorklist = Worklist<SomeObject*, 64>;
+
+TEST(CppgcWorkListTest, SegmentCreate) {
+ TestWorklist::Segment segment;
+ EXPECT_TRUE(segment.IsEmpty());
+ EXPECT_EQ(0u, segment.Size());
+ EXPECT_FALSE(segment.IsFull());
+}
+
+TEST(CppgcWorkListTest, SegmentPush) {
+ TestWorklist::Segment segment;
+ EXPECT_EQ(0u, segment.Size());
+ segment.Push(nullptr);
+ EXPECT_EQ(1u, segment.Size());
+}
+
+TEST(CppgcWorkListTest, SegmentPushPop) {
+ TestWorklist::Segment segment;
+ segment.Push(nullptr);
+ EXPECT_EQ(1u, segment.Size());
+ SomeObject dummy;
+ SomeObject* object = &dummy;
+ segment.Pop(&object);
+ EXPECT_EQ(0u, segment.Size());
+ EXPECT_EQ(nullptr, object);
+}
+
+TEST(CppgcWorkListTest, SegmentIsEmpty) {
+ TestWorklist::Segment segment;
+ EXPECT_TRUE(segment.IsEmpty());
+ segment.Push(nullptr);
+ EXPECT_FALSE(segment.IsEmpty());
+}
+
+TEST(CppgcWorkListTest, SegmentIsFull) {
+ TestWorklist::Segment segment;
+ EXPECT_FALSE(segment.IsFull());
+ for (size_t i = 0; i < TestWorklist::Segment::kSize; i++) {
+ segment.Push(nullptr);
+ }
+ EXPECT_TRUE(segment.IsFull());
+}
+
+TEST(CppgcWorkListTest, SegmentClear) {
+ TestWorklist::Segment segment;
+ segment.Push(nullptr);
+ EXPECT_FALSE(segment.IsEmpty());
+ segment.Clear();
+ EXPECT_TRUE(segment.IsEmpty());
+ for (size_t i = 0; i < TestWorklist::Segment::kSize; i++) {
+ segment.Push(nullptr);
+ }
+}
+
+TEST(CppgcWorkListTest, SegmentUpdateFalse) {
+ TestWorklist::Segment segment;
+ SomeObject* object;
+ object = reinterpret_cast<SomeObject*>(&object);
+ segment.Push(object);
+ segment.Update([](SomeObject* object, SomeObject** out) { return false; });
+ EXPECT_TRUE(segment.IsEmpty());
+}
+
+TEST(CppgcWorkListTest, SegmentUpdate) {
+ TestWorklist::Segment segment;
+ SomeObject* objectA;
+ objectA = reinterpret_cast<SomeObject*>(&objectA);
+ SomeObject* objectB;
+ objectB = reinterpret_cast<SomeObject*>(&objectB);
+ segment.Push(objectA);
+ segment.Update([objectB](SomeObject* object, SomeObject** out) {
+ *out = objectB;
+ return true;
+ });
+ SomeObject* object;
+ segment.Pop(&object);
+ EXPECT_EQ(object, objectB);
+}
+
+TEST(CppgcWorkListTest, CreateEmpty) {
+ TestWorklist worklist;
+ TestWorklist::Local worklist_local(&worklist);
+ EXPECT_TRUE(worklist_local.IsLocalEmpty());
+ EXPECT_TRUE(worklist.IsEmpty());
+}
+
+TEST(CppgcWorkListTest, LocalPushPop) {
+ TestWorklist worklist;
+ TestWorklist::Local worklist_local(&worklist);
+ SomeObject dummy;
+ SomeObject* retrieved = nullptr;
+ worklist_local.Push(&dummy);
+ EXPECT_FALSE(worklist_local.IsLocalEmpty());
+ EXPECT_TRUE(worklist_local.Pop(&retrieved));
+ EXPECT_EQ(&dummy, retrieved);
+}
+
+TEST(CppgcWorkListTest, LocalPushStaysPrivate) {
+ TestWorklist worklist;
+ TestWorklist::Local worklist_view1(&worklist);
+ TestWorklist::Local worklist_view2(&worklist);
+ SomeObject dummy;
+ SomeObject* retrieved = nullptr;
+ EXPECT_TRUE(worklist.IsEmpty());
+ EXPECT_EQ(0U, worklist.Size());
+ worklist_view1.Push(&dummy);
+ EXPECT_EQ(0U, worklist.Size());
+ EXPECT_FALSE(worklist_view2.Pop(&retrieved));
+ EXPECT_EQ(nullptr, retrieved);
+ EXPECT_TRUE(worklist_view1.Pop(&retrieved));
+ EXPECT_EQ(&dummy, retrieved);
+ EXPECT_EQ(0U, worklist.Size());
+}
+
+TEST(CppgcWorkListTest, GlobalUpdateNull) {
+ TestWorklist worklist;
+ TestWorklist::Local worklist_local(&worklist);
+ SomeObject* object;
+ object = reinterpret_cast<SomeObject*>(&object);
+ for (size_t i = 0; i < TestWorklist::kSegmentSize; i++) {
+ worklist_local.Push(object);
+ }
+ worklist_local.Push(object);
+ worklist_local.Publish();
+ worklist.Update([](SomeObject* object, SomeObject** out) { return false; });
+ EXPECT_TRUE(worklist.IsEmpty());
+ EXPECT_EQ(0U, worklist.Size());
+}
+
+TEST(CppgcWorkListTest, GlobalUpdate) {
+ TestWorklist worklist;
+ TestWorklist::Local worklist_local(&worklist);
+ SomeObject* objectA = nullptr;
+ objectA = reinterpret_cast<SomeObject*>(&objectA);
+ SomeObject* objectB = nullptr;
+ objectB = reinterpret_cast<SomeObject*>(&objectB);
+ SomeObject* objectC = nullptr;
+ objectC = reinterpret_cast<SomeObject*>(&objectC);
+ for (size_t i = 0; i < TestWorklist::kSegmentSize; i++) {
+ worklist_local.Push(objectA);
+ }
+ for (size_t i = 0; i < TestWorklist::kSegmentSize; i++) {
+ worklist_local.Push(objectB);
+ }
+ worklist_local.Push(objectA);
+ worklist_local.Publish();
+ worklist.Update([objectA, objectC](SomeObject* object, SomeObject** out) {
+ if (object != objectA) {
+ *out = objectC;
+ return true;
+ }
+ return false;
+ });
+ for (size_t i = 0; i < TestWorklist::kSegmentSize; i++) {
+ SomeObject* object;
+ EXPECT_TRUE(worklist_local.Pop(&object));
+ EXPECT_EQ(object, objectC);
+ }
+}
+
+TEST(CppgcWorkListTest, FlushToGlobalPushSegment) {
+ TestWorklist worklist;
+ TestWorklist::Local worklist_local0(&worklist);
+ TestWorklist::Local worklist_local1(&worklist);
+ SomeObject* object = nullptr;
+ SomeObject* objectA = nullptr;
+ objectA = reinterpret_cast<SomeObject*>(&objectA);
+ worklist_local0.Push(objectA);
+ worklist_local0.Publish();
+ EXPECT_EQ(1U, worklist.Size());
+ EXPECT_TRUE(worklist_local1.Pop(&object));
+}
+
+TEST(CppgcWorkListTest, FlushToGlobalPopSegment) {
+ TestWorklist worklist;
+ TestWorklist::Local worklist_local0(&worklist);
+ TestWorklist::Local worklist_local1(&worklist);
+ SomeObject* object = nullptr;
+ SomeObject* objectA = nullptr;
+ objectA = reinterpret_cast<SomeObject*>(&objectA);
+ worklist_local0.Push(objectA);
+ worklist_local0.Push(objectA);
+ worklist_local0.Pop(&object);
+ worklist_local0.Publish();
+ EXPECT_EQ(1U, worklist.Size());
+ EXPECT_TRUE(worklist_local1.Pop(&object));
+}
+
+TEST(CppgcWorkListTest, Clear) {
+ TestWorklist worklist;
+ TestWorklist::Local worklist_local(&worklist);
+ SomeObject* object;
+ object = reinterpret_cast<SomeObject*>(&object);
+ worklist_local.Push(object);
+ worklist_local.Publish();
+ EXPECT_EQ(1U, worklist.Size());
+ worklist.Clear();
+ EXPECT_TRUE(worklist.IsEmpty());
+ EXPECT_EQ(0U, worklist.Size());
+}
+
+TEST(CppgcWorkListTest, SingleSegmentSteal) {
+ TestWorklist worklist;
+ TestWorklist::Local worklist_local1(&worklist);
+ TestWorklist::Local worklist_local2(&worklist);
+ SomeObject dummy;
+ for (size_t i = 0; i < TestWorklist::kSegmentSize; i++) {
+ worklist_local1.Push(&dummy);
+ }
+ SomeObject* retrieved = nullptr;
+ // One more push/pop to publish the full segment.
+ worklist_local1.Push(nullptr);
+ EXPECT_TRUE(worklist_local1.Pop(&retrieved));
+ EXPECT_EQ(nullptr, retrieved);
+ EXPECT_EQ(1U, worklist.Size());
+ // Stealing.
+ for (size_t i = 0; i < TestWorklist::kSegmentSize; i++) {
+ EXPECT_TRUE(worklist_local2.Pop(&retrieved));
+ EXPECT_EQ(&dummy, retrieved);
+ EXPECT_FALSE(worklist_local1.Pop(&retrieved));
+ }
+ EXPECT_TRUE(worklist.IsEmpty());
+ EXPECT_EQ(0U, worklist.Size());
+}
+
+TEST(CppgcWorkListTest, MultipleSegmentsStolen) {
+ TestWorklist worklist;
+ TestWorklist::Local worklist_local1(&worklist);
+ TestWorklist::Local worklist_local2(&worklist);
+ TestWorklist::Local worklist_local3(&worklist);
+ SomeObject dummy1;
+ SomeObject dummy2;
+ for (size_t i = 0; i < TestWorklist::kSegmentSize; i++) {
+ worklist_local1.Push(&dummy1);
+ }
+ for (size_t i = 0; i < TestWorklist::kSegmentSize; i++) {
+ worklist_local1.Push(&dummy2);
+ }
+ SomeObject* retrieved = nullptr;
+ SomeObject dummy3;
+ // One more push/pop to publish the full segment.
+ worklist_local1.Push(&dummy3);
+ EXPECT_TRUE(worklist_local1.Pop(&retrieved));
+ EXPECT_EQ(&dummy3, retrieved);
+ EXPECT_EQ(2U, worklist.Size());
+ // Stealing.
+ EXPECT_TRUE(worklist_local2.Pop(&retrieved));
+ SomeObject* const expect_bag2 = retrieved;
+ EXPECT_TRUE(worklist_local3.Pop(&retrieved));
+ SomeObject* const expect_bag3 = retrieved;
+ EXPECT_EQ(0U, worklist.Size());
+ EXPECT_NE(expect_bag2, expect_bag3);
+ EXPECT_TRUE(expect_bag2 == &dummy1 || expect_bag2 == &dummy2);
+ EXPECT_TRUE(expect_bag3 == &dummy1 || expect_bag3 == &dummy2);
+ for (size_t i = 1; i < TestWorklist::kSegmentSize; i++) {
+ EXPECT_TRUE(worklist_local2.Pop(&retrieved));
+ EXPECT_EQ(expect_bag2, retrieved);
+ EXPECT_FALSE(worklist_local1.Pop(&retrieved));
+ }
+ for (size_t i = 1; i < TestWorklist::kSegmentSize; i++) {
+ EXPECT_TRUE(worklist_local3.Pop(&retrieved));
+ EXPECT_EQ(expect_bag3, retrieved);
+ EXPECT_FALSE(worklist_local1.Pop(&retrieved));
+ }
+ EXPECT_TRUE(worklist.IsEmpty());
+}
+
+TEST(CppgcWorkListTest, MergeGlobalPool) {
+ TestWorklist worklist1;
+ TestWorklist::Local worklist_local1(&worklist1);
+ SomeObject dummy;
+ for (size_t i = 0; i < TestWorklist::kSegmentSize; i++) {
+ worklist_local1.Push(&dummy);
+ }
+ SomeObject* retrieved = nullptr;
+ // One more push/pop to publish the full segment.
+ worklist_local1.Push(nullptr);
+ EXPECT_TRUE(worklist_local1.Pop(&retrieved));
+ EXPECT_EQ(nullptr, retrieved);
+ EXPECT_EQ(1U, worklist1.Size());
+ // Merging global pool into a new Worklist.
+ TestWorklist worklist2;
+ TestWorklist::Local worklist_local2(&worklist2);
+ EXPECT_EQ(0U, worklist2.Size());
+ worklist2.Merge(&worklist1);
+ EXPECT_EQ(1U, worklist2.Size());
+ EXPECT_FALSE(worklist2.IsEmpty());
+ for (size_t i = 0; i < TestWorklist::kSegmentSize; i++) {
+ EXPECT_TRUE(worklist_local2.Pop(&retrieved));
+ EXPECT_EQ(&dummy, retrieved);
+ EXPECT_FALSE(worklist_local1.Pop(&retrieved));
+ }
+ EXPECT_TRUE(worklist1.IsEmpty());
+ EXPECT_TRUE(worklist2.IsEmpty());
+}
+
+} // namespace base
+} // namespace heap
diff --git a/deps/v8/test/unittests/heap/cppgc/concurrent-marking-unittest.cc b/deps/v8/test/unittests/heap/cppgc/concurrent-marking-unittest.cc
new file mode 100644
index 0000000000..b39a545b7b
--- /dev/null
+++ b/deps/v8/test/unittests/heap/cppgc/concurrent-marking-unittest.cc
@@ -0,0 +1,159 @@
+// Copyright 2020 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "include/cppgc/allocation.h"
+#include "include/cppgc/default-platform.h"
+#include "include/cppgc/member.h"
+#include "include/cppgc/persistent.h"
+#include "src/heap/cppgc/globals.h"
+#include "src/heap/cppgc/marker.h"
+#include "src/heap/cppgc/marking-visitor.h"
+#include "src/heap/cppgc/stats-collector.h"
+#include "test/unittests/heap/cppgc/tests.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace cppgc {
+namespace internal {
+
+#if defined(THREAD_SANITIZER)
+
+namespace {
+
+class GCed : public GarbageCollected<GCed> {
+ public:
+ void Trace(cppgc::Visitor* visitor) const { visitor->Trace(child_); }
+
+ Member<GCed> child_;
+};
+
+class GCedWithCallback : public GarbageCollected<GCedWithCallback> {
+ public:
+ template <typename Callback>
+ explicit GCedWithCallback(Callback callback) {
+ callback(this);
+ }
+
+ void Trace(cppgc::Visitor* visitor) const { visitor->Trace(child_); }
+
+ Member<GCedWithCallback> child_;
+};
+
+class Mixin : public GarbageCollectedMixin {
+ public:
+ void Trace(cppgc::Visitor* visitor) const { visitor->Trace(child_); }
+
+ Member<Mixin> child_;
+};
+
+class GCedWithMixin : public GarbageCollected<GCedWithMixin>, public Mixin {
+ public:
+ void Trace(cppgc::Visitor* visitor) const { Mixin::Trace(visitor); }
+};
+
+template <typename T>
+class GCedHolder : public GarbageCollected<GCedHolder<T>> {
+ public:
+ void Trace(cppgc::Visitor* visitor) const { visitor->Trace(object_); }
+
+ Member<T> object_;
+};
+
+class ConcurrentMarkingTest : public testing::TestWithHeap {
+ public:
+ using Config = Heap::Config;
+ static constexpr Config ConcurrentPreciseConfig = {
+ Config::CollectionType::kMajor, Config::StackState::kNoHeapPointers,
+ Config::MarkingType::kIncrementalAndConcurrent,
+ Config::SweepingType::kIncrementalAndConcurrent};
+
+ void StartConcurrentGC() {
+ Heap* heap = Heap::From(GetHeap());
+ heap->DisableHeapGrowingForTesting();
+ heap->StartIncrementalGarbageCollection(ConcurrentPreciseConfig);
+ heap->marker()->DisableIncrementalMarkingForTesting();
+ }
+
+ bool SingleStep(Config::StackState stack_state) {
+ MarkerBase* marker = Heap::From(GetHeap())->marker();
+ DCHECK(marker);
+ return marker->IncrementalMarkingStepForTesting(stack_state);
+ }
+
+ void FinishSteps(Config::StackState stack_state) {
+ while (!SingleStep(stack_state)) {
+ }
+ }
+
+ void FinishGC() {
+ Heap::From(GetHeap())->FinalizeIncrementalGarbageCollectionIfRunning(
+ ConcurrentPreciseConfig);
+ }
+};
+
+// static
+constexpr ConcurrentMarkingTest::Config
+ ConcurrentMarkingTest::ConcurrentPreciseConfig;
+
+} // namespace
+
+// The following tests below check for data races during concurrent marking.
+
+TEST_F(ConcurrentMarkingTest, MarkingObjects) {
+ static constexpr int kNumStep = 1000;
+ StartConcurrentGC();
+ Persistent<GCedHolder<GCed>> root =
+ MakeGarbageCollected<GCedHolder<GCed>>(GetAllocationHandle());
+ Member<GCed>* last_object = &root->object_;
+ for (int i = 0; i < kNumStep; ++i) {
+ for (int j = 0; j < kNumStep; ++j) {
+ *last_object = MakeGarbageCollected<GCed>(GetAllocationHandle());
+ last_object = &(*last_object)->child_;
+ }
+ // Use SignleStep to re-post concurrent jobs.
+ SingleStep(Config::StackState::kNoHeapPointers);
+ }
+ FinishGC();
+}
+
+TEST_F(ConcurrentMarkingTest, MarkingInConstructionObjects) {
+ static constexpr int kNumStep = 1000;
+ StartConcurrentGC();
+ Persistent<GCedHolder<GCedWithCallback>> root =
+ MakeGarbageCollected<GCedHolder<GCedWithCallback>>(GetAllocationHandle());
+ Member<GCedWithCallback>* last_object = &root->object_;
+ for (int i = 0; i < kNumStep; ++i) {
+ for (int j = 0; j < kNumStep; ++j) {
+ MakeGarbageCollected<GCedWithCallback>(
+ GetAllocationHandle(), [&last_object](GCedWithCallback* obj) {
+ *last_object = obj;
+ last_object = &(*last_object)->child_;
+ });
+ }
+ // Use SignleStep to re-post concurrent jobs.
+ SingleStep(Config::StackState::kNoHeapPointers);
+ }
+ FinishGC();
+}
+
+TEST_F(ConcurrentMarkingTest, MarkingMixinObjects) {
+ static constexpr int kNumStep = 1000;
+ StartConcurrentGC();
+ Persistent<GCedHolder<Mixin>> root =
+ MakeGarbageCollected<GCedHolder<Mixin>>(GetAllocationHandle());
+ Member<Mixin>* last_object = &root->object_;
+ for (int i = 0; i < kNumStep; ++i) {
+ for (int j = 0; j < kNumStep; ++j) {
+ *last_object = MakeGarbageCollected<GCedWithMixin>(GetAllocationHandle());
+ last_object = &(*last_object)->child_;
+ }
+ // Use SignleStep to re-post concurrent jobs.
+ SingleStep(Config::StackState::kNoHeapPointers);
+ }
+ FinishGC();
+}
+
+#endif // defined(THREAD_SANITIZER)
+
+} // namespace internal
+} // namespace cppgc
diff --git a/deps/v8/test/unittests/heap/cppgc/concurrent-sweeper-unittest.cc b/deps/v8/test/unittests/heap/cppgc/concurrent-sweeper-unittest.cc
index 32e65756fb..3794adce25 100644
--- a/deps/v8/test/unittests/heap/cppgc/concurrent-sweeper-unittest.cc
+++ b/deps/v8/test/unittests/heap/cppgc/concurrent-sweeper-unittest.cc
@@ -79,7 +79,7 @@ class ConcurrentSweeperTest : public testing::TestWithHeap {
void FinishSweeping() {
Heap* heap = Heap::From(GetHeap());
Sweeper& sweeper = heap->sweeper();
- sweeper.Finish();
+ sweeper.FinishIfRunning();
}
const RawHeap& GetRawHeap() const {
diff --git a/deps/v8/test/unittests/heap/cppgc/gc-invoker-unittest.cc b/deps/v8/test/unittests/heap/cppgc/gc-invoker-unittest.cc
index 3695afc6f4..9dc1b8d426 100644
--- a/deps/v8/test/unittests/heap/cppgc/gc-invoker-unittest.cc
+++ b/deps/v8/test/unittests/heap/cppgc/gc-invoker-unittest.cc
@@ -6,6 +6,7 @@
#include "include/cppgc/platform.h"
#include "src/heap/cppgc/heap.h"
+#include "test/unittests/heap/cppgc/test-platform.h"
#include "testing/gmock/include/gmock/gmock-matchers.h"
#include "testing/gmock/include/gmock/gmock.h"
#include "testing/gtest/include/gtest/gtest.h"
@@ -18,6 +19,8 @@ namespace {
class MockGarbageCollector : public GarbageCollector {
public:
MOCK_METHOD(void, CollectGarbage, (GarbageCollector::Config), (override));
+ MOCK_METHOD(void, StartIncrementalGarbageCollection,
+ (GarbageCollector::Config), (override));
MOCK_METHOD(size_t, epoch, (), (const, override));
};
@@ -81,7 +84,7 @@ TEST(GCInvokerTest, ConservativeGCIsInvokedSynchronouslyWhenSupported) {
invoker.CollectGarbage(GarbageCollector::Config::ConservativeAtomicConfig());
}
-TEST(GCInvokerTest, ConservativeGCIsInvokedAsPreciseGCViaPlatform) {
+TEST(GCInvokerTest, ConservativeGCIsScheduledAsPreciseGCViaPlatform) {
std::shared_ptr<cppgc::TaskRunner> runner =
std::shared_ptr<cppgc::TaskRunner>(new MockTaskRunner());
MockPlatform platform(runner);
@@ -94,5 +97,43 @@ TEST(GCInvokerTest, ConservativeGCIsInvokedAsPreciseGCViaPlatform) {
invoker.CollectGarbage(GarbageCollector::Config::ConservativeAtomicConfig());
}
+TEST(GCInvokerTest, ConservativeGCIsInvokedAsPreciseGCViaPlatform) {
+ testing::TestPlatform platform;
+ MockGarbageCollector gc;
+ GCInvoker invoker(&gc, &platform,
+ cppgc::Heap::StackSupport::kNoConservativeStackScan);
+ EXPECT_CALL(gc, epoch).WillRepeatedly(::testing::Return(0));
+ EXPECT_CALL(gc, CollectGarbage);
+ invoker.CollectGarbage(GarbageCollector::Config::ConservativeAtomicConfig());
+ platform.WaitAllForegroundTasks();
+}
+
+TEST(GCInvokerTest, IncrementalGCIsStarted) {
+ // Since StartIncrementalGarbageCollection doesn't scan the stack, support for
+ // conservative stack scanning should not matter.
+ MockPlatform platform(nullptr);
+ MockGarbageCollector gc;
+ // Conservative stack scanning supported.
+ GCInvoker invoker_with_support(
+ &gc, &platform,
+ cppgc::Heap::StackSupport::kSupportsConservativeStackScan);
+ EXPECT_CALL(
+ gc, StartIncrementalGarbageCollection(::testing::Field(
+ &GarbageCollector::Config::stack_state,
+ GarbageCollector::Config::StackState::kMayContainHeapPointers)));
+ invoker_with_support.StartIncrementalGarbageCollection(
+ GarbageCollector::Config::ConservativeIncrementalConfig());
+ // Conservative stack scanning *not* supported.
+ GCInvoker invoker_without_support(
+ &gc, &platform, cppgc::Heap::StackSupport::kNoConservativeStackScan);
+ EXPECT_CALL(
+ gc, StartIncrementalGarbageCollection(::testing::Field(
+ &GarbageCollector::Config::stack_state,
+ GarbageCollector::Config::StackState::kMayContainHeapPointers)))
+ .Times(0);
+ invoker_without_support.StartIncrementalGarbageCollection(
+ GarbageCollector::Config::ConservativeIncrementalConfig());
+}
+
} // namespace internal
} // namespace cppgc
diff --git a/deps/v8/test/unittests/heap/cppgc/heap-growing-unittest.cc b/deps/v8/test/unittests/heap/cppgc/heap-growing-unittest.cc
index 42994fb0d5..f1ff2a5877 100644
--- a/deps/v8/test/unittests/heap/cppgc/heap-growing-unittest.cc
+++ b/deps/v8/test/unittests/heap/cppgc/heap-growing-unittest.cc
@@ -29,6 +29,11 @@ class FakeGarbageCollector : public GarbageCollector {
callcount_++;
}
+ void StartIncrementalGarbageCollection(
+ GarbageCollector::Config config) override {
+ UNREACHABLE();
+ }
+
size_t epoch() const override { return callcount_; }
private:
@@ -40,6 +45,8 @@ class FakeGarbageCollector : public GarbageCollector {
class MockGarbageCollector : public GarbageCollector {
public:
MOCK_METHOD(void, CollectGarbage, (GarbageCollector::Config), (override));
+ MOCK_METHOD(void, StartIncrementalGarbageCollection,
+ (GarbageCollector::Config), (override));
MOCK_METHOD(size_t, epoch, (), (const, override));
};
@@ -87,7 +94,7 @@ TEST(HeapGrowingTest, ConstantGrowingFactor) {
gc.SetLiveBytes(kObjectSize);
FakeAllocate(&stats_collector, kObjectSize + 1);
EXPECT_EQ(1u, gc.epoch());
- EXPECT_EQ(1.5 * kObjectSize, growing.limit());
+ EXPECT_EQ(1.5 * kObjectSize, growing.limit_for_atomic_gc());
}
TEST(HeapGrowingTest, SmallHeapGrowing) {
@@ -103,7 +110,35 @@ TEST(HeapGrowingTest, SmallHeapGrowing) {
gc.SetLiveBytes(1);
FakeAllocate(&stats_collector, kLargeAllocation);
EXPECT_EQ(1u, gc.epoch());
- EXPECT_EQ(1 + HeapGrowing::kMinLimitIncrease, growing.limit());
+ EXPECT_EQ(1 + HeapGrowing::kMinLimitIncrease, growing.limit_for_atomic_gc());
+}
+
+TEST(HeapGrowingTest, IncrementalGCStarted) {
+ StatsCollector stats_collector;
+ MockGarbageCollector gc;
+ cppgc::Heap::ResourceConstraints constraints;
+ HeapGrowing growing(&gc, &stats_collector, constraints);
+ EXPECT_CALL(gc, CollectGarbage(::testing::_)).Times(0);
+ EXPECT_CALL(gc, StartIncrementalGarbageCollection(::testing::_));
+ // Allocate 1 byte less the limit for atomic gc to trigger incremental gc.
+ FakeAllocate(&stats_collector, growing.limit_for_atomic_gc() - 1);
+}
+
+TEST(HeapGrowingTest, IncrementalGCFinalized) {
+ StatsCollector stats_collector;
+ MockGarbageCollector gc;
+ cppgc::Heap::ResourceConstraints constraints;
+ HeapGrowing growing(&gc, &stats_collector, constraints);
+ EXPECT_CALL(gc, CollectGarbage(::testing::_)).Times(0);
+ EXPECT_CALL(gc, StartIncrementalGarbageCollection(::testing::_));
+ // Allocate 1 byte less the limit for atomic gc to trigger incremental gc.
+ size_t bytes_for_incremental_gc = growing.limit_for_atomic_gc() - 1;
+ FakeAllocate(&stats_collector, bytes_for_incremental_gc);
+ ::testing::Mock::VerifyAndClearExpectations(&gc);
+ EXPECT_CALL(gc, CollectGarbage(::testing::_));
+ EXPECT_CALL(gc, StartIncrementalGarbageCollection(::testing::_)).Times(0);
+ // Allocate the rest needed to trigger atomic gc ().
+ FakeAllocate(&stats_collector, StatsCollector::kAllocationThresholdBytes);
}
} // namespace internal
diff --git a/deps/v8/test/unittests/heap/cppgc/incremental-marking-schedule-unittest.cc b/deps/v8/test/unittests/heap/cppgc/incremental-marking-schedule-unittest.cc
new file mode 100644
index 0000000000..e91870be9d
--- /dev/null
+++ b/deps/v8/test/unittests/heap/cppgc/incremental-marking-schedule-unittest.cc
@@ -0,0 +1,95 @@
+// Copyright 2020 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/heap/cppgc/incremental-marking-schedule.h"
+
+#include "src/base/platform/time.h"
+#include "src/heap/cppgc/globals.h"
+#include "src/heap/cppgc/stats-collector.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace cppgc {
+namespace internal {
+
+namespace {
+
+class IncrementalMarkingScheduleTest : public testing::Test {
+ public:
+ static const size_t kObjectSize;
+};
+
+const size_t IncrementalMarkingScheduleTest::kObjectSize =
+ 100 * IncrementalMarkingSchedule::kMinimumMarkedBytesPerIncrementalStep;
+
+} // namespace
+
+TEST_F(IncrementalMarkingScheduleTest, FirstStepReturnsDefaultDuration) {
+ IncrementalMarkingSchedule schedule;
+ schedule.NotifyIncrementalMarkingStart();
+ schedule.SetElapsedTimeForTesting(0);
+ EXPECT_EQ(IncrementalMarkingSchedule::kMinimumMarkedBytesPerIncrementalStep,
+ schedule.GetNextIncrementalStepDuration(kObjectSize));
+}
+
+// If marking is not behind schedule and very small time passed between steps
+// the oracle should return the minimum step duration.
+TEST_F(IncrementalMarkingScheduleTest, NoTimePassedReturnsMinimumDuration) {
+ IncrementalMarkingSchedule schedule;
+ schedule.NotifyIncrementalMarkingStart();
+ // Add incrementally marked bytes to tell oracle this is not the first step.
+ schedule.UpdateIncrementalMarkedBytes(
+ IncrementalMarkingSchedule::kMinimumMarkedBytesPerIncrementalStep);
+ schedule.SetElapsedTimeForTesting(0);
+ EXPECT_EQ(IncrementalMarkingSchedule::kMinimumMarkedBytesPerIncrementalStep,
+ schedule.GetNextIncrementalStepDuration(kObjectSize));
+}
+
+TEST_F(IncrementalMarkingScheduleTest, OracleDoesntExccedMaximumStepDuration) {
+ IncrementalMarkingSchedule schedule;
+ schedule.NotifyIncrementalMarkingStart();
+ // Add incrementally marked bytes to tell oracle this is not the first step.
+ static constexpr size_t kMarkedBytes = 1;
+ schedule.UpdateIncrementalMarkedBytes(kMarkedBytes);
+ schedule.SetElapsedTimeForTesting(
+ IncrementalMarkingSchedule::kEstimatedMarkingTimeMs);
+ EXPECT_EQ(kObjectSize - kMarkedBytes,
+ schedule.GetNextIncrementalStepDuration(kObjectSize));
+}
+
+TEST_F(IncrementalMarkingScheduleTest, AheadOfScheduleReturnsMinimumDuration) {
+ IncrementalMarkingSchedule schedule;
+ schedule.NotifyIncrementalMarkingStart();
+ // Add incrementally marked bytes to tell oracle this is not the first step.
+ schedule.UpdateIncrementalMarkedBytes(
+ IncrementalMarkingSchedule::kMinimumMarkedBytesPerIncrementalStep);
+ schedule.AddConcurrentlyMarkedBytes(0.6 * kObjectSize);
+ schedule.SetElapsedTimeForTesting(
+ 0.5 * IncrementalMarkingSchedule::kEstimatedMarkingTimeMs);
+ EXPECT_EQ(IncrementalMarkingSchedule::kMinimumMarkedBytesPerIncrementalStep,
+ schedule.GetNextIncrementalStepDuration(kObjectSize));
+}
+
+TEST_F(IncrementalMarkingScheduleTest, BehindScheduleReturnsCorrectDuration) {
+ IncrementalMarkingSchedule schedule;
+ schedule.NotifyIncrementalMarkingStart();
+ schedule.UpdateIncrementalMarkedBytes(0.1 * kObjectSize);
+ schedule.AddConcurrentlyMarkedBytes(0.25 * kObjectSize);
+ schedule.SetElapsedTimeForTesting(
+ 0.5 * IncrementalMarkingSchedule::kEstimatedMarkingTimeMs);
+ EXPECT_EQ(0.15 * kObjectSize,
+ schedule.GetNextIncrementalStepDuration(kObjectSize));
+ schedule.AddConcurrentlyMarkedBytes(0.05 * kObjectSize);
+ schedule.SetElapsedTimeForTesting(
+ 0.5 * IncrementalMarkingSchedule::kEstimatedMarkingTimeMs);
+ EXPECT_EQ(0.1 * kObjectSize,
+ schedule.GetNextIncrementalStepDuration(kObjectSize));
+ schedule.AddConcurrentlyMarkedBytes(0.05 * kObjectSize);
+ schedule.SetElapsedTimeForTesting(
+ 0.5 * IncrementalMarkingSchedule::kEstimatedMarkingTimeMs);
+ EXPECT_EQ(0.05 * kObjectSize,
+ schedule.GetNextIncrementalStepDuration(kObjectSize));
+}
+
+} // namespace internal
+} // namespace cppgc
diff --git a/deps/v8/test/unittests/heap/cppgc/marker-unittest.cc b/deps/v8/test/unittests/heap/cppgc/marker-unittest.cc
index 8944372701..b879d9b989 100644
--- a/deps/v8/test/unittests/heap/cppgc/marker-unittest.cc
+++ b/deps/v8/test/unittests/heap/cppgc/marker-unittest.cc
@@ -18,7 +18,6 @@ namespace cppgc {
namespace internal {
namespace {
-
class MarkerTest : public testing::TestWithHeap {
public:
using MarkingConfig = Marker::MarkingConfig;
@@ -27,14 +26,24 @@ class MarkerTest : public testing::TestWithHeap {
const MarkingConfig config = {MarkingConfig::CollectionType::kMajor,
stack_state};
auto* heap = Heap::From(GetHeap());
- Marker marker(heap->AsBase());
- marker.StartMarking(config);
- marker.FinishMarking(config);
- marker.ProcessWeakness();
+ InitializeMarker(*heap, GetPlatformHandle().get(), config);
+ marker_->FinishMarking(stack_state);
+ marker_->ProcessWeakness();
// Pretend do finish sweeping as StatsCollector verifies that Notify*
// methods are called in the right order.
heap->stats_collector()->NotifySweepingCompleted();
}
+
+ void InitializeMarker(HeapBase& heap, cppgc::Platform* platform,
+ MarkingConfig config) {
+ marker_ =
+ MarkerFactory::CreateAndStartMarking<Marker>(heap, platform, config);
+ }
+
+ Marker* marker() const { return marker_.get(); }
+
+ private:
+ std::unique_ptr<Marker> marker_;
};
class GCed : public GarbageCollected<GCed> {
@@ -216,50 +225,166 @@ class GCedWithCallback : public GarbageCollected<GCedWithCallback> {
} // namespace
TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedEmptyStack) {
- Marker marker(Heap::From(GetHeap())->AsBase());
- marker.StartMarking({MarkingConfig::CollectionType::kMajor,
- MarkingConfig::StackState::kMayContainHeapPointers});
+ static const Marker::MarkingConfig config = {
+ MarkingConfig::CollectionType::kMajor,
+ MarkingConfig::StackState::kMayContainHeapPointers};
+ InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config);
GCedWithCallback* object = MakeGarbageCollected<GCedWithCallback>(
- GetAllocationHandle(), [&marker](GCedWithCallback* obj) {
+ GetAllocationHandle(), [marker = marker()](GCedWithCallback* obj) {
Member<GCedWithCallback> member(obj);
- marker.VisitorForTesting().Trace(member);
+ marker->VisitorForTesting().Trace(member);
});
EXPECT_TRUE(HeapObjectHeader::FromPayload(object).IsMarked());
- marker.FinishMarking({MarkingConfig::CollectionType::kMajor,
- MarkingConfig::StackState::kMayContainHeapPointers});
+ marker()->FinishMarking(MarkingConfig::StackState::kMayContainHeapPointers);
EXPECT_TRUE(HeapObjectHeader::FromPayload(object).IsMarked());
}
TEST_F(MarkerTest, InConstructionObjectIsEventuallyMarkedNonEmptyStack) {
- Marker marker(Heap::From(GetHeap())->AsBase());
static const Marker::MarkingConfig config = {
MarkingConfig::CollectionType::kMajor,
MarkingConfig::StackState::kMayContainHeapPointers};
- marker.StartMarking(config);
+ InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config);
MakeGarbageCollected<GCedWithCallback>(
- GetAllocationHandle(), [&marker](GCedWithCallback* obj) {
+ GetAllocationHandle(), [marker = marker()](GCedWithCallback* obj) {
Member<GCedWithCallback> member(obj);
- marker.VisitorForTesting().Trace(member);
+ marker->VisitorForTesting().Trace(member);
EXPECT_TRUE(HeapObjectHeader::FromPayload(obj).IsMarked());
- marker.FinishMarking(config);
+ marker->FinishMarking(
+ MarkingConfig::StackState::kMayContainHeapPointers);
EXPECT_TRUE(HeapObjectHeader::FromPayload(obj).IsMarked());
});
}
TEST_F(MarkerTest, SentinelNotClearedOnWeakPersistentHandling) {
- Marker marker(Heap::From(GetHeap())->AsBase());
- Persistent<GCed> root = MakeGarbageCollected<GCed>(GetAllocationHandle());
- auto* tmp = MakeGarbageCollected<GCed>(GetAllocationHandle());
- root->SetWeakChild(tmp);
static const Marker::MarkingConfig config = {
MarkingConfig::CollectionType::kMajor,
MarkingConfig::StackState::kNoHeapPointers};
- marker.StartMarking(config);
- marker.FinishMarking(config);
+ InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(), config);
+ Persistent<GCed> root = MakeGarbageCollected<GCed>(GetAllocationHandle());
+ auto* tmp = MakeGarbageCollected<GCed>(GetAllocationHandle());
+ root->SetWeakChild(tmp);
+ marker()->FinishMarking(MarkingConfig::StackState::kNoHeapPointers);
root->SetWeakChild(kSentinelPointer);
- marker.ProcessWeakness();
+ marker()->ProcessWeakness();
EXPECT_EQ(kSentinelPointer, root->weak_child());
}
+// Incremental Marking
+
+class IncrementalMarkingTest : public testing::TestWithHeap {
+ public:
+ using MarkingConfig = Marker::MarkingConfig;
+
+ static constexpr MarkingConfig IncrementalPreciseMarkingConfig = {
+ MarkingConfig::CollectionType::kMajor,
+ MarkingConfig::StackState::kNoHeapPointers,
+ MarkingConfig::MarkingType::kIncremental};
+ static constexpr MarkingConfig IncrementalConservativeMarkingConfig = {
+ MarkingConfig::CollectionType::kMajor,
+ MarkingConfig::StackState::kMayContainHeapPointers,
+ MarkingConfig::MarkingType::kIncremental};
+
+ void FinishSteps(MarkingConfig::StackState stack_state) {
+ while (!SingleStep(stack_state)) {}
+ }
+
+ void FinishMarking() {
+ marker_->FinishMarking(MarkingConfig::StackState::kMayContainHeapPointers);
+ marker_->ProcessWeakness();
+ // Pretend do finish sweeping as StatsCollector verifies that Notify*
+ // methods are called in the right order.
+ Heap::From(GetHeap())->stats_collector()->NotifySweepingCompleted();
+ }
+
+ void InitializeMarker(HeapBase& heap, cppgc::Platform* platform,
+ MarkingConfig config) {
+ marker_ =
+ MarkerFactory::CreateAndStartMarking<Marker>(heap, platform, config);
+ }
+
+ Marker* marker() const { return marker_.get(); }
+
+ private:
+ bool SingleStep(MarkingConfig::StackState stack_state) {
+ return marker_->IncrementalMarkingStepForTesting(stack_state);
+ }
+
+ std::unique_ptr<Marker> marker_;
+};
+
+constexpr IncrementalMarkingTest::MarkingConfig
+ IncrementalMarkingTest::IncrementalPreciseMarkingConfig;
+constexpr IncrementalMarkingTest::MarkingConfig
+ IncrementalMarkingTest::IncrementalConservativeMarkingConfig;
+
+TEST_F(IncrementalMarkingTest, RootIsMarkedAfterMarkingStarted) {
+ Persistent<GCed> root = MakeGarbageCollected<GCed>(GetAllocationHandle());
+ EXPECT_FALSE(HeapObjectHeader::FromPayload(root).IsMarked());
+ InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(),
+ IncrementalPreciseMarkingConfig);
+ EXPECT_TRUE(HeapObjectHeader::FromPayload(root).IsMarked());
+ FinishMarking();
+}
+
+TEST_F(IncrementalMarkingTest, MemberIsMarkedAfterMarkingSteps) {
+ Persistent<GCed> root = MakeGarbageCollected<GCed>(GetAllocationHandle());
+ root->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
+ HeapObjectHeader& header = HeapObjectHeader::FromPayload(root->child());
+ EXPECT_FALSE(header.IsMarked());
+ InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(),
+ IncrementalPreciseMarkingConfig);
+ FinishSteps(MarkingConfig::StackState::kNoHeapPointers);
+ EXPECT_TRUE(header.IsMarked());
+ FinishMarking();
+}
+
+TEST_F(IncrementalMarkingTest,
+ MemberWithWriteBarrierIsMarkedAfterMarkingSteps) {
+ Persistent<GCed> root = MakeGarbageCollected<GCed>(GetAllocationHandle());
+ InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(),
+ IncrementalPreciseMarkingConfig);
+ root->SetChild(MakeGarbageCollected<GCed>(GetAllocationHandle()));
+ HeapObjectHeader& header = HeapObjectHeader::FromPayload(root->child());
+ EXPECT_FALSE(header.IsMarked());
+ FinishSteps(MarkingConfig::StackState::kNoHeapPointers);
+ EXPECT_TRUE(header.IsMarked());
+ FinishMarking();
+}
+
+namespace {
+class Holder : public GarbageCollected<Holder> {
+ public:
+ void Trace(Visitor* visitor) const { visitor->Trace(member_); }
+
+ Member<GCedWithCallback> member_;
+};
+} // namespace
+
+TEST_F(IncrementalMarkingTest, IncrementalStepDuringAllocation) {
+ Persistent<Holder> holder =
+ MakeGarbageCollected<Holder>(GetAllocationHandle());
+ InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(),
+ IncrementalPreciseMarkingConfig);
+ const HeapObjectHeader* header;
+ MakeGarbageCollected<GCedWithCallback>(
+ GetAllocationHandle(), [this, &holder, &header](GCedWithCallback* obj) {
+ header = &HeapObjectHeader::FromPayload(obj);
+ holder->member_ = obj;
+ EXPECT_FALSE(header->IsMarked());
+ FinishSteps(MarkingConfig::StackState::kMayContainHeapPointers);
+ EXPECT_TRUE(header->IsMarked());
+ });
+ FinishSteps(MarkingConfig::StackState::kNoHeapPointers);
+ EXPECT_TRUE(header->IsMarked());
+ FinishMarking();
+}
+
+TEST_F(IncrementalMarkingTest, MarkingRunsOutOfWorkEventually) {
+ InitializeMarker(*Heap::From(GetHeap()), GetPlatformHandle().get(),
+ IncrementalPreciseMarkingConfig);
+ FinishSteps(MarkingConfig::StackState::kNoHeapPointers);
+ FinishMarking();
+}
+
} // namespace internal
} // namespace cppgc
diff --git a/deps/v8/test/unittests/heap/cppgc/marking-verifier-unittest.cc b/deps/v8/test/unittests/heap/cppgc/marking-verifier-unittest.cc
index 47a6b26dee..fb5ba772da 100644
--- a/deps/v8/test/unittests/heap/cppgc/marking-verifier-unittest.cc
+++ b/deps/v8/test/unittests/heap/cppgc/marking-verifier-unittest.cc
@@ -76,6 +76,28 @@ TEST_F(MarkingVerifierTest, DoesntDieOnMarkedWeakMember) {
VerifyMarking(Heap::From(GetHeap())->AsBase(), StackState::kNoHeapPointers);
}
+namespace {
+
+class GCedWithCallback : public GarbageCollected<GCedWithCallback> {
+ public:
+ template <typename Callback>
+ explicit GCedWithCallback(Callback callback) {
+ callback(this);
+ }
+ void Trace(cppgc::Visitor* visitor) const {}
+};
+
+} // namespace
+
+TEST_F(MarkingVerifierTest, DoesntDieOnInConstructionOnObject) {
+ MakeGarbageCollected<GCedWithCallback>(
+ GetAllocationHandle(), [this](GCedWithCallback* obj) {
+ HeapObjectHeader::FromPayload(obj).TryMarkAtomic();
+ VerifyMarking(Heap::From(GetHeap())->AsBase(),
+ StackState::kMayContainHeapPointers);
+ });
+}
+
// Death tests.
namespace {
diff --git a/deps/v8/test/unittests/heap/cppgc/marking-visitor-unittest.cc b/deps/v8/test/unittests/heap/cppgc/marking-visitor-unittest.cc
index 799d44bea9..51387712c7 100644
--- a/deps/v8/test/unittests/heap/cppgc/marking-visitor-unittest.cc
+++ b/deps/v8/test/unittests/heap/cppgc/marking-visitor-unittest.cc
@@ -23,7 +23,8 @@ namespace {
class MarkingVisitorTest : public testing::TestWithHeap {
public:
MarkingVisitorTest()
- : marker_(std::make_unique<Marker>(Heap::From(GetHeap())->AsBase())) {}
+ : marker_(MarkerFactory::CreateAndStartMarking<Marker>(
+ *Heap::From(GetHeap()), GetPlatformHandle().get())) {}
~MarkingVisitorTest() override { marker_->ClearAllWorklistsForTesting(); }
Marker* GetMarker() { return marker_.get(); }
@@ -47,6 +48,7 @@ class TestMarkingVisitor : public MarkingVisitor {
public:
explicit TestMarkingVisitor(Marker* marker)
: MarkingVisitor(marker->heap(), marker->MarkingStateForTesting()) {}
+ ~TestMarkingVisitor() { marking_state_.Publish(); }
};
} // namespace
diff --git a/deps/v8/test/unittests/heap/cppgc/member-unittest.cc b/deps/v8/test/unittests/heap/cppgc/member-unittest.cc
index bddac760a5..64934d69cd 100644
--- a/deps/v8/test/unittests/heap/cppgc/member-unittest.cc
+++ b/deps/v8/test/unittests/heap/cppgc/member-unittest.cc
@@ -147,6 +147,41 @@ TEST_F(MemberTest, Swap) {
template <template <typename> class MemberType1,
template <typename> class MemberType2>
+void MoveTest(cppgc::Heap* heap) {
+ {
+ GCed* gced1 = MakeGarbageCollected<GCed>(heap->GetAllocationHandle());
+ MemberType1<GCed> member1 = gced1;
+ MemberType2<GCed> member2(std::move(member1));
+ // Move-from member must be in empty state.
+ EXPECT_FALSE(member1);
+ EXPECT_EQ(gced1, member2.Get());
+ }
+ {
+ GCed* gced1 = MakeGarbageCollected<GCed>(heap->GetAllocationHandle());
+ MemberType1<GCed> member1 = gced1;
+ MemberType2<GCed> member2;
+ member2 = std::move(member1);
+ // Move-from member must be in empty state.
+ EXPECT_FALSE(member1);
+ EXPECT_EQ(gced1, member2.Get());
+ }
+}
+
+TEST_F(MemberTest, Move) {
+ cppgc::Heap* heap = GetHeap();
+ MoveTest<Member, Member>(heap);
+ MoveTest<Member, WeakMember>(heap);
+ MoveTest<Member, UntracedMember>(heap);
+ MoveTest<WeakMember, Member>(heap);
+ MoveTest<WeakMember, WeakMember>(heap);
+ MoveTest<WeakMember, UntracedMember>(heap);
+ MoveTest<UntracedMember, Member>(heap);
+ MoveTest<UntracedMember, WeakMember>(heap);
+ MoveTest<UntracedMember, UntracedMember>(heap);
+}
+
+template <template <typename> class MemberType1,
+ template <typename> class MemberType2>
void HeterogeneousConversionTest(cppgc::Heap* heap) {
{
MemberType1<GCed> member1 =
diff --git a/deps/v8/test/unittests/heap/cppgc/sweeper-unittest.cc b/deps/v8/test/unittests/heap/cppgc/sweeper-unittest.cc
index 992bd67f6d..3591af29a4 100644
--- a/deps/v8/test/unittests/heap/cppgc/sweeper-unittest.cc
+++ b/deps/v8/test/unittests/heap/cppgc/sweeper-unittest.cc
@@ -49,7 +49,7 @@ class SweeperTest : public testing::TestWithHeap {
heap->stats_collector()->NotifyMarkingStarted();
heap->stats_collector()->NotifyMarkingCompleted(0);
sweeper.Start(Sweeper::Config::kAtomic);
- sweeper.Finish();
+ sweeper.FinishIfRunning();
}
void MarkObject(void* payload) {
diff --git a/deps/v8/test/unittests/heap/cppgc/test-platform.cc b/deps/v8/test/unittests/heap/cppgc/test-platform.cc
index 140e9c1589..c649b1e89d 100644
--- a/deps/v8/test/unittests/heap/cppgc/test-platform.cc
+++ b/deps/v8/test/unittests/heap/cppgc/test-platform.cc
@@ -6,29 +6,68 @@
#include "src/base/platform/platform.h"
#include "src/base/platform/time.h"
+#include "src/heap/cppgc/default-job.h"
namespace cppgc {
namespace internal {
namespace testing {
-void TestTaskRunner::PostTask(std::unique_ptr<v8::Task> task) {
+namespace {
+class TestJobThread final : public v8::base::Thread {
+ public:
+ using id = uint8_t;
+
+ explicit TestJobThread(TestJob* job) : Thread(Options("job")), job_(job) {}
+
+ void Run() final;
+
+ static size_t GetMaxSupportedConcurrency() { return 4u; }
+
+ private:
+ TestJob* const job_;
+};
+} // namespace
+
+// Default implementation of Jobs based on std::thread.
+class TestJob final : public DefaultJobImpl<TestJobThread> {
+ public:
+ explicit TestJob(Key key, std::unique_ptr<cppgc::JobTask> job_task)
+ : DefaultJobImpl(key, std::move(job_task)) {}
+
+ std::shared_ptr<TestJobThread> CreateThread(DefaultJobImpl* job) final {
+ std::shared_ptr<TestJobThread> thread =
+ std::make_shared<TestJobThread>(this);
+ const bool thread_started = thread->Start();
+ USE(thread_started);
+ DCHECK(thread_started);
+ return thread;
+ }
+};
+
+void TestJobThread::Run() {
+ DCHECK_NOT_NULL(job_);
+ job_->RunJobTask();
+}
+
+void TestTaskRunner::PostTask(std::unique_ptr<cppgc::Task> task) {
tasks_.push_back(std::move(task));
}
-void TestTaskRunner::PostNonNestableTask(std::unique_ptr<v8::Task> task) {
+void TestTaskRunner::PostNonNestableTask(std::unique_ptr<cppgc::Task> task) {
PostTask(std::move(task));
}
-void TestTaskRunner::PostDelayedTask(std::unique_ptr<v8::Task> task, double) {
+void TestTaskRunner::PostDelayedTask(std::unique_ptr<cppgc::Task> task,
+ double) {
PostTask(std::move(task));
}
-void TestTaskRunner::PostNonNestableDelayedTask(std::unique_ptr<v8::Task> task,
- double) {
+void TestTaskRunner::PostNonNestableDelayedTask(
+ std::unique_ptr<cppgc::Task> task, double) {
PostTask(std::move(task));
}
-void TestTaskRunner::PostIdleTask(std::unique_ptr<v8::IdleTask> task) {
+void TestTaskRunner::PostIdleTask(std::unique_ptr<cppgc::IdleTask> task) {
idle_tasks_.push_back(std::move(task));
}
@@ -62,35 +101,19 @@ void TestTaskRunner::RunUntilIdle() {
idle_tasks_.clear();
}
-class TestPlatform::TestJobHandle : public v8::JobHandle {
- public:
- explicit TestJobHandle(const std::shared_ptr<JobThread>& thread)
- : thread_(thread) {
- const bool success = thread_->Start();
- USE(success);
- }
-
- void NotifyConcurrencyIncrease() override {}
- void Join() override { thread_->Join(); }
- void Cancel() override { Join(); }
- bool IsRunning() override { return true; }
-
- private:
- std::shared_ptr<JobThread> thread_;
-};
-
TestPlatform::TestPlatform()
: foreground_task_runner_(std::make_unique<TestTaskRunner>()) {}
TestPlatform::~TestPlatform() V8_NOEXCEPT { WaitAllBackgroundTasks(); }
-std::unique_ptr<v8::JobHandle> TestPlatform::PostJob(
- v8::TaskPriority, std::unique_ptr<v8::JobTask> job_task) {
+std::unique_ptr<cppgc::JobHandle> TestPlatform::PostJob(
+ cppgc::TaskPriority, std::unique_ptr<cppgc::JobTask> job_task) {
if (AreBackgroundTasksDisabled()) return {};
- auto thread = std::make_shared<JobThread>(std::move(job_task));
- job_threads_.push_back(thread);
- return std::make_unique<TestJobHandle>(std::move(thread));
+ std::shared_ptr<TestJob> job =
+ DefaultJobFactory<TestJob>::Create(std::move(job_task));
+ jobs_.push_back(job);
+ return std::make_unique<TestJob::JobHandle>(std::move(job));
}
double TestPlatform::MonotonicallyIncreasingTime() {
@@ -103,10 +126,10 @@ void TestPlatform::WaitAllForegroundTasks() {
}
void TestPlatform::WaitAllBackgroundTasks() {
- for (auto& thread : job_threads_) {
- thread->Join();
+ for (auto& job : jobs_) {
+ job->Join();
}
- job_threads_.clear();
+ jobs_.clear();
}
TestPlatform::DisableBackgroundTasksScope::DisableBackgroundTasksScope(
diff --git a/deps/v8/test/unittests/heap/cppgc/test-platform.h b/deps/v8/test/unittests/heap/cppgc/test-platform.h
index 474afaed0f..1faa6efb40 100644
--- a/deps/v8/test/unittests/heap/cppgc/test-platform.h
+++ b/deps/v8/test/unittests/heap/cppgc/test-platform.h
@@ -16,16 +16,22 @@ namespace cppgc {
namespace internal {
namespace testing {
-class TestTaskRunner : public v8::TaskRunner {
+class TestJob;
+
+class TestTaskRunner : public cppgc::TaskRunner {
public:
- void PostTask(std::unique_ptr<v8::Task> task) override;
- void PostNonNestableTask(std::unique_ptr<v8::Task> task) override;
- void PostDelayedTask(std::unique_ptr<v8::Task> task, double) override;
- void PostNonNestableDelayedTask(std::unique_ptr<v8::Task> task,
+ void PostTask(std::unique_ptr<cppgc::Task> task) override;
+ void PostDelayedTask(std::unique_ptr<cppgc::Task> task, double) override;
+
+ bool NonNestableTasksEnabled() const override { return true; }
+ void PostNonNestableTask(std::unique_ptr<cppgc::Task> task) override;
+
+ bool NonNestableDelayedTasksEnabled() const override { return true; }
+ void PostNonNestableDelayedTask(std::unique_ptr<cppgc::Task> task,
double) override;
- void PostIdleTask(std::unique_ptr<v8::IdleTask> task) override;
bool IdleTasksEnabled() override { return true; }
+ void PostIdleTask(std::unique_ptr<cppgc::IdleTask> task) override;
bool RunSingleTask();
bool RunSingleIdleTask(double duration_in_seconds);
@@ -33,8 +39,8 @@ class TestTaskRunner : public v8::TaskRunner {
void RunUntilIdle();
private:
- std::vector<std::unique_ptr<v8::Task>> tasks_;
- std::vector<std::unique_ptr<v8::IdleTask>> idle_tasks_;
+ std::vector<std::unique_ptr<cppgc::Task>> tasks_;
+ std::vector<std::unique_ptr<cppgc::IdleTask>> idle_tasks_;
};
class TestPlatform : public Platform {
@@ -53,12 +59,14 @@ class TestPlatform : public Platform {
PageAllocator* GetPageAllocator() override { return &page_allocator_; }
- std::shared_ptr<v8::TaskRunner> GetForegroundTaskRunner() override {
+ std::shared_ptr<cppgc::TaskRunner> GetForegroundTaskRunner() override {
return foreground_task_runner_;
}
- std::unique_ptr<v8::JobHandle> PostJob(
- v8::TaskPriority, std::unique_ptr<v8::JobTask> job_task) override;
+ // TestPlatform does not support job priorities. All jobs would be assigned
+ // the same priority regardless of the cppgc::TaskPriority parameter.
+ std::unique_ptr<cppgc::JobHandle> PostJob(
+ cppgc::TaskPriority, std::unique_ptr<cppgc::JobTask> job_task) override;
double MonotonicallyIncreasingTime() override;
@@ -66,47 +74,13 @@ class TestPlatform : public Platform {
void WaitAllBackgroundTasks();
private:
- class TestJobHandle;
-
- class WorkerThread : public v8::base::Thread {
- public:
- explicit WorkerThread(std::unique_ptr<v8::Task> task)
- : Thread(Options("worker")), task_(std::move(task)) {}
-
- void Run() override {
- if (task_) std::move(task_)->Run();
- }
-
- private:
- std::unique_ptr<v8::Task> task_;
- };
-
- class JobThread : public v8::base::Thread {
- public:
- explicit JobThread(std::unique_ptr<v8::JobTask> task)
- : Thread(Options("job")), task_(std::move(task)) {}
-
- void Run() override {
- class JobDelegate : public v8::JobDelegate {
- public:
- bool ShouldYield() override { return false; }
- void NotifyConcurrencyIncrease() override {}
- } delegate;
-
- if (task_) task_->Run(&delegate);
- }
-
- private:
- std::unique_ptr<v8::JobTask> task_;
- };
-
bool AreBackgroundTasksDisabled() const {
return disabled_background_tasks_ > 0;
}
v8::base::PageAllocator page_allocator_;
std::shared_ptr<TestTaskRunner> foreground_task_runner_;
- std::vector<std::shared_ptr<JobThread>> job_threads_;
+ std::vector<std::shared_ptr<TestJob>> jobs_;
size_t disabled_background_tasks_ = 0;
};
diff --git a/deps/v8/test/unittests/heap/cppgc/worklist-unittest.cc b/deps/v8/test/unittests/heap/cppgc/worklist-unittest.cc
deleted file mode 100644
index b9e8843666..0000000000
--- a/deps/v8/test/unittests/heap/cppgc/worklist-unittest.cc
+++ /dev/null
@@ -1,346 +0,0 @@
-// Copyright 2020 the V8 project authors. All rights reserved.
-// Use of this source code is governed by a BSD-style license that can be
-// found in the LICENSE file.
-
-#include "src/heap/cppgc/worklist.h"
-
-#include "test/unittests/heap/cppgc/tests.h"
-
-namespace cppgc {
-namespace internal {
-
-class SomeObject {};
-
-using TestWorklist = Worklist<SomeObject*, 64>;
-
-TEST(CppgcWorkListTest, SegmentCreate) {
- TestWorklist::Segment segment;
- EXPECT_TRUE(segment.IsEmpty());
- EXPECT_EQ(0u, segment.Size());
- EXPECT_FALSE(segment.IsFull());
-}
-
-TEST(CppgcWorkListTest, SegmentPush) {
- TestWorklist::Segment segment;
- EXPECT_EQ(0u, segment.Size());
- EXPECT_TRUE(segment.Push(nullptr));
- EXPECT_EQ(1u, segment.Size());
-}
-
-TEST(CppgcWorkListTest, SegmentPushPop) {
- TestWorklist::Segment segment;
- EXPECT_TRUE(segment.Push(nullptr));
- EXPECT_EQ(1u, segment.Size());
- SomeObject dummy;
- SomeObject* object = &dummy;
- EXPECT_TRUE(segment.Pop(&object));
- EXPECT_EQ(0u, segment.Size());
- EXPECT_EQ(nullptr, object);
-}
-
-TEST(CppgcWorkListTest, SegmentIsEmpty) {
- TestWorklist::Segment segment;
- EXPECT_TRUE(segment.IsEmpty());
- EXPECT_TRUE(segment.Push(nullptr));
- EXPECT_FALSE(segment.IsEmpty());
-}
-
-TEST(CppgcWorkListTest, SegmentIsFull) {
- TestWorklist::Segment segment;
- EXPECT_FALSE(segment.IsFull());
- for (size_t i = 0; i < TestWorklist::Segment::kCapacity; i++) {
- EXPECT_TRUE(segment.Push(nullptr));
- }
- EXPECT_TRUE(segment.IsFull());
-}
-
-TEST(CppgcWorkListTest, SegmentClear) {
- TestWorklist::Segment segment;
- EXPECT_TRUE(segment.Push(nullptr));
- EXPECT_FALSE(segment.IsEmpty());
- segment.Clear();
- EXPECT_TRUE(segment.IsEmpty());
- for (size_t i = 0; i < TestWorklist::Segment::kCapacity; i++) {
- EXPECT_TRUE(segment.Push(nullptr));
- }
-}
-
-TEST(CppgcWorkListTest, SegmentFullPushFails) {
- TestWorklist::Segment segment;
- EXPECT_FALSE(segment.IsFull());
- for (size_t i = 0; i < TestWorklist::Segment::kCapacity; i++) {
- EXPECT_TRUE(segment.Push(nullptr));
- }
- EXPECT_TRUE(segment.IsFull());
- EXPECT_FALSE(segment.Push(nullptr));
-}
-
-TEST(CppgcWorkListTest, SegmentEmptyPopFails) {
- TestWorklist::Segment segment;
- EXPECT_TRUE(segment.IsEmpty());
- SomeObject* object;
- EXPECT_FALSE(segment.Pop(&object));
-}
-
-TEST(CppgcWorkListTest, SegmentUpdateFalse) {
- TestWorklist::Segment segment;
- SomeObject* object;
- object = reinterpret_cast<SomeObject*>(&object);
- EXPECT_TRUE(segment.Push(object));
- segment.Update([](SomeObject* object, SomeObject** out) { return false; });
- EXPECT_TRUE(segment.IsEmpty());
-}
-
-TEST(CppgcWorkListTest, SegmentUpdate) {
- TestWorklist::Segment segment;
- SomeObject* objectA;
- objectA = reinterpret_cast<SomeObject*>(&objectA);
- SomeObject* objectB;
- objectB = reinterpret_cast<SomeObject*>(&objectB);
- EXPECT_TRUE(segment.Push(objectA));
- segment.Update([objectB](SomeObject* object, SomeObject** out) {
- *out = objectB;
- return true;
- });
- SomeObject* object;
- EXPECT_TRUE(segment.Pop(&object));
- EXPECT_EQ(object, objectB);
-}
-
-TEST(CppgcWorkListTest, CreateEmpty) {
- TestWorklist worklist;
- TestWorklist::View worklist_view(&worklist, 0);
- EXPECT_TRUE(worklist_view.IsLocalEmpty());
- EXPECT_TRUE(worklist.IsEmpty());
-}
-
-TEST(CppgcWorkListTest, LocalPushPop) {
- TestWorklist worklist;
- TestWorklist::View worklist_view(&worklist, 0);
- SomeObject dummy;
- SomeObject* retrieved = nullptr;
- EXPECT_TRUE(worklist_view.Push(&dummy));
- EXPECT_FALSE(worklist_view.IsLocalEmpty());
- EXPECT_TRUE(worklist_view.Pop(&retrieved));
- EXPECT_EQ(&dummy, retrieved);
-}
-
-TEST(CppgcWorkListTest, LocalIsBasedOnId) {
- TestWorklist worklist;
- // Use the same id.
- TestWorklist::View worklist_view1(&worklist, 0);
- TestWorklist::View worklist_view2(&worklist, 0);
- SomeObject dummy;
- SomeObject* retrieved = nullptr;
- EXPECT_TRUE(worklist_view1.Push(&dummy));
- EXPECT_FALSE(worklist_view1.IsLocalEmpty());
- EXPECT_FALSE(worklist_view2.IsLocalEmpty());
- EXPECT_TRUE(worklist_view2.Pop(&retrieved));
- EXPECT_EQ(&dummy, retrieved);
- EXPECT_TRUE(worklist_view1.IsLocalEmpty());
- EXPECT_TRUE(worklist_view2.IsLocalEmpty());
-}
-
-TEST(CppgcWorkListTest, LocalPushStaysPrivate) {
- TestWorklist worklist;
- TestWorklist::View worklist_view1(&worklist, 0);
- TestWorklist::View worklist_view2(&worklist, 1);
- SomeObject dummy;
- SomeObject* retrieved = nullptr;
- EXPECT_TRUE(worklist.IsEmpty());
- EXPECT_EQ(0U, worklist.GlobalPoolSize());
- EXPECT_TRUE(worklist_view1.Push(&dummy));
- EXPECT_FALSE(worklist.IsEmpty());
- EXPECT_EQ(0U, worklist.GlobalPoolSize());
- EXPECT_FALSE(worklist_view2.Pop(&retrieved));
- EXPECT_EQ(nullptr, retrieved);
- EXPECT_TRUE(worklist_view1.Pop(&retrieved));
- EXPECT_EQ(&dummy, retrieved);
- EXPECT_TRUE(worklist.IsEmpty());
- EXPECT_EQ(0U, worklist.GlobalPoolSize());
-}
-
-TEST(CppgcWorkListTest, GlobalUpdateNull) {
- TestWorklist worklist;
- TestWorklist::View worklist_view(&worklist, 0);
- SomeObject* object;
- object = reinterpret_cast<SomeObject*>(&object);
- for (size_t i = 0; i < TestWorklist::kSegmentCapacity; i++) {
- EXPECT_TRUE(worklist_view.Push(object));
- }
- EXPECT_TRUE(worklist_view.Push(object));
- worklist.Update([](SomeObject* object, SomeObject** out) { return false; });
- EXPECT_TRUE(worklist.IsEmpty());
- EXPECT_EQ(0U, worklist.GlobalPoolSize());
-}
-
-TEST(CppgcWorkListTest, GlobalUpdate) {
- TestWorklist worklist;
- TestWorklist::View worklist_view(&worklist, 0);
- SomeObject* objectA = nullptr;
- objectA = reinterpret_cast<SomeObject*>(&objectA);
- SomeObject* objectB = nullptr;
- objectB = reinterpret_cast<SomeObject*>(&objectB);
- SomeObject* objectC = nullptr;
- objectC = reinterpret_cast<SomeObject*>(&objectC);
- for (size_t i = 0; i < TestWorklist::kSegmentCapacity; i++) {
- EXPECT_TRUE(worklist_view.Push(objectA));
- }
- for (size_t i = 0; i < TestWorklist::kSegmentCapacity; i++) {
- EXPECT_TRUE(worklist_view.Push(objectB));
- }
- EXPECT_TRUE(worklist_view.Push(objectA));
- worklist.Update([objectA, objectC](SomeObject* object, SomeObject** out) {
- if (object != objectA) {
- *out = objectC;
- return true;
- }
- return false;
- });
- for (size_t i = 0; i < TestWorklist::kSegmentCapacity; i++) {
- SomeObject* object;
- EXPECT_TRUE(worklist_view.Pop(&object));
- EXPECT_EQ(object, objectC);
- }
-}
-
-TEST(CppgcWorkListTest, FlushToGlobalPushSegment) {
- TestWorklist worklist;
- TestWorklist::View worklist_view0(&worklist, 0);
- TestWorklist::View worklist_view1(&worklist, 1);
- SomeObject* object = nullptr;
- SomeObject* objectA = nullptr;
- objectA = reinterpret_cast<SomeObject*>(&objectA);
- EXPECT_TRUE(worklist_view0.Push(objectA));
- worklist.FlushToGlobal(0);
- EXPECT_EQ(1U, worklist.GlobalPoolSize());
- EXPECT_TRUE(worklist_view1.Pop(&object));
-}
-
-TEST(CppgcWorkListTest, FlushToGlobalPopSegment) {
- TestWorklist worklist;
- TestWorklist::View worklist_view0(&worklist, 0);
- TestWorklist::View worklist_view1(&worklist, 1);
- SomeObject* object = nullptr;
- SomeObject* objectA = nullptr;
- objectA = reinterpret_cast<SomeObject*>(&objectA);
- EXPECT_TRUE(worklist_view0.Push(objectA));
- EXPECT_TRUE(worklist_view0.Push(objectA));
- EXPECT_TRUE(worklist_view0.Pop(&object));
- worklist.FlushToGlobal(0);
- EXPECT_EQ(1U, worklist.GlobalPoolSize());
- EXPECT_TRUE(worklist_view1.Pop(&object));
-}
-
-TEST(CppgcWorkListTest, Clear) {
- TestWorklist worklist;
- TestWorklist::View worklist_view(&worklist, 0);
- SomeObject* object;
- object = reinterpret_cast<SomeObject*>(&object);
- for (size_t i = 0; i < TestWorklist::kSegmentCapacity; i++) {
- EXPECT_TRUE(worklist_view.Push(object));
- }
- EXPECT_TRUE(worklist_view.Push(object));
- EXPECT_EQ(1U, worklist.GlobalPoolSize());
- worklist.Clear();
- EXPECT_TRUE(worklist.IsEmpty());
- EXPECT_EQ(0U, worklist.GlobalPoolSize());
-}
-
-TEST(CppgcWorkListTest, SingleSegmentSteal) {
- TestWorklist worklist;
- TestWorklist::View worklist_view1(&worklist, 0);
- TestWorklist::View worklist_view2(&worklist, 1);
- SomeObject dummy;
- for (size_t i = 0; i < TestWorklist::kSegmentCapacity; i++) {
- EXPECT_TRUE(worklist_view1.Push(&dummy));
- }
- SomeObject* retrieved = nullptr;
- // One more push/pop to publish the full segment.
- EXPECT_TRUE(worklist_view1.Push(nullptr));
- EXPECT_TRUE(worklist_view1.Pop(&retrieved));
- EXPECT_EQ(nullptr, retrieved);
- EXPECT_EQ(1U, worklist.GlobalPoolSize());
- // Stealing.
- for (size_t i = 0; i < TestWorklist::kSegmentCapacity; i++) {
- EXPECT_TRUE(worklist_view2.Pop(&retrieved));
- EXPECT_EQ(&dummy, retrieved);
- EXPECT_FALSE(worklist_view1.Pop(&retrieved));
- }
- EXPECT_TRUE(worklist.IsEmpty());
- EXPECT_EQ(0U, worklist.GlobalPoolSize());
-}
-
-TEST(CppgcWorkListTest, MultipleSegmentsStolen) {
- TestWorklist worklist;
- TestWorklist::View worklist_view1(&worklist, 0);
- TestWorklist::View worklist_view2(&worklist, 1);
- TestWorklist::View worklist_view3(&worklist, 2);
- SomeObject dummy1;
- SomeObject dummy2;
- for (size_t i = 0; i < TestWorklist::kSegmentCapacity; i++) {
- EXPECT_TRUE(worklist_view1.Push(&dummy1));
- }
- for (size_t i = 0; i < TestWorklist::kSegmentCapacity; i++) {
- EXPECT_TRUE(worklist_view1.Push(&dummy2));
- }
- SomeObject* retrieved = nullptr;
- SomeObject dummy3;
- // One more push/pop to publish the full segment.
- EXPECT_TRUE(worklist_view1.Push(&dummy3));
- EXPECT_TRUE(worklist_view1.Pop(&retrieved));
- EXPECT_EQ(&dummy3, retrieved);
- EXPECT_EQ(2U, worklist.GlobalPoolSize());
- // Stealing.
- EXPECT_TRUE(worklist_view2.Pop(&retrieved));
- SomeObject* const expect_bag2 = retrieved;
- EXPECT_TRUE(worklist_view3.Pop(&retrieved));
- SomeObject* const expect_bag3 = retrieved;
- EXPECT_EQ(0U, worklist.GlobalPoolSize());
- EXPECT_NE(expect_bag2, expect_bag3);
- EXPECT_TRUE(expect_bag2 == &dummy1 || expect_bag2 == &dummy2);
- EXPECT_TRUE(expect_bag3 == &dummy1 || expect_bag3 == &dummy2);
- for (size_t i = 1; i < TestWorklist::kSegmentCapacity; i++) {
- EXPECT_TRUE(worklist_view2.Pop(&retrieved));
- EXPECT_EQ(expect_bag2, retrieved);
- EXPECT_FALSE(worklist_view1.Pop(&retrieved));
- }
- for (size_t i = 1; i < TestWorklist::kSegmentCapacity; i++) {
- EXPECT_TRUE(worklist_view3.Pop(&retrieved));
- EXPECT_EQ(expect_bag3, retrieved);
- EXPECT_FALSE(worklist_view1.Pop(&retrieved));
- }
- EXPECT_TRUE(worklist.IsEmpty());
-}
-
-TEST(CppgcWorkListTest, MergeGlobalPool) {
- TestWorklist worklist1;
- TestWorklist::View worklist_view1(&worklist1, 0);
- SomeObject dummy;
- for (size_t i = 0; i < TestWorklist::kSegmentCapacity; i++) {
- EXPECT_TRUE(worklist_view1.Push(&dummy));
- }
- SomeObject* retrieved = nullptr;
- // One more push/pop to publish the full segment.
- EXPECT_TRUE(worklist_view1.Push(nullptr));
- EXPECT_TRUE(worklist_view1.Pop(&retrieved));
- EXPECT_EQ(nullptr, retrieved);
- EXPECT_EQ(1U, worklist1.GlobalPoolSize());
- // Merging global pool into a new Worklist.
- TestWorklist worklist2;
- TestWorklist::View worklist_view2(&worklist2, 0);
- EXPECT_EQ(0U, worklist2.GlobalPoolSize());
- worklist2.MergeGlobalPool(&worklist1);
- EXPECT_EQ(1U, worklist2.GlobalPoolSize());
- EXPECT_FALSE(worklist2.IsEmpty());
- for (size_t i = 0; i < TestWorklist::kSegmentCapacity; i++) {
- EXPECT_TRUE(worklist_view2.Pop(&retrieved));
- EXPECT_EQ(&dummy, retrieved);
- EXPECT_FALSE(worklist_view1.Pop(&retrieved));
- }
- EXPECT_TRUE(worklist1.IsEmpty());
- EXPECT_TRUE(worklist2.IsEmpty());
-}
-
-} // namespace internal
-} // namespace cppgc
diff --git a/deps/v8/test/unittests/heap/cppgc/write-barrier-unittest.cc b/deps/v8/test/unittests/heap/cppgc/write-barrier-unittest.cc
index b8ca42db53..b06083d1ef 100644
--- a/deps/v8/test/unittests/heap/cppgc/write-barrier-unittest.cc
+++ b/deps/v8/test/unittests/heap/cppgc/write-barrier-unittest.cc
@@ -21,20 +21,18 @@ namespace {
class IncrementalMarkingScope {
public:
- explicit IncrementalMarkingScope(MarkerBase* marker) : marker_(marker) {
- marker_->StartMarking(kIncrementalConfig);
- }
+ explicit IncrementalMarkingScope(MarkerBase* marker) : marker_(marker) {}
~IncrementalMarkingScope() V8_NOEXCEPT {
- marker_->FinishMarking(kIncrementalConfig);
+ marker_->FinishMarking(kIncrementalConfig.stack_state);
}
- private:
static constexpr Marker::MarkingConfig kIncrementalConfig{
Marker::MarkingConfig::CollectionType::kMajor,
Marker::MarkingConfig::StackState::kNoHeapPointers,
Marker::MarkingConfig::MarkingType::kIncremental};
+ private:
MarkerBase* marker_;
};
@@ -45,15 +43,12 @@ class ExpectWriteBarrierFires final : private IncrementalMarkingScope {
ExpectWriteBarrierFires(MarkerBase* marker,
std::initializer_list<void*> objects)
: IncrementalMarkingScope(marker),
- marking_worklist_(
- marker->MarkingWorklistsForTesting().marking_worklist(),
- MarkingWorklists::kMutatorThreadId),
+ marking_worklist_(marker->MarkingStateForTesting().marking_worklist()),
write_barrier_worklist_(
- marker->MarkingWorklistsForTesting().write_barrier_worklist(),
- MarkingWorklists::kMutatorThreadId),
+ marker->MarkingStateForTesting().write_barrier_worklist()),
objects_(objects) {
- EXPECT_TRUE(marking_worklist_.IsGlobalPoolEmpty());
- EXPECT_TRUE(write_barrier_worklist_.IsGlobalPoolEmpty());
+ EXPECT_TRUE(marking_worklist_.IsGlobalEmpty());
+ EXPECT_TRUE(write_barrier_worklist_.IsGlobalEmpty());
for (void* object : objects) {
headers_.push_back(&HeapObjectHeader::FromPayload(object));
EXPECT_FALSE(headers_.back()->IsMarked());
@@ -81,13 +76,13 @@ class ExpectWriteBarrierFires final : private IncrementalMarkingScope {
EXPECT_TRUE(header->IsMarked());
header->Unmark();
}
- EXPECT_TRUE(marking_worklist_.IsGlobalPoolEmpty());
- EXPECT_TRUE(write_barrier_worklist_.IsGlobalPoolEmpty());
+ EXPECT_TRUE(marking_worklist_.IsGlobalEmpty());
+ EXPECT_TRUE(write_barrier_worklist_.IsGlobalEmpty());
}
private:
- MarkingWorklists::MarkingWorklist::View marking_worklist_;
- MarkingWorklists::WriteBarrierWorklist::View write_barrier_worklist_;
+ MarkingWorklists::MarkingWorklist::Local& marking_worklist_;
+ MarkingWorklists::WriteBarrierWorklist::Local& write_barrier_worklist_;
std::vector<void*> objects_;
std::vector<HeapObjectHeader*> headers_;
};
@@ -97,14 +92,11 @@ class ExpectNoWriteBarrierFires final : private IncrementalMarkingScope {
ExpectNoWriteBarrierFires(MarkerBase* marker,
std::initializer_list<void*> objects)
: IncrementalMarkingScope(marker),
- marking_worklist_(
- marker->MarkingWorklistsForTesting().marking_worklist(),
- MarkingWorklists::kMutatorThreadId),
+ marking_worklist_(marker->MarkingStateForTesting().marking_worklist()),
write_barrier_worklist_(
- marker->MarkingWorklistsForTesting().write_barrier_worklist(),
- MarkingWorklists::kMutatorThreadId) {
- EXPECT_TRUE(marking_worklist_.IsGlobalPoolEmpty());
- EXPECT_TRUE(write_barrier_worklist_.IsGlobalPoolEmpty());
+ marker->MarkingStateForTesting().write_barrier_worklist()) {
+ EXPECT_TRUE(marking_worklist_.IsGlobalEmpty());
+ EXPECT_TRUE(write_barrier_worklist_.IsGlobalEmpty());
for (void* object : objects) {
auto* header = &HeapObjectHeader::FromPayload(object);
headers_.emplace_back(header, header->IsMarked());
@@ -112,16 +104,16 @@ class ExpectNoWriteBarrierFires final : private IncrementalMarkingScope {
}
~ExpectNoWriteBarrierFires() {
- EXPECT_TRUE(marking_worklist_.IsGlobalPoolEmpty());
- EXPECT_TRUE(write_barrier_worklist_.IsGlobalPoolEmpty());
+ EXPECT_TRUE(marking_worklist_.IsGlobalEmpty());
+ EXPECT_TRUE(write_barrier_worklist_.IsGlobalEmpty());
for (const auto& pair : headers_) {
EXPECT_EQ(pair.second, pair.first->IsMarked());
}
}
private:
- MarkingWorklists::MarkingWorklist::View marking_worklist_;
- MarkingWorklists::WriteBarrierWorklist::View write_barrier_worklist_;
+ MarkingWorklists::MarkingWorklist::Local& marking_worklist_;
+ MarkingWorklists::WriteBarrierWorklist::Local& write_barrier_worklist_;
std::vector<std::pair<HeapObjectHeader*, bool /* was marked */>> headers_;
};
@@ -149,7 +141,9 @@ class GCed : public GarbageCollected<GCed> {
class WriteBarrierTest : public testing::TestWithHeap {
public:
WriteBarrierTest() : internal_heap_(Heap::From(GetHeap())) {
- GetMarkerRef() = std::make_unique<Marker>(internal_heap_->AsBase());
+ GetMarkerRef() = MarkerFactory::CreateAndStartMarking<Marker>(
+ *internal_heap_, GetPlatformHandle().get(),
+ IncrementalMarkingScope::kIncrementalConfig);
marker_ = GetMarkerRef().get();
}
@@ -165,6 +159,8 @@ class WriteBarrierTest : public testing::TestWithHeap {
MarkerBase* marker_;
};
+class NoWriteBarrierTest : public testing::TestWithHeap {};
+
// =============================================================================
// Basic support. ==============================================================
// =============================================================================
@@ -187,7 +183,7 @@ TEST_F(WriteBarrierTest, TriggersWhenMarkingIsOn) {
}
}
-TEST_F(WriteBarrierTest, BailoutWhenMarkingIsOff) {
+TEST_F(NoWriteBarrierTest, BailoutWhenMarkingIsOff) {
auto* object1 = MakeGarbageCollected<GCed>(GetAllocationHandle());
auto* object2 = MakeGarbageCollected<GCed>(GetAllocationHandle());
EXPECT_FALSE(object1->IsMarked());
diff --git a/deps/v8/test/unittests/heap/heap-unittest.cc b/deps/v8/test/unittests/heap/heap-unittest.cc
index 9181992b38..34774b3a35 100644
--- a/deps/v8/test/unittests/heap/heap-unittest.cc
+++ b/deps/v8/test/unittests/heap/heap-unittest.cc
@@ -10,6 +10,7 @@
#include "src/handles/handles-inl.h"
#include "src/heap/memory-chunk.h"
+#include "src/heap/safepoint.h"
#include "src/heap/spaces-inl.h"
#include "src/objects/objects-inl.h"
#include "test/unittests/test-utils.h"
@@ -151,6 +152,7 @@ TEST_F(HeapWithPointerCompressionTest, HeapLayout) {
// Check that all memory chunks belong this region.
base::AddressRegion heap_reservation(isolate_root, size_t{4} * GB);
+ SafepointScope scope(i_isolate()->heap());
OldGenerationMemoryChunkIterator iter(i_isolate()->heap());
for (;;) {
MemoryChunk* chunk = iter.next();
diff --git a/deps/v8/test/unittests/heap/index-generator-unittest.cc b/deps/v8/test/unittests/heap/index-generator-unittest.cc
new file mode 100644
index 0000000000..11627741b8
--- /dev/null
+++ b/deps/v8/test/unittests/heap/index-generator-unittest.cc
@@ -0,0 +1,50 @@
+// Copyright 2020 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/heap/index-generator.h"
+
+#include "test/unittests/test-utils.h"
+
+namespace v8 {
+namespace internal {
+
+TEST(IndexGeneratorTest, Empty) {
+ IndexGenerator gen(0);
+
+ EXPECT_EQ(base::nullopt, gen.GetNext());
+}
+
+TEST(IndexGeneratorTest, GetNext) {
+ IndexGenerator gen(11);
+
+ EXPECT_EQ(0U, gen.GetNext());
+ EXPECT_EQ(5U, gen.GetNext());
+ EXPECT_EQ(2U, gen.GetNext());
+ EXPECT_EQ(8U, gen.GetNext());
+ EXPECT_EQ(1U, gen.GetNext());
+ EXPECT_EQ(3U, gen.GetNext());
+ EXPECT_EQ(6U, gen.GetNext());
+ EXPECT_EQ(9U, gen.GetNext());
+ EXPECT_EQ(4U, gen.GetNext());
+ EXPECT_EQ(7U, gen.GetNext());
+ EXPECT_EQ(10U, gen.GetNext());
+ EXPECT_EQ(base::nullopt, gen.GetNext());
+}
+
+TEST(IndexGeneratorTest, GiveBack) {
+ IndexGenerator gen(4);
+
+ EXPECT_EQ(0U, gen.GetNext());
+ EXPECT_EQ(2U, gen.GetNext());
+ EXPECT_EQ(1U, gen.GetNext());
+ gen.GiveBack(2);
+ gen.GiveBack(0);
+ EXPECT_EQ(0U, gen.GetNext());
+ EXPECT_EQ(2U, gen.GetNext());
+ EXPECT_EQ(3U, gen.GetNext());
+ EXPECT_EQ(base::nullopt, gen.GetNext());
+}
+
+} // namespace internal
+} // namespace v8
diff --git a/deps/v8/test/unittests/heap/object-start-bitmap-unittest.cc b/deps/v8/test/unittests/heap/object-start-bitmap-unittest.cc
new file mode 100644
index 0000000000..8e57d86d95
--- /dev/null
+++ b/deps/v8/test/unittests/heap/object-start-bitmap-unittest.cc
@@ -0,0 +1,174 @@
+// Copyright 2020 the V8 project authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+
+#include "src/heap/object-start-bitmap.h"
+
+#include "src/base/macros.h"
+#include "testing/gtest/include/gtest/gtest.h"
+
+namespace v8 {
+namespace internal {
+
+class ObjectStartBitmap;
+
+namespace {
+
+bool IsEmpty(const ObjectStartBitmap& bitmap) {
+ size_t count = 0;
+ bitmap.Iterate([&count](Address) { count++; });
+ return count == 0;
+}
+
+// Abstraction for objects that hides ObjectStartBitmap::kGranularity and
+// the base address as getting either of it wrong will result in failed DCHECKs.
+class TestObject {
+ public:
+ static Address kBaseOffset;
+
+ explicit TestObject(size_t number) : number_(number) {
+ const size_t max_entries = ObjectStartBitmap::MaxEntries();
+ EXPECT_GE(max_entries, number_);
+ }
+
+ Address base_ptr() const {
+ return kBaseOffset + ObjectStartBitmap::Granularity() * number_;
+ }
+
+ // Allow implicitly converting Object to Address.
+ operator Address() const { return base_ptr(); }
+
+ private:
+ const size_t number_;
+};
+
+Address TestObject::kBaseOffset = reinterpret_cast<Address>(0x4000ul);
+
+} // namespace
+
+TEST(V8ObjectStartBitmapTest, MoreThanZeroEntriesPossible) {
+ const size_t max_entries = ObjectStartBitmap::MaxEntries();
+ EXPECT_LT(0u, max_entries);
+}
+
+TEST(V8ObjectStartBitmapTest, InitialEmpty) {
+ ObjectStartBitmap bitmap(TestObject::kBaseOffset);
+ EXPECT_TRUE(IsEmpty(bitmap));
+}
+
+TEST(V8ObjectStartBitmapTest, SetBitImpliesNonEmpty) {
+ ObjectStartBitmap bitmap(TestObject::kBaseOffset);
+ bitmap.SetBit(TestObject(0));
+ EXPECT_FALSE(IsEmpty(bitmap));
+}
+
+TEST(V8ObjectStartBitmapTest, SetBitCheckBit) {
+ ObjectStartBitmap bitmap(TestObject::kBaseOffset);
+ TestObject object(7);
+ bitmap.SetBit(object);
+ EXPECT_TRUE(bitmap.CheckBit(object));
+}
+
+TEST(V8ObjectStartBitmapTest, SetBitClearbitCheckBit) {
+ ObjectStartBitmap bitmap(TestObject::kBaseOffset);
+ TestObject object(77);
+ bitmap.SetBit(object);
+ bitmap.ClearBit(object);
+ EXPECT_FALSE(bitmap.CheckBit(object));
+}
+
+TEST(V8ObjectStartBitmapTest, SetBitClearBitImpliesEmpty) {
+ ObjectStartBitmap bitmap(TestObject::kBaseOffset);
+ TestObject object(123);
+ bitmap.SetBit(object);
+ bitmap.ClearBit(object);
+ EXPECT_TRUE(IsEmpty(bitmap));
+}
+
+TEST(V8ObjectStartBitmapTest, AdjacentObjectsAtBegin) {
+ ObjectStartBitmap bitmap(TestObject::kBaseOffset);
+ TestObject object0(0);
+ TestObject object1(1);
+ bitmap.SetBit(object0);
+ bitmap.SetBit(object1);
+ EXPECT_FALSE(bitmap.CheckBit(TestObject(3)));
+ size_t count = 0;
+ bitmap.Iterate([&count, object0, object1](Address current) {
+ if (count == 0) {
+ EXPECT_EQ(object0.base_ptr(), current);
+ } else if (count == 1) {
+ EXPECT_EQ(object1.base_ptr(), current);
+ }
+ count++;
+ });
+ EXPECT_EQ(2u, count);
+}
+
+TEST(V8ObjectStartBitmapTest, AdjacentObjectsAtEnd) {
+ ObjectStartBitmap bitmap(TestObject::kBaseOffset);
+ const size_t last_entry_index = ObjectStartBitmap::MaxEntries() - 1;
+ TestObject object0(last_entry_index - 1);
+ TestObject object1(last_entry_index);
+ bitmap.SetBit(object0);
+ bitmap.SetBit(object1);
+ EXPECT_FALSE(bitmap.CheckBit(TestObject(last_entry_index - 2)));
+ size_t count = 0;
+ bitmap.Iterate([&count, object0, object1](Address current) {
+ if (count == 0) {
+ EXPECT_EQ(object0.base_ptr(), current);
+ } else if (count == 1) {
+ EXPECT_EQ(object1.base_ptr(), current);
+ }
+ count++;
+ });
+ EXPECT_EQ(2u, count);
+}
+
+TEST(V8ObjectStartBitmapTest, FindBasePtrExact) {
+ ObjectStartBitmap bitmap(TestObject::kBaseOffset);
+ TestObject object(654);
+ bitmap.SetBit(object);
+ EXPECT_EQ(object.base_ptr(), bitmap.FindBasePtr(object.base_ptr()));
+}
+
+TEST(V8ObjectStartBitmapTest, FindBasePtrApproximate) {
+ static const size_t kInternalDelta = 37;
+ ObjectStartBitmap bitmap(TestObject::kBaseOffset);
+ TestObject object(654);
+ bitmap.SetBit(object);
+ EXPECT_EQ(object.base_ptr(),
+ bitmap.FindBasePtr(object.base_ptr() + kInternalDelta));
+}
+
+TEST(V8ObjectStartBitmapTest, FindBasePtrIteratingWholeBitmap) {
+ ObjectStartBitmap bitmap(TestObject::kBaseOffset);
+ TestObject object_to_find(TestObject(0));
+ Address hint_index = TestObject(ObjectStartBitmap::MaxEntries() - 1);
+ bitmap.SetBit(object_to_find);
+ EXPECT_EQ(object_to_find.base_ptr(), bitmap.FindBasePtr(hint_index));
+}
+
+TEST(V8ObjectStartBitmapTest, FindBasePtrNextCell) {
+ // This white box test makes use of the fact that cells are of type uint32_t.
+ const size_t kCellSize = sizeof(uint32_t);
+ ObjectStartBitmap bitmap(TestObject::kBaseOffset);
+ TestObject object_to_find(TestObject(kCellSize - 1));
+ Address hint = TestObject(kCellSize);
+ bitmap.SetBit(TestObject(0));
+ bitmap.SetBit(object_to_find);
+ EXPECT_EQ(object_to_find.base_ptr(), bitmap.FindBasePtr(hint));
+}
+
+TEST(V8ObjectStartBitmapTest, FindBasePtrSameCell) {
+ // This white box test makes use of the fact that cells are of type uint32_t.
+ const size_t kCellSize = sizeof(uint32_t);
+ ObjectStartBitmap bitmap(TestObject::kBaseOffset);
+ TestObject object_to_find(TestObject(kCellSize - 1));
+ bitmap.SetBit(TestObject(0));
+ bitmap.SetBit(object_to_find);
+ EXPECT_EQ(object_to_find.base_ptr(),
+ bitmap.FindBasePtr(object_to_find.base_ptr()));
+}
+
+} // namespace internal
+} // namespace v8
diff --git a/deps/v8/test/unittests/heap/safepoint-unittest.cc b/deps/v8/test/unittests/heap/safepoint-unittest.cc
index 264d3e936e..214d15277c 100644
--- a/deps/v8/test/unittests/heap/safepoint-unittest.cc
+++ b/deps/v8/test/unittests/heap/safepoint-unittest.cc
@@ -13,11 +13,17 @@
namespace v8 {
namespace internal {
+void EnsureFlagLocalHeapsEnabled() {
+ // Avoid data race in concurrent thread by only setting the flag to true if
+ // not already enabled.
+ if (!FLAG_local_heaps) FLAG_local_heaps = true;
+}
+
using SafepointTest = TestWithIsolate;
TEST_F(SafepointTest, ReachSafepointWithoutLocalHeaps) {
+ EnsureFlagLocalHeapsEnabled();
Heap* heap = i_isolate()->heap();
- FLAG_local_heaps = true;
bool run = false;
{
SafepointScope scope(heap);
@@ -47,8 +53,8 @@ class ParkedThread final : public v8::base::Thread {
};
TEST_F(SafepointTest, StopParkedThreads) {
+ EnsureFlagLocalHeapsEnabled();
Heap* heap = i_isolate()->heap();
- FLAG_local_heaps = true;
int safepoints = 0;
@@ -106,8 +112,8 @@ class RunningThread final : public v8::base::Thread {
};
TEST_F(SafepointTest, StopRunningThreads) {
+ EnsureFlagLocalHeapsEnabled();
Heap* heap = i_isolate()->heap();
- FLAG_local_heaps = true;
const int kThreads = 10;
const int kRuns = 5;
@@ -139,8 +145,8 @@ TEST_F(SafepointTest, StopRunningThreads) {
}
TEST_F(SafepointTest, SkipLocalHeapOfThisThread) {
+ EnsureFlagLocalHeapsEnabled();
Heap* heap = i_isolate()->heap();
- FLAG_local_heaps = true;
LocalHeap local_heap(heap);
{
SafepointScope scope(heap);
diff --git a/deps/v8/test/unittests/heap/unmapper-unittest.cc b/deps/v8/test/unittests/heap/unmapper-unittest.cc
index a919945d3f..bd476cd1ec 100644
--- a/deps/v8/test/unittests/heap/unmapper-unittest.cc
+++ b/deps/v8/test/unittests/heap/unmapper-unittest.cc
@@ -170,7 +170,6 @@ class TrackingPageAllocator : public ::v8::PageAllocator {
os << " page: [" << start << ", " << end << "), access: ";
switch (access) {
case PageAllocator::kNoAccess:
- case PageAllocator::kNoAccessWillJitLater:
os << "--";
break;
case PageAllocator::kRead: