summaryrefslogtreecommitdiff
path: root/deps/v8/test/cctest/heap/test-invalidated-slots.cc
diff options
context:
space:
mode:
authorMichaël Zasso <targos@protonmail.com>2019-11-08 15:39:11 +0100
committerMichaël Zasso <targos@protonmail.com>2019-11-08 15:46:25 +0100
commit6ca81ad72a3c6fdf16c683335be748f22aaa9a0d (patch)
tree33c8ee75f729aed76c2c0b89c63f9bf1b4dd66aa /deps/v8/test/cctest/heap/test-invalidated-slots.cc
parent1eee0b8bf8bba39b600fb16a9223e545e3bac2bc (diff)
downloadnode-new-6ca81ad72a3c6fdf16c683335be748f22aaa9a0d.tar.gz
deps: update V8 to 7.9.317.20
PR-URL: https://github.com/nodejs/node/pull/30020 Reviewed-By: Colin Ihrig <cjihrig@gmail.com> Reviewed-By: Jiawen Geng <technicalcute@gmail.com> Reviewed-By: Anna Henningsen <anna@addaleax.net> Reviewed-By: Matteo Collina <matteo.collina@gmail.com>
Diffstat (limited to 'deps/v8/test/cctest/heap/test-invalidated-slots.cc')
-rw-r--r--deps/v8/test/cctest/heap/test-invalidated-slots.cc27
1 files changed, 8 insertions, 19 deletions
diff --git a/deps/v8/test/cctest/heap/test-invalidated-slots.cc b/deps/v8/test/cctest/heap/test-invalidated-slots.cc
index af42503f86..861c48d69d 100644
--- a/deps/v8/test/cctest/heap/test-invalidated-slots.cc
+++ b/deps/v8/test/cctest/heap/test-invalidated-slots.cc
@@ -70,8 +70,7 @@ HEAP_TEST(InvalidatedSlotsSomeInvalidatedRanges) {
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
// Register every second byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i += 2) {
- page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i],
- byte_arrays[i].Size());
+ page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
}
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
for (size_t i = 0; i < byte_arrays.size(); i++) {
@@ -95,8 +94,7 @@ HEAP_TEST(InvalidatedSlotsAllInvalidatedRanges) {
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
// Register the all byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i++) {
- page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i],
- byte_arrays[i].Size());
+ page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
}
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
for (size_t i = 0; i < byte_arrays.size(); i++) {
@@ -117,8 +115,7 @@ HEAP_TEST(InvalidatedSlotsAfterTrimming) {
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
// Register the all byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i++) {
- page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i],
- byte_arrays[i].Size());
+ page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
}
// Trim byte arrays and check that the slots outside the byte arrays are
// considered invalid if the old space page was swept.
@@ -145,8 +142,7 @@ HEAP_TEST(InvalidatedSlotsEvacuationCandidate) {
// This should be no-op because the page is marked as evacuation
// candidate.
for (size_t i = 0; i < byte_arrays.size(); i++) {
- page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i],
- byte_arrays[i].Size());
+ page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
}
// All slots must still be valid.
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
@@ -169,8 +165,7 @@ HEAP_TEST(InvalidatedSlotsResetObjectRegression) {
heap->RightTrimFixedArray(byte_arrays[0], byte_arrays[0].length() - 8);
// Register the all byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i++) {
- page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i],
- byte_arrays[i].Size());
+ page->RegisterObjectWithInvalidatedSlots<OLD_TO_OLD>(byte_arrays[i]);
}
// All slots must still be invalid.
InvalidatedSlotsFilter filter = InvalidatedSlotsFilter::OldToOld(page);
@@ -359,8 +354,7 @@ HEAP_TEST(InvalidatedSlotsCleanupFull) {
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
// Register all byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i++) {
- page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(byte_arrays[i],
- byte_arrays[i].Size());
+ page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(byte_arrays[i]);
}
// Mark full page as free
@@ -379,8 +373,7 @@ HEAP_TEST(InvalidatedSlotsCleanupEachObject) {
Page* page = AllocateByteArraysOnPage(heap, &byte_arrays);
// Register all byte arrays as invalidated.
for (size_t i = 0; i < byte_arrays.size(); i++) {
- page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(byte_arrays[i],
- byte_arrays[i].Size());
+ page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(byte_arrays[i]);
}
// Mark each object as free on page
@@ -405,11 +398,9 @@ HEAP_TEST(InvalidatedSlotsCleanupRightTrim) {
CHECK_GT(byte_arrays.size(), 1);
ByteArray& invalidated = byte_arrays[1];
- int invalidated_size = invalidated.Size();
heap->RightTrimFixedArray(invalidated, invalidated.length() - 8);
- page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(invalidated,
- invalidated_size);
+ page->RegisterObjectWithInvalidatedSlots<OLD_TO_NEW>(invalidated);
// Free memory at end of invalidated object
InvalidatedSlotsCleanup cleanup = InvalidatedSlotsCleanup::OldToNew(page);
@@ -418,8 +409,6 @@ HEAP_TEST(InvalidatedSlotsCleanupRightTrim) {
// After cleanup the invalidated object should be smaller
InvalidatedSlots* invalidated_slots = page->invalidated_slots<OLD_TO_NEW>();
- CHECK_GE((*invalidated_slots)[HeapObject::FromAddress(invalidated.address())],
- invalidated.Size());
CHECK_EQ(invalidated_slots->size(), 1);
}