diff options
author | Michaël Zasso <targos@protonmail.com> | 2017-02-14 11:27:26 +0100 |
---|---|---|
committer | Michaël Zasso <targos@protonmail.com> | 2017-02-22 15:55:42 +0100 |
commit | 7a77daf24344db7942e34c962b0f1ee729ab7af5 (patch) | |
tree | e7cbe7bf4e2f4b802a8f5bc18336c546cd6a0d7f /deps/v8/test/cctest/heap | |
parent | 5f08871ee93ea739148cc49e0f7679e33c70295a (diff) | |
download | node-new-7a77daf24344db7942e34c962b0f1ee729ab7af5.tar.gz |
deps: update V8 to 5.6.326.55
PR-URL: https://github.com/nodejs/node/pull/10992
Reviewed-By: Ben Noordhuis <info@bnoordhuis.nl>
Diffstat (limited to 'deps/v8/test/cctest/heap')
-rw-r--r-- | deps/v8/test/cctest/heap/heap-tester.h | 7 | ||||
-rw-r--r-- | deps/v8/test/cctest/heap/heap-utils.cc | 22 | ||||
-rw-r--r-- | deps/v8/test/cctest/heap/heap-utils.h | 2 | ||||
-rw-r--r-- | deps/v8/test/cctest/heap/test-alloc.cc | 17 | ||||
-rw-r--r-- | deps/v8/test/cctest/heap/test-array-buffer-tracker.cc | 2 | ||||
-rw-r--r-- | deps/v8/test/cctest/heap/test-heap.cc | 243 | ||||
-rw-r--r-- | deps/v8/test/cctest/heap/test-mark-compact.cc | 16 | ||||
-rw-r--r-- | deps/v8/test/cctest/heap/test-page-promotion.cc | 41 | ||||
-rw-r--r-- | deps/v8/test/cctest/heap/test-spaces.cc | 21 |
9 files changed, 192 insertions, 179 deletions
diff --git a/deps/v8/test/cctest/heap/heap-tester.h b/deps/v8/test/cctest/heap/heap-tester.h index a01de69291..99d39ca7ab 100644 --- a/deps/v8/test/cctest/heap/heap-tester.h +++ b/deps/v8/test/cctest/heap/heap-tester.h @@ -32,6 +32,8 @@ V(Regress587004) \ V(Regress538257) \ V(Regress589413) \ + V(Regress658718) \ + V(Regress670675) \ V(WriteBarriersInCopyJSObject) #define HEAP_TEST(Name) \ @@ -39,6 +41,11 @@ #Name, true, true); \ void v8::internal::HeapTester::Test##Name() +#define UNINITIALIZED_HEAP_TEST(Name) \ + CcTest register_test_##Name(v8::internal::HeapTester::Test##Name, __FILE__, \ + #Name, true, false); \ + void v8::internal::HeapTester::Test##Name() + #define THREADED_HEAP_TEST(Name) \ RegisterThreadedTest register_##Name(v8::internal::HeapTester::Test##Name, \ #Name); \ diff --git a/deps/v8/test/cctest/heap/heap-utils.cc b/deps/v8/test/cctest/heap/heap-utils.cc index 4f7d088a94..c44f82f1d9 100644 --- a/deps/v8/test/cctest/heap/heap-utils.cc +++ b/deps/v8/test/cctest/heap/heap-utils.cc @@ -38,8 +38,7 @@ std::vector<Handle<FixedArray>> FillOldSpacePageWithFixedArrays(Heap* heap, const int kArrayLen = heap::FixedArrayLenFromSize(kArraySize); CHECK_EQ(Page::kAllocatableMemory % kArraySize, 0); Handle<FixedArray> array; - for (size_t allocated = 0; - allocated != (Page::kAllocatableMemory - remainder); + for (int allocated = 0; allocated != (Page::kAllocatableMemory - remainder); allocated += array->Size()) { if (allocated == (Page::kAllocatableMemory - kArraySize)) { array = isolate->factory()->NewFixedArray( @@ -170,6 +169,10 @@ void SimulateIncrementalMarking(i::Heap* heap, bool force_completion) { } void SimulateFullSpace(v8::internal::PagedSpace* space) { + i::MarkCompactCollector* collector = space->heap()->mark_compact_collector(); + if (collector->sweeping_in_progress()) { + collector->EnsureSweepingCompleted(); + } space->EmptyAllocationInfo(); space->ResetFreeList(); space->ClearStats(); @@ -189,6 +192,21 @@ void GcAndSweep(Heap* heap, AllocationSpace space) { } } +void ForceEvacuationCandidate(Page* page) { + CHECK(FLAG_manual_evacuation_candidates_selection); + page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); + PagedSpace* space = static_cast<PagedSpace*>(page->owner()); + Address top = space->top(); + Address limit = space->limit(); + if (top < limit && Page::FromAllocationAreaAddress(top) == page) { + // Create filler object to keep page iterable if it was iterable. + int remaining = static_cast<int>(limit - top); + space->heap()->CreateFillerObjectAt(top, remaining, + ClearRecordedSlots::kNo); + space->SetTopAndLimit(nullptr, nullptr); + } +} + } // namespace heap } // namespace internal } // namespace v8 diff --git a/deps/v8/test/cctest/heap/heap-utils.h b/deps/v8/test/cctest/heap/heap-utils.h index 2f704cb422..a494f54210 100644 --- a/deps/v8/test/cctest/heap/heap-utils.h +++ b/deps/v8/test/cctest/heap/heap-utils.h @@ -50,6 +50,8 @@ void AbandonCurrentlyFreeMemory(PagedSpace* space); void GcAndSweep(Heap* heap, AllocationSpace space); +void ForceEvacuationCandidate(Page* page); + } // namespace heap } // namespace internal } // namespace v8 diff --git a/deps/v8/test/cctest/heap/test-alloc.cc b/deps/v8/test/cctest/heap/test-alloc.cc index c01827eee1..d7031e876d 100644 --- a/deps/v8/test/cctest/heap/test-alloc.cc +++ b/deps/v8/test/cctest/heap/test-alloc.cc @@ -60,10 +60,12 @@ AllocationResult v8::internal::HeapTester::AllocateAfterFailures() { heap->AllocateFixedArray(10000, TENURED).ToObjectChecked(); // Large object space. - static const int kLargeObjectSpaceFillerLength = 3 * (Page::kPageSize / 10); - static const int kLargeObjectSpaceFillerSize = FixedArray::SizeFor( - kLargeObjectSpaceFillerLength); - CHECK(kLargeObjectSpaceFillerSize > heap->old_space()->AreaSize()); + static const size_t kLargeObjectSpaceFillerLength = + 3 * (Page::kPageSize / 10); + static const size_t kLargeObjectSpaceFillerSize = + FixedArray::SizeFor(kLargeObjectSpaceFillerLength); + CHECK_GT(kLargeObjectSpaceFillerSize, + static_cast<size_t>(heap->old_space()->AreaSize())); while (heap->OldGenerationSpaceAvailable() > kLargeObjectSpaceFillerSize) { heap->AllocateFixedArray( kLargeObjectSpaceFillerLength, TENURED).ToObjectChecked(); @@ -116,11 +118,8 @@ void TestGetter( v8::internal::HeapTester::TestAllocateAfterFailures())); } - -void TestSetter( - v8::Local<v8::Name> name, - v8::Local<v8::Value> value, - const v8::PropertyCallbackInfo<void>& info) { +void TestSetter(v8::Local<v8::Name> name, v8::Local<v8::Value> value, + const v8::PropertyCallbackInfo<v8::Boolean>& info) { UNREACHABLE(); } diff --git a/deps/v8/test/cctest/heap/test-array-buffer-tracker.cc b/deps/v8/test/cctest/heap/test-array-buffer-tracker.cc index 173d1fa85f..0f3663041f 100644 --- a/deps/v8/test/cctest/heap/test-array-buffer-tracker.cc +++ b/deps/v8/test/cctest/heap/test-array-buffer-tracker.cc @@ -127,7 +127,7 @@ TEST(ArrayBuffer_Compaction) { heap::GcAndSweep(heap, NEW_SPACE); Page* page_before_gc = Page::FromAddress(buf1->address()); - page_before_gc->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); + heap::ForceEvacuationCandidate(page_before_gc); CHECK(IsTracked(*buf1)); CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); diff --git a/deps/v8/test/cctest/heap/test-heap.cc b/deps/v8/test/cctest/heap/test-heap.cc index c69d391f90..19474c3e95 100644 --- a/deps/v8/test/cctest/heap/test-heap.cc +++ b/deps/v8/test/cctest/heap/test-heap.cc @@ -749,46 +749,6 @@ TEST(DeleteWeakGlobalHandle) { CHECK(WeakPointerCleared); } -TEST(DoNotPromoteWhiteObjectsOnScavenge) { - CcTest::InitializeVM(); - Isolate* isolate = CcTest::i_isolate(); - Heap* heap = isolate->heap(); - Factory* factory = isolate->factory(); - - HandleScope scope(isolate); - Handle<Object> white = factory->NewStringFromStaticChars("white"); - - CHECK(Marking::IsWhite(ObjectMarking::MarkBitFrom(HeapObject::cast(*white)))); - - CcTest::CollectGarbage(NEW_SPACE); - - CHECK(heap->InNewSpace(*white)); -} - -TEST(PromoteGreyOrBlackObjectsOnScavenge) { - CcTest::InitializeVM(); - Isolate* isolate = CcTest::i_isolate(); - Heap* heap = isolate->heap(); - Factory* factory = isolate->factory(); - - HandleScope scope(isolate); - Handle<Object> marked = factory->NewStringFromStaticChars("marked"); - - IncrementalMarking* marking = heap->incremental_marking(); - marking->Stop(); - heap->StartIncrementalMarking(i::Heap::kNoGCFlags, - i::GarbageCollectionReason::kTesting); - while ( - Marking::IsWhite(ObjectMarking::MarkBitFrom(HeapObject::cast(*marked)))) { - marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD, - IncrementalMarking::DO_NOT_FORCE_COMPLETION, StepOrigin::kV8); - } - - CcTest::CollectGarbage(NEW_SPACE); - - CHECK(!heap->InNewSpace(*marked)); -} - TEST(BytecodeArray) { static const uint8_t kRawBytes[] = {0xc3, 0x7e, 0xa5, 0x5a}; static const int kRawBytesSize = sizeof(kRawBytes); @@ -831,7 +791,7 @@ TEST(BytecodeArray) { // Perform a full garbage collection and force the constant pool to be on an // evacuation candidate. Page* evac_page = Page::FromAddress(constant_pool->address()); - evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); + heap::ForceEvacuationCandidate(evac_page); CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); // BytecodeArray should survive. @@ -1078,7 +1038,7 @@ TEST(JSArray) { // Set array length to 0. JSArray::SetLength(array, 0); - CHECK_EQ(Smi::FromInt(0), array->length()); + CHECK_EQ(Smi::kZero, array->length()); // Must be in fast mode. CHECK(array->HasFastSmiOrObjectElements()); @@ -1319,8 +1279,10 @@ UNINITIALIZED_TEST(TestCodeFlushing) { } // foo should no longer be in the compilation cache - CHECK(!function->shared()->is_compiled() || function->IsOptimized()); - CHECK(!function->is_compiled() || function->IsOptimized()); + CHECK(!function->shared()->is_compiled() || function->IsOptimized() || + function->IsInterpreted()); + CHECK(!function->is_compiled() || function->IsOptimized() || + function->IsInterpreted()); // Call foo to get it recompiled. CompileRun("foo()"); CHECK(function->shared()->is_compiled()); @@ -1367,7 +1329,8 @@ TEST(TestCodeFlushingPreAged) { // The code was only run once, so it should be pre-aged and collected on the // next GC. CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); - CHECK(!function->shared()->is_compiled() || function->IsOptimized()); + CHECK(!function->shared()->is_compiled() || function->IsOptimized() || + function->IsInterpreted()); // Execute the function again twice, and ensure it is reset to the young age. { v8::HandleScope scope(CcTest::isolate()); @@ -1387,8 +1350,10 @@ TEST(TestCodeFlushingPreAged) { } // foo should no longer be in the compilation cache - CHECK(!function->shared()->is_compiled() || function->IsOptimized()); - CHECK(!function->is_compiled() || function->IsOptimized()); + CHECK(!function->shared()->is_compiled() || function->IsOptimized() || + function->IsInterpreted()); + CHECK(!function->is_compiled() || function->IsOptimized() || + function->IsInterpreted()); // Call foo to get it recompiled. CompileRun("foo()"); CHECK(function->shared()->is_compiled()); @@ -1436,8 +1401,10 @@ TEST(TestCodeFlushingIncremental) { heap::SimulateIncrementalMarking(CcTest::heap()); CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); } - CHECK(!function->shared()->is_compiled() || function->IsOptimized()); - CHECK(!function->is_compiled() || function->IsOptimized()); + CHECK(!function->shared()->is_compiled() || function->IsOptimized() || + function->IsInterpreted()); + CHECK(!function->is_compiled() || function->IsOptimized() || + function->IsInterpreted()); // This compile will compile the function again. { v8::HandleScope scope(CcTest::isolate()); @@ -1530,8 +1497,10 @@ TEST(TestCodeFlushingIncrementalScavenge) { // Simulate one final GC to make sure the candidate queue is sane. CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); - CHECK(!function->shared()->is_compiled() || function->IsOptimized()); - CHECK(!function->is_compiled() || function->IsOptimized()); + CHECK(!function->shared()->is_compiled() || function->IsOptimized() || + function->IsInterpreted()); + CHECK(!function->is_compiled() || function->IsOptimized() || + function->IsInterpreted()); } @@ -1584,7 +1553,7 @@ TEST(TestCodeFlushingIncrementalAbort) { // is running so that incremental marking aborts and code flushing is // disabled. int position = function->shared()->start_position(); - Handle<Object> breakpoint_object(Smi::FromInt(0), isolate); + Handle<Object> breakpoint_object(Smi::kZero, isolate); EnableDebugger(CcTest::isolate()); isolate->debug()->SetBreakPoint(function, breakpoint_object, &position); isolate->debug()->ClearBreakPoint(breakpoint_object); @@ -1646,6 +1615,7 @@ TEST(TestUseOfIncrementalBarrierOnCompileLazy) { TEST(CompilationCacheCachingBehavior) { // If we do not flush code, or have the compilation cache turned off, this // test is invalid. + i::FLAG_allow_natives_syntax = true; if (!FLAG_flush_code || !FLAG_compilation_cache) { return; } @@ -1662,7 +1632,7 @@ TEST(CompilationCacheCachingBehavior) { " var y = 42;" " var z = x + y;" "};" - "foo()"; + "foo();"; Handle<String> source = factory->InternalizeUtf8String(raw_source); Handle<Context> native_context = isolate->native_context(); @@ -2369,7 +2339,7 @@ TEST(GrowAndShrinkNewSpace) { } // Explicitly growing should double the space capacity. - intptr_t old_capacity, new_capacity; + size_t old_capacity, new_capacity; old_capacity = new_space->TotalCapacity(); new_space->Grow(); new_capacity = new_space->TotalCapacity(); @@ -2417,7 +2387,7 @@ TEST(CollectingAllAvailableGarbageShrinksNewSpace) { v8::HandleScope scope(CcTest::isolate()); NewSpace* new_space = heap->new_space(); - intptr_t old_capacity, new_capacity; + size_t old_capacity, new_capacity; old_capacity = new_space->TotalCapacity(); new_space->Grow(); new_capacity = new_space->TotalCapacity(); @@ -2697,8 +2667,8 @@ TEST(InstanceOfStubWriteBarrier) { namespace { int GetProfilerTicks(SharedFunctionInfo* shared) { - return FLAG_ignition ? shared->profiler_ticks() - : shared->code()->profiler_ticks(); + return FLAG_ignition || FLAG_turbo ? shared->profiler_ticks() + : shared->code()->profiler_ticks(); } } // namespace @@ -4118,6 +4088,7 @@ TEST(Regress165495) { TEST(Regress169209) { + i::FLAG_always_opt = false; i::FLAG_stress_compaction = false; i::FLAG_allow_natives_syntax = true; @@ -4135,11 +4106,15 @@ TEST(Regress169209) { { HandleScope inner_scope(isolate); LocalContext env; - CompileRun("function f() { return 'foobar'; }" - "function g(x) { if (x) f(); }" - "f();" - "g(false);" - "g(false);"); + CompileRun( + "function f() { return 'foobar'; }" + "function g(x) { if (x) f(); }" + "f();" + "%BaselineFunctionOnNextCall(f);" + "f();" + "g(false);" + "%BaselineFunctionOnNextCall(g);" + "g(false);"); Handle<JSFunction> f = Handle<JSFunction>::cast( v8::Utils::OpenHandle(*v8::Local<v8::Function>::Cast( @@ -4159,8 +4134,11 @@ TEST(Regress169209) { { HandleScope inner_scope(isolate); LocalContext env; - CompileRun("function flushMe() { return 0; }" - "flushMe(1);"); + CompileRun( + "function flushMe() { return 0; }" + "flushMe(1);" + "%BaselineFunctionOnNextCall(flushMe);" + "flushMe(1);"); Handle<JSFunction> f = Handle<JSFunction>::cast(v8::Utils::OpenHandle( *v8::Local<v8::Function>::Cast(CcTest::global() @@ -4398,7 +4376,7 @@ TEST(Regress514122) { // Heap is ready, force {lit_page} to become an evacuation candidate and // simulate incremental marking to enqueue optimized code map. FLAG_manual_evacuation_candidates_selection = true; - evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); + heap::ForceEvacuationCandidate(evac_page); heap::SimulateIncrementalMarking(heap); // No matter whether reachable or not, {boomer} is doomed. @@ -4597,7 +4575,7 @@ TEST(LargeObjectSlotRecording) { heap::SimulateFullSpace(heap->old_space()); Handle<FixedArray> lit = isolate->factory()->NewFixedArray(4, TENURED); Page* evac_page = Page::FromAddress(lit->address()); - evac_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); + heap::ForceEvacuationCandidate(evac_page); FixedArray* old_location = *lit; // Allocate a large object. @@ -5603,8 +5581,7 @@ HEAP_TEST(Regress538257) { heap->CanExpandOldGeneration(old_space->AreaSize()); i++) { objects[i] = i_isolate->factory()->NewFixedArray(kFixedArrayLen, TENURED); - Page::FromAddress(objects[i]->address()) - ->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); + heap::ForceEvacuationCandidate(Page::FromAddress(objects[i]->address())); } heap::SimulateFullSpace(old_space); heap->CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask, @@ -5701,7 +5678,8 @@ UNINITIALIZED_TEST(PromotionQueue) { CHECK(new_space->IsAtMaximumCapacity()); - CHECK(i::FLAG_min_semi_space_size * MB == new_space->TotalCapacity()); + CHECK_EQ(static_cast<size_t>(i::FLAG_min_semi_space_size * MB), + new_space->TotalCapacity()); // Call the scavenger two times to get an empty new space heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting); @@ -5717,7 +5695,8 @@ UNINITIALIZED_TEST(PromotionQueue) { } heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting); - CHECK(i::FLAG_min_semi_space_size * MB == new_space->TotalCapacity()); + CHECK_EQ(static_cast<size_t>(i::FLAG_min_semi_space_size * MB), + new_space->TotalCapacity()); // Fill-up the first semi-space page. heap::FillUpOnePage(new_space); @@ -5755,13 +5734,13 @@ TEST(Regress388880) { Representation::Tagged(), OMIT_TRANSITION) .ToHandleChecked(); - int desired_offset = Page::kPageSize - map1->instance_size(); + size_t desired_offset = Page::kPageSize - map1->instance_size(); // Allocate padding objects in old pointer space so, that object allocated // afterwards would end at the end of the page. heap::SimulateFullSpace(heap->old_space()); - int padding_size = desired_offset - Page::kObjectStartOffset; - heap::CreatePadding(heap, padding_size, TENURED); + size_t padding_size = desired_offset - Page::kObjectStartOffset; + heap::CreatePadding(heap, static_cast<int>(padding_size), TENURED); Handle<JSObject> o = factory->NewJSObjectFromMap(map1, TENURED); o->set_properties(*factory->empty_fixed_array()); @@ -6449,30 +6428,6 @@ TEST(Regress519319) { } -HEAP_TEST(TestMemoryReducerSampleJsCalls) { - CcTest::InitializeVM(); - v8::HandleScope scope(CcTest::isolate()); - Heap* heap = CcTest::heap(); - Isolate* isolate = CcTest::i_isolate(); - MemoryReducer* memory_reducer = heap->memory_reducer_; - memory_reducer->SampleAndGetJsCallsPerMs(0); - isolate->IncrementJsCallsFromApiCounter(); - isolate->IncrementJsCallsFromApiCounter(); - isolate->IncrementJsCallsFromApiCounter(); - double calls_per_ms = memory_reducer->SampleAndGetJsCallsPerMs(1); - CheckDoubleEquals(3, calls_per_ms); - - calls_per_ms = memory_reducer->SampleAndGetJsCallsPerMs(2); - CheckDoubleEquals(0, calls_per_ms); - - isolate->IncrementJsCallsFromApiCounter(); - isolate->IncrementJsCallsFromApiCounter(); - isolate->IncrementJsCallsFromApiCounter(); - isolate->IncrementJsCallsFromApiCounter(); - calls_per_ms = memory_reducer->SampleAndGetJsCallsPerMs(4); - CheckDoubleEquals(2, calls_per_ms); -} - HEAP_TEST(Regress587004) { FLAG_concurrent_sweeping = false; #ifdef VERIFY_HEAP @@ -6566,7 +6521,7 @@ HEAP_TEST(Regress589413) { AlwaysAllocateScope always_allocate(isolate); Handle<HeapObject> ec_obj = factory->NewFixedArray(5000, TENURED); Page* ec_page = Page::FromAddress(ec_obj->address()); - ec_page->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); + heap::ForceEvacuationCandidate(ec_page); // Make all arrays point to evacuation candidate so that // slots are recorded for them. for (size_t j = 0; j < arrays.size(); j++) { @@ -6773,8 +6728,7 @@ TEST(Regress631969) { heap::SimulateFullSpace(heap->old_space()); Handle<String> s1 = factory->NewStringFromStaticChars("123456789", TENURED); Handle<String> s2 = factory->NewStringFromStaticChars("01234", TENURED); - Page::FromAddress(s1->address()) - ->SetFlag(MemoryChunk::FORCE_EVACUATION_CANDIDATE_FOR_TESTING); + heap::ForceEvacuationCandidate(Page::FromAddress(s1->address())); heap::SimulateIncrementalMarking(heap, false); @@ -6963,49 +6917,6 @@ TEST(ContinuousRightTrimFixedArrayInBlackArea) { heap::GcAndSweep(heap, OLD_SPACE); } -TEST(SlotFilteringAfterBlackAreas) { - FLAG_black_allocation = true; - CcTest::InitializeVM(); - v8::HandleScope scope(CcTest::isolate()); - Heap* heap = CcTest::heap(); - Isolate* isolate = heap->isolate(); - MarkCompactCollector* mark_compact_collector = heap->mark_compact_collector(); - CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); - - i::MarkCompactCollector* collector = heap->mark_compact_collector(); - i::IncrementalMarking* marking = heap->incremental_marking(); - if (collector->sweeping_in_progress()) { - collector->EnsureSweepingCompleted(); - } - CHECK(marking->IsMarking() || marking->IsStopped()); - if (marking->IsStopped()) { - heap->StartIncrementalMarking(i::Heap::kNoGCFlags, - i::GarbageCollectionReason::kTesting); - } - CHECK(marking->IsMarking()); - marking->StartBlackAllocationForTesting(); - - // Ensure that we allocate a new page, set up a bump pointer area, and - // perform the allocation in a black area. - heap::SimulateFullSpace(heap->old_space()); - Handle<FixedArray> array = isolate->factory()->NewFixedArray(10, TENURED); - Page* page = Page::FromAddress(array->address()); - - // After allocation we empty the allocation info to limit the black area - // only on the allocated array. - heap->old_space()->EmptyAllocationInfo(); - - // Slots in the black area are part of the black object. - CHECK(mark_compact_collector->IsSlotInBlackObject(page, array->address())); - CHECK(mark_compact_collector->IsSlotInBlackObject( - page, array->address() + array->Size() - kPointerSize)); - - // Slots after the black area are not part of the black object and have to - // be filtered out. - CHECK(!mark_compact_collector->IsSlotInBlackObject( - page, array->address() + array->Size())); -} - TEST(Regress618958) { CcTest::InitializeVM(); v8::HandleScope scope(CcTest::isolate()); @@ -7077,7 +6988,8 @@ TEST(RememberedSetRemoveRange) { return KEEP_SLOT; }); - RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start, start + kPointerSize); + RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start, start + kPointerSize, + SlotSet::FREE_EMPTY_BUCKETS); slots[start] = false; RememberedSet<OLD_TO_NEW>::Iterate(chunk, [&slots](Address addr) { CHECK(slots[addr]); @@ -7085,7 +6997,8 @@ TEST(RememberedSetRemoveRange) { }); RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start + kPointerSize, - start + Page::kPageSize); + start + Page::kPageSize, + SlotSet::FREE_EMPTY_BUCKETS); slots[start + kPointerSize] = false; slots[start + Page::kPageSize - kPointerSize] = false; RememberedSet<OLD_TO_NEW>::Iterate(chunk, [&slots](Address addr) { @@ -7093,8 +7006,9 @@ TEST(RememberedSetRemoveRange) { return KEEP_SLOT; }); - RememberedSet<OLD_TO_NEW>::RemoveRange( - chunk, start, start + Page::kPageSize + kPointerSize); + RememberedSet<OLD_TO_NEW>::RemoveRange(chunk, start, + start + Page::kPageSize + kPointerSize, + SlotSet::FREE_EMPTY_BUCKETS); slots[start + Page::kPageSize] = false; RememberedSet<OLD_TO_NEW>::Iterate(chunk, [&slots](Address addr) { CHECK(slots[addr]); @@ -7102,7 +7016,8 @@ TEST(RememberedSetRemoveRange) { }); RememberedSet<OLD_TO_NEW>::RemoveRange( - chunk, chunk->area_end() - kPointerSize, chunk->area_end()); + chunk, chunk->area_end() - kPointerSize, chunk->area_end(), + SlotSet::FREE_EMPTY_BUCKETS); slots[chunk->area_end() - kPointerSize] = false; RememberedSet<OLD_TO_NEW>::Iterate(chunk, [&slots](Address addr) { CHECK(slots[addr]); @@ -7110,5 +7025,37 @@ TEST(RememberedSetRemoveRange) { }); } +HEAP_TEST(Regress670675) { + if (!FLAG_incremental_marking) return; + CcTest::InitializeVM(); + v8::HandleScope scope(CcTest::isolate()); + Heap* heap = CcTest::heap(); + Isolate* isolate = heap->isolate(); + i::MarkCompactCollector* collector = heap->mark_compact_collector(); + CcTest::CollectAllGarbage(i::Heap::kFinalizeIncrementalMarkingMask); + + if (collector->sweeping_in_progress()) { + collector->EnsureSweepingCompleted(); + } + i::IncrementalMarking* marking = CcTest::heap()->incremental_marking(); + if (marking->IsStopped()) { + marking->Start(i::GarbageCollectionReason::kTesting); + } + size_t array_length = Page::kPageSize / kPointerSize + 100; + size_t n = heap->OldGenerationSpaceAvailable() / array_length; + for (size_t i = 0; i < n + 40; i++) { + { + HandleScope inner_scope(isolate); + isolate->factory()->NewFixedArray(static_cast<int>(array_length)); + } + if (marking->IsStopped()) break; + double deadline = heap->MonotonicallyIncreasingTimeInMs() + 1; + marking->AdvanceIncrementalMarking( + deadline, IncrementalMarking::GC_VIA_STACK_GUARD, + IncrementalMarking::FORCE_COMPLETION, StepOrigin::kV8); + } + DCHECK(marking->IsStopped()); +} + } // namespace internal } // namespace v8 diff --git a/deps/v8/test/cctest/heap/test-mark-compact.cc b/deps/v8/test/cctest/heap/test-mark-compact.cc index d0f7f82741..0788e26108 100644 --- a/deps/v8/test/cctest/heap/test-mark-compact.cc +++ b/deps/v8/test/cctest/heap/test-mark-compact.cc @@ -51,13 +51,9 @@ using v8::Just; TEST(MarkingDeque) { CcTest::InitializeVM(); - int mem_size = 20 * kPointerSize; - byte* mem = NewArray<byte>(20*kPointerSize); - Address low = reinterpret_cast<Address>(mem); - Address high = low + mem_size; - MarkingDeque s; - s.Initialize(low, high); - + MarkingDeque s(CcTest::i_isolate()->heap()); + s.SetUp(); + s.StartUsing(); Address original_address = reinterpret_cast<Address>(&s); Address current_address = original_address; while (!s.IsFull()) { @@ -72,7 +68,9 @@ TEST(MarkingDeque) { } CHECK_EQ(original_address, current_address); - DeleteArray(mem); + s.StopUsing(); + CcTest::i_isolate()->cancelable_task_manager()->CancelAndWait(); + s.TearDown(); } TEST(Promotion) { @@ -415,7 +413,7 @@ static intptr_t MemoryInUse() { int fd = open("/proc/self/maps", O_RDONLY); if (fd < 0) return -1; - const int kBufSize = 10000; + const int kBufSize = 20000; char buffer[kBufSize]; ssize_t length = read(fd, buffer, kBufSize); intptr_t line_start = 0; diff --git a/deps/v8/test/cctest/heap/test-page-promotion.cc b/deps/v8/test/cctest/heap/test-page-promotion.cc index b3ac4960a5..4673f2edcf 100644 --- a/deps/v8/test/cctest/heap/test-page-promotion.cc +++ b/deps/v8/test/cctest/heap/test-page-promotion.cc @@ -14,20 +14,22 @@ // src/type-feedback-vector-inl.h #include "src/type-feedback-vector-inl.h" #include "test/cctest/cctest.h" +#include "test/cctest/heap/heap-tester.h" #include "test/cctest/heap/heap-utils.h" namespace { -v8::Isolate* NewIsolateForPagePromotion() { +v8::Isolate* NewIsolateForPagePromotion(int min_semi_space_size = 8, + int max_semi_space_size = 8) { i::FLAG_page_promotion = true; i::FLAG_page_promotion_threshold = 0; // % - i::FLAG_min_semi_space_size = 8; + i::FLAG_min_semi_space_size = min_semi_space_size; // We cannot optimize for size as we require a new space with more than one // page. i::FLAG_optimize_for_size = false; // Set max_semi_space_size because it could've been initialized by an // implication of optimize_for_size. - i::FLAG_max_semi_space_size = i::FLAG_min_semi_space_size; + i::FLAG_max_semi_space_size = max_semi_space_size; v8::Isolate::CreateParams create_params; create_params.array_buffer_allocator = CcTest::array_buffer_allocator(); v8::Isolate* isolate = v8::Isolate::New(create_params); @@ -134,5 +136,38 @@ UNINITIALIZED_TEST(PagePromotion_NewToNewJSArrayBuffer) { } } +UNINITIALIZED_HEAP_TEST(Regress658718) { + v8::Isolate* isolate = NewIsolateForPagePromotion(4, 8); + Isolate* i_isolate = reinterpret_cast<Isolate*>(isolate); + { + v8::Isolate::Scope isolate_scope(isolate); + v8::HandleScope handle_scope(isolate); + v8::Context::New(isolate)->Enter(); + Heap* heap = i_isolate->heap(); + heap->delay_sweeper_tasks_for_testing_ = true; + heap->new_space()->Grow(); + { + v8::HandleScope inner_handle_scope(isolate); + std::vector<Handle<FixedArray>> handles; + heap::SimulateFullSpace(heap->new_space(), &handles); + CHECK_GT(handles.size(), 0u); + // Last object in handles should definitely be on the last page which does + // not contain the age mark. + Handle<FixedArray> last_object = handles.back(); + Page* to_be_promoted_page = Page::FromAddress(last_object->address()); + CHECK(to_be_promoted_page->Contains(last_object->address())); + CHECK(heap->new_space()->ToSpaceContainsSlow(last_object->address())); + heap->CollectGarbage(OLD_SPACE, i::GarbageCollectionReason::kTesting); + CHECK(heap->new_space()->ToSpaceContainsSlow(last_object->address())); + CHECK(to_be_promoted_page->Contains(last_object->address())); + } + heap->CollectGarbage(NEW_SPACE, i::GarbageCollectionReason::kTesting); + heap->new_space()->Shrink(); + heap->memory_allocator()->unmapper()->WaitUntilCompleted(); + heap->mark_compact_collector()->sweeper().StartSweeperTasks(); + heap->mark_compact_collector()->EnsureSweepingCompleted(); + } +} + } // namespace internal } // namespace v8 diff --git a/deps/v8/test/cctest/heap/test-spaces.cc b/deps/v8/test/cctest/heap/test-spaces.cc index 262d0c5d58..fc692e331c 100644 --- a/deps/v8/test/cctest/heap/test-spaces.cc +++ b/deps/v8/test/cctest/heap/test-spaces.cc @@ -471,7 +471,7 @@ TEST(LargeObjectSpace) { CHECK(lo->Contains(ho)); while (true) { - intptr_t available = lo->Available(); + size_t available = lo->Available(); { AllocationResult allocation = lo->AllocateRaw(lo_size, NOT_EXECUTABLE); if (allocation.IsRetry()) break; } @@ -503,9 +503,15 @@ TEST(SizeOfInitialHeap) { // Initial size of LO_SPACE size_t initial_lo_space = isolate->heap()->lo_space()->Size(); - // The limit for each space for an empty isolate containing just the - // snapshot. +// The limit for each space for an empty isolate containing just the +// snapshot. +// In PPC the page size is 64K, causing more internal fragmentation +// hence requiring a larger limit. +#if V8_OS_LINUX && V8_HOST_ARCH_PPC + const size_t kMaxInitialSizePerSpace = 3 * MB; +#else const size_t kMaxInitialSizePerSpace = 2 * MB; +#endif // Freshly initialized VM gets by with the snapshot size (which is below // kMaxInitialSizePerSpace per space). @@ -530,7 +536,8 @@ TEST(SizeOfInitialHeap) { } // No large objects required to perform the above steps. - CHECK_EQ(initial_lo_space, isolate->heap()->lo_space()->Size()); + CHECK_EQ(initial_lo_space, + static_cast<size_t>(isolate->heap()->lo_space()->Size())); } static HeapObject* AllocateUnaligned(NewSpace* space, int size) { @@ -741,7 +748,7 @@ TEST(ShrinkPageToHighWaterMarkNoFiller) { CcTest::heap()->old_space()->EmptyAllocationInfo(); const size_t shrinked = page->ShrinkToHighWaterMark(); - CHECK_EQ(0, shrinked); + CHECK_EQ(0u, shrinked); } TEST(ShrinkPageToHighWaterMarkOneWordFiller) { @@ -767,7 +774,7 @@ TEST(ShrinkPageToHighWaterMarkOneWordFiller) { CHECK_EQ(filler->map(), CcTest::heap()->one_pointer_filler_map()); const size_t shrinked = page->ShrinkToHighWaterMark(); - CHECK_EQ(0, shrinked); + CHECK_EQ(0u, shrinked); } TEST(ShrinkPageToHighWaterMarkTwoWordFiller) { @@ -793,7 +800,7 @@ TEST(ShrinkPageToHighWaterMarkTwoWordFiller) { CHECK_EQ(filler->map(), CcTest::heap()->two_pointer_filler_map()); const size_t shrinked = page->ShrinkToHighWaterMark(); - CHECK_EQ(0, shrinked); + CHECK_EQ(0u, shrinked); } } // namespace internal |