summaryrefslogtreecommitdiff
path: root/deps/v8/test/cctest/test-heap.cc
diff options
context:
space:
mode:
Diffstat (limited to 'deps/v8/test/cctest/test-heap.cc')
-rw-r--r--deps/v8/test/cctest/test-heap.cc493
1 files changed, 52 insertions, 441 deletions
diff --git a/deps/v8/test/cctest/test-heap.cc b/deps/v8/test/cctest/test-heap.cc
index 533a1c3ef1..6927c43b9a 100644
--- a/deps/v8/test/cctest/test-heap.cc
+++ b/deps/v8/test/cctest/test-heap.cc
@@ -23,21 +23,6 @@ static void InitializeVM() {
}
-// Go through all incremental marking steps in one swoop.
-static void SimulateIncrementalMarking() {
- IncrementalMarking* marking = HEAP->incremental_marking();
- CHECK(marking->IsMarking() || marking->IsStopped());
- if (marking->IsStopped()) {
- marking->Start();
- }
- CHECK(marking->IsMarking());
- while (!marking->IsComplete()) {
- marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
- }
- CHECK(marking->IsComplete());
-}
-
-
static void CheckMap(Map* map, int type, int instance_size) {
CHECK(map->IsHeapObject());
#ifdef DEBUG
@@ -415,10 +400,9 @@ TEST(WeakGlobalHandlesMark) {
h2 = global_handles->Create(*u);
}
- // Make sure the objects are promoted.
HEAP->CollectGarbage(OLD_POINTER_SPACE);
HEAP->CollectGarbage(NEW_SPACE);
- CHECK(!HEAP->InNewSpace(*h1) && !HEAP->InNewSpace(*h2));
+ // Make sure the object is promoted.
global_handles->MakeWeak(h2.location(),
reinterpret_cast<void*>(1234),
@@ -426,8 +410,7 @@ TEST(WeakGlobalHandlesMark) {
CHECK(!GlobalHandles::IsNearDeath(h1.location()));
CHECK(!GlobalHandles::IsNearDeath(h2.location()));
- // Incremental marking potentially marked handles before they turned weak.
- HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
+ HEAP->CollectGarbage(OLD_POINTER_SPACE);
CHECK((*h1)->IsString());
@@ -959,9 +942,9 @@ TEST(Regression39128) {
TEST(TestCodeFlushing) {
+ i::FLAG_allow_natives_syntax = true;
// If we do not flush code this test is invalid.
if (!FLAG_flush_code) return;
- i::FLAG_allow_natives_syntax = true;
InitializeVM();
v8::HandleScope scope;
const char* source = "function foo() {"
@@ -984,217 +967,26 @@ TEST(TestCodeFlushing) {
Handle<JSFunction> function(JSFunction::cast(func_value));
CHECK(function->shared()->is_compiled());
- // The code will survive at least two GCs.
+ // TODO(1609) Currently incremental marker does not support code flushing.
HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
- CHECK(function->shared()->is_compiled());
-
- // Simulate several GCs that use full marking.
- const int kAgingThreshold = 6;
- for (int i = 0; i < kAgingThreshold; i++) {
- HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
- }
- // foo should no longer be in the compilation cache
- CHECK(!function->shared()->is_compiled() || function->IsOptimized());
- CHECK(!function->is_compiled() || function->IsOptimized());
- // Call foo to get it recompiled.
- CompileRun("foo()");
CHECK(function->shared()->is_compiled());
- CHECK(function->is_compiled());
-}
-
-TEST(TestCodeFlushingIncremental) {
- // If we do not flush code this test is invalid.
- if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
- i::FLAG_allow_natives_syntax = true;
- InitializeVM();
- v8::HandleScope scope;
- const char* source = "function foo() {"
- " var x = 42;"
- " var y = 42;"
- " var z = x + y;"
- "};"
- "foo()";
- Handle<String> foo_name = FACTORY->LookupAsciiSymbol("foo");
-
- // This compile will add the code to the compilation cache.
- { v8::HandleScope scope;
- CompileRun(source);
- }
-
- // Check function is compiled.
- Object* func_value = Isolate::Current()->context()->global_object()->
- GetProperty(*foo_name)->ToObjectChecked();
- CHECK(func_value->IsJSFunction());
- Handle<JSFunction> function(JSFunction::cast(func_value));
- CHECK(function->shared()->is_compiled());
-
- // The code will survive at least two GCs.
HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
- CHECK(function->shared()->is_compiled());
-
- // Simulate several GCs that use incremental marking.
- const int kAgingThreshold = 6;
- for (int i = 0; i < kAgingThreshold; i++) {
- SimulateIncrementalMarking();
- HEAP->CollectAllGarbage(Heap::kNoGCFlags);
- }
- CHECK(!function->shared()->is_compiled() || function->IsOptimized());
- CHECK(!function->is_compiled() || function->IsOptimized());
-
- // This compile will compile the function again.
- { v8::HandleScope scope;
- CompileRun("foo();");
- }
-
- // Simulate several GCs that use incremental marking but make sure
- // the loop breaks once the function is enqueued as a candidate.
- for (int i = 0; i < kAgingThreshold; i++) {
- SimulateIncrementalMarking();
- if (!function->next_function_link()->IsUndefined()) break;
- HEAP->CollectAllGarbage(Heap::kNoGCFlags);
- }
-
- // Force optimization while incremental marking is active and while
- // the function is enqueued as a candidate.
- { v8::HandleScope scope;
- CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
- }
-
- // Simulate one final GC to make sure the candidate queue is sane.
- HEAP->CollectAllGarbage(Heap::kNoGCFlags);
- CHECK(function->shared()->is_compiled() || !function->IsOptimized());
- CHECK(function->is_compiled() || !function->IsOptimized());
-}
-
-
-TEST(TestCodeFlushingIncrementalScavenge) {
- // If we do not flush code this test is invalid.
- if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
- i::FLAG_allow_natives_syntax = true;
- InitializeVM();
- v8::HandleScope scope;
- const char* source = "var foo = function() {"
- " var x = 42;"
- " var y = 42;"
- " var z = x + y;"
- "};"
- "foo();"
- "var bar = function() {"
- " var x = 23;"
- "};"
- "bar();";
- Handle<String> foo_name = FACTORY->LookupAsciiSymbol("foo");
- Handle<String> bar_name = FACTORY->LookupAsciiSymbol("bar");
-
- // Perfrom one initial GC to enable code flushing.
+ HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
+ HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
+ HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
- // This compile will add the code to the compilation cache.
- { v8::HandleScope scope;
- CompileRun(source);
- }
-
- // Check functions are compiled.
- Object* func_value = Isolate::Current()->context()->global_object()->
- GetProperty(*foo_name)->ToObjectChecked();
- CHECK(func_value->IsJSFunction());
- Handle<JSFunction> function(JSFunction::cast(func_value));
- CHECK(function->shared()->is_compiled());
- Object* func_value2 = Isolate::Current()->context()->global_object()->
- GetProperty(*bar_name)->ToObjectChecked();
- CHECK(func_value2->IsJSFunction());
- Handle<JSFunction> function2(JSFunction::cast(func_value2));
- CHECK(function2->shared()->is_compiled());
-
- // Clear references to functions so that one of them can die.
- { v8::HandleScope scope;
- CompileRun("foo = 0; bar = 0;");
- }
-
- // Bump the code age so that flushing is triggered while the function
- // object is still located in new-space.
- const int kAgingThreshold = 6;
- for (int i = 0; i < kAgingThreshold; i++) {
- function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
- function2->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
- }
-
- // Simulate incremental marking so that the functions are enqueued as
- // code flushing candidates. Then kill one of the functions. Finally
- // perform a scavenge while incremental marking is still running.
- SimulateIncrementalMarking();
- *function2.location() = NULL;
- HEAP->CollectGarbage(NEW_SPACE, "test scavenge while marking");
-
- // Simulate one final GC to make sure the candidate queue is sane.
- HEAP->CollectAllGarbage(Heap::kNoGCFlags);
+ // foo should no longer be in the compilation cache
CHECK(!function->shared()->is_compiled() || function->IsOptimized());
CHECK(!function->is_compiled() || function->IsOptimized());
-}
-
-
-TEST(TestCodeFlushingIncrementalAbort) {
- // If we do not flush code this test is invalid.
- if (!FLAG_flush_code || !FLAG_flush_code_incrementally) return;
- i::FLAG_allow_natives_syntax = true;
- InitializeVM();
- v8::HandleScope scope;
- const char* source = "function foo() {"
- " var x = 42;"
- " var y = 42;"
- " var z = x + y;"
- "};"
- "foo()";
- Handle<String> foo_name = FACTORY->LookupAsciiSymbol("foo");
-
- // This compile will add the code to the compilation cache.
- { v8::HandleScope scope;
- CompileRun(source);
- }
-
- // Check function is compiled.
- Object* func_value = Isolate::Current()->context()->global_object()->
- GetProperty(*foo_name)->ToObjectChecked();
- CHECK(func_value->IsJSFunction());
- Handle<JSFunction> function(JSFunction::cast(func_value));
- CHECK(function->shared()->is_compiled());
-
- // The code will survive at least two GCs.
- HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
- HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
+ // Call foo to get it recompiled.
+ CompileRun("foo()");
CHECK(function->shared()->is_compiled());
-
- // Bump the code age so that flushing is triggered.
- const int kAgingThreshold = 6;
- for (int i = 0; i < kAgingThreshold; i++) {
- function->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
- }
-
- // Simulate incremental marking so that the function is enqueued as
- // code flushing candidate.
- SimulateIncrementalMarking();
-
- // Enable the debugger and add a breakpoint while incremental marking
- // is running so that incremental marking aborts and code flushing is
- // disabled.
- int position = 0;
- Handle<Object> breakpoint_object(Smi::FromInt(0));
- ISOLATE->debug()->SetBreakPoint(function, breakpoint_object, &position);
- ISOLATE->debug()->ClearAllBreakPoints();
-
- // Force optimization now that code flushing is disabled.
- { v8::HandleScope scope;
- CompileRun("%OptimizeFunctionOnNextCall(foo); foo();");
- }
-
- // Simulate one final GC to make sure the candidate queue is sane.
- HEAP->CollectAllGarbage(Heap::kNoGCFlags);
- CHECK(function->shared()->is_compiled() || !function->IsOptimized());
- CHECK(function->is_compiled() || !function->IsOptimized());
+ CHECK(function->is_compiled());
}
@@ -1227,10 +1019,6 @@ static int CountOptimizedUserFunctions(v8::Handle<v8::Context> context) {
TEST(TestInternalWeakLists) {
v8::V8::Initialize();
- // Some flags turn Scavenge collections into Mark-sweep collections
- // and hence are incompatible with this test case.
- if (FLAG_gc_global || FLAG_stress_compaction) return;
-
static const int kNumTestContexts = 10;
v8::HandleScope scope;
@@ -1743,7 +1531,6 @@ TEST(InstanceOfStubWriteBarrier) {
InitializeVM();
if (!i::V8::UseCrankshaft()) return;
- if (i::FLAG_force_marking_deque_overflows) return;
v8::HandleScope outer_scope;
{
@@ -1829,11 +1616,10 @@ TEST(PrototypeTransitionClearing) {
// Make sure next prototype is placed on an old-space evacuation candidate.
Handle<JSObject> prototype;
PagedSpace* space = HEAP->old_pointer_space();
- {
- AlwaysAllocateScope always_allocate;
- SimulateFullSpace(space);
+ do {
prototype = FACTORY->NewJSArray(32 * KB, FAST_HOLEY_ELEMENTS, TENURED);
- }
+ } while (space->FirstPage() == space->LastPage() ||
+ !space->LastPage()->Contains(prototype->address()));
// Add a prototype on an evacuation candidate and verify that transition
// clearing correctly records slots in prototype transition array.
@@ -1952,10 +1738,9 @@ TEST(OptimizedAllocationAlwaysInNewSpace) {
i::FLAG_allow_natives_syntax = true;
InitializeVM();
if (!i::V8::UseCrankshaft() || i::FLAG_always_opt) return;
- if (i::FLAG_gc_global || i::FLAG_stress_compaction) return;
v8::HandleScope scope;
- SimulateFullSpace(HEAP->new_space());
+ FillUpNewSpace(HEAP->new_space());
AlwaysAllocateScope always_allocate;
v8::Local<v8::Value> res = CompileRun(
"function c(x) {"
@@ -1982,6 +1767,19 @@ static int CountMapTransitions(Map* map) {
}
+// Go through all incremental marking steps in one swoop.
+static void SimulateIncrementalMarking() {
+ IncrementalMarking* marking = HEAP->incremental_marking();
+ CHECK(marking->IsStopped());
+ marking->Start();
+ CHECK(marking->IsMarking());
+ while (!marking->IsComplete()) {
+ marking->Step(MB, IncrementalMarking::NO_GC_VIA_STACK_GUARD);
+ }
+ CHECK(marking->IsComplete());
+}
+
+
// Test that map transitions are cleared and maps are collected with
// incremental marking as well.
TEST(Regress1465) {
@@ -2106,6 +1904,10 @@ TEST(Regress2143b) {
}
+// Implemented in the test-alloc.cc test suite.
+void SimulateFullSpace(PagedSpace* space);
+
+
TEST(ReleaseOverReservedPages) {
i::FLAG_trace_gc = true;
// The optimizer can allocate stuff, messing up the test.
@@ -2128,7 +1930,7 @@ TEST(ReleaseOverReservedPages) {
// Triggering one GC will cause a lot of garbage to be discovered but
// even spread across all allocated pages.
HEAP->CollectAllGarbage(Heap::kNoGCFlags, "triggered for preparation");
- CHECK_GE(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
+ CHECK_EQ(number_of_test_pages + 1, old_pointer_space->CountTotalPages());
// Triggering subsequent GCs should cause at least half of the pages
// to be released to the OS after at most two cycles.
@@ -2159,22 +1961,27 @@ TEST(Regress2237) {
v8::HandleScope inner_scope;
const char* c = "This text is long enough to trigger sliced strings.";
Handle<String> s = FACTORY->NewStringFromAscii(CStrVector(c));
- CHECK(s->IsSeqOneByteString());
+ CHECK(s->IsSeqAsciiString());
CHECK(HEAP->InNewSpace(*s));
// Generate a sliced string that is based on the above parent and
// lives in old-space.
- SimulateFullSpace(HEAP->new_space());
+ FillUpNewSpace(HEAP->new_space());
AlwaysAllocateScope always_allocate;
- Handle<String> t = FACTORY->NewProperSubString(s, 5, 35);
+ Handle<String> t;
+ // TODO(mstarzinger): Unfortunately FillUpNewSpace() still leaves
+ // some slack, so we need to allocate a few sliced strings.
+ for (int i = 0; i < 16; i++) {
+ t = FACTORY->NewProperSubString(s, 5, 35);
+ }
CHECK(t->IsSlicedString());
CHECK(!HEAP->InNewSpace(*t));
*slice.location() = *t.location();
}
- CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
+ CHECK(SlicedString::cast(*slice)->parent()->IsSeqAsciiString());
HEAP->CollectAllGarbage(Heap::kNoGCFlags);
- CHECK(SlicedString::cast(*slice)->parent()->IsSeqOneByteString());
+ CHECK(SlicedString::cast(*slice)->parent()->IsSeqAsciiString());
}
@@ -2414,13 +2221,19 @@ class SourceResource: public v8::String::ExternalAsciiStringResource {
};
-void ReleaseStackTraceDataTest(const char* source) {
+TEST(ReleaseStackTraceData) {
// Test that the data retained by the Error.stack accessor is released
// after the first time the accessor is fired. We use external string
// to check whether the data is being released since the external string
// resource's callback is fired when the external string is GC'ed.
InitializeVM();
v8::HandleScope scope;
+ static const char* source = "var error = 1; "
+ "try { "
+ " throw new Error(); "
+ "} catch (e) { "
+ " error = e; "
+ "} ";
SourceResource* resource = new SourceResource(i::StrDup(source));
{
v8::HandleScope scope;
@@ -2432,29 +2245,12 @@ void ReleaseStackTraceDataTest(const char* source) {
// External source is being retained by the stack trace.
CHECK(!resource->IsDisposed());
- CompileRun("error.stack;");
+ CompileRun("error.stack; error.stack;");
HEAP->CollectAllAvailableGarbage();
// External source has been released.
CHECK(resource->IsDisposed());
- delete resource;
-}
-
-TEST(ReleaseStackTraceData) {
- static const char* source1 = "var error = null; "
- /* Normal Error */ "try { "
- " throw new Error(); "
- "} catch (e) { "
- " error = e; "
- "} ";
- static const char* source2 = "var error = null; "
- /* Stack overflow */ "try { "
- " (function f() { f(); })(); "
- "} catch (e) { "
- " error = e; "
- "} ";
- ReleaseStackTraceDataTest(source1);
- ReleaseStackTraceDataTest(source2);
+ delete resource;
}
@@ -2515,188 +2311,3 @@ TEST(Regression144230) {
USE(global->SetProperty(*name, *call_function, NONE, kNonStrictMode));
CompileRun("call();");
}
-
-
-TEST(Regress159140) {
- i::FLAG_allow_natives_syntax = true;
- i::FLAG_flush_code_incrementally = true;
- InitializeVM();
- v8::HandleScope scope;
-
- // Perform one initial GC to enable code flushing.
- HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
-
- // Prepare several closures that are all eligible for code flushing
- // because all reachable ones are not optimized. Make sure that the
- // optimized code object is directly reachable through a handle so
- // that it is marked black during incremental marking.
- Handle<Code> code;
- {
- HandleScope inner_scope;
- CompileRun("function h(x) {}"
- "function mkClosure() {"
- " return function(x) { return x + 1; };"
- "}"
- "var f = mkClosure();"
- "var g = mkClosure();"
- "f(1); f(2);"
- "g(1); g(2);"
- "h(1); h(2);"
- "%OptimizeFunctionOnNextCall(f); f(3);"
- "%OptimizeFunctionOnNextCall(h); h(3);");
-
- Handle<JSFunction> f =
- v8::Utils::OpenHandle(
- *v8::Handle<v8::Function>::Cast(
- v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
- CHECK(f->is_compiled());
- CompileRun("f = null;");
-
- Handle<JSFunction> g =
- v8::Utils::OpenHandle(
- *v8::Handle<v8::Function>::Cast(
- v8::Context::GetCurrent()->Global()->Get(v8_str("g"))));
- CHECK(g->is_compiled());
- const int kAgingThreshold = 6;
- for (int i = 0; i < kAgingThreshold; i++) {
- g->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
- }
-
- code = inner_scope.CloseAndEscape(Handle<Code>(f->code()));
- }
-
- // Simulate incremental marking so that the functions are enqueued as
- // code flushing candidates. Then optimize one function. Finally
- // finish the GC to complete code flushing.
- SimulateIncrementalMarking();
- CompileRun("%OptimizeFunctionOnNextCall(g); g(3);");
- HEAP->CollectAllGarbage(Heap::kNoGCFlags);
-
- // Unoptimized code is missing and the deoptimizer will go ballistic.
- CompileRun("g('bozo');");
-}
-
-
-TEST(Regress169209) {
- i::FLAG_allow_natives_syntax = true;
- i::FLAG_flush_code_incrementally = true;
- InitializeVM();
- v8::HandleScope scope;
-
- // Perform one initial GC to enable code flushing.
- HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
-
- // Prepare a shared function info eligible for code flushing for which
- // the unoptimized code will be replaced during optimization.
- Handle<SharedFunctionInfo> shared1;
- {
- HandleScope inner_scope;
- CompileRun("function f() { return 'foobar'; }"
- "function g(x) { if (x) f(); }"
- "f();"
- "g(false);"
- "g(false);");
-
- Handle<JSFunction> f =
- v8::Utils::OpenHandle(
- *v8::Handle<v8::Function>::Cast(
- v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
- CHECK(f->is_compiled());
- const int kAgingThreshold = 6;
- for (int i = 0; i < kAgingThreshold; i++) {
- f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
- }
-
- shared1 = inner_scope.CloseAndEscape(handle(f->shared(), ISOLATE));
- }
-
- // Prepare a shared function info eligible for code flushing that will
- // represent the dangling tail of the candidate list.
- Handle<SharedFunctionInfo> shared2;
- {
- HandleScope inner_scope;
- CompileRun("function flushMe() { return 0; }"
- "flushMe(1);");
-
- Handle<JSFunction> f =
- v8::Utils::OpenHandle(
- *v8::Handle<v8::Function>::Cast(
- v8::Context::GetCurrent()->Global()->Get(v8_str("flushMe"))));
- CHECK(f->is_compiled());
- const int kAgingThreshold = 6;
- for (int i = 0; i < kAgingThreshold; i++) {
- f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
- }
-
- shared2 = inner_scope.CloseAndEscape(handle(f->shared(), ISOLATE));
- }
-
- // Simulate incremental marking and collect code flushing candidates.
- SimulateIncrementalMarking();
- CHECK(shared1->code()->gc_metadata() != NULL);
-
- // Optimize function and make sure the unoptimized code is replaced.
-#ifdef DEBUG
- FLAG_stop_at = "f";
-#endif
- CompileRun("%OptimizeFunctionOnNextCall(g);"
- "g(false);");
-
- // Finish garbage collection cycle.
- HEAP->CollectAllGarbage(Heap::kNoGCFlags);
- CHECK(shared1->code()->gc_metadata() == NULL);
-}
-
-
-TEST(Regress168801) {
- i::FLAG_always_compact = true;
- i::FLAG_cache_optimized_code = false;
- i::FLAG_allow_natives_syntax = true;
- i::FLAG_flush_code_incrementally = true;
- InitializeVM();
- v8::HandleScope scope;
-
- // Perform one initial GC to enable code flushing.
- HEAP->CollectAllGarbage(Heap::kAbortIncrementalMarkingMask);
-
- // Ensure the code ends up on an evacuation candidate.
- SimulateFullSpace(HEAP->code_space());
-
- // Prepare an unoptimized function that is eligible for code flushing.
- Handle<JSFunction> function;
- {
- HandleScope inner_scope;
- CompileRun("function mkClosure() {"
- " return function(x) { return x + 1; };"
- "}"
- "var f = mkClosure();"
- "f(1); f(2);");
-
- Handle<JSFunction> f =
- v8::Utils::OpenHandle(
- *v8::Handle<v8::Function>::Cast(
- v8::Context::GetCurrent()->Global()->Get(v8_str("f"))));
- CHECK(f->is_compiled());
- const int kAgingThreshold = 6;
- for (int i = 0; i < kAgingThreshold; i++) {
- f->shared()->code()->MakeOlder(static_cast<MarkingParity>(i % 2));
- }
-
- function = inner_scope.CloseAndEscape(handle(*f, ISOLATE));
- }
-
- // Simulate incremental marking so that unoptimized function is enqueued as a
- // candidate for code flushing. The shared function info however will not be
- // explicitly enqueued.
- SimulateIncrementalMarking();
-
- // Now optimize the function so that it is taken off the candidate list.
- {
- HandleScope inner_scope;
- CompileRun("%OptimizeFunctionOnNextCall(f); f(3);");
- }
-
- // This cycle will bust the heap and subsequent cycles will go ballistic.
- HEAP->CollectAllGarbage(Heap::kNoGCFlags);
- HEAP->CollectAllGarbage(Heap::kNoGCFlags);
-}