summaryrefslogtreecommitdiff
path: root/rts
diff options
context:
space:
mode:
authorSimon Marlow <marlowsd@gmail.com>2011-07-20 15:29:54 +0100
committerSimon Marlow <marlowsd@gmail.com>2011-07-20 16:23:29 +0100
commite903a09466c5a700baea8a34511cbdc2576b136e (patch)
treef3564334039591b62cc126960a2c1c6dd4c513f0 /rts
parent2c2a434f411029977992207a19245a24f942ff78 (diff)
downloadhaskell-e903a09466c5a700baea8a34511cbdc2576b136e.tar.gz
Move the call to heapCensus() into GarbageCollect(), just before
calling resurrectThreads() (fixes #5314). This avoids a lot of problems, because resurrectThreads() may overwrite some closures in the heap, leaving slop behind. The bug in instances, this fix avoids them all in one go.
Diffstat (limited to 'rts')
-rw-r--r--rts/Schedule.c9
-rw-r--r--rts/sm/GC.c10
-rw-r--r--rts/sm/GC.h4
3 files changed, 17 insertions, 6 deletions
diff --git a/rts/Schedule.c b/rts/Schedule.c
index 2a2cc22a66..834e3eb420 100644
--- a/rts/Schedule.c
+++ b/rts/Schedule.c
@@ -1443,9 +1443,9 @@ delete_threads_and_gc:
// reset waiting_for_gc *before* GC, so that when the GC threads
// emerge they don't immediately re-enter the GC.
waiting_for_gc = 0;
- GarbageCollect(force_major || heap_census, gc_type, cap);
+ GarbageCollect(force_major || heap_census, heap_census, gc_type, cap);
#else
- GarbageCollect(force_major || heap_census, 0, cap);
+ GarbageCollect(force_major || heap_census, heap_census, 0, cap);
#endif
traceEventGcEnd(cap);
@@ -1473,10 +1473,9 @@ delete_threads_and_gc:
ASSERT(checkSparkCountInvariant());
#endif
+ // The heap census itself is done during GarbageCollect().
if (heap_census) {
- debugTrace(DEBUG_sched, "performing heap census");
- heapCensus();
- performHeapProfile = rtsFalse;
+ performHeapProfile = rtsFalse;
}
#if defined(THREADED_RTS)
diff --git a/rts/sm/GC.c b/rts/sm/GC.c
index 9f69a4c65a..396992ca01 100644
--- a/rts/sm/GC.c
+++ b/rts/sm/GC.c
@@ -171,6 +171,7 @@ StgPtr mark_sp; // pointer to the next unallocated mark stack entry
void
GarbageCollect (rtsBool force_major_gc,
+ rtsBool do_heap_census,
nat gc_type USED_IF_THREADS,
Capability *cap)
{
@@ -661,6 +662,15 @@ GarbageCollect (rtsBool force_major_gc,
// fill slop.
IF_DEBUG(sanity, checkSanity(rtsTrue /* after GC */, major_gc));
+ // If a heap census is due, we need to do it before
+ // resurrectThreads(), for the same reason as checkSanity above:
+ // resurrectThreads() will overwrite some closures and leave slop
+ // behind.
+ if (do_heap_census) {
+ debugTrace(DEBUG_sched, "performing heap census");
+ heapCensus();
+ }
+
// send exceptions to any threads which were about to die
RELEASE_SM_LOCK;
resurrectThreads(resurrected_threads);
diff --git a/rts/sm/GC.h b/rts/sm/GC.h
index 38fc87ca44..eb1802338b 100644
--- a/rts/sm/GC.h
+++ b/rts/sm/GC.h
@@ -16,7 +16,9 @@
#include "BeginPrivate.h"
-void GarbageCollect(rtsBool force_major_gc, nat gc_type, Capability *cap);
+void GarbageCollect (rtsBool force_major_gc,
+ rtsBool do_heap_census,
+ nat gc_type, Capability *cap);
typedef void (*evac_fn)(void *user, StgClosure **root);