summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBen Gamari <ben@smart-cactus.org>2019-11-30 21:55:58 -0500
committerMarge Bot <ben+marge-bot@smart-cactus.org>2019-12-05 16:07:48 -0500
commit69001f54279a55bc4e5e5883c675e5ba6fe49a22 (patch)
treed9a5896071252e60ef1670cc87ce506677d51645
parent8324f0b7357c428f505dccbc84bb7dde897b509c (diff)
downloadhaskell-69001f54279a55bc4e5e5883c675e5ba6fe49a22.tar.gz
nonmoving: Clear segment bitmaps during sweep
Previously we would clear the bitmaps of segments which we are going to sweep during the preparatory pause. However, this is unnecessary: the existence of the mark epoch ensures that the sweep will correctly identify non-reachable objects, even if we do not clear the bitmap. We now defer clearing the bitmap to sweep, which happens concurrently with mutation.
-rw-r--r--rts/sm/NonMoving.c9
-rw-r--r--rts/sm/NonMoving.h1
-rw-r--r--rts/sm/NonMovingSweep.c1
3 files changed, 4 insertions, 7 deletions
diff --git a/rts/sm/NonMoving.c b/rts/sm/NonMoving.c
index 5cb7c8b7e5..0bd96d1800 100644
--- a/rts/sm/NonMoving.c
+++ b/rts/sm/NonMoving.c
@@ -374,7 +374,6 @@ memcount nonmoving_live_words = 0;
#if defined(THREADED_RTS)
static void* nonmovingConcurrentMark(void *mark_queue);
#endif
-static void nonmovingClearBitmap(struct NonmovingSegment *seg);
static void nonmovingMark_(MarkQueue *mark_queue, StgWeak **dead_weaks, StgTSO **resurrected_threads);
static void nonmovingInitSegment(struct NonmovingSegment *seg, uint8_t log_block_size)
@@ -681,7 +680,7 @@ void nonmovingAddCapabilities(uint32_t new_n_caps)
nonmovingHeap.n_caps = new_n_caps;
}
-static inline void nonmovingClearBitmap(struct NonmovingSegment *seg)
+void nonmovingClearBitmap(struct NonmovingSegment *seg)
{
unsigned int n = nonmovingSegmentBlockCount(seg);
memset(seg->bitmap, 0, n);
@@ -715,13 +714,9 @@ static void nonmovingPrepareMark(void)
if (filled) {
struct NonmovingSegment *seg = filled;
while (true) {
- n_filled++;
- prefetchForRead(seg->link);
- // Clear bitmap
- prefetchForWrite(seg->link->bitmap);
- nonmovingClearBitmap(seg);
// Set snapshot
nonmovingSegmentInfo(seg)->next_free_snap = seg->next_free;
+ n_filled++;
if (seg->link)
seg = seg->link;
else
diff --git a/rts/sm/NonMoving.h b/rts/sm/NonMoving.h
index b3d4e14065..36ecd8b0af 100644
--- a/rts/sm/NonMoving.h
+++ b/rts/sm/NonMoving.h
@@ -130,6 +130,7 @@ void nonmovingCollect(StgWeak **dead_weaks,
void *nonmovingAllocate(Capability *cap, StgWord sz);
void nonmovingAddCapabilities(uint32_t new_n_caps);
void nonmovingPushFreeSegment(struct NonmovingSegment *seg);
+void nonmovingClearBitmap(struct NonmovingSegment *seg);
INLINE_HEADER struct NonmovingSegmentInfo *nonmovingSegmentInfo(struct NonmovingSegment *seg) {
diff --git a/rts/sm/NonMovingSweep.c b/rts/sm/NonMovingSweep.c
index b390959612..925d7c2068 100644
--- a/rts/sm/NonMovingSweep.c
+++ b/rts/sm/NonMovingSweep.c
@@ -65,6 +65,7 @@ nonmovingSweepSegment(struct NonmovingSegment *seg)
} else {
ASSERT(seg->next_free == 0);
ASSERT(nonmovingSegmentInfo(seg)->next_free_snap == 0);
+ nonmovingClearBitmap(seg);
return SEGMENT_FREE;
}
}