summaryrefslogtreecommitdiff
path: root/rts
diff options
context:
space:
mode:
authorÖmer Sinan Ağacan <omer@well-typed.com>2019-02-05 12:45:17 -0500
committerMarge Bot <ben+marge-bot@smart-cactus.org>2019-03-06 15:53:32 -0500
commit5aab1d9ca927a058135ca9a08c10ea3474cbe251 (patch)
treed958e4ff355b1ca59b23ceee3dc76d2f5cf16d8b /rts
parent2ff77b9894eecf51fa619ed2266ca196e296cd1e (diff)
downloadhaskell-5aab1d9ca927a058135ca9a08c10ea3474cbe251.tar.gz
rts: Unglobalize dead_weak_ptr_list and resurrected_threads
In the concurrent nonmoving collector we will need the ability to call `traverseWeakPtrList` concurrently with minor generation collections. This global state stands in the way of this. However, refactoring it away is straightforward since this list only persists the length of a single GC.
Diffstat (limited to 'rts')
-rw-r--r--rts/sm/Compact.c2
-rw-r--r--rts/sm/Compact.h4
-rw-r--r--rts/sm/GC.c9
-rw-r--r--rts/sm/MarkWeak.c30
-rw-r--r--rts/sm/MarkWeak.h2
5 files changed, 23 insertions, 24 deletions
diff --git a/rts/sm/Compact.c b/rts/sm/Compact.c
index 004e042069..bd45489da1 100644
--- a/rts/sm/Compact.c
+++ b/rts/sm/Compact.c
@@ -940,7 +940,7 @@ update_bkwd_compact( generation *gen )
}
void
-compact(StgClosure *static_objects)
+compact(StgClosure *static_objects, StgWeak *dead_weak_ptr_list, StgTSO *resurrected_threads)
{
W_ n, g, blocks;
generation *gen;
diff --git a/rts/sm/Compact.h b/rts/sm/Compact.h
index 63abfc7180..ba39c30acd 100644
--- a/rts/sm/Compact.h
+++ b/rts/sm/Compact.h
@@ -45,6 +45,8 @@ is_marked(StgPtr p, bdescr *bd)
return (*bitmap_word & bit_mask);
}
-void compact (StgClosure *static_objects);
+void compact (StgClosure *static_objects,
+ StgWeak *dead_weak_ptr_list,
+ StgTSO *resurrected_threads);
#include "EndPrivate.h"
diff --git a/rts/sm/GC.c b/rts/sm/GC.c
index 70d6d8efe5..b3cc4482aa 100644
--- a/rts/sm/GC.c
+++ b/rts/sm/GC.c
@@ -416,15 +416,20 @@ GarbageCollect (uint32_t collect_gen,
* Repeatedly scavenge all the areas we know about until there's no
* more scavenging to be done.
*/
+
+ StgWeak *dead_weak_ptr_list = NULL;
+ StgTSO *resurrected_threads = END_TSO_QUEUE;
+
for (;;)
{
scavenge_until_all_done();
+
// The other threads are now stopped. We might recurse back to
// here, but from now on this is the only thread.
// must be last... invariant is that everything is fully
// scavenged at this point.
- if (traverseWeakPtrList()) { // returns true if evaced something
+ if (traverseWeakPtrList(&dead_weak_ptr_list, &resurrected_threads)) { // returns true if evaced something
inc_running();
continue;
}
@@ -468,7 +473,7 @@ GarbageCollect (uint32_t collect_gen,
// Finally: compact or sweep the oldest generation.
if (major_gc && oldest_gen->mark) {
if (oldest_gen->compact)
- compact(gct->scavenged_static_objects);
+ compact(gct->scavenged_static_objects, dead_weak_ptr_list, resurrected_threads);
else
sweep(oldest_gen);
}
diff --git a/rts/sm/MarkWeak.c b/rts/sm/MarkWeak.c
index d7b8fe696f..3aa28878b7 100644
--- a/rts/sm/MarkWeak.c
+++ b/rts/sm/MarkWeak.c
@@ -77,15 +77,9 @@
typedef enum { WeakPtrs, WeakThreads, WeakDone } WeakStage;
static WeakStage weak_stage;
-// List of weak pointers whose key is dead
-StgWeak *dead_weak_ptr_list;
-
-// List of threads found to be unreachable
-StgTSO *resurrected_threads;
-
-static void collectDeadWeakPtrs (generation *gen);
+static void collectDeadWeakPtrs (generation *gen, StgWeak **dead_weak_ptr_list);
static bool tidyWeakList (generation *gen);
-static bool resurrectUnreachableThreads (generation *gen);
+static bool resurrectUnreachableThreads (generation *gen, StgTSO **resurrected_threads);
static void tidyThreadList (generation *gen);
void
@@ -100,12 +94,10 @@ initWeakForGC(void)
}
weak_stage = WeakThreads;
- dead_weak_ptr_list = NULL;
- resurrected_threads = END_TSO_QUEUE;
}
bool
-traverseWeakPtrList(void)
+traverseWeakPtrList(StgWeak **dead_weak_ptr_list, StgTSO **resurrected_threads)
{
bool flag = false;
@@ -140,7 +132,7 @@ traverseWeakPtrList(void)
// Resurrect any threads which were unreachable
for (g = 0; g <= N; g++) {
- if (resurrectUnreachableThreads(&generations[g])) {
+ if (resurrectUnreachableThreads(&generations[g], resurrected_threads)) {
flag = true;
}
}
@@ -175,7 +167,7 @@ traverseWeakPtrList(void)
*/
if (flag == false) {
for (g = 0; g <= N; g++) {
- collectDeadWeakPtrs(&generations[g]);
+ collectDeadWeakPtrs(&generations[g], dead_weak_ptr_list);
}
weak_stage = WeakDone; // *now* we're done,
@@ -190,7 +182,7 @@ traverseWeakPtrList(void)
}
}
-static void collectDeadWeakPtrs (generation *gen)
+static void collectDeadWeakPtrs (generation *gen, StgWeak **dead_weak_ptr_list)
{
StgWeak *w, *next_w;
for (w = gen->old_weak_ptr_list; w != NULL; w = next_w) {
@@ -201,12 +193,12 @@ static void collectDeadWeakPtrs (generation *gen)
}
evacuate(&w->finalizer);
next_w = w->link;
- w->link = dead_weak_ptr_list;
- dead_weak_ptr_list = w;
+ w->link = *dead_weak_ptr_list;
+ *dead_weak_ptr_list = w;
}
}
-static bool resurrectUnreachableThreads (generation *gen)
+static bool resurrectUnreachableThreads (generation *gen, StgTSO **resurrected_threads)
{
StgTSO *t, *tmp, *next;
bool flag = false;
@@ -225,8 +217,8 @@ static bool resurrectUnreachableThreads (generation *gen)
default:
tmp = t;
evacuate((StgClosure **)&tmp);
- tmp->global_link = resurrected_threads;
- resurrected_threads = tmp;
+ tmp->global_link = *resurrected_threads;
+ *resurrected_threads = tmp;
flag = true;
}
}
diff --git a/rts/sm/MarkWeak.h b/rts/sm/MarkWeak.h
index cd58ec99d5..2756cb06e2 100644
--- a/rts/sm/MarkWeak.h
+++ b/rts/sm/MarkWeak.h
@@ -19,7 +19,7 @@ extern StgTSO *resurrected_threads;
void collectFreshWeakPtrs ( void );
void initWeakForGC ( void );
-bool traverseWeakPtrList ( void );
+bool traverseWeakPtrList ( StgWeak **dead_weak_ptr_list, StgTSO **resurrected_threads );
void markWeakPtrList ( void );
void scavengeLiveWeak ( StgWeak * );