diff options
Diffstat (limited to 'rts/sm/Evac.c')
-rw-r--r-- | rts/sm/Evac.c | 38 |
1 files changed, 19 insertions, 19 deletions
diff --git a/rts/sm/Evac.c b/rts/sm/Evac.c index 1323cbea6a..0581321205 100644 --- a/rts/sm/Evac.c +++ b/rts/sm/Evac.c @@ -45,7 +45,7 @@ StgWord64 whitehole_spin = 0; */ #define MAX_THUNK_SELECTOR_DEPTH 16 -static void eval_thunk_selector (StgClosure **q, StgSelector * p, rtsBool); +static void eval_thunk_selector (StgClosure **q, StgSelector * p, bool); STATIC_INLINE void evacuate_large(StgPtr p); /* ----------------------------------------------------------------------------- @@ -67,7 +67,7 @@ alloc_for_copy (uint32_t size, uint32_t gen_no) if (gct->eager_promotion) { gen_no = gct->evac_gen_no; } else { - gct->failed_to_evac = rtsTrue; + gct->failed_to_evac = true; } } @@ -182,7 +182,7 @@ copy_tag_nolock(StgClosure **p, const StgInfoTable *info, * pointer of an object, but reserve some padding after it. This is * used to optimise evacuation of TSOs. */ -static rtsBool +static bool copyPart(StgClosure **p, StgClosure *src, uint32_t size_to_reserve, uint32_t size_to_copy, uint32_t gen_no) { @@ -202,7 +202,7 @@ spin: if (IS_FORWARDING_PTR(info)) { src->header.info = (const StgInfoTable *)info; evacuate(p); // does the failed_to_evac stuff - return rtsFalse; + return false; } #else info = (W_)src->header.info; @@ -229,7 +229,7 @@ spin: LDV_FILL_SLOP(to + size_to_copy, (int)(size_to_reserve - size_to_copy)); #endif - return rtsTrue; + return true; } @@ -271,7 +271,7 @@ evacuate_large(StgPtr p) * the desired destination (see comments in evacuate()). */ if (gen_no < gct->evac_gen_no) { - gct->failed_to_evac = rtsTrue; + gct->failed_to_evac = true; TICK_GC_FAILED_PROMOTION(); } RELEASE_SPIN_LOCK(&gen->sync); @@ -296,7 +296,7 @@ evacuate_large(StgPtr p) if (gct->eager_promotion) { new_gen_no = gct->evac_gen_no; } else { - gct->failed_to_evac = rtsTrue; + gct->failed_to_evac = true; } } @@ -388,7 +388,7 @@ evacuate_compact (StgPtr p) * the desired destination (see comments in evacuate()). */ if (gen_no < gct->evac_gen_no) { - gct->failed_to_evac = rtsTrue; + gct->failed_to_evac = true; TICK_GC_FAILED_PROMOTION(); } return; @@ -404,7 +404,7 @@ evacuate_compact (StgPtr p) * the desired destination (see comments in evacuate()). */ if (gen_no < gct->evac_gen_no) { - gct->failed_to_evac = rtsTrue; + gct->failed_to_evac = true; TICK_GC_FAILED_PROMOTION(); } RELEASE_SPIN_LOCK(&gen->sync); @@ -429,7 +429,7 @@ evacuate_compact (StgPtr p) if (gct->eager_promotion) { new_gen_no = gct->evac_gen_no; } else { - gct->failed_to_evac = rtsTrue; + gct->failed_to_evac = true; } } @@ -582,7 +582,7 @@ loop: // whether it is already in the target generation. (this is // the write barrier). if (bd->gen_no < gct->evac_gen_no) { - gct->failed_to_evac = rtsTrue; + gct->failed_to_evac = true; TICK_GC_FAILED_PROMOTION(); } return; @@ -639,7 +639,7 @@ loop: *p = TAG_CLOSURE(tag,e); if (gen_no < gct->evac_gen_no) { // optimisation if (Bdescr((P_)e)->gen_no < gct->evac_gen_no) { - gct->failed_to_evac = rtsTrue; + gct->failed_to_evac = true; TICK_GC_FAILED_PROMOTION(); } } @@ -767,7 +767,7 @@ loop: return; case THUNK_SELECTOR: - eval_thunk_selector(p, (StgSelector *)q, rtsTrue); + eval_thunk_selector(p, (StgSelector *)q, true); return; case IND: @@ -835,7 +835,7 @@ loop: { StgStack *new_stack; StgPtr r, s; - rtsBool mine; + bool mine; mine = copyPart(p,(StgClosure *)stack, stack_sizeW(stack), sizeofW(StgStack), gen_no); @@ -932,7 +932,7 @@ unchain_thunk_selectors(StgSelector *p, StgClosure *val) } static void -eval_thunk_selector (StgClosure **q, StgSelector * p, rtsBool evac) +eval_thunk_selector (StgClosure **q, StgSelector * p, bool evac) // NB. for legacy reasons, p & q are swapped around :( { uint32_t field; @@ -963,7 +963,7 @@ selector_chain: *q = (StgClosure *)p; // shortcut, behave as for: if (evac) evacuate(q); if (evac && bd->gen_no < gct->evac_gen_no) { - gct->failed_to_evac = rtsTrue; + gct->failed_to_evac = true; TICK_GC_FAILED_PROMOTION(); } return; @@ -975,7 +975,7 @@ selector_chain: // bit is very tricky to get right. If you make changes // around here, test by compiling stage 3 with +RTS -c -RTS. if (bd->flags & BF_MARKED) { - // must call evacuate() to mark this closure if evac==rtsTrue + // must call evacuate() to mark this closure if evac==true *q = (StgClosure *)p; if (evac) evacuate(q); unchain_thunk_selectors(prev_thunk_selector, (StgClosure *)p); @@ -1164,10 +1164,10 @@ selector_loop: } gct->thunk_selector_depth++; - // rtsFalse says "don't evacuate the result". It will, + // false says "don't evacuate the result". It will, // however, update any THUNK_SELECTORs that are evaluated // along the way. - eval_thunk_selector(&val, (StgSelector*)selectee, rtsFalse); + eval_thunk_selector(&val, (StgSelector*)selectee, false); gct->thunk_selector_depth--; // did we actually manage to evaluate it? |