summaryrefslogtreecommitdiff
path: root/rts/sm/NonMovingCensus.c
blob: 41b6f7433ba910b7efb11f23dcaf45104933adf8 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
/* -----------------------------------------------------------------------------
 *
 * (c) The GHC Team, 1998-2018
 *
 * Non-moving garbage collector and allocator: Accounting census
 *
 * This is a simple space accounting census useful for characterising
 * fragmentation in the nonmoving heap.
 *
 * ---------------------------------------------------------------------------*/

#include "Rts.h"
#include "NonMoving.h"
#include "Trace.h"
#include "NonMovingCensus.h"

// N.B. This may miss segments in the event of concurrent mutation (e.g. if a
// mutator retires its current segment to the filled list).
//
// all_stopped is whether we can guarantee that all mutators and minor GCs are
// stopped. In this case is safe to look at active and current segments so we can
// also collect statistics on live words.
static struct NonmovingAllocCensus
nonmovingAllocatorCensus_(struct NonmovingAllocator *alloc, bool collect_live_words)
{
    struct NonmovingAllocCensus census = {0, 0, 0, 0};

    for (struct NonmovingSegment *seg = alloc->filled;
         seg != NULL;
         seg = seg->link)
    {
        unsigned int n = nonmovingSegmentBlockCount(seg);
        census.n_filled_segs++;
        census.n_live_blocks += n;
        if (collect_live_words) {
            for (unsigned int i=0; i < n; i++) {
                StgClosure *c = (StgClosure *) nonmovingSegmentGetBlock(seg, i);
                census.n_live_words += closure_sizeW(c);
            }
        }
    }

    for (struct NonmovingSegment *seg = alloc->active;
         seg != NULL;
         seg = seg->link)
    {
        census.n_active_segs++;
        unsigned int n = nonmovingSegmentBlockCount(seg);
        for (unsigned int i=0; i < n; i++) {
            if (nonmovingGetMark(seg, i)) {
                StgClosure *c = (StgClosure *) nonmovingSegmentGetBlock(seg, i);
                if (collect_live_words)
                    census.n_live_words += closure_sizeW(c);
                census.n_live_blocks++;
            }
        }
    }

    for (unsigned int cap=0; cap < getNumCapabilities(); cap++)
    {
        struct NonmovingSegment *seg = alloc->current[cap];
        unsigned int n = nonmovingSegmentBlockCount(seg);
        for (unsigned int i=0; i < n; i++) {
            if (nonmovingGetMark(seg, i)) {
                StgClosure *c = (StgClosure *) nonmovingSegmentGetBlock(seg, i);
                if (collect_live_words)
                    census.n_live_words += closure_sizeW(c);
                census.n_live_blocks++;
            }
        }
    }
    return census;
}

/* This must not be used when mutators are active since it assumes that
 * all blocks in nonmoving heap are valid closures.
 */
struct NonmovingAllocCensus
nonmovingAllocatorCensusWithWords(struct NonmovingAllocator *alloc)
{
    return nonmovingAllocatorCensus_(alloc, true);
}

struct NonmovingAllocCensus
nonmovingAllocatorCensus(struct NonmovingAllocator *alloc)
{
    return nonmovingAllocatorCensus_(alloc, false);
}


void nonmovingPrintAllocatorCensus()
{
    if (!RtsFlags.GcFlags.useNonmoving)
        return;

    for (int i=0; i < NONMOVING_ALLOCA_CNT; i++) {
        struct NonmovingAllocCensus census =
            nonmovingAllocatorCensus(nonmovingHeap.allocators[i]);

        uint32_t blk_size = 1 << (i + NONMOVING_ALLOCA0);
        // We define occupancy as the fraction of space that is used for useful
        // data (that is, live and not slop).
        double occupancy = 100.0 * census.n_live_words * sizeof(W_)
            / (census.n_live_blocks * blk_size);
        if (census.n_live_blocks == 0) occupancy = 100;
        (void) occupancy; // silence warning if !DEBUG
        debugTrace(DEBUG_nonmoving_gc, "Allocator %d (%d bytes - %d bytes): "
                   "%d active segs, %d filled segs, %d live blocks, %d live words "
                   "(%2.1f%% occupancy)",
                   i, 1 << (i + NONMOVING_ALLOCA0 - 1), 1 << (i + NONMOVING_ALLOCA0),
                   census.n_active_segs, census.n_filled_segs, census.n_live_blocks, census.n_live_words,
                   occupancy);
    }
}

void nonmovingTraceAllocatorCensus()
{
#if defined(TRACING)
    if (!RtsFlags.GcFlags.useNonmoving && !TRACE_nonmoving_gc)
        return;

    for (int i=0; i < NONMOVING_ALLOCA_CNT; i++) {
        const struct NonmovingAllocCensus census =
            nonmovingAllocatorCensus(nonmovingHeap.allocators[i]);
        const uint32_t log_blk_size = i + NONMOVING_ALLOCA0;
        traceNonmovingHeapCensus(log_blk_size, &census);
    }
#endif
}