summaryrefslogtreecommitdiff
path: root/rts/sm/NonMovingCensus.c
blob: 27494284fe4f4236cbf38fef5453dc8e40a933de (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
/* -----------------------------------------------------------------------------
 *
 * (c) The GHC Team, 1998-2018
 *
 * Non-moving garbage collector and allocator: Accounting census
 *
 * This is a simple space accounting census useful for characterising
 * fragmentation in the nonmoving heap.
 *
 * ---------------------------------------------------------------------------*/

#include "Rts.h"
#include "NonMoving.h"
#include "Trace.h"
#include "NonMovingCensus.h"

// N.B. This may miss segments in the event of concurrent mutation (e.g. if a
// mutator retires its current segment to the filled list).
//
// all_stopped is whether we can guarantee that all mutators and minor GCs are
// stopped. In this case is safe to look at active and current segments so we can
// also collect statistics on live words.
static struct NonmovingAllocCensus
nonmovingAllocatorCensus_(uint32_t alloc_idx, bool collect_live_words)
{
    struct NonmovingAllocCensus census = {collect_live_words, 0, 0, 0, 0};
    struct NonmovingAllocator *alloc = &nonmovingHeap.allocators[alloc_idx];

    // filled segments
    for (struct NonmovingSegment *seg = alloc->filled;
         seg != NULL;
         seg = seg->link)
    {
        unsigned int n = nonmovingSegmentBlockCount(seg);
        census.n_filled_segs++;
        census.n_live_blocks += n;
        if (collect_live_words) {
            for (unsigned int i=0; i < n; i++) {
                StgClosure *c = (StgClosure *) nonmovingSegmentGetBlock(seg, i);
                census.n_live_words += closure_sizeW(c);
            }
        }
    }

    // active segments
    for (struct NonmovingSegment *seg = alloc->active;
         seg != NULL;
         seg = seg->link)
    {
        census.n_active_segs++;
        unsigned int n = nonmovingSegmentBlockCount(seg);
        for (unsigned int i=0; i < n; i++) {
            if (nonmovingGetMark(seg, i) == nonmovingMarkEpoch) {
                StgClosure *c = (StgClosure *) nonmovingSegmentGetBlock(seg, i);
                if (collect_live_words)
                    census.n_live_words += closure_sizeW(c);
                census.n_live_blocks++;
            }
        }
    }

    // current segments
    for (unsigned int cap_n=0; cap_n < getNumCapabilities(); cap_n++)
    {
        Capability *cap = getCapability(cap_n);
        struct NonmovingSegment *seg = cap->current_segments[alloc_idx];
        unsigned int n = nonmovingSegmentBlockCount(seg);
        for (unsigned int i=0; i < n; i++) {
            if (nonmovingGetMark(seg, i)) {
                StgClosure *c = (StgClosure *) nonmovingSegmentGetBlock(seg, i);
                if (collect_live_words)
                    census.n_live_words += closure_sizeW(c);
                census.n_live_blocks++;
            }
        }
    }
    return census;
}

/* This must not be used when mutators are active since it assumes that
 * all blocks in nonmoving heap are valid closures.
 */
struct NonmovingAllocCensus
nonmovingAllocatorCensusWithWords(uint32_t alloc_idx)
{
    return nonmovingAllocatorCensus_(alloc_idx, true);
}

struct NonmovingAllocCensus
nonmovingAllocatorCensus(uint32_t alloc_idx)
{
    return nonmovingAllocatorCensus_(alloc_idx, false);
}


static void print_alloc_census(int i, struct NonmovingAllocCensus census)
{
    uint32_t blk_size = 1 << (i + NONMOVING_ALLOCA0);
    int sz_min = 1 << (i + NONMOVING_ALLOCA0 - 1);
    int sz_max = 1 << (i + NONMOVING_ALLOCA0);
    (void) sz_min; (void) sz_max;

    if (census.collected_live_words) {
        // We define occupancy as the fraction of space that is used for useful
        // data (that is, live and not slop).
        double occupancy = 100.0 * census.n_live_words * sizeof(W_)
            / (census.n_live_blocks * blk_size);
        if (census.n_live_blocks == 0) occupancy = 100;
        (void) occupancy; // silence warning if !DEBUG
        debugTrace(DEBUG_nonmoving_gc,
                   "Allocator %d (%d bytes - %d bytes): "
                   "%"PRIu32" active segs, %"PRIu32" filled segs, %"PRIu32" live blocks, "
                   "%"PRIu32" live words (%2.1f%% occupancy)",
                   i, sz_min, sz_max,
                   census.n_active_segs,
                   census.n_filled_segs,
                   census.n_live_blocks,
                   census.n_live_words,
                   occupancy);
    } else {
        debugTrace(DEBUG_nonmoving_gc,
                   "Allocator %d (%d bytes - %d bytes): "
                   "%"PRIu32" active segs, %"PRIu32" filled segs, %"PRIu32" live blocks",
                   i, sz_min, sz_max,
                   census.n_active_segs,
                   census.n_filled_segs,
                   census.n_live_blocks);
    }
}

void nonmovingPrintAllocatorCensus(bool collect_live_words)
{
    if (!RtsFlags.GcFlags.useNonmoving)
        return;

    for (int i=0; i < NONMOVING_ALLOCA_CNT; i++) {
        struct NonmovingAllocCensus census =
            nonmovingAllocatorCensus_(i, collect_live_words);

        print_alloc_census(i, census);
    }
}

void nonmovingTraceAllocatorCensus()
{
#if defined(TRACING)
    if (!RtsFlags.GcFlags.useNonmoving && !TRACE_nonmoving_gc)
        return;

    for (int i=0; i < NONMOVING_ALLOCA_CNT; i++) {
        const struct NonmovingAllocCensus census = nonmovingAllocatorCensus(i);
        const uint32_t log_blk_size = i + NONMOVING_ALLOCA0;
        traceNonmovingHeapCensus(log_blk_size, &census);
    }
#endif
}