diff options
author | Simon Marlow <marlowsd@gmail.com> | 2011-06-15 16:40:16 +0100 |
---|---|---|
committer | Simon Marlow <marlowsd@gmail.com> | 2011-07-01 09:01:48 +0100 |
commit | d56cbac1a875ea9f52f586be8fb6f305c706ca1d (patch) | |
tree | fa8656a34c2f79f38ffeaf1a64291008e9d1b967 /rts/ProfHeap.c | |
parent | bd6a73b06aad1cf99ca8eca4dbd15b153ee147d4 (diff) | |
download | haskell-d56cbac1a875ea9f52f586be8fb6f305c706ca1d.tar.gz |
Better heap profiling for pinned objects (e.g. ByteStrings).
Previously we were completely ignoring these, due to the difficulties
of traversing the pinned blocks (the objects are not necessarily
end-to-end, we can't tell how large the gaps are). Now just count the
whole block as a big ARR_WORDS, so at least we're accounting for the
memory and it has the right type.
Diffstat (limited to 'rts/ProfHeap.c')
-rw-r--r-- | rts/ProfHeap.c | 181 |
1 files changed, 98 insertions, 83 deletions
diff --git a/rts/ProfHeap.c b/rts/ProfHeap.c index c41f3615a2..e88d7042ed 100644 --- a/rts/ProfHeap.c +++ b/rts/ProfHeap.c @@ -827,6 +827,84 @@ dumpCensus( Census *census ) printSample(rtsFalse, census->time); } + +static void heapProfObject(Census *census, StgClosure *p, nat size, + rtsBool prim +#ifndef PROFILING + STG_UNUSED +#endif + ) +{ + void *identity; + nat real_size; + counter *ctr; + + identity = NULL; + +#ifdef PROFILING + // subtract the profiling overhead + real_size = size - sizeofW(StgProfHeader); +#else + real_size = size; +#endif + + if (closureSatisfiesConstraints((StgClosure*)p)) { +#ifdef PROFILING + if (RtsFlags.ProfFlags.doHeapProfile == HEAP_BY_LDV) { + if (prim) + census->prim += real_size; + else if ((LDVW(p) & LDV_STATE_MASK) == LDV_STATE_CREATE) + census->not_used += real_size; + else + census->used += real_size; + } else +#endif + { + identity = closureIdentity((StgClosure *)p); + + if (identity != NULL) { + ctr = lookupHashTable( census->hash, (StgWord)identity ); + if (ctr != NULL) { +#ifdef PROFILING + if (RtsFlags.ProfFlags.bioSelector != NULL) { + if (prim) + ctr->c.ldv.prim += real_size; + else if ((LDVW(p) & LDV_STATE_MASK) == LDV_STATE_CREATE) + ctr->c.ldv.not_used += real_size; + else + ctr->c.ldv.used += real_size; + } else +#endif + { + ctr->c.resid += real_size; + } + } else { + ctr = arenaAlloc( census->arena, sizeof(counter) ); + initLDVCtr(ctr); + insertHashTable( census->hash, (StgWord)identity, ctr ); + ctr->identity = identity; + ctr->next = census->ctrs; + census->ctrs = ctr; + +#ifdef PROFILING + if (RtsFlags.ProfFlags.bioSelector != NULL) { + if (prim) + ctr->c.ldv.prim = real_size; + else if ((LDVW(p) & LDV_STATE_MASK) == LDV_STATE_CREATE) + ctr->c.ldv.not_used = real_size; + else + ctr->c.ldv.used = real_size; + } else +#endif + { + ctr->c.resid = real_size; + } + } + } + } + } +} + /* ----------------------------------------------------------------------------- * Code to perform a heap census. * -------------------------------------------------------------------------- */ @@ -835,26 +913,26 @@ heapCensusChain( Census *census, bdescr *bd ) { StgPtr p; StgInfoTable *info; - void *identity; nat size; - counter *ctr; - nat real_size; - PROFILING_ONLY( rtsBool prim ); + rtsBool prim; for (; bd != NULL; bd = bd->link) { - // HACK: ignore pinned blocks, because they contain gaps. - // It's not clear exactly what we'd like to do here, since we - // can't tell which objects in the block are actually alive. - // Perhaps the whole block should be counted as SYSTEM memory. - if (bd->flags & BF_PINNED) { - continue; - } + // HACK: pretend a pinned block is just one big ARR_WORDS + // owned by CCS_SYSTEM. These blocks can be full of holes due + // to alignment constraints so we can't traverse the memory + // and do a proper census. + if (bd->flags & BF_PINNED) { + StgClosure arr; + SET_HDR(&arr, &stg_ARR_WORDS_info, CCS_SYSTEM); + heapProfObject(census, &arr, bd->blocks * BLOCK_SIZE_W, rtsTrue); + continue; + } p = bd->start; while (p < bd->free) { info = get_itbl((StgClosure *)p); - PROFILING_ONLY( prim = rtsFalse ); + prim = rtsFalse; switch (info->type) { @@ -904,7 +982,7 @@ heapCensusChain( Census *census, bdescr *bd ) break; case BCO: - PROFILING_ONLY ( prim = rtsTrue ); + prim = rtsTrue; size = bco_sizeW((StgBCO *)p); break; @@ -915,7 +993,7 @@ heapCensusChain( Census *census, bdescr *bd ) case MUT_PRIM: case MUT_VAR_CLEAN: case MUT_VAR_DIRTY: - PROFILING_ONLY ( prim = rtsTrue ); + prim = rtsTrue; size = sizeW_fromITBL(info); break; @@ -932,7 +1010,7 @@ heapCensusChain( Census *census, bdescr *bd ) break; case ARR_WORDS: - PROFILING_ONLY ( prim = rtsTrue ); + prim = rtsTrue; size = arr_words_sizeW((StgArrWords*)p); break; @@ -940,12 +1018,12 @@ heapCensusChain( Census *census, bdescr *bd ) case MUT_ARR_PTRS_DIRTY: case MUT_ARR_PTRS_FROZEN: case MUT_ARR_PTRS_FROZEN0: - PROFILING_ONLY ( prim = rtsTrue ); + prim = rtsTrue; size = mut_arr_ptrs_sizeW((StgMutArrPtrs *)p); break; case TSO: - PROFILING_ONLY ( prim = rtsTrue ); + prim = rtsTrue; #ifdef PROFILING if (RtsFlags.ProfFlags.includeTSOs) { size = sizeofW(StgTSO); @@ -961,7 +1039,7 @@ heapCensusChain( Census *census, bdescr *bd ) #endif case STACK: - PROFILING_ONLY ( prim = rtsTrue ); + prim = rtsTrue; #ifdef PROFILING if (RtsFlags.ProfFlags.includeTSOs) { size = stack_sizeW((StgStack*)p); @@ -977,7 +1055,7 @@ heapCensusChain( Census *census, bdescr *bd ) #endif case TREC_CHUNK: - PROFILING_ONLY ( prim = rtsTrue ); + prim = rtsTrue; size = sizeofW(StgTRecChunk); break; @@ -985,70 +1063,7 @@ heapCensusChain( Census *census, bdescr *bd ) barf("heapCensus, unknown object: %d", info->type); } - identity = NULL; - -#ifdef PROFILING - // subtract the profiling overhead - real_size = size - sizeofW(StgProfHeader); -#else - real_size = size; -#endif - - if (closureSatisfiesConstraints((StgClosure*)p)) { -#ifdef PROFILING - if (RtsFlags.ProfFlags.doHeapProfile == HEAP_BY_LDV) { - if (prim) - census->prim += real_size; - else if ((LDVW(p) & LDV_STATE_MASK) == LDV_STATE_CREATE) - census->not_used += real_size; - else - census->used += real_size; - } else -#endif - { - identity = closureIdentity((StgClosure *)p); - - if (identity != NULL) { - ctr = lookupHashTable( census->hash, (StgWord)identity ); - if (ctr != NULL) { -#ifdef PROFILING - if (RtsFlags.ProfFlags.bioSelector != NULL) { - if (prim) - ctr->c.ldv.prim += real_size; - else if ((LDVW(p) & LDV_STATE_MASK) == LDV_STATE_CREATE) - ctr->c.ldv.not_used += real_size; - else - ctr->c.ldv.used += real_size; - } else -#endif - { - ctr->c.resid += real_size; - } - } else { - ctr = arenaAlloc( census->arena, sizeof(counter) ); - initLDVCtr(ctr); - insertHashTable( census->hash, (StgWord)identity, ctr ); - ctr->identity = identity; - ctr->next = census->ctrs; - census->ctrs = ctr; - -#ifdef PROFILING - if (RtsFlags.ProfFlags.bioSelector != NULL) { - if (prim) - ctr->c.ldv.prim = real_size; - else if ((LDVW(p) & LDV_STATE_MASK) == LDV_STATE_CREATE) - ctr->c.ldv.not_used = real_size; - else - ctr->c.ldv.used = real_size; - } else -#endif - { - ctr->c.resid = real_size; - } - } - } - } - } + heapProfObject(census,(StgClosure*)p,size,prim); p += size; } |