summaryrefslogtreecommitdiff
path: root/rts/Arena.c
diff options
context:
space:
mode:
authorDaniel Gröber <dxld@darkboxed.org>2020-02-07 07:22:23 +0100
committerMarge Bot <ben+marge-bot@smart-cactus.org>2020-02-08 10:29:00 -0500
commit1183ae94a256b30bfe12ddc5e1c29d6f46abd79d (patch)
tree73d0b4badd102419deade3a89dec395cc266610a /rts/Arena.c
parentb3857b62cb74ef50cbe88ab898aae946ca933365 (diff)
downloadhaskell-1183ae94a256b30bfe12ddc5e1c29d6f46abd79d.tar.gz
rts: Fix Arena blocks accounting for MBlock sized allocations
When requesting more than BLOCKS_PER_MBLOCK blocks allocGroup can return a different number of blocks than requested. Here we use the number of requested blocks, however arenaFree will subtract the actual number of blocks we got from arena_blocks (possibly) resulting in a negative value and triggering ASSERT(arena_blocks >= 0).
Diffstat (limited to 'rts/Arena.c')
-rw-r--r--rts/Arena.c2
1 files changed, 1 insertions, 1 deletions
diff --git a/rts/Arena.c b/rts/Arena.c
index a4ff11be7d..1d6dac623c 100644
--- a/rts/Arena.c
+++ b/rts/Arena.c
@@ -82,7 +82,7 @@ arenaAlloc( Arena *arena, size_t size )
// allocate a fresh block...
req_blocks = (W_)BLOCK_ROUND_UP(size) / BLOCK_SIZE;
bd = allocGroup_lock(req_blocks);
- arena_blocks += req_blocks;
+ arena_blocks += bd->blocks;
bd->gen_no = 0;
bd->gen = NULL;