diff options
author | Ivan Maidanski <ivmai@mail.ru> | 2023-03-24 21:41:26 +0300 |
---|---|---|
committer | Ivan Maidanski <ivmai@mail.ru> | 2023-03-24 22:46:52 +0300 |
commit | 3a6a58e899ccd5336bb2cb58417a45c14d4c7c0f (patch) | |
tree | b4dc5204aa753e4e52ef4c1e592d108384785a6c /malloc.c | |
parent | 4d83569d52df353ccfaa1575cda612035eb6336e (diff) | |
download | bdwgc-3a6a58e899ccd5336bb2cb58417a45c14d4c7c0f.tar.gz |
Rename ROUNDED_UP_GRANULES macro to ALLOC_REQUEST_GRANS
(refactoring)
* include/gc/gc_tiny_fl.h (GC_RAW_BYTES_FROM_INDEX): Update comment
(rename ROUNDED_UP_GRANULES to ALLOC_REQUEST_GRANS).
* include/private/gc_priv.h (ADD_SLOP): Rename to ADD_EXTRA_BYTES; move
definition upper (to be before ALLOC_REQUEST_GRANS).
* include/private/gc_priv.h (ROUNDED_UP_GRANULES): Rename to
ALLOC_REQUEST_GRANS; refine comment.
* include/private/gc_priv.h (SMALL_OBJ): Remove extra parentheses; add
comment.
* include/private/gc_priv.h (_GC_arrays._size_map): Refine comment.
* malloc.c (GC_extend_size_map, GC_generic_malloc_aligned): Rename
ROUNDED_UP_GRANULES to ALLOC_REQUEST_GRANS.
* misc.c (GC_init_size_map): Likewise.
* thread_local_alloc.c [THREAD_LOCAL_ALLOC] (GC_malloc_kind): Likewise.
* thread_local_alloc.c [THREAD_LOCAL_ALLOC && GC_GCJ_SUPPORT]
(GC_gcj_malloc): Likewise.
* malloc.c (GC_generic_malloc_inner): Rename ADD_SLOP to
ADD_EXTRA_BYTES.
* mallocx.c (GC_realloc): Likewise.
* mallocx.c (GC_malloc_many): Define lg local variable; use
ALLOC_REQUEST_GRANS() and GRANULES_TO_BYTES(); remove comment.
Diffstat (limited to 'malloc.c')
-rw-r--r-- | malloc.c | 8 |
1 files changed, 4 insertions, 4 deletions
@@ -102,7 +102,7 @@ STATIC ptr_t GC_alloc_large_and_clear(size_t lb, int k, unsigned flags) /* has the length of at least n/4. */ STATIC void GC_extend_size_map(size_t i) { - size_t orig_granule_sz = ROUNDED_UP_GRANULES(i); + size_t orig_granule_sz = ALLOC_REQUEST_GRANS(i); size_t granule_sz; size_t byte_sz = GRANULES_TO_BYTES(orig_granule_sz); /* The size we try to preserve. */ @@ -124,7 +124,7 @@ STATIC void GC_extend_size_map(size_t i) while (GC_size_map[low_limit] != 0) low_limit++; - granule_sz = ROUNDED_UP_GRANULES(low_limit); + granule_sz = ALLOC_REQUEST_GRANS(low_limit); granule_sz += granule_sz >> 3; if (granule_sz < orig_granule_sz) granule_sz = orig_granule_sz; @@ -197,7 +197,7 @@ GC_INNER void * GC_generic_malloc_inner(size_t lb, int k, unsigned flags) return GC_generic_malloc_inner_small(lb, k); } - return GC_alloc_large_and_clear(ADD_SLOP(lb), k, flags); + return GC_alloc_large_and_clear(ADD_EXTRA_BYTES(lb), k, flags); } #ifdef GC_COLLECT_AT_MALLOC @@ -229,7 +229,7 @@ GC_INNER void * GC_generic_malloc_aligned(size_t lb, int k, unsigned flags, size_t lb_rounded; GC_bool init; - lg = ROUNDED_UP_GRANULES(lb); + lg = ALLOC_REQUEST_GRANS(lb); lb_rounded = GRANULES_TO_BYTES(lg); init = GC_obj_kinds[k].ok_init; if (EXPECT(align_m1 < GRANULE_BYTES, TRUE)) { |