summaryrefslogtreecommitdiff
path: root/mallocx.c
diff options
context:
space:
mode:
authorIvan Maidanski <ivmai@mail.ru>2023-03-24 08:35:46 +0300
committerIvan Maidanski <ivmai@mail.ru>2023-03-24 12:53:15 +0300
commit1d8ed2c5bb50d20c8903bb96f017b43a62b32ef2 (patch)
tree88c9cab5eaa62d7572de72f2673a48b0e0725ad8 /mallocx.c
parentd21d456b3abecb61ec8e5dfc6f06f48ba1eb4cc5 (diff)
downloadbdwgc-1d8ed2c5bb50d20c8903bb96f017b43a62b32ef2.tar.gz
Avoid code duplication in IGNORE_OFF_PAGE-specific malloc functions
(refactoring) * alloc.c: Update comment regarding GC_generic_malloc_inner usage. * alloc.c (GC_collect_or_expand): Replace ignore_off_page argument to flags (check IGNORE_OFF_PAGE bit only in it); update comment. * alloc.c (GC_allocobj): Pass 0 as flags to GC_collect_or_expand. * dbg_mlc.c (GC_debug_generic_malloc, GC_debug_generic_or_special_malloc): Rename knd argument to k. * mallocx.c (GC_generic_or_special_malloc): Likewise. * dbg_mlc.c (GC_debug_generic_malloc): Use GC_generic_malloc_aligned() instead of GC_generic_malloc(). * dbg_mlc.c (GC_debug_generic_malloc_inner): Add flags argument. * gcj_mlc.c [GC_GCJ_SUPPORT] (GC_core_gcj_malloc): Likewise. * include/private/gc_priv.h (C_generic_malloc_aligned, GC_generic_malloc_inner): Likewise. * include/private/gc_priv.h [THREAD_LOCAL_ALLOC && GC_GCJ_SUPPORT] (GC_core_gcj_malloc): Likewise. * include/private/gc_priv.h [DBG_HDRS_ALL] (GC_debug_generic_malloc_inner): Likewise. * malloc.c (GC_generic_malloc_inner, GC_generic_malloc_aligned): Likewise. * dbg_mlc.c (GC_debug_generic_malloc_inner): Use GC_generic_malloc_inner() instead of GC_generic_malloc_inner_ignore_off_page(). * dbg_mlc.c [DBG_HDRS_ALL] (GC_debug_generic_malloc_inner_ignore_off_page): Remove GC_INNER function. * include/private/gc_priv.h [DBG_HDRS_ALL || GC_GCJ_SUPPORT || !GC_NO_FINALIZATION] (GC_generic_malloc_inner_ignore_off_page): Likewise. * include/private/gc_priv.h [DBG_HDRS_ALL] (GC_debug_generic_malloc_inner_ignore_off_page): Likewise. * malloc.c [DBG_HDRS_ALL || GC_GCJ_SUPPORT || !GC_NO_FINALIZATION] (GC_generic_malloc_inner_ignore_off_page): Likewise. * gcj_mlc.c [GC_GCJ_SUPPORT && !THREAD_LOCAL_ALLOC] (GC_gcj_malloc): Define as STATIC GC_core_gcj_malloc. * gcj_mlc.c [GC_GCJ_SUPPORT] (GC_core_gcj_malloc): Reformat comment; pass flags to GC_generic_malloc_inner(). * gcj_mlc.c [GC_GCJ_SUPPORT && !THREAD_LOCAL_ALLOC] (GC_gcj_malloc): Redirect to GC_core_gcj_malloc() passing 0 to flags argument. * thread_local_alloc.c [THREAD_LOCAL_ALLOC && GC_GCJ_SUPPORT] (GC_gcj_malloc): Likewise. * gcj_mlc.c [GC_GCJ_SUPPORT] (GC_gcj_malloc_ignore_off_page): Redirect to GC_core_gcj_malloc() passing IGNORE_OFF_PAGE. * gcj_mlc.c [GC_GCJ_SUPPORT] (GC_debug_gcj_malloc): Pass 0 as flags to GC_generic_malloc_inner(). * include/private/gc_priv.h (GC_generic_malloc_inner): Update comment. * mallocx.c (GC_generic_malloc_many): Likewise. * include/private/gc_priv.h (GC_collect_or_expand): Replace GC_bool ignore_off_page argument to unsigned flags. * include/private/gc_priv.h (GC_INTERNAL_MALLOC, GC_INTERNAL_MALLOC_IGNORE_OFF_PAGE): Specify macro arguments. * include/private/gc_priv.h (GC_INTERNAL_MALLOC): Pass 0 as flags argument to GC_[debug_]generic_malloc_inner(). * include/private/gc_priv.h (GC_INTERNAL_MALLOC_IGNORE_OFF_PAGE): Pass IGNORE_OFF_PAGE to GC_[debug_]generic_malloc_inner(). * malloc.c (GC_alloc_large): Pass flags (instead of flags!=0) to GC_collect_or_expand(). * malloc.c (GC_generic_malloc_inner_small): New STATIC function (move most of code from GC_generic_malloc_inner). * malloc.c (GC_generic_malloc_inner): Move comment to gc_priv.h; call GC_generic_malloc_inner_small(). * malloc.c (GC_generic_malloc_aligned): Call GC_generic_malloc_inner_small() instead of GC_generic_malloc_inner(); pass flags (instead of 0) to GC_alloc_large(); do not cast result of GC_alloc_large() to ptr_t. * malloc.c (GC_generic_malloc): Pass 0 as flags to GC_generic_malloc_aligned(). * malloc.c (GC_memalign): Likewise. * malloc.c (GC_malloc_kind_global, GC_generic_malloc_uncollectable): Call GC_generic_malloc_aligned() instead of GC_generic_malloc(). * mallocx.c (GC_generic_malloc_many): Likewise. * malloc.c (free_internal): Rename knd local variable to k. * mallocx.c (GC_generic_malloc_ignore_off_page, GC_malloc_ignore_off_page, GC_malloc_atomic_ignore_off_page): Redirect to GC_generic_malloc_aligned() passing IGNORE_OFF_PAGE. * typd_mlc.c (GC_generic_malloc_ignore_off_page): Likewise. * tests/gctest.c (run_one_test): Call GC_generic_malloc_ignore_off_page() (with size larger than HBLKSIZE); increment collectable_count.
Diffstat (limited to 'mallocx.c')
-rw-r--r--mallocx.c64
1 files changed, 13 insertions, 51 deletions
diff --git a/mallocx.c b/mallocx.c
index 8dee11b3..091ba14f 100644
--- a/mallocx.c
+++ b/mallocx.c
@@ -52,19 +52,19 @@ GC_API int GC_CALL GC_get_kind_and_size(const void * p, size_t * psize)
}
GC_API GC_ATTR_MALLOC void * GC_CALL GC_generic_or_special_malloc(size_t lb,
- int knd)
+ int k)
{
- switch(knd) {
+ switch (k) {
case PTRFREE:
case NORMAL:
- return GC_malloc_kind(lb, knd);
+ return GC_malloc_kind(lb, k);
case UNCOLLECTABLE:
# ifdef GC_ATOMIC_UNCOLLECTABLE
case AUNCOLLECTABLE:
# endif
- return GC_generic_malloc_uncollectable(lb, knd);
+ return GC_generic_malloc_uncollectable(lb, k);
default:
- return GC_generic_malloc(lb, knd);
+ return GC_generic_malloc_aligned(lb, k, 0 /* flags */, 0);
}
}
@@ -193,57 +193,18 @@ GC_API void * GC_CALL GC_realloc(void * p, size_t lb)
GC_API GC_ATTR_MALLOC void * GC_CALL
GC_generic_malloc_ignore_off_page(size_t lb, int k)
{
- void *result;
- size_t lg;
- word n_blocks;
- GC_bool init;
-
- if (SMALL_OBJ(lb))
- return GC_generic_malloc(lb, k);
- GC_ASSERT(k < MAXOBJKINDS);
- lg = ROUNDED_UP_GRANULES(lb);
- n_blocks = OBJ_SZ_TO_BLOCKS(GRANULES_TO_BYTES(lg));
- init = GC_obj_kinds[k].ok_init;
- if (EXPECT(get_have_errors(), FALSE))
- GC_print_all_errors();
- GC_INVOKE_FINALIZERS();
- GC_DBG_COLLECT_AT_MALLOC(lb);
- LOCK();
- result = (ptr_t)GC_alloc_large(ADD_SLOP(lb), k, IGNORE_OFF_PAGE, 0);
- if (EXPECT(NULL == result, FALSE)) {
- GC_oom_func oom_fn = GC_oom_fn;
- UNLOCK();
- return (*oom_fn)(lb);
- }
-
- if (GC_debugging_started) {
- BZERO(result, n_blocks * HBLKSIZE);
- } else {
-# ifdef THREADS
- /* Clear any memory that might be used for GC descriptors */
- /* before we release the lock. */
- ((word *)result)[0] = 0;
- ((word *)result)[1] = 0;
- ((word *)result)[GRANULES_TO_WORDS(lg)-1] = 0;
- ((word *)result)[GRANULES_TO_WORDS(lg)-2] = 0;
-# endif
- }
- UNLOCK();
- if (init && !GC_debugging_started) {
- BZERO(result, n_blocks * HBLKSIZE);
- }
- return result;
+ return GC_generic_malloc_aligned(lb, k, IGNORE_OFF_PAGE, 0 /* align_m1 */);
}
GC_API GC_ATTR_MALLOC void * GC_CALL GC_malloc_ignore_off_page(size_t lb)
{
- return GC_generic_malloc_ignore_off_page(lb, NORMAL);
+ return GC_generic_malloc_aligned(lb, NORMAL, IGNORE_OFF_PAGE, 0);
}
GC_API GC_ATTR_MALLOC void * GC_CALL
GC_malloc_atomic_ignore_off_page(size_t lb)
{
- return GC_generic_malloc_ignore_off_page(lb, PTRFREE);
+ return GC_generic_malloc_aligned(lb, PTRFREE, IGNORE_OFF_PAGE, 0);
}
/* Increment GC_bytes_allocd from code that doesn't have direct access */
@@ -307,7 +268,7 @@ GC_API void GC_CALL GC_generic_malloc_many(size_t lb, int k, void **result)
/* TODO: GC_dirty should be called for each linked object (but */
/* the last one) to support multiple objects allocation. */
if (!SMALL_OBJ(lb) || GC_manual_vdb) {
- op = GC_generic_malloc(lb, k);
+ op = GC_generic_malloc_aligned(lb, k, 0 /* flags */, 0 /* align_m1 */);
if (EXPECT(0 != op, TRUE))
obj_link(op) = 0;
*result = op;
@@ -465,8 +426,8 @@ GC_API void GC_CALL GC_generic_malloc_many(size_t lb, int k, void **result)
/* As a last attempt, try allocating a single object. Note that */
/* this may trigger a collection or expand the heap. */
- op = GC_generic_malloc_inner(lb, k);
- if (0 != op) obj_link(op) = 0;
+ op = GC_generic_malloc_inner(lb, k, 0 /* flags */);
+ if (op != NULL) obj_link(op) = NULL;
out:
*result = op;
@@ -509,7 +470,8 @@ GC_API GC_ATTR_MALLOC void * GC_CALL GC_memalign(size_t align, size_t lb)
if (align <= GRANULE_BYTES) return GC_malloc(lb);
if (align >= HBLKSIZE/2 || lb >= HBLKSIZE/2) {
- return GC_clear_stack(GC_generic_malloc_aligned(lb, NORMAL, align_m1));
+ return GC_clear_stack(GC_generic_malloc_aligned(lb, NORMAL,
+ 0 /* flags */, align_m1));
}
/* We could also try to make sure that the real rounded-up object size */