summaryrefslogtreecommitdiff
path: root/include/jemalloc/internal/base.h
blob: 9b2c9fb10b99a057d6a7f599a6df7cdbea6f1243 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
#ifndef JEMALLOC_INTERNAL_BASE_H
#define JEMALLOC_INTERNAL_BASE_H

#include "jemalloc/internal/edata.h"
#include "jemalloc/internal/ehooks.h"
#include "jemalloc/internal/mutex.h"

enum metadata_thp_mode_e {
	metadata_thp_disabled   = 0,
	/*
	 * Lazily enable hugepage for metadata. To avoid high RSS caused by THP
	 * + low usage arena (i.e. THP becomes a significant percentage), the
	 * "auto" option only starts using THP after a base allocator used up
	 * the first THP region.  Starting from the second hugepage (in a single
	 * arena), "auto" behaves the same as "always", i.e. madvise hugepage
	 * right away.
	 */
	metadata_thp_auto       = 1,
	metadata_thp_always     = 2,
	metadata_thp_mode_limit = 3
};
typedef enum metadata_thp_mode_e metadata_thp_mode_t;

#define METADATA_THP_DEFAULT metadata_thp_disabled
extern metadata_thp_mode_t opt_metadata_thp;
extern const char *metadata_thp_mode_names[];


/* Embedded at the beginning of every block of base-managed virtual memory. */
typedef struct base_block_s base_block_t;
struct base_block_s {
	/* Total size of block's virtual memory mapping. */
	size_t size;

	/* Next block in list of base's blocks. */
	base_block_t *next;

	/* Tracks unused trailing space. */
	edata_t edata;
};

typedef struct base_s base_t;
struct base_s {
	/*
	 * User-configurable extent hook functions.
	 */
	ehooks_t ehooks;

	/*
	 * User-configurable extent hook functions for metadata allocations.
	 */
	ehooks_t ehooks_base;

	/* Protects base_alloc() and base_stats_get() operations. */
	malloc_mutex_t mtx;

	/* Using THP when true (metadata_thp auto mode). */
	bool auto_thp_switched;
	/*
	 * Most recent size class in the series of increasingly large base
	 * extents.  Logarithmic spacing between subsequent allocations ensures
	 * that the total number of distinct mappings remains small.
	 */
	pszind_t pind_last;

	/* Serial number generation state. */
	size_t extent_sn_next;

	/* Chain of all blocks associated with base. */
	base_block_t *blocks;

	/* Heap of extents that track unused trailing space within blocks. */
	edata_heap_t avail[SC_NSIZES];

	/* Stats, only maintained if config_stats. */
	size_t allocated;
	size_t resident;
	size_t mapped;
	/* Number of THP regions touched. */
	size_t n_thp;
};

static inline unsigned
base_ind_get(const base_t *base) {
	return ehooks_ind_get(&base->ehooks);
}

static inline bool
metadata_thp_enabled(void) {
	return (opt_metadata_thp != metadata_thp_disabled);
}

base_t *b0get(void);
base_t *base_new(tsdn_t *tsdn, unsigned ind,
    const extent_hooks_t *extent_hooks, bool metadata_use_hooks);
void base_delete(tsdn_t *tsdn, base_t *base);
ehooks_t *base_ehooks_get(base_t *base);
ehooks_t *base_ehooks_get_for_metadata(base_t *base);
extent_hooks_t *base_extent_hooks_set(base_t *base,
    extent_hooks_t *extent_hooks);
void *base_alloc(tsdn_t *tsdn, base_t *base, size_t size, size_t alignment);
edata_t *base_alloc_edata(tsdn_t *tsdn, base_t *base);
void base_stats_get(tsdn_t *tsdn, base_t *base, size_t *allocated,
    size_t *resident, size_t *mapped, size_t *n_thp);
void base_prefork(tsdn_t *tsdn, base_t *base);
void base_postfork_parent(tsdn_t *tsdn, base_t *base);
void base_postfork_child(tsdn_t *tsdn, base_t *base);
bool base_boot(tsdn_t *tsdn);

#endif /* JEMALLOC_INTERNAL_BASE_H */