summaryrefslogtreecommitdiff
path: root/Zend/zend_alloc.c
diff options
context:
space:
mode:
Diffstat (limited to 'Zend/zend_alloc.c')
-rw-r--r--Zend/zend_alloc.c304
1 files changed, 190 insertions, 114 deletions
diff --git a/Zend/zend_alloc.c b/Zend/zend_alloc.c
index 3a43027346..035e23f1db 100644
--- a/Zend/zend_alloc.c
+++ b/Zend/zend_alloc.c
@@ -12,14 +12,12 @@
| obtain it through the world-wide-web, please send a note to |
| license@zend.com so we can mail you a copy immediately. |
+----------------------------------------------------------------------+
- | Authors: Andi Gutmans <andi@zend.com> |
- | Zeev Suraski <zeev@zend.com> |
- | Dmitry Stogov <dmitry@zend.com> |
+ | Authors: Andi Gutmans <andi@php.net> |
+ | Zeev Suraski <zeev@php.net> |
+ | Dmitry Stogov <dmitry@php.net> |
+----------------------------------------------------------------------+
*/
-/* $Id$ */
-
/*
* zend_alloc is designed to be a modern CPU cache friendly memory manager
* for PHP. Most ideas are taken from jemalloc and tcmalloc implementations.
@@ -423,11 +421,15 @@ static void *zend_mm_mmap_fixed(void *addr, size_t size)
#ifdef _WIN32
return VirtualAlloc(addr, size, MEM_COMMIT | MEM_RESERVE, PAGE_READWRITE);
#else
+ int flags = MAP_PRIVATE | MAP_ANON;
+#if defined(MAP_EXCL)
+ flags |= MAP_FIXED | MAP_EXCL;
+#endif
/* MAP_FIXED leads to discarding of the old mapping, so it can't be used. */
- void *ptr = mmap(addr, size, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANON /*| MAP_POPULATE | MAP_HUGETLB*/, -1, 0);
+ void *ptr = mmap(addr, size, PROT_READ | PROT_WRITE, flags /*| MAP_POPULATE | MAP_HUGETLB*/, -1, 0);
if (ptr == MAP_FAILED) {
-#if ZEND_MM_ERROR
+#if ZEND_MM_ERROR && !defined(MAP_EXCL)
fprintf(stderr, "\nmmap() failed: [%d] %s\n", errno, strerror(errno));
#endif
return NULL;
@@ -585,7 +587,7 @@ static zend_always_inline int zend_mm_bitset_find_zero_and_set(zend_mm_bitset *b
static zend_always_inline int zend_mm_bitset_is_set(zend_mm_bitset *bitset, int bit)
{
- return (bitset[bit / ZEND_MM_BITSET_LEN] & (Z_L(1) << (bit & (ZEND_MM_BITSET_LEN-1)))) != 0;
+ return ZEND_BIT_TEST(bitset, bit);
}
static zend_always_inline void zend_mm_bitset_set_bit(zend_mm_bitset *bitset, int bit)
@@ -1056,7 +1058,7 @@ found:
return ZEND_MM_PAGE_ADDR(chunk, page_num);
}
-static zend_always_inline void *zend_mm_alloc_large(zend_mm_heap *heap, size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC)
+static zend_always_inline void *zend_mm_alloc_large_ex(zend_mm_heap *heap, size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC)
{
int pages_count = (int)ZEND_MM_SIZE_TO_NUM(size, ZEND_MM_PAGE_SIZE);
#if ZEND_DEBUG
@@ -1075,6 +1077,18 @@ static zend_always_inline void *zend_mm_alloc_large(zend_mm_heap *heap, size_t s
return ptr;
}
+#if ZEND_DEBUG
+static zend_never_inline void *zend_mm_alloc_large(zend_mm_heap *heap, size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC)
+{
+ return zend_mm_alloc_large_ex(heap, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
+}
+#else
+static zend_never_inline void *zend_mm_alloc_large(zend_mm_heap *heap, size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC)
+{
+ return zend_mm_alloc_large_ex(heap, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
+}
+#endif
+
static zend_always_inline void zend_mm_delete_chunk(zend_mm_heap *heap, zend_mm_chunk *chunk)
{
chunk->next->prev = chunk->prev;
@@ -1124,7 +1138,7 @@ static zend_always_inline void zend_mm_free_pages_ex(zend_mm_heap *heap, zend_mm
}
}
-static void zend_mm_free_pages(zend_mm_heap *heap, zend_mm_chunk *chunk, int page_num, int pages_count)
+static zend_never_inline void zend_mm_free_pages(zend_mm_heap *heap, zend_mm_chunk *chunk, int page_num, int pages_count)
{
zend_mm_free_pages_ex(heap, chunk, page_num, pages_count, 1);
}
@@ -1342,7 +1356,7 @@ static zend_always_inline void *zend_mm_alloc_heap(zend_mm_heap *heap, size_t si
return NULL;
}
#endif
- if (size <= ZEND_MM_MAX_SMALL_SIZE) {
+ if (EXPECTED(size <= ZEND_MM_MAX_SMALL_SIZE)) {
ptr = zend_mm_alloc_small(heap, size, ZEND_MM_SMALL_SIZE_TO_BIN(size) ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
#if ZEND_DEBUG
dbg = zend_mm_get_debug_info(heap, ptr);
@@ -1353,7 +1367,7 @@ static zend_always_inline void *zend_mm_alloc_heap(zend_mm_heap *heap, size_t si
dbg->orig_lineno = __zend_orig_lineno;
#endif
return ptr;
- } else if (size <= ZEND_MM_MAX_LARGE_SIZE) {
+ } else if (EXPECTED(size <= ZEND_MM_MAX_LARGE_SIZE)) {
ptr = zend_mm_alloc_large(heap, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
#if ZEND_DEBUG
dbg = zend_mm_get_debug_info(heap, ptr);
@@ -1425,97 +1439,130 @@ static size_t zend_mm_size(zend_mm_heap *heap, void *ptr ZEND_FILE_LINE_DC ZEND_
}
}
-static void *zend_mm_realloc_heap(zend_mm_heap *heap, void *ptr, size_t size, size_t copy_size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC)
+static zend_never_inline void *zend_mm_realloc_slow(zend_mm_heap *heap, void *ptr, size_t size, size_t copy_size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC)
+{
+ void *ret;
+
+#if ZEND_MM_STAT
+ do {
+ size_t orig_peak = heap->peak;
+ size_t orig_real_peak = heap->real_peak;
+#endif
+ ret = zend_mm_alloc_heap(heap, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
+ memcpy(ret, ptr, copy_size);
+ zend_mm_free_heap(heap, ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
+#if ZEND_MM_STAT
+ heap->peak = MAX(orig_peak, heap->size);
+ heap->real_peak = MAX(orig_real_peak, heap->real_size);
+ } while (0);
+#endif
+ return ret;
+}
+
+static zend_never_inline void *zend_mm_realloc_huge(zend_mm_heap *heap, void *ptr, size_t size, size_t copy_size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC)
{
- size_t page_offset;
size_t old_size;
size_t new_size;
- void *ret;
#if ZEND_DEBUG
size_t real_size;
- zend_mm_debug_info *dbg;
#endif
- page_offset = ZEND_MM_ALIGNED_OFFSET(ptr, ZEND_MM_CHUNK_SIZE);
- if (UNEXPECTED(page_offset == 0)) {
- if (UNEXPECTED(ptr == NULL)) {
- return zend_mm_alloc_heap(heap, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
- }
- old_size = zend_mm_get_huge_block_size(heap, ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
+ old_size = zend_mm_get_huge_block_size(heap, ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
#if ZEND_DEBUG
- real_size = size;
- size = ZEND_MM_ALIGNED_SIZE(size) + ZEND_MM_ALIGNED_SIZE(sizeof(zend_mm_debug_info));
+ real_size = size;
+ size = ZEND_MM_ALIGNED_SIZE(size) + ZEND_MM_ALIGNED_SIZE(sizeof(zend_mm_debug_info));
#endif
- if (size > ZEND_MM_MAX_LARGE_SIZE) {
+ if (size > ZEND_MM_MAX_LARGE_SIZE) {
#if ZEND_DEBUG
- size = real_size;
+ size = real_size;
#endif
#ifdef ZEND_WIN32
- /* On Windows we don't have ability to extend huge blocks in-place.
- * We allocate them with 2MB size granularity, to avoid many
- * reallocations when they are extended by small pieces
- */
- new_size = ZEND_MM_ALIGNED_SIZE_EX(size, MAX(REAL_PAGE_SIZE, ZEND_MM_CHUNK_SIZE));
+ /* On Windows we don't have ability to extend huge blocks in-place.
+ * We allocate them with 2MB size granularity, to avoid many
+ * reallocations when they are extended by small pieces
+ */
+ new_size = ZEND_MM_ALIGNED_SIZE_EX(size, MAX(REAL_PAGE_SIZE, ZEND_MM_CHUNK_SIZE));
#else
- new_size = ZEND_MM_ALIGNED_SIZE_EX(size, REAL_PAGE_SIZE);
+ new_size = ZEND_MM_ALIGNED_SIZE_EX(size, REAL_PAGE_SIZE);
#endif
- if (new_size == old_size) {
+ if (new_size == old_size) {
#if ZEND_DEBUG
- zend_mm_change_huge_block_size(heap, ptr, new_size, real_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
+ zend_mm_change_huge_block_size(heap, ptr, new_size, real_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
#else
- zend_mm_change_huge_block_size(heap, ptr, new_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
+ zend_mm_change_huge_block_size(heap, ptr, new_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
#endif
- return ptr;
- } else if (new_size < old_size) {
- /* unmup tail */
- if (zend_mm_chunk_truncate(heap, ptr, old_size, new_size)) {
+ return ptr;
+ } else if (new_size < old_size) {
+ /* unmup tail */
+ if (zend_mm_chunk_truncate(heap, ptr, old_size, new_size)) {
#if ZEND_MM_STAT || ZEND_MM_LIMIT
- heap->real_size -= old_size - new_size;
+ heap->real_size -= old_size - new_size;
#endif
#if ZEND_MM_STAT
- heap->size -= old_size - new_size;
+ heap->size -= old_size - new_size;
#endif
#if ZEND_DEBUG
- zend_mm_change_huge_block_size(heap, ptr, new_size, real_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
+ zend_mm_change_huge_block_size(heap, ptr, new_size, real_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
#else
- zend_mm_change_huge_block_size(heap, ptr, new_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
+ zend_mm_change_huge_block_size(heap, ptr, new_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
#endif
- return ptr;
- }
- } else /* if (new_size > old_size) */ {
+ return ptr;
+ }
+ } else /* if (new_size > old_size) */ {
#if ZEND_MM_LIMIT
- if (UNEXPECTED(new_size - old_size > heap->limit - heap->real_size)) {
- if (zend_mm_gc(heap) && new_size - old_size <= heap->limit - heap->real_size) {
- /* pass */
- } else if (heap->overflow == 0) {
+ if (UNEXPECTED(new_size - old_size > heap->limit - heap->real_size)) {
+ if (zend_mm_gc(heap) && new_size - old_size <= heap->limit - heap->real_size) {
+ /* pass */
+ } else if (heap->overflow == 0) {
#if ZEND_DEBUG
- zend_mm_safe_error(heap, "Allowed memory size of %zu bytes exhausted at %s:%d (tried to allocate %zu bytes)", heap->limit, __zend_filename, __zend_lineno, size);
+ zend_mm_safe_error(heap, "Allowed memory size of %zu bytes exhausted at %s:%d (tried to allocate %zu bytes)", heap->limit, __zend_filename, __zend_lineno, size);
#else
- zend_mm_safe_error(heap, "Allowed memory size of %zu bytes exhausted (tried to allocate %zu bytes)", heap->limit, size);
+ zend_mm_safe_error(heap, "Allowed memory size of %zu bytes exhausted (tried to allocate %zu bytes)", heap->limit, size);
#endif
- return NULL;
- }
+ return NULL;
}
+ }
#endif
- /* try to map tail right after this block */
- if (zend_mm_chunk_extend(heap, ptr, old_size, new_size)) {
+ /* try to map tail right after this block */
+ if (zend_mm_chunk_extend(heap, ptr, old_size, new_size)) {
#if ZEND_MM_STAT || ZEND_MM_LIMIT
- heap->real_size += new_size - old_size;
+ heap->real_size += new_size - old_size;
#endif
#if ZEND_MM_STAT
- heap->real_peak = MAX(heap->real_peak, heap->real_size);
- heap->size += new_size - old_size;
- heap->peak = MAX(heap->peak, heap->size);
+ heap->real_peak = MAX(heap->real_peak, heap->real_size);
+ heap->size += new_size - old_size;
+ heap->peak = MAX(heap->peak, heap->size);
#endif
#if ZEND_DEBUG
- zend_mm_change_huge_block_size(heap, ptr, new_size, real_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
+ zend_mm_change_huge_block_size(heap, ptr, new_size, real_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
#else
- zend_mm_change_huge_block_size(heap, ptr, new_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
+ zend_mm_change_huge_block_size(heap, ptr, new_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
#endif
- return ptr;
- }
+ return ptr;
}
}
+ }
+
+ return zend_mm_realloc_slow(heap, ptr, size, MIN(old_size, copy_size) ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
+}
+
+static zend_always_inline void *zend_mm_realloc_heap(zend_mm_heap *heap, void *ptr, size_t size, zend_bool use_copy_size, size_t copy_size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC)
+{
+ size_t page_offset;
+ size_t old_size;
+ size_t new_size;
+ void *ret;
+#if ZEND_DEBUG
+ zend_mm_debug_info *dbg;
+#endif
+
+ page_offset = ZEND_MM_ALIGNED_OFFSET(ptr, ZEND_MM_CHUNK_SIZE);
+ if (UNEXPECTED(page_offset == 0)) {
+ if (EXPECTED(ptr == NULL)) {
+ return _zend_mm_alloc(heap, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
+ } else {
+ return zend_mm_realloc_huge(heap, ptr, size, copy_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
+ }
} else {
zend_mm_chunk *chunk = (zend_mm_chunk*)ZEND_MM_ALIGNED_BASE(ptr, ZEND_MM_CHUNK_SIZE);
int page_num = (int)(page_offset / ZEND_MM_PAGE_SIZE);
@@ -1529,21 +1576,56 @@ static void *zend_mm_realloc_heap(zend_mm_heap *heap, void *ptr, size_t size, si
ZEND_MM_CHECK(chunk->heap == heap, "zend_mm_heap corrupted");
if (info & ZEND_MM_IS_SRUN) {
int old_bin_num = ZEND_MM_SRUN_BIN_NUM(info);
- old_size = bin_data_size[old_bin_num];
- if (size <= ZEND_MM_MAX_SMALL_SIZE) {
- int bin_num = ZEND_MM_SMALL_SIZE_TO_BIN(size);
- if (old_bin_num == bin_num) {
-#if ZEND_DEBUG
- dbg = zend_mm_get_debug_info(heap, ptr);
- dbg->size = real_size;
- dbg->filename = __zend_filename;
- dbg->orig_filename = __zend_orig_filename;
- dbg->lineno = __zend_lineno;
- dbg->orig_lineno = __zend_orig_lineno;
+
+ do {
+ old_size = bin_data_size[old_bin_num];
+
+ /* Check if requested size fits into current bin */
+ if (size <= old_size) {
+ /* Check if truncation is necessary */
+ if (old_bin_num > 0 && size < bin_data_size[old_bin_num - 1]) {
+ /* truncation */
+ ret = zend_mm_alloc_small(heap, size, ZEND_MM_SMALL_SIZE_TO_BIN(size) ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
+ copy_size = use_copy_size ? MIN(size, copy_size) : size;
+ memcpy(ret, ptr, copy_size);
+ zend_mm_free_small(heap, ptr, old_bin_num);
+ } else {
+ /* reallocation in-place */
+ ret = ptr;
+ }
+ } else if (size <= ZEND_MM_MAX_SMALL_SIZE) {
+ /* small extension */
+
+#if ZEND_MM_STAT
+ do {
+ size_t orig_peak = heap->peak;
+ size_t orig_real_peak = heap->real_peak;
+#endif
+ ret = zend_mm_alloc_small(heap, size, ZEND_MM_SMALL_SIZE_TO_BIN(size) ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
+ copy_size = use_copy_size ? MIN(old_size, copy_size) : old_size;
+ memcpy(ret, ptr, copy_size);
+ zend_mm_free_small(heap, ptr, old_bin_num);
+#if ZEND_MM_STAT
+ heap->peak = MAX(orig_peak, heap->size);
+ heap->real_peak = MAX(orig_real_peak, heap->real_size);
+ } while (0);
#endif
- return ptr;
+ } else {
+ /* slow reallocation */
+ break;
}
- }
+
+#if ZEND_DEBUG
+ dbg = zend_mm_get_debug_info(heap, ret);
+ dbg->size = real_size;
+ dbg->filename = __zend_filename;
+ dbg->orig_filename = __zend_orig_filename;
+ dbg->lineno = __zend_lineno;
+ dbg->orig_lineno = __zend_orig_lineno;
+#endif
+ return ret;
+ } while (0);
+
} else /* if (info & ZEND_MM_IS_LARGE_RUN) */ {
ZEND_MM_CHECK(ZEND_MM_ALIGNED_OFFSET(page_offset, ZEND_MM_PAGE_SIZE) == 0, "zend_mm_heap corrupted");
old_size = ZEND_MM_LRUN_PAGES(info) * ZEND_MM_PAGE_SIZE;
@@ -1615,21 +1697,8 @@ static void *zend_mm_realloc_heap(zend_mm_heap *heap, void *ptr, size_t size, si
#endif
}
- /* Naive reallocation */
-#if ZEND_MM_STAT
- do {
- size_t orig_peak = heap->peak;
- size_t orig_real_peak = heap->real_peak;
-#endif
- ret = zend_mm_alloc_heap(heap, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
- memcpy(ret, ptr, MIN(old_size, copy_size));
- zend_mm_free_heap(heap, ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
-#if ZEND_MM_STAT
- heap->peak = MAX(orig_peak, heap->size);
- heap->real_peak = MAX(orig_real_peak, heap->real_size);
- } while (0);
-#endif
- return ret;
+ copy_size = MIN(old_size, copy_size);
+ return zend_mm_realloc_slow(heap, ptr, size, copy_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
}
/*********************/
@@ -2279,12 +2348,12 @@ ZEND_API void ZEND_FASTCALL _zend_mm_free(zend_mm_heap *heap, void *ptr ZEND_FIL
void* ZEND_FASTCALL _zend_mm_realloc(zend_mm_heap *heap, void *ptr, size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC)
{
- return zend_mm_realloc_heap(heap, ptr, size, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
+ return zend_mm_realloc_heap(heap, ptr, size, 0, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
}
void* ZEND_FASTCALL _zend_mm_realloc2(zend_mm_heap *heap, void *ptr, size_t size, size_t copy_size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC)
{
- return zend_mm_realloc_heap(heap, ptr, size, copy_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
+ return zend_mm_realloc_heap(heap, ptr, size, 1, copy_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
}
ZEND_API size_t ZEND_FASTCALL _zend_mm_block_size(zend_mm_heap *heap, void *ptr ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC)
@@ -2355,14 +2424,12 @@ ZEND_MM_BINS_INFO(_ZEND_BIN_ALLOCATOR, x, y)
ZEND_API void* ZEND_FASTCALL _emalloc_large(size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC)
{
-
ZEND_MM_CUSTOM_ALLOCATOR(size);
- return zend_mm_alloc_large(AG(mm_heap), size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
+ return zend_mm_alloc_large_ex(AG(mm_heap), size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
}
ZEND_API void* ZEND_FASTCALL _emalloc_huge(size_t size)
{
-
ZEND_MM_CUSTOM_ALLOCATOR(size);
return zend_mm_alloc_huge(AG(mm_heap), size);
}
@@ -2397,7 +2464,6 @@ ZEND_MM_BINS_INFO(_ZEND_BIN_FREE, x, y)
ZEND_API void ZEND_FASTCALL _efree_large(void *ptr, size_t size)
{
-
ZEND_MM_CUSTOM_DEALLOCATOR(ptr);
{
size_t page_offset = ZEND_MM_ALIGNED_OFFSET(ptr, ZEND_MM_CHUNK_SIZE);
@@ -2422,7 +2488,6 @@ ZEND_API void ZEND_FASTCALL _efree_huge(void *ptr, size_t size)
ZEND_API void* ZEND_FASTCALL _emalloc(size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC)
{
-
#if ZEND_MM_CUSTOM
if (UNEXPECTED(AG(mm_heap)->use_custom_heap)) {
if (ZEND_DEBUG && AG(mm_heap)->use_custom_heap == ZEND_MM_CUSTOM_HEAP_DEBUG) {
@@ -2437,7 +2502,6 @@ ZEND_API void* ZEND_FASTCALL _emalloc(size_t size ZEND_FILE_LINE_DC ZEND_FILE_LI
ZEND_API void ZEND_FASTCALL _efree(void *ptr ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC)
{
-
#if ZEND_MM_CUSTOM
if (UNEXPECTED(AG(mm_heap)->use_custom_heap)) {
if (ZEND_DEBUG && AG(mm_heap)->use_custom_heap == ZEND_MM_CUSTOM_HEAP_DEBUG) {
@@ -2453,7 +2517,7 @@ ZEND_API void ZEND_FASTCALL _efree(void *ptr ZEND_FILE_LINE_DC ZEND_FILE_LINE_OR
ZEND_API void* ZEND_FASTCALL _erealloc(void *ptr, size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC)
{
-
+#if ZEND_MM_CUSTOM
if (UNEXPECTED(AG(mm_heap)->use_custom_heap)) {
if (ZEND_DEBUG && AG(mm_heap)->use_custom_heap == ZEND_MM_CUSTOM_HEAP_DEBUG) {
return AG(mm_heap)->custom_heap.debug._realloc(ptr, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
@@ -2461,12 +2525,13 @@ ZEND_API void* ZEND_FASTCALL _erealloc(void *ptr, size_t size ZEND_FILE_LINE_DC
return AG(mm_heap)->custom_heap.std._realloc(ptr, size);
}
}
- return zend_mm_realloc_heap(AG(mm_heap), ptr, size, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
+#endif
+ return zend_mm_realloc_heap(AG(mm_heap), ptr, size, 0, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
}
ZEND_API void* ZEND_FASTCALL _erealloc2(void *ptr, size_t size, size_t copy_size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC)
{
-
+#if ZEND_MM_CUSTOM
if (UNEXPECTED(AG(mm_heap)->use_custom_heap)) {
if (ZEND_DEBUG && AG(mm_heap)->use_custom_heap == ZEND_MM_CUSTOM_HEAP_DEBUG) {
return AG(mm_heap)->custom_heap.debug._realloc(ptr, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
@@ -2474,14 +2539,17 @@ ZEND_API void* ZEND_FASTCALL _erealloc2(void *ptr, size_t size, size_t copy_size
return AG(mm_heap)->custom_heap.std._realloc(ptr, size);
}
}
- return zend_mm_realloc_heap(AG(mm_heap), ptr, size, copy_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
+#endif
+ return zend_mm_realloc_heap(AG(mm_heap), ptr, size, 1, copy_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
}
ZEND_API size_t ZEND_FASTCALL _zend_mem_block_size(void *ptr ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC)
{
+#if ZEND_MM_CUSTOM
if (UNEXPECTED(AG(mm_heap)->use_custom_heap)) {
return 0;
}
+#endif
return zend_mm_size(AG(mm_heap), ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
}
@@ -2505,13 +2573,13 @@ ZEND_API void* ZEND_FASTCALL _safe_realloc(void *ptr, size_t nmemb, size_t size,
return perealloc(ptr, zend_safe_address_guarded(nmemb, size, offset), 1);
}
-
ZEND_API void* ZEND_FASTCALL _ecalloc(size_t nmemb, size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC)
{
void *p;
- p = _safe_emalloc(nmemb, size, 0 ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC);
- memset(p, 0, size * nmemb);
+ size = zend_safe_address_guarded(nmemb, size, 0);
+ p = emalloc_rel(size);
+ memset(p, 0, size);
return p;
}
@@ -2603,9 +2671,9 @@ ZEND_API void shutdown_memory_manager(int silent, int full_shutdown)
static void alloc_globals_ctor(zend_alloc_globals *alloc_globals)
{
char *tmp;
+
#if ZEND_MM_CUSTOM
tmp = getenv("USE_ZEND_ALLOC");
-
if (tmp && !zend_atoi(tmp, 0)) {
alloc_globals->mm_heap = malloc(sizeof(zend_mm_heap));
memset(alloc_globals->mm_heap, 0, sizeof(zend_mm_heap));
@@ -2616,6 +2684,7 @@ static void alloc_globals_ctor(zend_alloc_globals *alloc_globals)
return;
}
#endif
+
tmp = getenv("USE_ZEND_ALLOC_HUGE_PAGES");
if (tmp && zend_atoi(tmp, 0)) {
zend_mm_use_huge_pages = 1;
@@ -2678,10 +2747,14 @@ ZEND_API void zend_mm_set_custom_handlers(zend_mm_heap *heap,
#if ZEND_MM_CUSTOM
zend_mm_heap *_heap = (zend_mm_heap*)heap;
- _heap->use_custom_heap = ZEND_MM_CUSTOM_HEAP_STD;
- _heap->custom_heap.std._malloc = _malloc;
- _heap->custom_heap.std._free = _free;
- _heap->custom_heap.std._realloc = _realloc;
+ if (!_malloc && !_free && !_realloc) {
+ _heap->use_custom_heap = ZEND_MM_CUSTOM_HEAP_NONE;
+ } else {
+ _heap->use_custom_heap = ZEND_MM_CUSTOM_HEAP_STD;
+ _heap->custom_heap.std._malloc = _malloc;
+ _heap->custom_heap.std._free = _free;
+ _heap->custom_heap.std._realloc = _realloc;
+ }
#endif
}
@@ -2836,8 +2909,11 @@ ZEND_API void * __zend_malloc(size_t len)
ZEND_API void * __zend_calloc(size_t nmemb, size_t len)
{
- void *tmp = _safe_malloc(nmemb, len, 0);
- memset(tmp, 0, nmemb * len);
+ void *tmp;
+
+ len = zend_safe_address_guarded(nmemb, len, 0);
+ tmp = __zend_malloc(len);
+ memset(tmp, 0, len);
return tmp;
}