diff --git a/Zend/zend_alloc.c b/Zend/zend_alloc.c index 5a1c19e09d19d..e2d7e8a1556ce 100644 --- a/Zend/zend_alloc.c +++ b/Zend/zend_alloc.c @@ -83,9 +83,7 @@ typedef int pid_t; #include #include #include -#ifdef __SANITIZE_ADDRESS__ -# include -#endif + #ifndef _WIN32 # include @@ -173,6 +171,68 @@ static size_t _real_page_size = ZEND_MM_PAGE_SIZE; } while (0) #endif +#ifdef __SANITIZE_ADDRESS__ +# include + +#if 0 + +#define ZEND_MM_POISON_DEBUG(_type, _ptr, _size) do { \ + fprintf(stderr, "%s %p - %p in %d\n", (_type), (_ptr), (void*) (((size_t)_ptr)+((size_t)_size)), __LINE__); \ + fflush(stderr); \ +} while (0); + +#else + +#define ZEND_MM_POISON_DEBUG(_type, _ptr, _size) + +#endif + +#define ZEND_MM_POISON(_ptr, _size) do { \ + if (UNEXPECTED(((size_t) (_ptr)) & ((size_t)7))) { \ + zend_mm_panic("Wrong alignment"); \ + } \ + ZEND_MM_POISON_DEBUG("Poisoning", (_ptr), (_size)); \ + ASAN_POISON_MEMORY_REGION((_ptr), (_size));\ +} while (0); +#define ZEND_MM_UNPOISON(_ptr, _size) do { \ + if (UNEXPECTED(((size_t) (_ptr)) & ((size_t)7))) { \ + zend_mm_panic("Wrong alignment"); \ + } \ + ZEND_MM_POISON_DEBUG("Unpoisoning", (_ptr), (_size)); \ + ASAN_UNPOISON_MEMORY_REGION((_ptr), (_size));\ +} while (0); + +#define ZEND_MM_POISON_HEAP(_ptr) ZEND_MM_POISON((_ptr), sizeof(zend_mm_heap)); +#define ZEND_MM_UNPOISON_HEAP(_ptr) ZEND_MM_UNPOISON((_ptr), sizeof(zend_mm_heap)); + +#define ZEND_MM_POISON_DEBUGINFO(_ptr) ZEND_MM_POISON((_ptr), sizeof(zend_mm_debug_info)); +#define ZEND_MM_UNPOISON_DEBUGINFO(_ptr) ZEND_MM_UNPOISON((_ptr), sizeof(zend_mm_debug_info)); + +#define ZEND_MM_POISON_CHUNK_HDR(_ptr, _heap) do { \ + ZEND_MM_POISON((_ptr), sizeof(zend_mm_chunk)); \ + ZEND_MM_UNPOISON_HEAP((_heap)); \ +} while (0); + +#define ZEND_MM_UNPOISON_CHUNK_HDR(_ptr) ZEND_MM_UNPOISON((_ptr), sizeof(zend_mm_chunk)); + +#define ZEND_MM_POISON_CHUNK(_ptr, _heap) do { \ + ZEND_MM_POISON((_ptr), ZEND_MM_CHUNK_SIZE); \ + ZEND_MM_UNPOISON_HEAP((_heap)); \ +} while (0); + +#else + +#define ZEND_MM_POISON(_ptr, _size) +#define ZEND_MM_UNPOISON(_ptr, _size) +#define ZEND_MM_POISON_HEAP(_ptr) +#define ZEND_MM_UNPOISON_HEAP(_ptr) +#define ZEND_MM_POISON_CHUNK_HDR(_ptr, _heap) +#define ZEND_MM_UNPOISON_CHUNK_HDR(_ptr) +#define ZEND_MM_POISON_CHUNK(_ptr, _heap) +#define ZEND_MM_POISON_DEBUGINFO(_ptr) +#define ZEND_MM_UNPOISON_DEBUGINFO(_ptr) + +#endif typedef uint32_t zend_mm_page_info; /* 4-byte integer */ typedef zend_ulong zend_mm_bitset; /* 4-byte or 8-byte integer */ @@ -417,8 +477,8 @@ static ZEND_COLD ZEND_NORETURN void zend_mm_safe_error(zend_mm_heap *heap, #endif size_t size) { - heap->overflow = 1; + ZEND_MM_POISON_HEAP(heap); zend_try { zend_error_noreturn(E_ERROR, format, @@ -430,7 +490,9 @@ static ZEND_COLD ZEND_NORETURN void zend_mm_safe_error(zend_mm_heap *heap, size); } zend_catch { } zend_end_try(); + ZEND_MM_UNPOISON_HEAP(heap); heap->overflow = 0; + ZEND_MM_POISON_HEAP(heap); zend_bailout(); exit(1); } @@ -452,12 +514,78 @@ static void stderr_last_error(char *msg) } #endif +static void _zend_mm_set_custom_handlers_ex(zend_mm_heap *heap, + void* (*_malloc)(size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), + void (*_free)(void* ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), + void* (*_realloc)(void*, size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), + size_t (*_gc)(void), + void (*_shutdown)(bool, bool)) +{ +#if ZEND_MM_CUSTOM + zend_mm_heap *_heap = (zend_mm_heap*)heap; + + if (!_malloc && !_free && !_realloc) { + _heap->use_custom_heap = ZEND_MM_CUSTOM_HEAP_NONE; + } else { + _heap->use_custom_heap = ZEND_MM_CUSTOM_HEAP_STD; + _heap->custom_heap._malloc = _malloc; + _heap->custom_heap._free = _free; + _heap->custom_heap._realloc = _realloc; + _heap->custom_heap._gc = _gc; + _heap->custom_heap._shutdown = _shutdown; + } +#endif +} + + +static void _zend_mm_get_custom_handlers_ex(zend_mm_heap *heap, + void* (**_malloc)(size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), + void (**_free)(void* ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), + void* (**_realloc)(void*, size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), + size_t (**_gc)(void), + void (**_shutdown)(bool, bool)) +{ +#if ZEND_MM_CUSTOM + zend_mm_heap *_heap = (zend_mm_heap*)heap; + + if (heap->use_custom_heap) { + *_malloc = _heap->custom_heap._malloc; + *_free = _heap->custom_heap._free; + *_realloc = _heap->custom_heap._realloc; + if (_gc != NULL) { + *_gc = _heap->custom_heap._gc; + } + if (_shutdown != NULL) { + *_shutdown = _heap->custom_heap._shutdown; + } + } else { + *_malloc = NULL; + *_free = NULL; + *_realloc = NULL; + if (_gc != NULL) { + *_gc = NULL; + } + if (_shutdown != NULL) { + *_shutdown = NULL; + } + } +#else + *_malloc = NULL; + *_free = NULL; + *_realloc = NULL; + *_gc = NULL; + *_shutdown = NULL; +#endif +} + + /*****************/ /* OS Allocation */ /*****************/ static void zend_mm_munmap(void *addr, size_t size) { + ZEND_MM_UNPOISON(addr, size); #ifdef _WIN32 if (VirtualFree(addr, 0, MEM_RELEASE) == 0) { /** ERROR_INVALID_ADDRESS is expected when addr is not range start address */ @@ -764,9 +892,7 @@ static void *zend_mm_chunk_alloc_int(size_t size, size_t alignment) if (zend_mm_use_huge_pages) { zend_mm_hugepage(ptr, size); } -#ifdef __SANITIZE_ADDRESS__ - ASAN_UNPOISON_MEMORY_REGION(ptr, size); -#endif + ZEND_MM_POISON(ptr, size); return ptr; } else { size_t offset; @@ -806,9 +932,7 @@ static void *zend_mm_chunk_alloc_int(size_t size, size_t alignment) if (zend_mm_use_huge_pages) { zend_mm_hugepage(ptr, size); } -# ifdef __SANITIZE_ADDRESS__ - ASAN_UNPOISON_MEMORY_REGION(ptr, size); -# endif + ZEND_MM_POISON(ptr, size); #endif return ptr; } @@ -887,6 +1011,8 @@ static zend_always_inline void zend_mm_chunk_init(zend_mm_heap *heap, zend_mm_ch { chunk->heap = heap; chunk->next = heap->main_chunk; + ZEND_MM_UNPOISON_CHUNK_HDR(heap->main_chunk); + ZEND_MM_UNPOISON_CHUNK_HDR(heap->main_chunk->prev); chunk->prev = heap->main_chunk->prev; chunk->prev->next = chunk; chunk->next->prev = chunk; @@ -898,6 +1024,8 @@ static zend_always_inline void zend_mm_chunk_init(zend_mm_heap *heap, zend_mm_ch /* mark first pages as allocated */ chunk->free_map[0] = (1L << ZEND_MM_FIRST_PAGE) - 1; chunk->map[0] = ZEND_MM_LRUN(ZEND_MM_FIRST_PAGE); + ZEND_MM_POISON_CHUNK_HDR(heap->main_chunk->prev, heap); + ZEND_MM_POISON_CHUNK_HDR(heap->main_chunk, heap); } /***********************/ @@ -918,6 +1046,8 @@ static void zend_mm_change_huge_block_size(zend_mm_heap *heap, void *ptr, size_t /* Large Runs */ /**************/ +static size_t _zend_mm_gc(zend_mm_heap *heap); + #if ZEND_DEBUG static void *zend_mm_alloc_pages(zend_mm_heap *heap, uint32_t pages_count, size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) #else @@ -925,6 +1055,7 @@ static void *zend_mm_alloc_pages(zend_mm_heap *heap, uint32_t pages_count ZEND_F #endif { zend_mm_chunk *chunk = heap->main_chunk; + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); uint32_t page_num, len; int steps = 0; @@ -1047,12 +1178,14 @@ static void *zend_mm_alloc_pages(zend_mm_heap *heap, uint32_t pages_count ZEND_F get_chunk: if (heap->cached_chunks) { heap->cached_chunks_count--; + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); chunk = heap->cached_chunks; + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); heap->cached_chunks = chunk->next; } else { #if ZEND_MM_LIMIT if (UNEXPECTED(ZEND_MM_CHUNK_SIZE > heap->limit - heap->real_size)) { - if (zend_mm_gc(heap)) { + if (_zend_mm_gc(heap)) { goto get_chunk; } else if (heap->overflow == 0) { #if ZEND_DEBUG @@ -1060,14 +1193,16 @@ static void *zend_mm_alloc_pages(zend_mm_heap *heap, uint32_t pages_count ZEND_F #else zend_mm_safe_error(heap, "Allowed memory size of %zu bytes exhausted (tried to allocate %zu bytes)", heap->limit, ZEND_MM_PAGE_SIZE * pages_count); #endif + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); return NULL; } } #endif + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); chunk = (zend_mm_chunk*)zend_mm_chunk_alloc(heap, ZEND_MM_CHUNK_SIZE, ZEND_MM_CHUNK_SIZE); if (UNEXPECTED(chunk == NULL)) { /* insufficient memory */ - if (zend_mm_gc(heap) && + if (_zend_mm_gc(heap) && (chunk = (zend_mm_chunk*)zend_mm_chunk_alloc(heap, ZEND_MM_CHUNK_SIZE, ZEND_MM_CHUNK_SIZE)) != NULL) { /* pass */ } else { @@ -1097,28 +1232,44 @@ static void *zend_mm_alloc_pages(zend_mm_heap *heap, uint32_t pages_count ZEND_F if (heap->chunks_count > heap->peak_chunks_count) { heap->peak_chunks_count = heap->chunks_count; } + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); zend_mm_chunk_init(heap, chunk); + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); page_num = ZEND_MM_FIRST_PAGE; len = ZEND_MM_PAGES - ZEND_MM_FIRST_PAGE; goto found; } else { - chunk = chunk->next; + zend_mm_chunk *next_chunk = chunk->next; + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); + chunk = next_chunk; + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); steps++; } } found: if (steps > 2 && pages_count < 8) { + ZEND_MM_UNPOISON_CHUNK_HDR(chunk->next); + ZEND_MM_UNPOISON_CHUNK_HDR(chunk->prev); ZEND_MM_CHECK(chunk->next->prev == chunk, "zend_mm_heap corrupted"); ZEND_MM_CHECK(chunk->prev->next == chunk, "zend_mm_heap corrupted"); /* move chunk into the head of the linked-list */ chunk->prev->next = chunk->next; chunk->next->prev = chunk->prev; + + ZEND_MM_UNPOISON_CHUNK_HDR(heap->main_chunk); + ZEND_MM_UNPOISON_CHUNK_HDR(heap->main_chunk->next); chunk->next = heap->main_chunk->next; chunk->prev = heap->main_chunk; chunk->prev->next = chunk; chunk->next->prev = chunk; + ZEND_MM_POISON_CHUNK_HDR(heap->main_chunk->next, heap); + ZEND_MM_POISON_CHUNK_HDR(heap->main_chunk, heap); + + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); + ZEND_MM_POISON_CHUNK_HDR(chunk->next, heap); + ZEND_MM_POISON_CHUNK_HDR(chunk->prev, heap); } /* mark run as allocated */ chunk->free_pages -= pages_count; @@ -1127,6 +1278,7 @@ static void *zend_mm_alloc_pages(zend_mm_heap *heap, uint32_t pages_count ZEND_F if (page_num == chunk->free_tail) { chunk->free_tail = page_num + pages_count; } + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); return ZEND_MM_PAGE_ADDR(chunk, page_num); } @@ -1156,11 +1308,19 @@ static zend_never_inline void *zend_mm_alloc_large(zend_mm_heap *heap, size_t si static zend_always_inline void zend_mm_delete_chunk(zend_mm_heap *heap, zend_mm_chunk *chunk) { + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); + ZEND_MM_UNPOISON_CHUNK_HDR(chunk->next); + ZEND_MM_UNPOISON_CHUNK_HDR(chunk->prev); + ZEND_MM_CHECK(chunk->next->prev == chunk, "zend_mm_heap corrupted"); ZEND_MM_CHECK(chunk->prev->next == chunk, "zend_mm_heap corrupted"); chunk->next->prev = chunk->prev; chunk->prev->next = chunk->next; + + ZEND_MM_POISON_CHUNK_HDR(chunk->next, heap); + ZEND_MM_POISON_CHUNK_HDR(chunk->prev, heap); + heap->chunks_count--; if (heap->chunks_count + heap->cached_chunks_count < heap->avg_chunks_count + 0.1 || (heap->chunks_count == heap->last_chunks_delete_boundary @@ -1169,6 +1329,7 @@ static zend_always_inline void zend_mm_delete_chunk(zend_mm_heap *heap, zend_mm_ heap->cached_chunks_count++; chunk->next = heap->cached_chunks; heap->cached_chunks = chunk; + ZEND_MM_POISON_CHUNK(chunk, heap); } else { #if ZEND_MM_STAT || ZEND_MM_LIMIT heap->real_size -= ZEND_MM_CHUNK_SIZE; @@ -1181,19 +1342,31 @@ static zend_always_inline void zend_mm_delete_chunk(zend_mm_heap *heap, zend_mm_ heap->last_chunks_delete_count++; } } + + if (heap->cached_chunks) { + ZEND_MM_UNPOISON_CHUNK_HDR(heap->cached_chunks); + } + if (!heap->cached_chunks || chunk->num > heap->cached_chunks->num) { + if (heap->cached_chunks) { + ZEND_MM_POISON_CHUNK_HDR(heap->cached_chunks, heap); + } zend_mm_chunk_free(heap, chunk, ZEND_MM_CHUNK_SIZE); } else { //TODO: select the best chunk to delete??? + + ZEND_MM_UNPOISON_CHUNK_HDR(heap->cached_chunks); chunk->next = heap->cached_chunks->next; zend_mm_chunk_free(heap, heap->cached_chunks, ZEND_MM_CHUNK_SIZE); heap->cached_chunks = chunk; + ZEND_MM_POISON_CHUNK(chunk, heap); } } } static zend_always_inline void zend_mm_free_pages_ex(zend_mm_heap *heap, zend_mm_chunk *chunk, uint32_t page_num, uint32_t pages_count, int free_chunk) { + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); chunk->free_pages += pages_count; zend_mm_bitset_reset_range(chunk->free_map, page_num, pages_count); chunk->map[page_num] = 0; @@ -1201,8 +1374,13 @@ static zend_always_inline void zend_mm_free_pages_ex(zend_mm_heap *heap, zend_mm /* this setting may be not accurate */ chunk->free_tail = page_num; } - if (free_chunk && chunk != heap->main_chunk && chunk->free_pages == ZEND_MM_PAGES - ZEND_MM_FIRST_PAGE) { + bool should_free = free_chunk && chunk != heap->main_chunk && chunk->free_pages == ZEND_MM_PAGES - ZEND_MM_FIRST_PAGE; + ZEND_MM_POISON(ZEND_MM_PAGE_ADDR(chunk, page_num), pages_count * ZEND_MM_PAGE_SIZE); + + if (should_free) { zend_mm_delete_chunk(heap, chunk); + } else { + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); } } @@ -1323,19 +1501,28 @@ static zend_always_inline void zend_mm_set_next_free_slot(zend_mm_heap *heap, ui { ZEND_ASSERT(bin_data_size[bin_num] >= ZEND_MM_MIN_USEABLE_BIN_SIZE); + ZEND_MM_UNPOISON(slot, sizeof(zend_mm_free_slot*)); + ZEND_MM_UNPOISON(&ZEND_MM_FREE_SLOT_PTR_SHADOW(slot, bin_num), sizeof(zend_mm_free_slot*)); + slot->next_free_slot = next; ZEND_MM_FREE_SLOT_PTR_SHADOW(slot, bin_num) = zend_mm_encode_free_slot(heap, next); + + ZEND_MM_POISON(slot, sizeof(zend_mm_free_slot*)); + ZEND_MM_POISON(&ZEND_MM_FREE_SLOT_PTR_SHADOW(slot, bin_num), sizeof(zend_mm_free_slot*)); } static zend_always_inline zend_mm_free_slot *zend_mm_get_next_free_slot(zend_mm_heap *heap, uint32_t bin_num, zend_mm_free_slot* slot) { + ZEND_MM_UNPOISON(slot, sizeof(zend_mm_free_slot*)); + ZEND_MM_UNPOISON(&ZEND_MM_FREE_SLOT_PTR_SHADOW(slot, bin_num), sizeof(zend_mm_free_slot*)); + zend_mm_free_slot *next = slot->next_free_slot; - if (EXPECTED(next != NULL)) { - zend_mm_free_slot *shadow = ZEND_MM_FREE_SLOT_PTR_SHADOW(slot, bin_num); - if (UNEXPECTED(next != zend_mm_decode_free_slot(heap, shadow))) { - zend_mm_panic("zend_mm_heap corrupted"); - } - } + zend_mm_free_slot *shadow = ZEND_MM_FREE_SLOT_PTR_SHADOW(slot, bin_num); + ZEND_MM_CHECK(next == zend_mm_decode_free_slot(heap, shadow), "zend_mm_heap corrupted"); + + ZEND_MM_POISON(slot, sizeof(zend_mm_free_slot*)); + ZEND_MM_POISON(&ZEND_MM_FREE_SLOT_PTR_SHADOW(slot, bin_num), sizeof(zend_mm_free_slot*)); + return (zend_mm_free_slot*)next; } @@ -1362,8 +1549,12 @@ static zend_never_inline void *zend_mm_alloc_small_slow(zend_mm_heap *heap, uint /* insufficient memory */ return NULL; } + // bin is poisoned, do not unpoision inside this function + // zend_mm_set_next_free_slot automatically unpoisons and repoisons before return chunk = (zend_mm_chunk*)ZEND_MM_ALIGNED_BASE(bin, ZEND_MM_CHUNK_SIZE); + + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); page_num = ZEND_MM_ALIGNED_OFFSET(bin, ZEND_MM_CHUNK_SIZE) / ZEND_MM_PAGE_SIZE; chunk->map[page_num] = ZEND_MM_SRUN(bin_num); if (bin_pages[bin_num] > 1) { @@ -1374,6 +1565,7 @@ static zend_never_inline void *zend_mm_alloc_small_slow(zend_mm_heap *heap, uint i++; } while (i < bin_pages[bin_num]); } + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); /* create a linked list of elements from 1 to last */ end = (zend_mm_free_slot*)((char*)bin + (bin_data_size[bin_num] * (bin_elements[bin_num] - 1))); @@ -1383,18 +1575,23 @@ static zend_never_inline void *zend_mm_alloc_small_slow(zend_mm_heap *heap, uint #if ZEND_DEBUG do { zend_mm_debug_info *dbg = (zend_mm_debug_info*)((char*)p + bin_data_size[bin_num] - ZEND_MM_ALIGNED_SIZE(sizeof(zend_mm_debug_info))); + ZEND_MM_UNPOISON_DEBUGINFO(dbg); dbg->size = 0; + ZEND_MM_POISON_DEBUGINFO(dbg); } while (0); #endif + p = (zend_mm_free_slot*)((char*)p + bin_data_size[bin_num]); } while (p != end); - /* terminate list using NULL */ - p->next_free_slot = NULL; + zend_mm_set_next_free_slot(heap, bin_num, p, NULL); + #if ZEND_DEBUG do { zend_mm_debug_info *dbg = (zend_mm_debug_info*)((char*)p + bin_data_size[bin_num] - ZEND_MM_ALIGNED_SIZE(sizeof(zend_mm_debug_info))); + ZEND_MM_UNPOISON_DEBUGINFO(dbg); dbg->size = 0; + ZEND_MM_POISON_DEBUGINFO(dbg); } while (0); #endif @@ -1418,6 +1615,7 @@ static zend_always_inline void *zend_mm_alloc_small(zend_mm_heap *heap, int bin_ if (EXPECTED(heap->free_slot[bin_num] != NULL)) { zend_mm_free_slot *p = heap->free_slot[bin_num]; heap->free_slot[bin_num] = zend_mm_get_next_free_slot(heap, bin_num, p); + return p; } else { return zend_mm_alloc_small_slow(heap, bin_num ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); @@ -1437,13 +1635,17 @@ static zend_always_inline void zend_mm_free_small(zend_mm_heap *heap, void *ptr, #if ZEND_DEBUG do { zend_mm_debug_info *dbg = (zend_mm_debug_info*)((char*)ptr + bin_data_size[bin_num] - ZEND_MM_ALIGNED_SIZE(sizeof(zend_mm_debug_info))); + ZEND_MM_UNPOISON_DEBUGINFO(dbg); dbg->size = 0; + ZEND_MM_POISON_DEBUGINFO(dbg); } while (0); #endif p = (zend_mm_free_slot*)ptr; zend_mm_set_next_free_slot(heap, bin_num, p, heap->free_slot[bin_num]); heap->free_slot[bin_num] = p; + + ZEND_MM_POISON(p, bin_data_size[bin_num]); } /********/ @@ -1461,8 +1663,10 @@ static zend_always_inline zend_mm_debug_info *zend_mm_get_debug_info(zend_mm_hea ZEND_MM_CHECK(page_offset != 0, "zend_mm_heap corrupted"); chunk = (zend_mm_chunk*)ZEND_MM_ALIGNED_BASE(ptr, ZEND_MM_CHUNK_SIZE); page_num = (int)(page_offset / ZEND_MM_PAGE_SIZE); + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); info = chunk->map[page_num]; ZEND_MM_CHECK(chunk->heap == heap, "zend_mm_heap corrupted"); + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); if (EXPECTED(info & ZEND_MM_IS_SRUN)) { int bin_num = ZEND_MM_SRUN_BIN_NUM(info); return (zend_mm_debug_info*)((char*)ptr + bin_data_size[bin_num] - ZEND_MM_ALIGNED_SIZE(sizeof(zend_mm_debug_info))); @@ -1498,22 +1702,26 @@ static zend_always_inline void *zend_mm_alloc_heap(zend_mm_heap *heap, size_t si ptr = zend_mm_alloc_small(heap, ZEND_MM_SMALL_SIZE_TO_BIN(size) ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); #if ZEND_DEBUG dbg = zend_mm_get_debug_info(heap, ptr); + ZEND_MM_UNPOISON_DEBUGINFO(dbg); dbg->size = real_size; dbg->filename = __zend_filename; dbg->orig_filename = __zend_orig_filename; dbg->lineno = __zend_lineno; dbg->orig_lineno = __zend_orig_lineno; + ZEND_MM_POISON_DEBUGINFO(dbg); #endif return ptr; } else if (EXPECTED(size <= ZEND_MM_MAX_LARGE_SIZE)) { ptr = zend_mm_alloc_large(heap, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); #if ZEND_DEBUG dbg = zend_mm_get_debug_info(heap, ptr); + ZEND_MM_UNPOISON_DEBUGINFO(dbg); dbg->size = real_size; dbg->filename = __zend_filename; dbg->orig_filename = __zend_orig_filename; dbg->lineno = __zend_lineno; dbg->orig_lineno = __zend_orig_lineno; + ZEND_MM_POISON_DEBUGINFO(dbg); #endif return ptr; } else { @@ -1535,9 +1743,11 @@ static zend_always_inline void zend_mm_free_heap(zend_mm_heap *heap, void *ptr Z } else { zend_mm_chunk *chunk = (zend_mm_chunk*)ZEND_MM_ALIGNED_BASE(ptr, ZEND_MM_CHUNK_SIZE); int page_num = (int)(page_offset / ZEND_MM_PAGE_SIZE); + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); zend_mm_page_info info = chunk->map[page_num]; ZEND_MM_CHECK(chunk->heap == heap, "zend_mm_heap corrupted"); + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); if (EXPECTED(info & ZEND_MM_IS_SRUN)) { zend_mm_free_small(heap, ptr, ZEND_MM_SRUN_BIN_NUM(info)); } else /* if (info & ZEND_MM_IS_LRUN) */ { @@ -1559,15 +1769,20 @@ static size_t zend_mm_size(zend_mm_heap *heap, void *ptr ZEND_FILE_LINE_DC ZEND_ zend_mm_chunk *chunk; #if 0 && ZEND_DEBUG zend_mm_debug_info *dbg = zend_mm_get_debug_info(heap, ptr); - return dbg->size; + ZEND_MM_UNPOISON_DEBUGINFO(dbg); + size_t ret = dbg->size; + ZEND_MM_POISON_DEBUGINFO(dbg); + return ret; #else int page_num; zend_mm_page_info info; chunk = (zend_mm_chunk*)ZEND_MM_ALIGNED_BASE(ptr, ZEND_MM_CHUNK_SIZE); page_num = (int)(page_offset / ZEND_MM_PAGE_SIZE); + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); info = chunk->map[page_num]; ZEND_MM_CHECK(chunk->heap == heap, "zend_mm_heap corrupted"); + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); if (EXPECTED(info & ZEND_MM_IS_SRUN)) { return bin_data_size[ZEND_MM_SRUN_BIN_NUM(info)]; } else /* if (info & ZEND_MM_IS_LARGE_RUN) */ { @@ -1586,6 +1801,8 @@ static zend_never_inline void *zend_mm_realloc_slow(zend_mm_heap *heap, void *pt size_t orig_peak = heap->peak; #endif ret = zend_mm_alloc_heap(heap, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_UNPOISON(ret, size); + ZEND_MM_UNPOISON(ptr, copy_size); memcpy(ret, ptr, copy_size); zend_mm_free_heap(heap, ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); #if ZEND_MM_STAT @@ -1627,6 +1844,7 @@ static zend_never_inline void *zend_mm_realloc_huge(zend_mm_heap *heap, void *pt #else zend_mm_change_huge_block_size(heap, ptr, new_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); #endif + ZEND_MM_POISON(ptr, new_size); return ptr; } else if (new_size < old_size) { /* unmup tail */ @@ -1642,12 +1860,13 @@ static zend_never_inline void *zend_mm_realloc_huge(zend_mm_heap *heap, void *pt #else zend_mm_change_huge_block_size(heap, ptr, new_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); #endif + ZEND_MM_POISON(ptr, old_size); return ptr; } } else /* if (new_size > old_size) */ { #if ZEND_MM_LIMIT if (UNEXPECTED(new_size - old_size > heap->limit - heap->real_size)) { - if (zend_mm_gc(heap) && new_size - old_size <= heap->limit - heap->real_size) { + if (_zend_mm_gc(heap) && new_size - old_size <= heap->limit - heap->real_size) { /* pass */ } else if (heap->overflow == 0) { #if ZEND_DEBUG @@ -1674,6 +1893,8 @@ static zend_never_inline void *zend_mm_realloc_huge(zend_mm_heap *heap, void *pt #else zend_mm_change_huge_block_size(heap, ptr, new_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); #endif + ZEND_MM_POISON(ptr, new_size); + return ptr; } } @@ -1695,12 +1916,16 @@ static zend_always_inline void *zend_mm_realloc_heap(zend_mm_heap *heap, void *p page_offset = ZEND_MM_ALIGNED_OFFSET(ptr, ZEND_MM_CHUNK_SIZE); if (UNEXPECTED(page_offset == 0)) { if (EXPECTED(ptr == NULL)) { - return _zend_mm_alloc(heap, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ret = _zend_mm_alloc(heap, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); } else { - return zend_mm_realloc_huge(heap, ptr, size, copy_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ret = zend_mm_realloc_huge(heap, ptr, size, copy_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); } + ZEND_MM_UNPOISON(ret, size); + return ret; } else { zend_mm_chunk *chunk = (zend_mm_chunk*)ZEND_MM_ALIGNED_BASE(ptr, ZEND_MM_CHUNK_SIZE); + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); + int page_num = (int)(page_offset / ZEND_MM_PAGE_SIZE); zend_mm_page_info info = chunk->map[page_num]; #if ZEND_MM_HEAP_PROTECTION @@ -1728,11 +1953,14 @@ static zend_always_inline void *zend_mm_realloc_heap(zend_mm_heap *heap, void *p /* truncation */ ret = zend_mm_alloc_small(heap, ZEND_MM_SMALL_SIZE_TO_BIN(size) ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); copy_size = use_copy_size ? MIN(size, copy_size) : size; + ZEND_MM_UNPOISON(ret, size); + ZEND_MM_UNPOISON(ptr, copy_size); memcpy(ret, ptr, copy_size); zend_mm_free_small(heap, ptr, old_bin_num); } else { /* reallocation in-place */ ret = ptr; + ZEND_MM_UNPOISON(ret, size); } } else if (size <= ZEND_MM_MAX_SMALL_SIZE) { /* small extension */ @@ -1743,6 +1971,8 @@ static zend_always_inline void *zend_mm_realloc_heap(zend_mm_heap *heap, void *p #endif ret = zend_mm_alloc_small(heap, ZEND_MM_SMALL_SIZE_TO_BIN(size) ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); copy_size = use_copy_size ? MIN(old_size, copy_size) : old_size; + ZEND_MM_UNPOISON(ret, size); + ZEND_MM_UNPOISON(ptr, copy_size); memcpy(ret, ptr, copy_size); zend_mm_free_small(heap, ptr, old_bin_num); #if ZEND_MM_STAT @@ -1756,12 +1986,15 @@ static zend_always_inline void *zend_mm_realloc_heap(zend_mm_heap *heap, void *p #if ZEND_DEBUG dbg = zend_mm_get_debug_info(heap, ret); + ZEND_MM_UNPOISON_DEBUGINFO(dbg); dbg->size = real_size; dbg->filename = __zend_filename; dbg->orig_filename = __zend_orig_filename; dbg->lineno = __zend_lineno; dbg->orig_lineno = __zend_orig_lineno; + ZEND_MM_POISON_DEBUGINFO(dbg); #endif + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); return ret; } while (0); @@ -1773,12 +2006,16 @@ static zend_always_inline void *zend_mm_realloc_heap(zend_mm_heap *heap, void *p if (new_size == old_size) { #if ZEND_DEBUG dbg = zend_mm_get_debug_info(heap, ptr); + ZEND_MM_UNPOISON_DEBUGINFO(dbg); dbg->size = real_size; dbg->filename = __zend_filename; dbg->orig_filename = __zend_orig_filename; dbg->lineno = __zend_lineno; dbg->orig_lineno = __zend_orig_lineno; + ZEND_MM_POISON_DEBUGINFO(dbg); #endif + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); + ZEND_MM_UNPOISON(ptr, size); return ptr; } else if (new_size < old_size) { /* free tail pages */ @@ -1791,14 +2028,20 @@ static zend_always_inline void *zend_mm_realloc_heap(zend_mm_heap *heap, void *p chunk->map[page_num] = ZEND_MM_LRUN(new_pages_count); chunk->free_pages += rest_pages_count; zend_mm_bitset_reset_range(chunk->free_map, page_num + new_pages_count, rest_pages_count); + ZEND_MM_POISON(ZEND_MM_PAGE_ADDR(chunk, page_num + new_pages_count), rest_pages_count * ZEND_MM_PAGE_SIZE); #if ZEND_DEBUG dbg = zend_mm_get_debug_info(heap, ptr); + ZEND_MM_UNPOISON_DEBUGINFO(dbg); dbg->size = real_size; dbg->filename = __zend_filename; dbg->orig_filename = __zend_orig_filename; dbg->lineno = __zend_lineno; dbg->orig_lineno = __zend_orig_lineno; + ZEND_MM_POISON_DEBUGINFO(dbg); #endif + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); + ZEND_MM_POISON(ptr, old_size); + ZEND_MM_UNPOISON(ptr, size); return ptr; } else /* if (new_size > old_size) */ { int new_pages_count = (int)(new_size / ZEND_MM_PAGE_SIZE); @@ -1820,12 +2063,16 @@ static zend_always_inline void *zend_mm_realloc_heap(zend_mm_heap *heap, void *p chunk->map[page_num] = ZEND_MM_LRUN(new_pages_count); #if ZEND_DEBUG dbg = zend_mm_get_debug_info(heap, ptr); + ZEND_MM_UNPOISON_DEBUGINFO(dbg); dbg->size = real_size; dbg->filename = __zend_filename; dbg->orig_filename = __zend_orig_filename; dbg->lineno = __zend_lineno; dbg->orig_lineno = __zend_orig_lineno; + ZEND_MM_POISON_DEBUGINFO(dbg); #endif + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); + ZEND_MM_UNPOISON(ptr, size); return ptr; } } @@ -1834,10 +2081,13 @@ static zend_always_inline void *zend_mm_realloc_heap(zend_mm_heap *heap, void *p #if ZEND_DEBUG size = real_size; #endif + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); } copy_size = MIN(old_size, copy_size); - return zend_mm_realloc_slow(heap, ptr, size, copy_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ret = zend_mm_realloc_slow(heap, ptr, size, copy_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_UNPOISON(ret, size); + return ret; } /*********************/ @@ -1851,6 +2101,7 @@ static void zend_mm_add_huge_block(zend_mm_heap *heap, void *ptr, size_t size ZE #endif { zend_mm_huge_list *list = (zend_mm_huge_list*)zend_mm_alloc_heap(heap, sizeof(zend_mm_huge_list) ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_UNPOISON(list, sizeof(zend_mm_huge_list)); list->ptr = ptr; list->size = size; list->next = heap->huge_list; @@ -1861,6 +2112,7 @@ static void zend_mm_add_huge_block(zend_mm_heap *heap, void *ptr, size_t size ZE list->dbg.lineno = __zend_lineno; list->dbg.orig_lineno = __zend_orig_lineno; #endif + ZEND_MM_POISON(list, sizeof(zend_mm_huge_list)); heap->huge_list = list; } @@ -1869,11 +2121,14 @@ static size_t zend_mm_del_huge_block(zend_mm_heap *heap, void *ptr ZEND_FILE_LIN zend_mm_huge_list *prev = NULL; zend_mm_huge_list *list = heap->huge_list; while (list != NULL) { + ZEND_MM_UNPOISON(list, sizeof(zend_mm_huge_list)); if (list->ptr == ptr) { size_t size; if (prev) { + ZEND_MM_UNPOISON(prev, sizeof(zend_mm_huge_list)); prev->next = list->next; + ZEND_MM_POISON(prev, sizeof(zend_mm_huge_list)); } else { heap->huge_list = list->next; } @@ -1883,6 +2138,7 @@ static size_t zend_mm_del_huge_block(zend_mm_heap *heap, void *ptr ZEND_FILE_LIN } prev = list; list = list->next; + ZEND_MM_POISON(prev, sizeof(zend_mm_huge_list)); } ZEND_MM_CHECK(0, "zend_mm_heap corrupted"); return 0; @@ -1892,10 +2148,15 @@ static size_t zend_mm_get_huge_block_size(zend_mm_heap *heap, void *ptr ZEND_FIL { zend_mm_huge_list *list = heap->huge_list; while (list != NULL) { + ZEND_MM_UNPOISON(list, sizeof(zend_mm_huge_list)); if (list->ptr == ptr) { - return list->size; + size_t size = list->size; + ZEND_MM_POISON(list, sizeof(zend_mm_huge_list)); + return size; } - list = list->next; + zend_mm_huge_list *next = list->next; + ZEND_MM_POISON(list, sizeof(zend_mm_huge_list)); + list = next; } ZEND_MM_CHECK(0, "zend_mm_heap corrupted"); return 0; @@ -1909,6 +2170,7 @@ static void zend_mm_change_huge_block_size(zend_mm_heap *heap, void *ptr, size_t { zend_mm_huge_list *list = heap->huge_list; while (list != NULL) { + ZEND_MM_UNPOISON(list, sizeof(zend_mm_huge_list)); if (list->ptr == ptr) { list->size = size; #if ZEND_DEBUG @@ -1918,9 +2180,12 @@ static void zend_mm_change_huge_block_size(zend_mm_heap *heap, void *ptr, size_t list->dbg.lineno = __zend_lineno; list->dbg.orig_lineno = __zend_orig_lineno; #endif + ZEND_MM_POISON(list, sizeof(zend_mm_huge_list)); return; } - list = list->next; + zend_mm_huge_list *next = list->next; + ZEND_MM_POISON(list, sizeof(zend_mm_huge_list)); + list = next; } } @@ -1944,7 +2209,7 @@ static void *zend_mm_alloc_huge(zend_mm_heap *heap, size_t size ZEND_FILE_LINE_D #if ZEND_MM_LIMIT if (UNEXPECTED(new_size > heap->limit - heap->real_size)) { - if (zend_mm_gc(heap) && new_size <= heap->limit - heap->real_size) { + if (_zend_mm_gc(heap) && new_size <= heap->limit - heap->real_size) { /* pass */ } else if (heap->overflow == 0) { #if ZEND_DEBUG @@ -1959,7 +2224,7 @@ static void *zend_mm_alloc_huge(zend_mm_heap *heap, size_t size ZEND_FILE_LINE_D ptr = zend_mm_chunk_alloc(heap, new_size, ZEND_MM_CHUNK_SIZE); if (UNEXPECTED(ptr == NULL)) { /* insufficient memory */ - if (zend_mm_gc(heap) && + if (_zend_mm_gc(heap) && (ptr = zend_mm_chunk_alloc(heap, new_size, ZEND_MM_CHUNK_SIZE)) != NULL) { /* pass */ } else { @@ -1999,6 +2264,7 @@ static void *zend_mm_alloc_huge(zend_mm_heap *heap, size_t size ZEND_FILE_LINE_D static void zend_mm_free_huge(zend_mm_heap *heap, void *ptr ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { + size_t size; ZEND_MM_CHECK(ZEND_MM_ALIGNED_OFFSET(ptr, ZEND_MM_CHUNK_SIZE) == 0, "zend_mm_heap corrupted"); @@ -2030,6 +2296,7 @@ static void zend_mm_init_key(zend_mm_heap *heap) static zend_mm_heap *zend_mm_init(void) { zend_mm_chunk *chunk = (zend_mm_chunk*)zend_mm_chunk_alloc_int(ZEND_MM_CHUNK_SIZE, ZEND_MM_CHUNK_SIZE); + zend_mm_heap *heap; if (UNEXPECTED(chunk == NULL)) { @@ -2038,6 +2305,7 @@ static zend_mm_heap *zend_mm_init(void) #endif return NULL; } + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); heap = &chunk->heap_slot; chunk->heap = heap; chunk->next = chunk; @@ -2076,10 +2344,11 @@ static zend_mm_heap *zend_mm_init(void) #endif heap->huge_list = NULL; heap->pid = getpid(); + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); return heap; } -ZEND_API size_t zend_mm_gc(zend_mm_heap *heap) +static size_t _zend_mm_gc(zend_mm_heap *heap) { zend_mm_free_slot *p, *q; zend_mm_chunk *chunk; @@ -2094,7 +2363,9 @@ ZEND_API size_t zend_mm_gc(zend_mm_heap *heap) if (heap->use_custom_heap) { size_t (*gc)(void) = heap->custom_heap._gc; if (gc) { - return gc(); + size_t ret = gc(); + ZEND_MM_UNPOISON_HEAP(heap); + return ret; } return 0; } @@ -2105,6 +2376,8 @@ ZEND_API size_t zend_mm_gc(zend_mm_heap *heap) p = heap->free_slot[i]; while (p != NULL) { chunk = (zend_mm_chunk*)ZEND_MM_ALIGNED_BASE(p, ZEND_MM_CHUNK_SIZE); + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); + ZEND_MM_CHECK(chunk->heap == heap, "zend_mm_heap corrupted"); page_offset = ZEND_MM_ALIGNED_OFFSET(p, ZEND_MM_CHUNK_SIZE); ZEND_ASSERT(page_offset != 0); @@ -2123,6 +2396,8 @@ ZEND_API size_t zend_mm_gc(zend_mm_heap *heap) has_free_pages = true; } chunk->map[page_num] = ZEND_MM_SRUN_EX(i, free_counter); + + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); p = zend_mm_get_next_free_slot(heap, i, p); } @@ -2134,6 +2409,8 @@ ZEND_API size_t zend_mm_gc(zend_mm_heap *heap) p = q->next_free_slot; while (p != NULL) { chunk = (zend_mm_chunk*)ZEND_MM_ALIGNED_BASE(p, ZEND_MM_CHUNK_SIZE); + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); + ZEND_MM_CHECK(chunk->heap == heap, "zend_mm_heap corrupted"); page_offset = ZEND_MM_ALIGNED_OFFSET(p, ZEND_MM_CHUNK_SIZE); ZEND_ASSERT(page_offset != 0); @@ -2146,6 +2423,8 @@ ZEND_API size_t zend_mm_gc(zend_mm_heap *heap) ZEND_ASSERT(info & ZEND_MM_IS_SRUN); ZEND_ASSERT(!(info & ZEND_MM_IS_LRUN)); } + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); + ZEND_ASSERT(ZEND_MM_SRUN_BIN_NUM(info) == i); if (ZEND_MM_SRUN_FREE_COUNTER(info) == bin_elements[i]) { /* remove from cache */ @@ -2167,6 +2446,7 @@ ZEND_API size_t zend_mm_gc(zend_mm_heap *heap) } chunk = heap->main_chunk; + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); do { i = ZEND_MM_FIRST_PAGE; while (i < chunk->free_tail) { @@ -2179,6 +2459,7 @@ ZEND_API size_t zend_mm_gc(zend_mm_heap *heap) if (ZEND_MM_SRUN_FREE_COUNTER(info) == bin_elements[bin_num]) { /* all elements are free */ zend_mm_free_pages_ex(heap, chunk, i, pages_count, 0); + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); collected += pages_count; } else { /* reset counter */ @@ -2197,14 +2478,27 @@ ZEND_API size_t zend_mm_gc(zend_mm_heap *heap) zend_mm_delete_chunk(heap, chunk); chunk = next_chunk; + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); } else { - chunk = chunk->next; + zend_mm_chunk *next_chunk = chunk->next; + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); + chunk = next_chunk; + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); } } while (chunk != heap->main_chunk); + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); return collected * ZEND_MM_PAGE_SIZE; } +ZEND_API size_t zend_mm_gc(zend_mm_heap *heap) +{ + ZEND_MM_UNPOISON_HEAP(heap); + size_t ret = _zend_mm_gc(heap); + ZEND_MM_POISON_HEAP(heap); + return ret; +} + #if ZEND_DEBUG /******************/ /* Leak detection */ @@ -2218,6 +2512,7 @@ static zend_long zend_mm_find_leaks_small(zend_mm_chunk *p, uint32_t i, uint32_t zend_mm_debug_info *dbg = (zend_mm_debug_info*)((char*)p + ZEND_MM_PAGE_SIZE * i + bin_data_size[bin_num] * (j + 1) - ZEND_MM_ALIGNED_SIZE(sizeof(zend_mm_debug_info))); while (j < bin_elements[bin_num]) { + ZEND_MM_UNPOISON_DEBUGINFO(dbg); if (dbg->size != 0) { if (dbg->filename == leak->filename && dbg->lineno == leak->lineno) { count++; @@ -2229,8 +2524,10 @@ static zend_long zend_mm_find_leaks_small(zend_mm_chunk *p, uint32_t i, uint32_t } } j++; + ZEND_MM_POISON_DEBUGINFO(dbg); dbg = (zend_mm_debug_info*)((char*)dbg + bin_data_size[bin_num]); } + if (empty) { zend_mm_bitset_reset_range(p->free_map, i, bin_pages[bin_num]); } @@ -2251,10 +2548,13 @@ static zend_long zend_mm_find_leaks(zend_mm_heap *heap, zend_mm_chunk *p, uint32 } else /* if (p->map[i] & ZEND_MM_IS_LRUN) */ { int pages_count = ZEND_MM_LRUN_PAGES(p->map[i]); zend_mm_debug_info *dbg = (zend_mm_debug_info*)((char*)p + ZEND_MM_PAGE_SIZE * (i + pages_count) - ZEND_MM_ALIGNED_SIZE(sizeof(zend_mm_debug_info))); - + + ZEND_MM_UNPOISON_DEBUGINFO(dbg); if (dbg->filename == leak->filename && dbg->lineno == leak->lineno) { count++; } + ZEND_MM_POISON_DEBUGINFO(dbg); + zend_mm_bitset_reset_range(p->free_map, i, pages_count); i += pages_count; } @@ -2262,9 +2562,14 @@ static zend_long zend_mm_find_leaks(zend_mm_heap *heap, zend_mm_chunk *p, uint32 i++; } } - p = p->next; + zend_mm_chunk *next = p->next; + ZEND_MM_POISON_CHUNK_HDR(p, heap); + p = next; + ZEND_MM_UNPOISON_CHUNK_HDR(p); + i = ZEND_MM_FIRST_PAGE; } while (p != heap->main_chunk); + ZEND_MM_POISON_CHUNK_HDR(p, heap); return count; } @@ -2302,6 +2607,7 @@ static void zend_mm_check_leaks(zend_mm_heap *heap) list = heap->huge_list; while (list) { zend_mm_huge_list *q = list; + ZEND_MM_UNPOISON(list, sizeof(zend_mm_huge_list)); leak.addr = list->ptr; leak.size = list->dbg.size; @@ -2325,6 +2631,7 @@ static void zend_mm_check_leaks(zend_mm_heap *heap) /* for each chunk */ p = heap->main_chunk; + ZEND_MM_UNPOISON_CHUNK_HDR(p); do { i = ZEND_MM_FIRST_PAGE; while (i < p->free_tail) { @@ -2335,6 +2642,7 @@ static void zend_mm_check_leaks(zend_mm_heap *heap) j = 0; while (j < bin_elements[bin_num]) { + ZEND_MM_UNPOISON_DEBUGINFO(dbg); if (dbg->size != 0) { leak.addr = (zend_mm_debug_info*)((char*)p + ZEND_MM_PAGE_SIZE * i + bin_data_size[bin_num] * j); leak.size = dbg->size; @@ -2350,13 +2658,18 @@ static void zend_mm_check_leaks(zend_mm_heap *heap) dbg->filename = NULL; dbg->lineno = 0; - repeated = zend_mm_find_leaks_small(p, i, j + 1, &leak) + - zend_mm_find_leaks(heap, p, i + bin_pages[bin_num], &leak); + repeated = zend_mm_find_leaks_small(p, i, j + 1, &leak); + ZEND_MM_UNPOISON_CHUNK_HDR(p); + + repeated += zend_mm_find_leaks(heap, p, i + bin_pages[bin_num], &leak); + ZEND_MM_UNPOISON_CHUNK_HDR(p); + total += 1 + repeated; if (repeated) { zend_message_dispatcher(ZMSG_MEMORY_LEAK_REPEATED, (void *)(uintptr_t)repeated); } } + ZEND_MM_POISON_DEBUGINFO(dbg); dbg = (zend_mm_debug_info*)((char*)dbg + bin_data_size[bin_num]); j++; } @@ -2364,6 +2677,7 @@ static void zend_mm_check_leaks(zend_mm_heap *heap) } else /* if (p->map[i] & ZEND_MM_IS_LRUN) */ { int pages_count = ZEND_MM_LRUN_PAGES(p->map[i]); zend_mm_debug_info *dbg = (zend_mm_debug_info*)((char*)p + ZEND_MM_PAGE_SIZE * (i + pages_count) - ZEND_MM_ALIGNED_SIZE(sizeof(zend_mm_debug_info))); + ZEND_MM_UNPOISON_DEBUGINFO(dbg); leak.addr = (void*)((char*)p + ZEND_MM_PAGE_SIZE * i); leak.size = dbg->size; @@ -2371,6 +2685,7 @@ static void zend_mm_check_leaks(zend_mm_heap *heap) leak.orig_filename = dbg->orig_filename; leak.lineno = dbg->lineno; leak.orig_lineno = dbg->orig_lineno; + ZEND_MM_POISON_DEBUGINFO(dbg); zend_message_dispatcher(ZMSG_LOG_SCRIPT_NAME, NULL); zend_message_dispatcher(ZMSG_MEMORY_LEAK_DETECTED, &leak); @@ -2378,6 +2693,7 @@ static void zend_mm_check_leaks(zend_mm_heap *heap) zend_mm_bitset_reset_range(p->free_map, i, pages_count); repeated = zend_mm_find_leaks(heap, p, i + pages_count, &leak); + ZEND_MM_UNPOISON_CHUNK_HDR(p); total += 1 + repeated; if (repeated) { zend_message_dispatcher(ZMSG_MEMORY_LEAK_REPEATED, (void *)(uintptr_t)repeated); @@ -2388,7 +2704,10 @@ static void zend_mm_check_leaks(zend_mm_heap *heap) i++; } } - p = p->next; + zend_mm_chunk *next = p->next; + ZEND_MM_POISON_CHUNK_HDR(p, heap); + p = next; + ZEND_MM_UNPOISON_CHUNK_HDR(p); } while (p != heap->main_chunk); if (total) { zend_message_dispatcher(ZMSG_MEMORY_LEAKS_GRAND_TOTAL, &total); @@ -2416,6 +2735,7 @@ ZEND_API void zend_mm_shutdown(zend_mm_heap *heap, bool full, bool silent) { zend_mm_chunk *p; zend_mm_huge_list *list; + ZEND_MM_UNPOISON_HEAP(heap); #if ZEND_MM_CUSTOM if (heap->use_custom_heap) { @@ -2444,6 +2764,7 @@ ZEND_API void zend_mm_shutdown(zend_mm_heap *heap, bool full, bool silent) if (shutdown) { shutdown(full, silent); } + ZEND_MM_POISON_HEAP(heap); return; } @@ -2463,25 +2784,33 @@ ZEND_API void zend_mm_shutdown(zend_mm_heap *heap, bool full, bool silent) heap->huge_list = NULL; while (list) { zend_mm_huge_list *q = list; + ZEND_MM_UNPOISON(list, sizeof(zend_mm_huge_list)); list = list->next; zend_mm_chunk_free(heap, q->ptr, q->size); } /* move all chunks except of the first one into the cache */ + ZEND_MM_UNPOISON_CHUNK_HDR(heap->main_chunk); p = heap->main_chunk->next; while (p != heap->main_chunk) { + ZEND_MM_UNPOISON_CHUNK_HDR(p); zend_mm_chunk *q = p->next; p->next = heap->cached_chunks; heap->cached_chunks = p; + ZEND_MM_POISON_CHUNK(p, heap); p = q; + ZEND_MM_UNPOISON_CHUNK_HDR(p); heap->chunks_count--; heap->cached_chunks_count++; } + ZEND_MM_POISON_CHUNK_HDR(heap->main_chunk, heap); + if (full) { /* free all cached chunks */ while (heap->cached_chunks) { p = heap->cached_chunks; + ZEND_MM_UNPOISON_CHUNK_HDR(p); heap->cached_chunks = p->next; zend_mm_chunk_free(heap, p, ZEND_MM_CHUNK_SIZE); } @@ -2493,6 +2822,7 @@ ZEND_API void zend_mm_shutdown(zend_mm_heap *heap, bool full, bool silent) while ((double)heap->cached_chunks_count + 0.9 > heap->avg_chunks_count && heap->cached_chunks) { p = heap->cached_chunks; + ZEND_MM_UNPOISON_CHUNK_HDR(p); heap->cached_chunks = p->next; zend_mm_chunk_free(heap, p, ZEND_MM_CHUNK_SIZE); heap->cached_chunks_count--; @@ -2500,14 +2830,17 @@ ZEND_API void zend_mm_shutdown(zend_mm_heap *heap, bool full, bool silent) /* clear cached chunks */ p = heap->cached_chunks; while (p != NULL) { + ZEND_MM_UNPOISON_CHUNK_HDR(p); zend_mm_chunk *q = p->next; memset(p, 0, sizeof(zend_mm_chunk)); p->next = q; + ZEND_MM_POISON_CHUNK_HDR(p, heap); p = q; } /* reinitialize the first chunk and heap */ p = heap->main_chunk; + ZEND_MM_UNPOISON_CHUNK_HDR(p); p->heap = &p->heap_slot; p->next = p; p->prev = p; @@ -2541,7 +2874,9 @@ ZEND_API void zend_mm_shutdown(zend_mm_heap *heap, bool full, bool silent) } else { zend_mm_refresh_key(heap); } + ZEND_MM_POISON_CHUNK_HDR(p, heap); } + ZEND_MM_POISON_HEAP(heap); } /**************/ @@ -2550,12 +2885,20 @@ ZEND_API void zend_mm_shutdown(zend_mm_heap *heap, bool full, bool silent) ZEND_API void* ZEND_FASTCALL _zend_mm_alloc(zend_mm_heap *heap, size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { - return zend_mm_alloc_heap(heap, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_UNPOISON_HEAP(heap); + void *ptr = zend_mm_alloc_heap(heap, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + if (EXPECTED(ptr)) { + ZEND_MM_UNPOISON(ptr, size); + } + ZEND_MM_POISON_HEAP(heap); + return ptr; } ZEND_API void ZEND_FASTCALL _zend_mm_free(zend_mm_heap *heap, void *ptr ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { + ZEND_MM_UNPOISON_HEAP(heap); zend_mm_free_heap(heap, ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_POISON_HEAP(heap); } void* ZEND_FASTCALL _zend_mm_realloc(zend_mm_heap *heap, void *ptr, size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) @@ -2570,20 +2913,27 @@ void* ZEND_FASTCALL _zend_mm_realloc2(zend_mm_heap *heap, void *ptr, size_t size ZEND_API size_t ZEND_FASTCALL _zend_mm_block_size(zend_mm_heap *heap, void *ptr ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { + ZEND_MM_UNPOISON_HEAP(heap); #if ZEND_MM_CUSTOM if (UNEXPECTED(heap->use_custom_heap)) { if (heap->custom_heap._malloc == tracked_malloc) { zend_ulong h = ((uintptr_t) ptr) >> ZEND_MM_ALIGNMENT_LOG2; zval *size_zv = zend_hash_index_find(heap->tracked_allocs, h); if (size_zv) { + ZEND_MM_POISON_HEAP(heap); return Z_LVAL_P(size_zv); } } else if (heap->custom_heap._malloc != poison_malloc) { + ZEND_MM_POISON_HEAP(heap); return 0; } + ZEND_MM_POISON_HEAP(heap); + return 0; } #endif - return zend_mm_size(heap, ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + size_t ret = zend_mm_size(heap, ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_POISON_HEAP(heap); + return ret; } /**********************/ @@ -2606,7 +2956,10 @@ static zend_alloc_globals alloc_globals; ZEND_API bool is_zend_mm(void) { #if ZEND_MM_CUSTOM - return !AG(mm_heap)->use_custom_heap; + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); + bool ret = !AG(mm_heap)->use_custom_heap; + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ret; #else return 1; #endif @@ -2614,40 +2967,57 @@ ZEND_API bool is_zend_mm(void) ZEND_API bool is_zend_ptr(const void *ptr) { + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); #if ZEND_MM_CUSTOM if (AG(mm_heap)->use_custom_heap) { if (AG(mm_heap)->custom_heap._malloc == tracked_malloc) { zend_ulong h = ((uintptr_t) ptr) >> ZEND_MM_ALIGNMENT_LOG2; zval *size_zv = zend_hash_index_find(AG(mm_heap)->tracked_allocs, h); if (size_zv) { + ZEND_MM_POISON_HEAP(AG(mm_heap)); return 1; } } + ZEND_MM_POISON_HEAP(AG(mm_heap)); return 0; } #endif if (AG(mm_heap)->main_chunk) { zend_mm_chunk *chunk = AG(mm_heap)->main_chunk; + zend_mm_chunk *next; + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); do { if (ptr >= (void*)chunk && ptr < (void*)((char*)chunk + ZEND_MM_CHUNK_SIZE)) { + ZEND_MM_POISON_CHUNK_HDR(chunk, AG(mm_heap)); + ZEND_MM_POISON_HEAP(AG(mm_heap)); return 1; } - chunk = chunk->next; + next = chunk->next; + ZEND_MM_POISON_CHUNK_HDR(chunk, AG(mm_heap)); + chunk = next; + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); } while (chunk != AG(mm_heap)->main_chunk); + ZEND_MM_POISON_CHUNK_HDR(chunk, AG(mm_heap)); } zend_mm_huge_list *block = AG(mm_heap)->huge_list; while (block) { + ZEND_MM_UNPOISON(block, sizeof(zend_mm_huge_list)); if (ptr >= block->ptr && ptr < (void*)((char*)block->ptr + block->size)) { + ZEND_MM_POISON(block, sizeof(zend_mm_huge_list)); + ZEND_MM_POISON_HEAP(AG(mm_heap)); return 1; } - block = block->next; + zend_mm_huge_list *next = block->next; + ZEND_MM_POISON(block, sizeof(zend_mm_huge_list)); + block = next; } + ZEND_MM_POISON_HEAP(AG(mm_heap)); return 0; } @@ -2657,12 +3027,15 @@ ZEND_API bool is_zend_ptr(const void *ptr) #if ZEND_MM_CUSTOM # define ZEND_MM_CUSTOM_ALLOCATOR(size) do { \ if (UNEXPECTED(AG(mm_heap)->use_custom_heap)) { \ - return AG(mm_heap)->custom_heap._malloc(size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); \ + void *ret = AG(mm_heap)->custom_heap._malloc(size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); \ + ZEND_MM_POISON_HEAP(AG(mm_heap)); \ + return ret; \ } \ } while (0) # define ZEND_MM_CUSTOM_DEALLOCATOR(ptr) do { \ if (UNEXPECTED(AG(mm_heap)->use_custom_heap)) { \ AG(mm_heap)->custom_heap._free(ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); \ + ZEND_MM_POISON_HEAP(AG(mm_heap)); \ return; \ } \ } while (0) @@ -2673,57 +3046,81 @@ ZEND_API bool is_zend_ptr(const void *ptr) # define _ZEND_BIN_ALLOCATOR(_num, _size, _elements, _pages, _min_size, y) \ ZEND_API void* ZEND_FASTCALL _emalloc_ ## _size(void) { \ + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); \ ZEND_MM_CUSTOM_ALLOCATOR(_size); \ if (_size < _min_size) { \ - return _emalloc_ ## _min_size(); \ + void *ret = _emalloc_ ## _min_size(); \ + ZEND_MM_POISON_HEAP(AG(mm_heap)); \ + return ret; \ } \ - return zend_mm_alloc_small(AG(mm_heap), _num ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); \ + void *ptr = zend_mm_alloc_small(AG(mm_heap), _num ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); \ + ZEND_MM_POISON_HEAP(AG(mm_heap)); \ + ZEND_MM_UNPOISON(ptr, _size); \ + return ptr; \ } ZEND_MM_BINS_INFO(_ZEND_BIN_ALLOCATOR, ZEND_MM_MIN_USEABLE_BIN_SIZE, y) ZEND_API void* ZEND_FASTCALL _emalloc_large(size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); ZEND_MM_CUSTOM_ALLOCATOR(size); - return zend_mm_alloc_large_ex(AG(mm_heap), size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + void *ptr = zend_mm_alloc_large_ex(AG(mm_heap), size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_POISON_HEAP(AG(mm_heap)); + ZEND_MM_UNPOISON(ptr, size); + return ptr; } ZEND_API void* ZEND_FASTCALL _emalloc_huge(size_t size) { + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); ZEND_MM_CUSTOM_ALLOCATOR(size); - return zend_mm_alloc_huge(AG(mm_heap), size); + void *ptr = zend_mm_alloc_huge(AG(mm_heap), size); + ZEND_MM_POISON_HEAP(AG(mm_heap)); + ZEND_MM_UNPOISON(ptr, size); + return ptr; } #if ZEND_DEBUG # define _ZEND_BIN_FREE(_num, _size, _elements, _pages, _min_size, y) \ ZEND_API void ZEND_FASTCALL _efree_ ## _size(void *ptr) { \ + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); \ ZEND_MM_CUSTOM_DEALLOCATOR(ptr); \ if (_size < _min_size) { \ _efree_ ## _min_size(ptr); \ + ZEND_MM_POISON_HEAP(AG(mm_heap)); \ return; \ } \ { \ size_t page_offset = ZEND_MM_ALIGNED_OFFSET(ptr, ZEND_MM_CHUNK_SIZE); \ zend_mm_chunk *chunk = (zend_mm_chunk*)ZEND_MM_ALIGNED_BASE(ptr, ZEND_MM_CHUNK_SIZE); \ int page_num = page_offset / ZEND_MM_PAGE_SIZE; \ + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); \ ZEND_MM_CHECK(chunk->heap == AG(mm_heap), "zend_mm_heap corrupted"); \ ZEND_ASSERT(chunk->map[page_num] & ZEND_MM_IS_SRUN); \ ZEND_ASSERT(ZEND_MM_SRUN_BIN_NUM(chunk->map[page_num]) == _num); \ + ZEND_MM_POISON_CHUNK_HDR(chunk, AG(mm_heap)); \ zend_mm_free_small(AG(mm_heap), ptr, _num); \ + ZEND_MM_POISON_HEAP(AG(mm_heap)); \ } \ } #else # define _ZEND_BIN_FREE(_num, _size, _elements, _pages, _min_size, y) \ ZEND_API void ZEND_FASTCALL _efree_ ## _size(void *ptr) { \ + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); \ ZEND_MM_CUSTOM_DEALLOCATOR(ptr); \ if (_size < _min_size) { \ _efree_ ## _min_size(ptr); \ + ZEND_MM_POISON_HEAP(AG(mm_heap)); \ return; \ } \ { \ zend_mm_chunk *chunk = (zend_mm_chunk*)ZEND_MM_ALIGNED_BASE(ptr, ZEND_MM_CHUNK_SIZE); \ + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); \ ZEND_MM_CHECK(chunk->heap == AG(mm_heap), "zend_mm_heap corrupted"); \ + ZEND_MM_POISON_CHUNK_HDR(chunk, AG(mm_heap)); \ zend_mm_free_small(AG(mm_heap), ptr, _num); \ + ZEND_MM_POISON_HEAP(AG(mm_heap)); \ } \ } #endif @@ -2732,72 +3129,103 @@ ZEND_MM_BINS_INFO(_ZEND_BIN_FREE, ZEND_MM_MIN_USEABLE_BIN_SIZE, y) ZEND_API void ZEND_FASTCALL _efree_large(void *ptr, size_t size) { + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); + ZEND_MM_CUSTOM_DEALLOCATOR(ptr); { size_t page_offset = ZEND_MM_ALIGNED_OFFSET(ptr, ZEND_MM_CHUNK_SIZE); zend_mm_chunk *chunk = (zend_mm_chunk*)ZEND_MM_ALIGNED_BASE(ptr, ZEND_MM_CHUNK_SIZE); int page_num = page_offset / ZEND_MM_PAGE_SIZE; uint32_t pages_count = ZEND_MM_ALIGNED_SIZE_EX(size, ZEND_MM_PAGE_SIZE) / ZEND_MM_PAGE_SIZE; - + + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); ZEND_MM_CHECK(chunk->heap == AG(mm_heap) && ZEND_MM_ALIGNED_OFFSET(page_offset, ZEND_MM_PAGE_SIZE) == 0, "zend_mm_heap corrupted"); ZEND_ASSERT(chunk->map[page_num] & ZEND_MM_IS_LRUN); ZEND_ASSERT(ZEND_MM_LRUN_PAGES(chunk->map[page_num]) == pages_count); + ZEND_MM_POISON_CHUNK_HDR(chunk, AG(mm_heap)); + zend_mm_free_large(AG(mm_heap), chunk, page_num, pages_count); + ZEND_MM_POISON_HEAP(AG(mm_heap)); } } ZEND_API void ZEND_FASTCALL _efree_huge(void *ptr, size_t size) { - + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); ZEND_MM_CUSTOM_DEALLOCATOR(ptr); zend_mm_free_huge(AG(mm_heap), ptr); + ZEND_MM_POISON_HEAP(AG(mm_heap)); } #endif ZEND_API void* ZEND_FASTCALL _emalloc(size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { #if ZEND_MM_CUSTOM + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); if (UNEXPECTED(AG(mm_heap)->use_custom_heap)) { - return AG(mm_heap)->custom_heap._malloc(size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); \ + void *ptr = AG(mm_heap)->custom_heap._malloc(size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); \ + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ptr; } #endif - return zend_mm_alloc_heap(AG(mm_heap), size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + void *ptr = zend_mm_alloc_heap(AG(mm_heap), size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + if (EXPECTED(ptr)) { + ZEND_MM_UNPOISON(ptr, size); + } + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ptr; } ZEND_API void ZEND_FASTCALL _efree(void *ptr ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); #if ZEND_MM_CUSTOM if (UNEXPECTED(AG(mm_heap)->use_custom_heap)) { AG(mm_heap)->custom_heap._free(ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_POISON_HEAP(AG(mm_heap)); return; } #endif zend_mm_free_heap(AG(mm_heap), ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_POISON_HEAP(AG(mm_heap)); } ZEND_API void* ZEND_FASTCALL _erealloc(void *ptr, size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { #if ZEND_MM_CUSTOM + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); if (UNEXPECTED(AG(mm_heap)->use_custom_heap)) { - return AG(mm_heap)->custom_heap._realloc(ptr, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ptr = AG(mm_heap)->custom_heap._realloc(ptr, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ptr; } #endif - return zend_mm_realloc_heap(AG(mm_heap), ptr, size, 0, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ptr = zend_mm_realloc_heap(AG(mm_heap), ptr, size, 0, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ptr; } ZEND_API void* ZEND_FASTCALL _erealloc2(void *ptr, size_t size, size_t copy_size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { #if ZEND_MM_CUSTOM + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); if (UNEXPECTED(AG(mm_heap)->use_custom_heap)) { - return AG(mm_heap)->custom_heap._realloc(ptr, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ptr = AG(mm_heap)->custom_heap._realloc(ptr, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ptr; } #endif - return zend_mm_realloc_heap(AG(mm_heap), ptr, size, 1, copy_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ptr = zend_mm_realloc_heap(AG(mm_heap), ptr, size, 1, copy_size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ptr; } ZEND_API size_t ZEND_FASTCALL _zend_mem_block_size(void *ptr ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { - return _zend_mm_block_size(AG(mm_heap), ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); + size_t ret = _zend_mm_block_size(AG(mm_heap), ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ret; } ZEND_API void* ZEND_FASTCALL _safe_emalloc(size_t nmemb, size_t size, size_t offset ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) @@ -2880,6 +3308,7 @@ ZEND_API char* ZEND_FASTCALL zend_strndup(const char *s, size_t length) ZEND_API zend_result zend_set_memory_limit(size_t memory_limit) { #if ZEND_MM_LIMIT + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); zend_mm_heap *heap = AG(mm_heap); if (UNEXPECTED(memory_limit < heap->real_size)) { @@ -2887,16 +3316,20 @@ ZEND_API zend_result zend_set_memory_limit(size_t memory_limit) /* free some cached chunks to fit into new memory limit */ do { zend_mm_chunk *p = heap->cached_chunks; + ZEND_MM_UNPOISON_CHUNK_HDR(p); heap->cached_chunks = p->next; zend_mm_chunk_free(heap, p, ZEND_MM_CHUNK_SIZE); heap->cached_chunks_count--; heap->real_size -= ZEND_MM_CHUNK_SIZE; } while (memory_limit < heap->real_size); + ZEND_MM_POISON_HEAP(AG(mm_heap)); return SUCCESS; } + ZEND_MM_POISON_HEAP(AG(mm_heap)); return FAILURE; } AG(mm_heap)->limit = memory_limit; + ZEND_MM_POISON_HEAP(AG(mm_heap)); #endif return SUCCESS; } @@ -2904,7 +3337,10 @@ ZEND_API zend_result zend_set_memory_limit(size_t memory_limit) ZEND_API bool zend_alloc_in_memory_limit_error_reporting(void) { #if ZEND_MM_LIMIT - return AG(mm_heap)->overflow; + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); + bool ret = AG(mm_heap)->overflow; + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ret; #else return false; #endif @@ -2913,11 +3349,17 @@ ZEND_API bool zend_alloc_in_memory_limit_error_reporting(void) ZEND_API size_t zend_memory_usage(bool real_usage) { #if ZEND_MM_STAT + size_t ret; if (real_usage) { - return AG(mm_heap)->real_size; + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); + ret = AG(mm_heap)->real_size; + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ret; } else { - size_t usage = AG(mm_heap)->size; - return usage; + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); + ret = AG(mm_heap)->size; + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ret; } #endif return 0; @@ -2926,10 +3368,17 @@ ZEND_API size_t zend_memory_usage(bool real_usage) ZEND_API size_t zend_memory_peak_usage(bool real_usage) { #if ZEND_MM_STAT + size_t ret; if (real_usage) { - return AG(mm_heap)->real_peak; + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); + ret = AG(mm_heap)->real_peak; + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ret; } else { - return AG(mm_heap)->peak; + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); + ret = AG(mm_heap)->peak; + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ret; } #endif return 0; @@ -2938,8 +3387,10 @@ ZEND_API size_t zend_memory_peak_usage(bool real_usage) ZEND_API void zend_memory_reset_peak_usage(void) { #if ZEND_MM_STAT + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); AG(mm_heap)->real_peak = AG(mm_heap)->real_size; AG(mm_heap)->peak = AG(mm_heap)->size; + ZEND_MM_POISON_HEAP(AG(mm_heap)); #endif } @@ -2989,6 +3440,7 @@ static zend_always_inline void tracked_check_limit(zend_mm_heap *heap, size_t ad static void *tracked_malloc(size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { zend_mm_heap *heap = AG(mm_heap); + ZEND_MM_UNPOISON_HEAP(heap); tracked_check_limit(heap, size); void *ptr = malloc(size); @@ -3000,6 +3452,7 @@ static void *tracked_malloc(size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC #if ZEND_MM_STAT heap->size += size; #endif + ZEND_MM_POISON_HEAP(heap); return ptr; } @@ -3009,16 +3462,19 @@ static void tracked_free(void *ptr ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { } zend_mm_heap *heap = AG(mm_heap); + ZEND_MM_UNPOISON_HEAP(heap); zval *size_zv = tracked_get_size_zv(heap, ptr); #if ZEND_MM_STAT heap->size -= Z_LVAL_P(size_zv); #endif zend_hash_del_bucket(heap->tracked_allocs, (Bucket *) size_zv); + ZEND_MM_POISON_HEAP(heap); free(ptr); } static void *tracked_realloc(void *ptr, size_t new_size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { zend_mm_heap *heap = AG(mm_heap); + ZEND_MM_UNPOISON_HEAP(heap); zval *old_size_zv = NULL; size_t old_size = 0; if (ptr) { @@ -3040,6 +3496,7 @@ static void *tracked_realloc(void *ptr, size_t new_size ZEND_FILE_LINE_DC ZEND_F #if ZEND_MM_STAT heap->size += new_size - old_size; #endif + ZEND_MM_POISON_HEAP(heap); return ptr; } @@ -3055,28 +3512,34 @@ static void tracked_free_all(zend_mm_heap *heap) { static void* poison_malloc(size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { zend_mm_heap *heap = AG(mm_heap); + ZEND_MM_UNPOISON_HEAP(heap); if (SIZE_MAX - heap->debug.padding * 2 < size) { zend_mm_panic("Integer overflow in memory allocation"); } - size += heap->debug.padding * 2; + size_t sizePlusPadding = size + heap->debug.padding * 2; - void *ptr = zend_mm_alloc_heap(heap, size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + void *ptr = zend_mm_alloc_heap(heap, sizePlusPadding ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); if (EXPECTED(ptr)) { if (heap->debug.poison_alloc) { - memset(ptr, heap->debug.poison_alloc_value, size); + ZEND_MM_UNPOISON(ptr, sizePlusPadding); + memset(ptr, heap->debug.poison_alloc_value, sizePlusPadding); + ZEND_MM_POISON(ptr, sizePlusPadding); } ptr = (char*)ptr + heap->debug.padding; + ZEND_MM_UNPOISON(ptr, size); } + ZEND_MM_POISON_HEAP(heap); return ptr; } static void poison_free(void *ptr ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) { zend_mm_heap *heap = AG(mm_heap); + ZEND_MM_UNPOISON_HEAP(heap); if (EXPECTED(ptr)) { /* zend_mm_shutdown() will try to free the heap when custom handlers @@ -3090,11 +3553,14 @@ static void poison_free(void *ptr ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) size_t size = zend_mm_size(heap, ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); if (heap->debug.poison_free) { + ZEND_MM_UNPOISON(ptr, size); memset(ptr, heap->debug.poison_free_value, size); + ZEND_MM_POISON(ptr, size); } } zend_mm_free_heap(heap, ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_POISON_HEAP(heap); } static void* poison_realloc(void *ptr, size_t size ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC) @@ -3103,6 +3569,7 @@ static void* poison_realloc(void *ptr, size_t size ZEND_FILE_LINE_DC ZEND_FILE_L void *new = poison_malloc(size ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); + ZEND_MM_UNPOISON_HEAP(heap); if (ptr) { /* Determine the size of the old allocation from the unpadded pointer. */ size_t oldsize = zend_mm_size(heap, (char*)ptr - heap->debug.padding ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); @@ -3114,9 +3581,11 @@ static void* poison_realloc(void *ptr, size_t size ZEND_FILE_LINE_DC ZEND_FILE_L oldsize -= sizeof(zend_mm_debug_info); #endif + ZEND_MM_UNPOISON(ptr, MIN(oldsize, size)); memcpy(new, ptr, MIN(oldsize, size)); poison_free(ptr ZEND_FILE_LINE_RELAY_CC ZEND_FILE_LINE_ORIG_RELAY_CC); } + ZEND_MM_POISON_HEAP(heap); return new; } @@ -3124,6 +3593,7 @@ static void* poison_realloc(void *ptr, size_t size ZEND_FILE_LINE_DC ZEND_FILE_L static size_t poison_gc(void) { zend_mm_heap *heap = AG(mm_heap); + ZEND_MM_UNPOISON_HEAP(heap); void* (*_malloc)(size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC); void (*_free)(void* ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC); @@ -3131,19 +3601,21 @@ static size_t poison_gc(void) size_t (*_gc)(void); void (*_shutdown)(bool, bool); - zend_mm_get_custom_handlers_ex(heap, &_malloc, &_free, &_realloc, &_gc, &_shutdown); - zend_mm_set_custom_handlers_ex(heap, NULL, NULL, NULL, NULL, NULL); + _zend_mm_get_custom_handlers_ex(heap, &_malloc, &_free, &_realloc, &_gc, &_shutdown); + _zend_mm_set_custom_handlers_ex(heap, NULL, NULL, NULL, NULL, NULL); - size_t collected = zend_mm_gc(heap); - - zend_mm_set_custom_handlers_ex(heap, _malloc, _free, _realloc, _gc, _shutdown); + size_t collected = _zend_mm_gc(heap); + _zend_mm_set_custom_handlers_ex(heap, _malloc, _free, _realloc, _gc, _shutdown); + + ZEND_MM_POISON_HEAP(heap); return collected; } static void poison_shutdown(bool full, bool silent) { zend_mm_heap *heap = AG(mm_heap); + ZEND_MM_UNPOISON_HEAP(heap); void* (*_malloc)(size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC); void (*_free)(void* ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC); @@ -3151,18 +3623,20 @@ static void poison_shutdown(bool full, bool silent) size_t (*_gc)(void); void (*_shutdown)(bool, bool); - zend_mm_get_custom_handlers_ex(heap, &_malloc, &_free, &_realloc, &_gc, &_shutdown); - zend_mm_set_custom_handlers_ex(heap, NULL, NULL, NULL, NULL, NULL); + _zend_mm_get_custom_handlers_ex(heap, &_malloc, &_free, &_realloc, &_gc, &_shutdown); + _zend_mm_set_custom_handlers_ex(heap, NULL, NULL, NULL, NULL, NULL); if (heap->debug.check_freelists_on_shutdown) { zend_mm_check_freelists(heap); } zend_mm_shutdown(heap, full, silent); + ZEND_MM_UNPOISON_HEAP(heap); if (!full) { - zend_mm_set_custom_handlers_ex(heap, _malloc, _free, _realloc, _gc, _shutdown); + _zend_mm_set_custom_handlers_ex(heap, _malloc, _free, _realloc, _gc, _shutdown); } + ZEND_MM_POISON_HEAP(heap); } static void poison_enable(zend_mm_heap *heap, char *parameters) @@ -3242,7 +3716,7 @@ static void poison_enable(zend_mm_heap *heap, char *parameters) tmp++; } - zend_mm_set_custom_handlers_ex(heap, poison_malloc, poison_free, + _zend_mm_set_custom_handlers_ex(heap, poison_malloc, poison_free, poison_realloc, poison_gc, poison_shutdown); } #endif @@ -3333,22 +3807,15 @@ ZEND_API zend_mm_heap *zend_mm_get_heap(void) ZEND_API bool zend_mm_is_custom_heap(zend_mm_heap *new_heap) { #if ZEND_MM_CUSTOM - return AG(mm_heap)->use_custom_heap; + ZEND_MM_UNPOISON_HEAP(AG(mm_heap)); + bool ret = AG(mm_heap)->use_custom_heap; + ZEND_MM_POISON_HEAP(AG(mm_heap)); + return ret; #else return 0; #endif } -ZEND_API void zend_mm_set_custom_handlers(zend_mm_heap *heap, - void* (*_malloc)(size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), - void (*_free)(void* ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), - void* (*_realloc)(void*, size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC)) -{ -#if ZEND_MM_CUSTOM - zend_mm_set_custom_handlers_ex(heap, _malloc, _free, _realloc, NULL, NULL); -#endif -} - ZEND_API void zend_mm_set_custom_handlers_ex(zend_mm_heap *heap, void* (*_malloc)(size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), void (*_free)(void* ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), @@ -3356,29 +3823,20 @@ ZEND_API void zend_mm_set_custom_handlers_ex(zend_mm_heap *heap, size_t (*_gc)(void), void (*_shutdown)(bool, bool)) { -#if ZEND_MM_CUSTOM - zend_mm_heap *_heap = (zend_mm_heap*)heap; - - if (!_malloc && !_free && !_realloc) { - _heap->use_custom_heap = ZEND_MM_CUSTOM_HEAP_NONE; - } else { - _heap->use_custom_heap = ZEND_MM_CUSTOM_HEAP_STD; - _heap->custom_heap._malloc = _malloc; - _heap->custom_heap._free = _free; - _heap->custom_heap._realloc = _realloc; - _heap->custom_heap._gc = _gc; - _heap->custom_heap._shutdown = _shutdown; - } -#endif + ZEND_MM_UNPOISON_HEAP(heap); + _zend_mm_set_custom_handlers_ex(heap, _malloc, _free, _realloc, _gc, _shutdown); + ZEND_MM_UNPOISON_HEAP(heap); } -ZEND_API void zend_mm_get_custom_handlers(zend_mm_heap *heap, - void* (**_malloc)(size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), - void (**_free)(void* ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), - void* (**_realloc)(void*, size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC)) +ZEND_API void zend_mm_set_custom_handlers(zend_mm_heap *heap, + void* (*_malloc)(size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), + void (*_free)(void* ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), + void* (*_realloc)(void*, size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC)) { #if ZEND_MM_CUSTOM - zend_mm_get_custom_handlers_ex(heap, _malloc, _free, _realloc, NULL, NULL); + ZEND_MM_UNPOISON_HEAP(heap); + _zend_mm_set_custom_handlers_ex(heap, _malloc, _free, _realloc, NULL, NULL); + ZEND_MM_POISON_HEAP(heap); #endif } @@ -3389,43 +3847,30 @@ ZEND_API void zend_mm_get_custom_handlers_ex(zend_mm_heap *heap, size_t (**_gc)(void), void (**_shutdown)(bool, bool)) { -#if ZEND_MM_CUSTOM - zend_mm_heap *_heap = (zend_mm_heap*)heap; + ZEND_MM_UNPOISON_HEAP(heap); + _zend_mm_get_custom_handlers_ex(heap, _malloc, _free, _realloc, _gc, _shutdown); + ZEND_MM_POISON_HEAP(heap); +} - if (heap->use_custom_heap) { - *_malloc = _heap->custom_heap._malloc; - *_free = _heap->custom_heap._free; - *_realloc = _heap->custom_heap._realloc; - if (_gc != NULL) { - *_gc = _heap->custom_heap._gc; - } - if (_shutdown != NULL) { - *_shutdown = _heap->custom_heap._shutdown; - } - } else { - *_malloc = NULL; - *_free = NULL; - *_realloc = NULL; - if (_gc != NULL) { - *_gc = NULL; - } - if (_shutdown != NULL) { - *_shutdown = NULL; - } - } -#else - *_malloc = NULL; - *_free = NULL; - *_realloc = NULL; - *_gc = NULL; - *_shutdown = NULL; +ZEND_API void zend_mm_get_custom_handlers(zend_mm_heap *heap, + void* (**_malloc)(size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), + void (**_free)(void* ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC), + void* (**_realloc)(void*, size_t ZEND_FILE_LINE_DC ZEND_FILE_LINE_ORIG_DC)) +{ +#if ZEND_MM_CUSTOM + ZEND_MM_UNPOISON_HEAP(heap); + _zend_mm_get_custom_handlers_ex(heap, _malloc, _free, _realloc, NULL, NULL); + ZEND_MM_POISON_HEAP(heap); #endif } ZEND_API zend_mm_storage *zend_mm_get_storage(zend_mm_heap *heap) { #if ZEND_MM_STORAGE - return heap->storage; + ZEND_MM_UNPOISON_HEAP(heap); + zend_mm_storage *ret = heap->storage; + ZEND_MM_POISON_HEAP(heap); + return ret; #else return NULL; #endif @@ -3452,6 +3897,7 @@ ZEND_API zend_mm_heap *zend_mm_startup_ex(const zend_mm_handlers *handlers, void #endif return NULL; } + ZEND_MM_UNPOISON_CHUNK_HDR(chunk); heap = &chunk->heap_slot; chunk->heap = heap; chunk->next = chunk; @@ -3503,6 +3949,7 @@ ZEND_API zend_mm_heap *zend_mm_startup_ex(const zend_mm_handlers *handlers, void } heap->storage = storage; heap->pid = getpid(); + ZEND_MM_POISON_CHUNK_HDR(chunk, heap); return heap; #else return NULL; diff --git a/sapi/fpm/tests/gh9754-daemonized-stderr-close.phpt b/sapi/fpm/tests/gh9754-daemonized-stderr-close.phpt index 37061ba967eae..4f3f41fde8240 100644 --- a/sapi/fpm/tests/gh9754-daemonized-stderr-close.phpt +++ b/sapi/fpm/tests/gh9754-daemonized-stderr-close.phpt @@ -10,6 +10,15 @@ FPM\Tester::skipIfRoot(); require_once "tester.inc"; +// TODO: fix leak on shutdown caused by alloc_globals_dtor not getting invoked +$opts = getenv("ASAN_OPTIONS") ?: ''; +if ($opts) { + $opts .= ",detect_leaks=0"; +} else { + $opts = "detect_leaks=0"; +} +putenv("ASAN_OPTIONS=$opts"); + $cfg = <<