diff --git a/src/alloc-aligned.c b/src/alloc-aligned.c index 937cbdb8..6b8bc762 100644 --- a/src/alloc-aligned.c +++ b/src/alloc-aligned.c @@ -193,9 +193,6 @@ static void* mi_heap_malloc_zero_aligned_at(mi_heap_t* const heap, const size_t const bool is_aligned = (((uintptr_t)page->free + offset) & align_mask)==0; if mi_likely(is_aligned) { - #if MI_STAT>1 - mi_heap_stat_increase(heap, malloc_requested, size); - #endif void* p = (zero ? _mi_page_malloc_zeroed(heap,page,padsize) : _mi_page_malloc(heap,page,padsize)); // call specific page malloc for better codegen mi_assert_internal(p != NULL); mi_assert_internal(((uintptr_t)p + offset) % alignment == 0); diff --git a/src/alloc.c b/src/alloc.c index 487253db..f1e4e9eb 100644 --- a/src/alloc.c +++ b/src/alloc.c @@ -661,9 +661,10 @@ mi_decl_restrict void* _mi_heap_malloc_guarded(mi_heap_t* heap, size_t size, boo if (p != NULL) { if (!mi_heap_is_initialized(heap)) { heap = mi_prim_get_default_heap(); } #if MI_STAT>1 - mi_heap_stat_increase(heap, malloc, mi_usable_size(p)); + mi_heap_stat_decrease(heap, malloc_requested, req_size); + mi_heap_stat_increase(heap, malloc_requested, size); #endif - mi_heap_stat_counter_increase(heap, guarded_alloc_count, 1); + mi_heap_stat_counter_increase(heap, malloc_guarded_count, 1); } #if MI_DEBUG>3 if (p != NULL && zero) { diff --git a/src/stats.c b/src/stats.c index 807714b4..b1bff00d 100644 --- a/src/stats.c +++ b/src/stats.c @@ -645,7 +645,7 @@ char* mi_stats_get_json(size_t output_size, char* output_buf) mi_attr_noexcept { for (size_t i = 0; i <= MI_BIN_HUGE; i++) { mi_heap_buf_print_count_bin(&hbuf, " ", &stats->page_bins[i], i, i!=MI_BIN_HUGE); } - mi_heap_buf_print(&hbuf, " ]\n"); + mi_heap_buf_print(&hbuf, " ],\n"); mi_heap_buf_print(&hbuf, " \"chunk_bins\": [\n"); for (size_t i = 0; i < MI_CBIN_COUNT; i++) { mi_heap_buf_print_count_cbin(&hbuf, " ", &stats->chunk_bins[i], (mi_chunkbin_t)i, i!=MI_CBIN_COUNT-1);