respect large pages for arena allocation

This commit is contained in:
daan 2019-10-31 10:59:40 -07:00
parent 28cb19148c
commit ed4f60fc7e

View file

@ -232,21 +232,23 @@ void* _mi_arena_alloc_aligned(size_t size, size_t alignment, bool* commit, bool*
for (size_t i = 0; i < MI_MAX_ARENAS; i++) { for (size_t i = 0; i < MI_MAX_ARENAS; i++) {
mi_arena_t* arena = (mi_arena_t*)mi_atomic_read_ptr_relaxed(mi_atomic_cast(void*, &mi_arenas[i])); mi_arena_t* arena = (mi_arena_t*)mi_atomic_read_ptr_relaxed(mi_atomic_cast(void*, &mi_arenas[i]));
if (arena==NULL) break; if (arena==NULL) break;
size_t block_index = SIZE_MAX; if (*large || !arena->is_large) { // large OS pages allowed, or arena is not large OS pages
void* p = mi_arena_alloc(arena, bcount, is_zero, &block_index); size_t block_index = SIZE_MAX;
if (p != NULL) { void* p = mi_arena_alloc(arena, bcount, is_zero, &block_index);
mi_assert_internal(block_index != SIZE_MAX); if (p != NULL) {
#if MI_DEBUG>=1 mi_assert_internal(block_index != SIZE_MAX);
_Atomic(mi_block_info_t)* block = &arena->blocks[block_index]; #if MI_DEBUG>=1
mi_block_info_t binfo = mi_atomic_read(block); _Atomic(mi_block_info_t)* block = &arena->blocks[block_index];
mi_assert_internal(mi_block_is_in_use(binfo)); mi_block_info_t binfo = mi_atomic_read(block);
mi_assert_internal(mi_block_count(binfo)*MI_ARENA_BLOCK_SIZE >= size); mi_assert_internal(mi_block_is_in_use(binfo));
#endif mi_assert_internal(mi_block_count(binfo)*MI_ARENA_BLOCK_SIZE >= size);
*memid = mi_memid_create(i, block_index); #endif
*commit = true; // TODO: support commit on demand? * memid = mi_memid_create(i, block_index);
*large = arena->is_large; *commit = true; // TODO: support commit on demand?
mi_assert_internal((uintptr_t)p % alignment == 0); *large = arena->is_large;
return p; mi_assert_internal((uintptr_t)p % alignment == 0);
return p;
}
} }
} }
} }