/external/jemalloc_new/include/jemalloc/internal/ |
D | arena_inlines_b.h | 12 arena_prof_tctx_get(tsdn_t *tsdn, const void *ptr, alloc_ctx_t *alloc_ctx) { in arena_prof_tctx_get() argument 18 const extent_t *extent = iealloc(tsdn, ptr); in arena_prof_tctx_get() 20 return large_prof_tctx_get(tsdn, extent); in arena_prof_tctx_get() 24 return large_prof_tctx_get(tsdn, iealloc(tsdn, ptr)); in arena_prof_tctx_get() 31 arena_prof_tctx_set(tsdn_t *tsdn, const void *ptr, UNUSED size_t usize, in arena_prof_tctx_set() argument 38 extent_t *extent = iealloc(tsdn, ptr); in arena_prof_tctx_set() 40 large_prof_tctx_set(tsdn, extent, tctx); in arena_prof_tctx_set() 44 large_prof_tctx_set(tsdn, iealloc(tsdn, ptr), tctx); in arena_prof_tctx_set() 50 arena_prof_tctx_reset(tsdn_t *tsdn, const void *ptr, UNUSED prof_tctx_t *tctx) { in arena_prof_tctx_reset() argument 54 extent_t *extent = iealloc(tsdn, ptr); in arena_prof_tctx_reset() [all …]
|
D | jemalloc_internal_inlines_c.h | 27 iaalloc(tsdn_t *tsdn, const void *ptr) { in iaalloc() argument 30 return arena_aalloc(tsdn, ptr); in iaalloc() 34 isalloc(tsdn_t *tsdn, const void *ptr) { in isalloc() argument 37 return arena_salloc(tsdn, ptr); in isalloc() 41 iallocztm(tsdn_t *tsdn, size_t size, szind_t ind, bool zero, tcache_t *tcache, in iallocztm() argument 48 if (!tsdn_null(tsdn) && tsd_reentrancy_level_get(tsdn_tsd(tsdn)) == 0) { in iallocztm() 49 witness_assert_depth_to_rank(tsdn_witness_tsdp_get(tsdn), in iallocztm() 53 ret = arena_malloc(tsdn, arena, size, ind, zero, tcache, slow_path); in iallocztm() 55 arena_internal_add(iaalloc(tsdn, ret), isalloc(tsdn, ret)); in iallocztm() 67 ipallocztm(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero, in ipallocztm() argument [all …]
|
D | mutex_pool.h | 27 mutex_pool_assert_not_held(tsdn_t *tsdn, mutex_pool_t *pool) { in mutex_pool_assert_not_held() argument 29 malloc_mutex_assert_not_owner(tsdn, &pool->mutexes[i]); in mutex_pool_assert_not_held() 41 mutex_pool_lock(tsdn_t *tsdn, mutex_pool_t *pool, uintptr_t key) { in mutex_pool_lock() argument 42 mutex_pool_assert_not_held(tsdn, pool); in mutex_pool_lock() 45 malloc_mutex_lock(tsdn, mutex); in mutex_pool_lock() 49 mutex_pool_unlock(tsdn_t *tsdn, mutex_pool_t *pool, uintptr_t key) { in mutex_pool_unlock() argument 51 malloc_mutex_unlock(tsdn, mutex); in mutex_pool_unlock() 53 mutex_pool_assert_not_held(tsdn, pool); in mutex_pool_unlock() 57 mutex_pool_lock2(tsdn_t *tsdn, mutex_pool_t *pool, uintptr_t key1, in mutex_pool_lock2() argument 59 mutex_pool_assert_not_held(tsdn, pool); in mutex_pool_lock2() [all …]
|
D | arena_externs.h | 19 void arena_basic_stats_merge(tsdn_t *tsdn, arena_t *arena, 22 void arena_stats_merge(tsdn_t *tsdn, arena_t *arena, unsigned *nthreads, 26 void arena_extents_dirty_dalloc(tsdn_t *tsdn, arena_t *arena, 31 extent_t *arena_extent_alloc_large(tsdn_t *tsdn, arena_t *arena, 33 void arena_extent_dalloc_large_prep(tsdn_t *tsdn, arena_t *arena, 35 void arena_extent_ralloc_large_shrink(tsdn_t *tsdn, arena_t *arena, 37 void arena_extent_ralloc_large_expand(tsdn_t *tsdn, arena_t *arena, 40 bool arena_dirty_decay_ms_set(tsdn_t *tsdn, arena_t *arena, ssize_t decay_ms); 42 bool arena_muzzy_decay_ms_set(tsdn_t *tsdn, arena_t *arena, ssize_t decay_ms); 43 void arena_decay(tsdn_t *tsdn, arena_t *arena, bool is_background_thread, [all …]
|
D | arena_stats.h | 98 arena_stats_init(UNUSED tsdn_t *tsdn, arena_stats_t *arena_stats) { in arena_stats_init() argument 115 arena_stats_lock(tsdn_t *tsdn, arena_stats_t *arena_stats) { in arena_stats_lock() argument 117 malloc_mutex_lock(tsdn, &arena_stats->mtx); in arena_stats_lock() 122 arena_stats_unlock(tsdn_t *tsdn, arena_stats_t *arena_stats) { in arena_stats_unlock() argument 124 malloc_mutex_unlock(tsdn, &arena_stats->mtx); in arena_stats_unlock() 129 arena_stats_read_u64(tsdn_t *tsdn, arena_stats_t *arena_stats, in arena_stats_read_u64() argument 134 malloc_mutex_assert_owner(tsdn, &arena_stats->mtx); in arena_stats_read_u64() 140 arena_stats_add_u64(tsdn_t *tsdn, arena_stats_t *arena_stats, in arena_stats_add_u64() argument 145 malloc_mutex_assert_owner(tsdn, &arena_stats->mtx); in arena_stats_add_u64() 151 arena_stats_sub_u64(tsdn_t *tsdn, arena_stats_t *arena_stats, in arena_stats_sub_u64() argument [all …]
|
D | rtree.h | 126 void rtree_delete(tsdn_t *tsdn, rtree_t *rtree); 128 rtree_leaf_elm_t *rtree_leaf_elm_lookup_hard(tsdn_t *tsdn, rtree_t *rtree, 173 rtree_leaf_elm_bits_read(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *elm, in rtree_leaf_elm_bits_read() argument 211 rtree_leaf_elm_extent_read(UNUSED tsdn_t *tsdn, UNUSED rtree_t *rtree, in rtree_leaf_elm_extent_read() argument 214 uintptr_t bits = rtree_leaf_elm_bits_read(tsdn, rtree, elm, dependent); in rtree_leaf_elm_extent_read() 224 rtree_leaf_elm_szind_read(UNUSED tsdn_t *tsdn, UNUSED rtree_t *rtree, in rtree_leaf_elm_szind_read() argument 227 uintptr_t bits = rtree_leaf_elm_bits_read(tsdn, rtree, elm, dependent); in rtree_leaf_elm_szind_read() 236 rtree_leaf_elm_slab_read(UNUSED tsdn_t *tsdn, UNUSED rtree_t *rtree, in rtree_leaf_elm_slab_read() argument 239 uintptr_t bits = rtree_leaf_elm_bits_read(tsdn, rtree, elm, dependent); in rtree_leaf_elm_slab_read() 248 rtree_leaf_elm_extent_write(UNUSED tsdn_t *tsdn, UNUSED rtree_t *rtree, in rtree_leaf_elm_extent_write() argument [all …]
|
D | extent_externs.h | 15 extent_t *extent_alloc(tsdn_t *tsdn, arena_t *arena); 16 void extent_dalloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent); 30 bool extents_init(tsdn_t *tsdn, extents_t *extents, extent_state_t state, 34 extent_t *extents_alloc(tsdn_t *tsdn, arena_t *arena, 38 void extents_dalloc(tsdn_t *tsdn, arena_t *arena, 40 extent_t *extents_evict(tsdn_t *tsdn, arena_t *arena, 42 void extents_prefork(tsdn_t *tsdn, extents_t *extents); 43 void extents_postfork_parent(tsdn_t *tsdn, extents_t *extents); 44 void extents_postfork_child(tsdn_t *tsdn, extents_t *extents); 45 extent_t *extent_alloc_wrapper(tsdn_t *tsdn, arena_t *arena, [all …]
|
D | mutex.h | 131 void malloc_mutex_prefork(tsdn_t *tsdn, malloc_mutex_t *mutex); 132 void malloc_mutex_postfork_parent(tsdn_t *tsdn, malloc_mutex_t *mutex); 133 void malloc_mutex_postfork_child(tsdn_t *tsdn, malloc_mutex_t *mutex); 135 void malloc_mutex_prof_data_reset(tsdn_t *tsdn, malloc_mutex_t *mutex); 150 mutex_owner_stats_update(tsdn_t *tsdn, malloc_mutex_t *mutex) { in mutex_owner_stats_update() argument 154 if (data->prev_owner != tsdn) { in mutex_owner_stats_update() 155 data->prev_owner = tsdn; in mutex_owner_stats_update() 163 malloc_mutex_trylock(tsdn_t *tsdn, malloc_mutex_t *mutex) { in malloc_mutex_trylock() argument 164 witness_assert_not_owner(tsdn_witness_tsdp_get(tsdn), &mutex->witness); in malloc_mutex_trylock() 169 mutex_owner_stats_update(tsdn, mutex); in malloc_mutex_trylock() [all …]
|
D | prof_externs.h | 46 void prof_malloc_sample_object(tsdn_t *tsdn, const void *ptr, size_t usize, 65 bool prof_accum_init(tsdn_t *tsdn, prof_accum_t *prof_accum); 66 void prof_idump(tsdn_t *tsdn); 68 void prof_gdump(tsdn_t *tsdn); 73 bool prof_active_get(tsdn_t *tsdn); 74 bool prof_active_set(tsdn_t *tsdn, bool active); 79 bool prof_thread_active_init_get(tsdn_t *tsdn); 80 bool prof_thread_active_init_set(tsdn_t *tsdn, bool active_init); 81 bool prof_gdump_get(tsdn_t *tsdn); 82 bool prof_gdump_set(tsdn_t *tsdn, bool active); [all …]
|
D | large_externs.h | 4 void *large_malloc(tsdn_t *tsdn, arena_t *arena, size_t usize, bool zero); 5 void *large_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment, 7 bool large_ralloc_no_move(tsdn_t *tsdn, extent_t *extent, size_t usize_min, 9 void *large_ralloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent, size_t usize, 18 void large_dalloc_prep_junked_locked(tsdn_t *tsdn, extent_t *extent); 19 void large_dalloc_finish(tsdn_t *tsdn, extent_t *extent); 20 void large_dalloc(tsdn_t *tsdn, extent_t *extent); 21 size_t large_salloc(tsdn_t *tsdn, const extent_t *extent); 22 prof_tctx_t *large_prof_tctx_get(tsdn_t *tsdn, const extent_t *extent); 23 void large_prof_tctx_set(tsdn_t *tsdn, extent_t *extent, prof_tctx_t *tctx); [all …]
|
D | base_externs.h | 8 base_t *base_new(tsdn_t *tsdn, unsigned ind, extent_hooks_t *extent_hooks); 9 void base_delete(tsdn_t *tsdn, base_t *base); 13 void *base_alloc(tsdn_t *tsdn, base_t *base, size_t size, size_t alignment); 14 extent_t *base_alloc_extent(tsdn_t *tsdn, base_t *base); 15 void base_stats_get(tsdn_t *tsdn, base_t *base, size_t *allocated, 17 void base_prefork(tsdn_t *tsdn, base_t *base); 18 void base_postfork_parent(tsdn_t *tsdn, base_t *base); 19 void base_postfork_child(tsdn_t *tsdn, base_t *base); 20 bool base_boot(tsdn_t *tsdn);
|
D | background_thread_externs.h | 16 void background_thread_interval_check(tsdn_t *tsdn, arena_t *arena, 18 void background_thread_prefork0(tsdn_t *tsdn); 19 void background_thread_prefork1(tsdn_t *tsdn); 20 void background_thread_postfork_parent(tsdn_t *tsdn); 21 void background_thread_postfork_child(tsdn_t *tsdn); 22 bool background_thread_stats_read(tsdn_t *tsdn, 24 void background_thread_ctl_init(tsdn_t *tsdn); 31 bool background_thread_boot1(tsdn_t *tsdn);
|
D | tcache_externs.h | 30 size_t tcache_salloc(tsdn_t *tsdn, const void *ptr); 32 void *tcache_alloc_small_hard(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache, 38 void tcache_arena_reassociate(tsdn_t *tsdn, tcache_t *tcache, 42 void tcache_stats_merge(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena); 46 bool tcache_boot(tsdn_t *tsdn); 47 void tcache_arena_associate(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena); 48 void tcache_prefork(tsdn_t *tsdn); 49 void tcache_postfork_parent(tsdn_t *tsdn); 50 void tcache_postfork_child(tsdn_t *tsdn);
|
D | bin.h | 85 void bin_prefork(tsdn_t *tsdn, bin_t *bin); 86 void bin_postfork_parent(tsdn_t *tsdn, bin_t *bin); 87 void bin_postfork_child(tsdn_t *tsdn, bin_t *bin); 91 bin_stats_merge(tsdn_t *tsdn, bin_stats_t *dst_bin_stats, bin_t *bin) { in bin_stats_merge() argument 92 malloc_mutex_lock(tsdn, &bin->lock); in bin_stats_merge() 93 malloc_mutex_prof_read(tsdn, &dst_bin_stats->mutex_data, &bin->lock); in bin_stats_merge() 103 malloc_mutex_unlock(tsdn, &bin->lock); in bin_stats_merge()
|
D | background_thread_inlines.h | 10 background_thread_enabled_set(tsdn_t *tsdn, bool state) { in background_thread_enabled_set() argument 11 malloc_mutex_assert_owner(tsdn, &background_thread_lock); in background_thread_enabled_set() 30 background_thread_wakeup_time_set(tsdn_t *tsdn, background_thread_info_t *info, in background_thread_wakeup_time_set() argument 32 malloc_mutex_assert_owner(tsdn, &info->mtx); in background_thread_wakeup_time_set() 44 arena_background_thread_inactivity_check(tsdn_t *tsdn, arena_t *arena, in arena_background_thread_inactivity_check() argument 52 background_thread_interval_check(tsdn, arena, in arena_background_thread_inactivity_check()
|
/external/jemalloc_new/src/ |
D | extent.c | 34 static bool extent_commit_impl(tsdn_t *tsdn, arena_t *arena, 43 static bool extent_purge_lazy_impl(tsdn_t *tsdn, arena_t *arena, 50 static bool extent_purge_forced_impl(tsdn_t *tsdn, arena_t *arena, 58 static extent_t *extent_split_impl(tsdn_t *tsdn, arena_t *arena, 67 static bool extent_merge_impl(tsdn_t *tsdn, arena_t *arena, 108 static void extent_deregister(tsdn_t *tsdn, extent_t *extent); 109 static extent_t *extent_recycle(tsdn_t *tsdn, arena_t *arena, 113 static extent_t *extent_try_coalesce(tsdn_t *tsdn, arena_t *arena, 116 static void extent_record(tsdn_t *tsdn, arena_t *arena, 132 extent_rtree_leaf_elm_try_lock(tsdn_t *tsdn, rtree_leaf_elm_t *elm, in extent_rtree_leaf_elm_try_lock() argument [all …]
|
D | large.c | 14 large_malloc(tsdn_t *tsdn, arena_t *arena, size_t usize, bool zero) { in large_malloc() argument 17 return large_palloc(tsdn, arena, usize, CACHELINE, zero); in large_malloc() 21 large_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment, in large_palloc() argument 28 assert(!tsdn_null(tsdn) || arena != NULL); in large_palloc() 44 if (likely(!tsdn_null(tsdn))) { in large_palloc() 50 arena = arena_get(tsdn, 0, false); in large_palloc() 52 arena = arena_choose(tsdn_tsd(tsdn), arena); in large_palloc() 55 if (unlikely(arena == NULL) || (extent = arena_extent_alloc_large(tsdn, in large_palloc() 63 malloc_mutex_lock(tsdn, &arena->large_mtx); in large_palloc() 65 malloc_mutex_unlock(tsdn, &arena->large_mtx); in large_palloc() [all …]
|
D | arena.c | 51 static void arena_decay_to_limit(tsdn_t *tsdn, arena_t *arena, 54 static bool arena_decay_dirty(tsdn_t *tsdn, arena_t *arena, 56 static void arena_dalloc_bin_slab(tsdn_t *tsdn, arena_t *arena, extent_t *slab, 58 static void arena_bin_lower_slab(tsdn_t *tsdn, arena_t *arena, extent_t *slab, 64 arena_basic_stats_merge(UNUSED tsdn_t *tsdn, arena_t *arena, unsigned *nthreads, in arena_basic_stats_merge() argument 77 arena_stats_merge(tsdn_t *tsdn, arena_t *arena, unsigned *nthreads, in arena_stats_merge() argument 83 arena_basic_stats_merge(tsdn, arena, nthreads, dss, dirty_decay_ms, in arena_stats_merge() 87 base_stats_get(tsdn, arena->base, &base_allocated, &base_resident, in arena_stats_merge() 90 arena_stats_lock(tsdn, &arena->stats); in arena_stats_merge() 93 + arena_stats_read_zu(tsdn, &arena->stats, &arena->stats.mapped)); in arena_stats_merge() [all …]
|
D | background_thread.c | 59 void background_thread_interval_check(tsdn_t *tsdn, arena_t *arena, in background_thread_create() 61 void background_thread_prefork0(tsdn_t *tsdn) NOT_REACHED in background_thread_create() 62 void background_thread_prefork1(tsdn_t *tsdn) NOT_REACHED in background_thread_create() 63 void background_thread_postfork_parent(tsdn_t *tsdn) NOT_REACHED in background_thread_create() 64 void background_thread_postfork_child(tsdn_t *tsdn) NOT_REACHED in background_thread_create() 65 bool background_thread_stats_read(tsdn_t *tsdn, in background_thread_create() 67 void background_thread_ctl_init(tsdn_t *tsdn) NOT_REACHED in background_thread_create() 74 background_thread_info_init(tsdn_t *tsdn, background_thread_info_t *info) { 75 background_thread_wakeup_time_set(tsdn, info, 0); 118 arena_decay_compute_purge_interval_impl(tsdn_t *tsdn, arena_decay_t *decay, [all …]
|
D | rtree.c | 31 rtree_node_alloc_impl(tsdn_t *tsdn, rtree_t *rtree, size_t nelms) { in rtree_node_alloc_impl() argument 32 return (rtree_node_elm_t *)base_alloc(tsdn, b0get(), nelms * in rtree_node_alloc_impl() 38 rtree_node_dalloc_impl(tsdn_t *tsdn, rtree_t *rtree, rtree_node_elm_t *node) { in rtree_node_dalloc_impl() argument 46 rtree_leaf_alloc_impl(tsdn_t *tsdn, rtree_t *rtree, size_t nelms) { in rtree_leaf_alloc_impl() argument 47 return (rtree_leaf_elm_t *)base_alloc(tsdn, b0get(), nelms * in rtree_leaf_alloc_impl() 53 rtree_leaf_dalloc_impl(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *leaf) { in rtree_leaf_dalloc_impl() argument 63 rtree_delete_subtree(tsdn_t *tsdn, rtree_t *rtree, rtree_node_elm_t *subtree, in rtree_delete_subtree() argument 72 rtree_delete_subtree(tsdn, rtree, node, level + in rtree_delete_subtree() 82 rtree_leaf_dalloc(tsdn, rtree, leaf); in rtree_delete_subtree() 88 rtree_node_dalloc(tsdn, rtree, subtree); in rtree_delete_subtree() [all …]
|
D | base.c | 32 base_map(tsdn_t *tsdn, extent_hooks_t *extent_hooks, unsigned ind, size_t size) { in base_map() argument 44 tsd_t *tsd = tsdn_null(tsdn) ? tsd_fetch() : tsdn_tsd(tsdn); in base_map() 55 base_unmap(tsdn_t *tsdn, extent_hooks_t *extent_hooks, unsigned ind, void *addr, in base_unmap() argument 83 tsd_t *tsd = tsdn_null(tsdn) ? tsd_fetch() : tsdn_tsd(tsdn); in base_unmap() 144 base_auto_thp_switch(tsdn_t *tsdn, base_t *base) { in base_auto_thp_switch() argument 146 malloc_mutex_assert_owner(tsdn, &base->mtx); in base_auto_thp_switch() 248 base_block_alloc(tsdn_t *tsdn, base_t *base, extent_hooks_t *extent_hooks, in base_block_alloc() argument 270 base_block_t *block = (base_block_t *)base_map(tsdn, extent_hooks, ind, in base_block_alloc() 285 malloc_mutex_lock(tsdn, &base->mtx); in base_block_alloc() 286 base_auto_thp_switch(tsdn, base); in base_block_alloc() [all …]
|
D | prof.c | 140 static bool prof_tctx_should_destroy(tsdn_t *tsdn, prof_tctx_t *tctx); 142 static bool prof_tdata_should_destroy(tsdn_t *tsdn, prof_tdata_t *tdata, 146 static char *prof_thread_name_alloc(tsdn_t *tsdn, const char *thread_name); 241 prof_malloc_sample_object(tsdn_t *tsdn, const void *ptr, size_t usize, in prof_malloc_sample_object() argument 243 prof_tctx_set(tsdn, ptr, usize, NULL, tctx); in prof_malloc_sample_object() 245 malloc_mutex_lock(tsdn, tctx->tdata->lock); in prof_malloc_sample_object() 253 malloc_mutex_unlock(tsdn, tctx->tdata->lock); in prof_malloc_sample_object() 553 prof_gctx_create(tsdn_t *tsdn, prof_bt_t *bt) { in prof_gctx_create() argument 558 prof_gctx_t *gctx = (prof_gctx_t *)iallocztm(tsdn, size, in prof_gctx_create() 614 prof_tctx_should_destroy(tsdn_t *tsdn, prof_tctx_t *tctx) { in prof_tctx_should_destroy() argument [all …]
|
D | bin.c | 38 bin_prefork(tsdn_t *tsdn, bin_t *bin) { in bin_prefork() argument 39 malloc_mutex_prefork(tsdn, &bin->lock); in bin_prefork() 43 bin_postfork_parent(tsdn_t *tsdn, bin_t *bin) { in bin_postfork_parent() argument 44 malloc_mutex_postfork_parent(tsdn, &bin->lock); in bin_postfork_parent() 48 bin_postfork_child(tsdn_t *tsdn, bin_t *bin) { in bin_postfork_child() argument 49 malloc_mutex_postfork_child(tsdn, &bin->lock); in bin_postfork_child()
|
/external/jemalloc_new/test/unit/ |
D | rtree.c | 14 rtree_node_alloc_intercept(tsdn_t *tsdn, rtree_t *rtree, size_t nelms) { in rtree_node_alloc_intercept() argument 18 return rtree_node_alloc_orig(tsdn, rtree, nelms); in rtree_node_alloc_intercept() 21 malloc_mutex_unlock(tsdn, &rtree->init_lock); in rtree_node_alloc_intercept() 24 malloc_mutex_lock(tsdn, &rtree->init_lock); in rtree_node_alloc_intercept() 30 rtree_node_dalloc_intercept(tsdn_t *tsdn, rtree_t *rtree, in rtree_node_dalloc_intercept() argument 33 rtree_node_dalloc_orig(tsdn, rtree, node); in rtree_node_dalloc_intercept() 41 rtree_leaf_alloc_intercept(tsdn_t *tsdn, rtree_t *rtree, size_t nelms) { in rtree_leaf_alloc_intercept() argument 45 return rtree_leaf_alloc_orig(tsdn, rtree, nelms); in rtree_leaf_alloc_intercept() 48 malloc_mutex_unlock(tsdn, &rtree->init_lock); in rtree_leaf_alloc_intercept() 51 malloc_mutex_lock(tsdn, &rtree->init_lock); in rtree_leaf_alloc_intercept() [all …]
|
D | base.c | 33 tsdn_t *tsdn = tsd_tsdn(tsd_fetch()); in TEST_BEGIN() local 34 base = base_new(tsdn, 0, (extent_hooks_t *)&extent_hooks_default); in TEST_BEGIN() 37 base_stats_get(tsdn, base, &allocated0, &resident, &mapped, in TEST_BEGIN() 47 assert_ptr_not_null(base_alloc(tsdn, base, 42, 1), in TEST_BEGIN() 51 base_stats_get(tsdn, base, &allocated1, &resident, &mapped, in TEST_BEGIN() 57 base_delete(tsdn, base); in TEST_BEGIN() 75 tsdn_t *tsdn = tsd_tsdn(tsd_fetch()); in TEST_BEGIN() local 76 base = base_new(tsdn, 0, &hooks); in TEST_BEGIN() 80 base_stats_get(tsdn, base, &allocated0, &resident, &mapped, in TEST_BEGIN() 90 assert_ptr_not_null(base_alloc(tsdn, base, 42, 1), in TEST_BEGIN() [all …]
|