/external/jemalloc_new/include/jemalloc/internal/ |
D | arena_externs.h | 19 void arena_basic_stats_merge(tsdn_t *tsdn, arena_t *arena, 22 void arena_stats_merge(tsdn_t *tsdn, arena_t *arena, unsigned *nthreads, 26 void arena_extents_dirty_dalloc(tsdn_t *tsdn, arena_t *arena, 31 extent_t *arena_extent_alloc_large(tsdn_t *tsdn, arena_t *arena, 33 void arena_extent_dalloc_large_prep(tsdn_t *tsdn, arena_t *arena, 35 void arena_extent_ralloc_large_shrink(tsdn_t *tsdn, arena_t *arena, 37 void arena_extent_ralloc_large_expand(tsdn_t *tsdn, arena_t *arena, 40 bool arena_dirty_decay_ms_set(tsdn_t *tsdn, arena_t *arena, ssize_t decay_ms); 42 bool arena_muzzy_decay_ms_set(tsdn_t *tsdn, arena_t *arena, ssize_t decay_ms); 43 void arena_decay(tsdn_t *tsdn, arena_t *arena, bool is_background_thread, [all …]
|
D | extent_externs.h | 15 extent_t *extent_alloc(tsdn_t *tsdn, arena_t *arena); 16 void extent_dalloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent); 30 bool extents_init(tsdn_t *tsdn, extents_t *extents, extent_state_t state, 34 extent_t *extents_alloc(tsdn_t *tsdn, arena_t *arena, 38 void extents_dalloc(tsdn_t *tsdn, arena_t *arena, 40 extent_t *extents_evict(tsdn_t *tsdn, arena_t *arena, 42 void extents_prefork(tsdn_t *tsdn, extents_t *extents); 43 void extents_postfork_parent(tsdn_t *tsdn, extents_t *extents); 44 void extents_postfork_child(tsdn_t *tsdn, extents_t *extents); 45 extent_t *extent_alloc_wrapper(tsdn_t *tsdn, arena_t *arena, [all …]
|
D | prof_externs.h | 46 void prof_malloc_sample_object(tsdn_t *tsdn, const void *ptr, size_t usize, 59 typedef bool (prof_dump_header_t)(tsdn_t *, bool, const prof_cnt_t *); 65 bool prof_accum_init(tsdn_t *tsdn, prof_accum_t *prof_accum); 66 void prof_idump(tsdn_t *tsdn); 68 void prof_gdump(tsdn_t *tsdn); 73 bool prof_active_get(tsdn_t *tsdn); 74 bool prof_active_set(tsdn_t *tsdn, bool active); 79 bool prof_thread_active_init_get(tsdn_t *tsdn); 80 bool prof_thread_active_init_set(tsdn_t *tsdn, bool active_init); 81 bool prof_gdump_get(tsdn_t *tsdn); [all …]
|
D | large_externs.h | 4 void *large_malloc(tsdn_t *tsdn, arena_t *arena, size_t usize, bool zero); 5 void *large_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment, 7 bool large_ralloc_no_move(tsdn_t *tsdn, extent_t *extent, size_t usize_min, 9 void *large_ralloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent, size_t usize, 18 void large_dalloc_prep_junked_locked(tsdn_t *tsdn, extent_t *extent); 19 void large_dalloc_finish(tsdn_t *tsdn, extent_t *extent); 20 void large_dalloc(tsdn_t *tsdn, extent_t *extent); 21 size_t large_salloc(tsdn_t *tsdn, const extent_t *extent); 22 prof_tctx_t *large_prof_tctx_get(tsdn_t *tsdn, const extent_t *extent); 23 void large_prof_tctx_set(tsdn_t *tsdn, extent_t *extent, prof_tctx_t *tctx); [all …]
|
D | base_externs.h | 8 base_t *base_new(tsdn_t *tsdn, unsigned ind, extent_hooks_t *extent_hooks); 9 void base_delete(tsdn_t *tsdn, base_t *base); 13 void *base_alloc(tsdn_t *tsdn, base_t *base, size_t size, size_t alignment); 14 extent_t *base_alloc_extent(tsdn_t *tsdn, base_t *base); 15 void base_stats_get(tsdn_t *tsdn, base_t *base, size_t *allocated, 17 void base_prefork(tsdn_t *tsdn, base_t *base); 18 void base_postfork_parent(tsdn_t *tsdn, base_t *base); 19 void base_postfork_child(tsdn_t *tsdn, base_t *base); 20 bool base_boot(tsdn_t *tsdn);
|
D | background_thread_externs.h | 16 void background_thread_interval_check(tsdn_t *tsdn, arena_t *arena, 18 void background_thread_prefork0(tsdn_t *tsdn); 19 void background_thread_prefork1(tsdn_t *tsdn); 20 void background_thread_postfork_parent(tsdn_t *tsdn); 21 void background_thread_postfork_child(tsdn_t *tsdn); 22 bool background_thread_stats_read(tsdn_t *tsdn, 24 void background_thread_ctl_init(tsdn_t *tsdn); 31 bool background_thread_boot1(tsdn_t *tsdn);
|
D | tcache_externs.h | 30 size_t tcache_salloc(tsdn_t *tsdn, const void *ptr); 32 void *tcache_alloc_small_hard(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache, 38 void tcache_arena_reassociate(tsdn_t *tsdn, tcache_t *tcache, 42 void tcache_stats_merge(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena); 46 bool tcache_boot(tsdn_t *tsdn); 47 void tcache_arena_associate(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena); 48 void tcache_prefork(tsdn_t *tsdn); 49 void tcache_postfork_parent(tsdn_t *tsdn); 50 void tcache_postfork_child(tsdn_t *tsdn);
|
D | rtree.h | 114 typedef rtree_node_elm_t *(rtree_node_alloc_t)(tsdn_t *, rtree_t *, size_t); 117 typedef rtree_leaf_elm_t *(rtree_leaf_alloc_t)(tsdn_t *, rtree_t *, size_t); 120 typedef void (rtree_node_dalloc_t)(tsdn_t *, rtree_t *, rtree_node_elm_t *); 123 typedef void (rtree_leaf_dalloc_t)(tsdn_t *, rtree_t *, rtree_leaf_elm_t *); 126 void rtree_delete(tsdn_t *tsdn, rtree_t *rtree); 128 rtree_leaf_elm_t *rtree_leaf_elm_lookup_hard(tsdn_t *tsdn, rtree_t *rtree, 173 rtree_leaf_elm_bits_read(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *elm, in rtree_leaf_elm_bits_read() 211 rtree_leaf_elm_extent_read(UNUSED tsdn_t *tsdn, UNUSED rtree_t *rtree, in rtree_leaf_elm_extent_read() 224 rtree_leaf_elm_szind_read(UNUSED tsdn_t *tsdn, UNUSED rtree_t *rtree, in rtree_leaf_elm_szind_read() 236 rtree_leaf_elm_slab_read(UNUSED tsdn_t *tsdn, UNUSED rtree_t *rtree, in rtree_leaf_elm_slab_read() [all …]
|
D | arena_stats.h | 98 arena_stats_init(UNUSED tsdn_t *tsdn, arena_stats_t *arena_stats) { in arena_stats_init() 115 arena_stats_lock(tsdn_t *tsdn, arena_stats_t *arena_stats) { in arena_stats_lock() 122 arena_stats_unlock(tsdn_t *tsdn, arena_stats_t *arena_stats) { in arena_stats_unlock() 129 arena_stats_read_u64(tsdn_t *tsdn, arena_stats_t *arena_stats, in arena_stats_read_u64() 140 arena_stats_add_u64(tsdn_t *tsdn, arena_stats_t *arena_stats, in arena_stats_add_u64() 151 arena_stats_sub_u64(tsdn_t *tsdn, arena_stats_t *arena_stats, in arena_stats_sub_u64() 179 arena_stats_read_zu(tsdn_t *tsdn, arena_stats_t *arena_stats, atomic_zu_t *p) { in arena_stats_read_zu() 189 arena_stats_add_zu(tsdn_t *tsdn, arena_stats_t *arena_stats, atomic_zu_t *p, in arena_stats_add_zu() 201 arena_stats_sub_zu(tsdn_t *tsdn, arena_stats_t *arena_stats, atomic_zu_t *p, in arena_stats_sub_zu() 221 arena_stats_large_nrequests_add(tsdn_t *tsdn, arena_stats_t *arena_stats, in arena_stats_large_nrequests_add() [all …]
|
D | mutex.h | 131 void malloc_mutex_prefork(tsdn_t *tsdn, malloc_mutex_t *mutex); 132 void malloc_mutex_postfork_parent(tsdn_t *tsdn, malloc_mutex_t *mutex); 133 void malloc_mutex_postfork_child(tsdn_t *tsdn, malloc_mutex_t *mutex); 135 void malloc_mutex_prof_data_reset(tsdn_t *tsdn, malloc_mutex_t *mutex); 150 mutex_owner_stats_update(tsdn_t *tsdn, malloc_mutex_t *mutex) { in mutex_owner_stats_update() 163 malloc_mutex_trylock(tsdn_t *tsdn, malloc_mutex_t *mutex) { in malloc_mutex_trylock() 201 malloc_mutex_lock(tsdn_t *tsdn, malloc_mutex_t *mutex) { in malloc_mutex_lock() 213 malloc_mutex_unlock(tsdn_t *tsdn, malloc_mutex_t *mutex) { in malloc_mutex_unlock() 221 malloc_mutex_assert_owner(tsdn_t *tsdn, malloc_mutex_t *mutex) { in malloc_mutex_assert_owner() 226 malloc_mutex_assert_not_owner(tsdn_t *tsdn, malloc_mutex_t *mutex) { in malloc_mutex_assert_not_owner() [all …]
|
D | jemalloc_internal_inlines_c.h | 27 iaalloc(tsdn_t *tsdn, const void *ptr) { in iaalloc() 34 isalloc(tsdn_t *tsdn, const void *ptr) { in isalloc() 41 iallocztm(tsdn_t *tsdn, size_t size, szind_t ind, bool zero, tcache_t *tcache, in iallocztm() 67 ipallocztm(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero, in ipallocztm() 87 ipalloct(tsdn_t *tsdn, size_t usize, size_t alignment, bool zero, in ipalloct() 99 ivsalloc(tsdn_t *tsdn, const void *ptr) { in ivsalloc() 104 idalloctm(tsdn_t *tsdn, void *ptr, tcache_t *tcache, alloc_ctx_t *alloc_ctx, in idalloctm() 127 isdalloct(tsdn_t *tsdn, void *ptr, size_t size, tcache_t *tcache, in isdalloct() 135 iralloct_realign(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size, in iralloct_realign() 173 iralloct(tsdn_t *tsdn, void *ptr, size_t oldsize, size_t size, size_t alignment, in iralloct() [all …]
|
D | arena_inlines_b.h | 12 arena_prof_tctx_get(tsdn_t *tsdn, const void *ptr, alloc_ctx_t *alloc_ctx) { in arena_prof_tctx_get() 31 arena_prof_tctx_set(tsdn_t *tsdn, const void *ptr, UNUSED size_t usize, in arena_prof_tctx_set() 50 arena_prof_tctx_reset(tsdn_t *tsdn, const void *ptr, UNUSED prof_tctx_t *tctx) { in arena_prof_tctx_reset() 61 arena_decay_ticks(tsdn_t *tsdn, arena_t *arena, unsigned nticks) { in arena_decay_ticks() 79 arena_decay_tick(tsdn_t *tsdn, arena_t *arena) { in arena_decay_tick() 87 arena_malloc(tsdn_t *tsdn, arena_t *arena, size_t size, szind_t ind, bool zero, in arena_malloc() 109 arena_aalloc(tsdn_t *tsdn, const void *ptr) { in arena_aalloc() 114 arena_salloc(tsdn_t *tsdn, const void *ptr) { in arena_salloc() 128 arena_vsalloc(tsdn_t *tsdn, const void *ptr) { in arena_vsalloc() 161 arena_dalloc_no_tcache(tsdn_t *tsdn, void *ptr) { in arena_dalloc_no_tcache() [all …]
|
/external/jemalloc/include/jemalloc/internal/ |
D | prof.h | 284 void prof_malloc_sample_object(tsdn_t *tsdn, const void *ptr, size_t usize, 296 typedef bool (prof_dump_header_t)(tsdn_t *, bool, const prof_cnt_t *); 299 void prof_idump(tsdn_t *tsdn); 301 void prof_gdump(tsdn_t *tsdn); 306 bool prof_active_get(tsdn_t *tsdn); 307 bool prof_active_set(tsdn_t *tsdn, bool active); 312 bool prof_thread_active_init_get(tsdn_t *tsdn); 313 bool prof_thread_active_init_set(tsdn_t *tsdn, bool active_init); 314 bool prof_gdump_get(tsdn_t *tsdn); 315 bool prof_gdump_set(tsdn_t *tsdn, bool active); [all …]
|
D | mutex.h | 67 void malloc_mutex_prefork(tsdn_t *tsdn, malloc_mutex_t *mutex); 68 void malloc_mutex_postfork_parent(tsdn_t *tsdn, malloc_mutex_t *mutex); 69 void malloc_mutex_postfork_child(tsdn_t *tsdn, malloc_mutex_t *mutex); 77 void malloc_mutex_lock(tsdn_t *tsdn, malloc_mutex_t *mutex); 78 void malloc_mutex_unlock(tsdn_t *tsdn, malloc_mutex_t *mutex); 79 void malloc_mutex_assert_owner(tsdn_t *tsdn, malloc_mutex_t *mutex); 80 void malloc_mutex_assert_not_owner(tsdn_t *tsdn, malloc_mutex_t *mutex); 85 malloc_mutex_lock(tsdn_t *tsdn, malloc_mutex_t *mutex) in malloc_mutex_lock() 108 malloc_mutex_unlock(tsdn_t *tsdn, malloc_mutex_t *mutex) in malloc_mutex_unlock() 130 malloc_mutex_assert_owner(tsdn_t *tsdn, malloc_mutex_t *mutex) in malloc_mutex_assert_owner() [all …]
|
D | huge.h | 12 void *huge_malloc(tsdn_t *tsdn, arena_t *arena, size_t usize, bool zero); 13 void *huge_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, 15 bool huge_ralloc_no_move(tsdn_t *tsdn, void *ptr, size_t oldsize, 23 void huge_dalloc(tsdn_t *tsdn, void *ptr); 25 size_t huge_salloc(tsdn_t *tsdn, const void *ptr); 26 prof_tctx_t *huge_prof_tctx_get(tsdn_t *tsdn, const void *ptr); 27 void huge_prof_tctx_set(tsdn_t *tsdn, const void *ptr, prof_tctx_t *tctx); 28 void huge_prof_tctx_reset(tsdn_t *tsdn, const void *ptr);
|
D | arena.h | 547 extent_node_t *arena_node_alloc(tsdn_t *tsdn, arena_t *arena); 548 void arena_node_dalloc(tsdn_t *tsdn, arena_t *arena, extent_node_t *node); 549 void *arena_chunk_alloc_huge(tsdn_t *tsdn, arena_t *arena, size_t usize, 551 void arena_chunk_dalloc_huge(tsdn_t *tsdn, arena_t *arena, void *chunk, 553 void arena_chunk_ralloc_huge_similar(tsdn_t *tsdn, arena_t *arena, 555 void arena_chunk_ralloc_huge_shrink(tsdn_t *tsdn, arena_t *arena, 557 bool arena_chunk_ralloc_huge_expand(tsdn_t *tsdn, arena_t *arena, 559 ssize_t arena_lg_dirty_mult_get(tsdn_t *tsdn, arena_t *arena); 560 bool arena_lg_dirty_mult_set(tsdn_t *tsdn, arena_t *arena, 562 ssize_t arena_decay_time_get(tsdn_t *tsdn, arena_t *arena); [all …]
|
D | chunk.h | 55 chunk_hooks_t chunk_hooks_get(tsdn_t *tsdn, arena_t *arena); 56 chunk_hooks_t chunk_hooks_set(tsdn_t *tsdn, arena_t *arena, 59 bool chunk_register(tsdn_t *tsdn, const void *chunk, 63 void *chunk_alloc_cache(tsdn_t *tsdn, arena_t *arena, 66 void *chunk_alloc_wrapper(tsdn_t *tsdn, arena_t *arena, 69 void chunk_dalloc_cache(tsdn_t *tsdn, arena_t *arena, 72 void chunk_dalloc_wrapper(tsdn_t *tsdn, arena_t *arena, 75 bool chunk_purge_wrapper(tsdn_t *tsdn, arena_t *arena,
|
D | base.h | 12 void *base_alloc(tsdn_t *tsdn, size_t size); 13 void base_stats_get(tsdn_t *tsdn, size_t *allocated, size_t *resident, 16 void base_prefork(tsdn_t *tsdn); 17 void base_postfork_parent(tsdn_t *tsdn); 18 void base_postfork_child(tsdn_t *tsdn);
|
D | witness.h | 113 void witness_assert_owner(tsdn_t *tsdn, const witness_t *witness); 114 void witness_assert_not_owner(tsdn_t *tsdn, const witness_t *witness); 115 void witness_assert_lockless(tsdn_t *tsdn); 116 void witness_lock(tsdn_t *tsdn, witness_t *witness); 117 void witness_unlock(tsdn_t *tsdn, witness_t *witness); 137 witness_assert_owner(tsdn_t *tsdn, const witness_t *witness) in witness_assert_owner() 156 witness_assert_not_owner(tsdn_t *tsdn, const witness_t *witness) in witness_assert_not_owner() 179 witness_assert_lockless(tsdn_t *tsdn) in witness_assert_lockless() 199 witness_lock(tsdn_t *tsdn, witness_t *witness) in witness_lock() 239 witness_unlock(tsdn_t *tsdn, witness_t *witness) in witness_unlock()
|
/external/jemalloc_new/src/ |
D | large.c | 14 large_malloc(tsdn_t *tsdn, arena_t *arena, size_t usize, bool zero) { in large_malloc() 21 large_palloc(tsdn_t *tsdn, arena_t *arena, size_t usize, size_t alignment, in large_palloc() 104 large_ralloc_no_move_shrink(tsdn_t *tsdn, extent_t *extent, size_t usize) { in large_ralloc_no_move_shrink() 139 large_ralloc_no_move_expand(tsdn_t *tsdn, extent_t *extent, size_t usize, in large_ralloc_no_move_expand() 227 large_ralloc_no_move(tsdn_t *tsdn, extent_t *extent, size_t usize_min, in large_ralloc_no_move() 272 large_ralloc_move_helper(tsdn_t *tsdn, arena_t *arena, size_t usize, in large_ralloc_move_helper() 281 large_ralloc(tsdn_t *tsdn, arena_t *arena, extent_t *extent, size_t usize, in large_ralloc() 317 large_dalloc_prep_impl(tsdn_t *tsdn, arena_t *arena, extent_t *extent, in large_dalloc_prep_impl() 338 large_dalloc_finish_impl(tsdn_t *tsdn, arena_t *arena, extent_t *extent) { in large_dalloc_finish_impl() 344 large_dalloc_prep_junked_locked(tsdn_t *tsdn, extent_t *extent) { in large_dalloc_prep_junked_locked() [all …]
|
D | extent.c | 34 static bool extent_commit_impl(tsdn_t *tsdn, arena_t *arena, 43 static bool extent_purge_lazy_impl(tsdn_t *tsdn, arena_t *arena, 50 static bool extent_purge_forced_impl(tsdn_t *tsdn, arena_t *arena, 58 static extent_t *extent_split_impl(tsdn_t *tsdn, arena_t *arena, 67 static bool extent_merge_impl(tsdn_t *tsdn, arena_t *arena, 108 static void extent_deregister(tsdn_t *tsdn, extent_t *extent); 109 static extent_t *extent_recycle(tsdn_t *tsdn, arena_t *arena, 113 static extent_t *extent_try_coalesce(tsdn_t *tsdn, arena_t *arena, 116 static void extent_record(tsdn_t *tsdn, arena_t *arena, 132 extent_rtree_leaf_elm_try_lock(tsdn_t *tsdn, rtree_leaf_elm_t *elm, in extent_rtree_leaf_elm_try_lock() [all …]
|
D | arena.c | 51 static void arena_decay_to_limit(tsdn_t *tsdn, arena_t *arena, 54 static bool arena_decay_dirty(tsdn_t *tsdn, arena_t *arena, 56 static void arena_dalloc_bin_slab(tsdn_t *tsdn, arena_t *arena, extent_t *slab, 58 static void arena_bin_lower_slab(tsdn_t *tsdn, arena_t *arena, extent_t *slab, 64 arena_basic_stats_merge(UNUSED tsdn_t *tsdn, arena_t *arena, unsigned *nthreads, in arena_basic_stats_merge() 77 arena_stats_merge(tsdn_t *tsdn, arena_t *arena, unsigned *nthreads, in arena_stats_merge() 209 arena_extents_dirty_dalloc(tsdn_t *tsdn, arena_t *arena, in arena_extents_dirty_dalloc() 289 arena_large_malloc_stats_update(tsdn_t *tsdn, arena_t *arena, size_t usize) { in arena_large_malloc_stats_update() 305 arena_large_dalloc_stats_update(tsdn_t *tsdn, arena_t *arena, size_t usize) { in arena_large_dalloc_stats_update() 321 arena_large_ralloc_stats_update(tsdn_t *tsdn, arena_t *arena, size_t oldusize, in arena_large_ralloc_stats_update() [all …]
|
D | background_thread.c | 59 void background_thread_interval_check(tsdn_t *tsdn, arena_t *arena, in background_thread_create() 61 void background_thread_prefork0(tsdn_t *tsdn) NOT_REACHED in background_thread_create() 62 void background_thread_prefork1(tsdn_t *tsdn) NOT_REACHED in background_thread_create() 63 void background_thread_postfork_parent(tsdn_t *tsdn) NOT_REACHED in background_thread_create() 64 void background_thread_postfork_child(tsdn_t *tsdn) NOT_REACHED in background_thread_create() 65 bool background_thread_stats_read(tsdn_t *tsdn, in background_thread_create() 67 void background_thread_ctl_init(tsdn_t *tsdn) NOT_REACHED in background_thread_create() 74 background_thread_info_init(tsdn_t *tsdn, background_thread_info_t *info) { 118 arena_decay_compute_purge_interval_impl(tsdn_t *tsdn, arena_decay_t *decay, 204 arena_decay_compute_purge_interval(tsdn_t *tsdn, arena_t *arena) { [all …]
|
D | rtree.c | 31 rtree_node_alloc_impl(tsdn_t *tsdn, rtree_t *rtree, size_t nelms) { in rtree_node_alloc_impl() 38 rtree_node_dalloc_impl(tsdn_t *tsdn, rtree_t *rtree, rtree_node_elm_t *node) { in rtree_node_dalloc_impl() 46 rtree_leaf_alloc_impl(tsdn_t *tsdn, rtree_t *rtree, size_t nelms) { in rtree_leaf_alloc_impl() 53 rtree_leaf_dalloc_impl(tsdn_t *tsdn, rtree_t *rtree, rtree_leaf_elm_t *leaf) { in rtree_leaf_dalloc_impl() 63 rtree_delete_subtree(tsdn_t *tsdn, rtree_t *rtree, rtree_node_elm_t *subtree, in rtree_delete_subtree() 94 rtree_delete(tsdn_t *tsdn, rtree_t *rtree) { in rtree_delete() 102 rtree_node_init(tsdn_t *tsdn, rtree_t *rtree, unsigned level, in rtree_node_init() 129 rtree_leaf_init(tsdn_t *tsdn, rtree_t *rtree, atomic_p_t *elmp) { in rtree_leaf_init() 181 rtree_child_node_read(tsdn_t *tsdn, rtree_t *rtree, rtree_node_elm_t *elm, in rtree_child_node_read() 210 rtree_child_leaf_read(tsdn_t *tsdn, rtree_t *rtree, rtree_node_elm_t *elm, in rtree_child_leaf_read() [all …]
|
/external/jemalloc/src/ |
D | base.c | 18 base_node_try_alloc(tsdn_t *tsdn) in base_node_try_alloc() 33 base_node_dalloc(tsdn_t *tsdn, extent_node_t *node) in base_node_dalloc() 52 base_chunk_alloc(tsdn_t *tsdn, size_t minsize) in base_chunk_alloc() 90 base_alloc(tsdn_t *tsdn, size_t size) in base_alloc() 142 base_stats_get(tsdn_t *tsdn, size_t *allocated, size_t *resident, in base_stats_get() 169 base_prefork(tsdn_t *tsdn) in base_prefork() 176 base_postfork_parent(tsdn_t *tsdn) in base_postfork_parent() 183 base_postfork_child(tsdn_t *tsdn) in base_postfork_child()
|