Home
last modified time | relevance | path

Searched refs:tsd_tsdn (Results 1 – 18 of 18) sorted by relevance

/external/jemalloc_new/src/
Dtcache.c121 item_extent[i] = iealloc(tsd_tsdn(tsd), *(tbin->avail - 1 - i)); in tcache_bin_flush_small()
131 if (arena_prof_accum(tsd_tsdn(tsd), arena, in tcache_bin_flush_small()
133 prof_idump(tsd_tsdn(tsd)); in tcache_bin_flush_small()
138 malloc_mutex_lock(tsd_tsdn(tsd), &bin->lock); in tcache_bin_flush_small()
155 arena_dalloc_bin_junked_locked(tsd_tsdn(tsd), in tcache_bin_flush_small()
169 malloc_mutex_unlock(tsd_tsdn(tsd), &bin->lock); in tcache_bin_flush_small()
170 arena_decay_ticks(tsd_tsdn(tsd), bin_arena, nflush - ndeferred); in tcache_bin_flush_small()
179 malloc_mutex_lock(tsd_tsdn(tsd), &bin->lock); in tcache_bin_flush_small()
185 malloc_mutex_unlock(tsd_tsdn(tsd), &bin->lock); in tcache_bin_flush_small()
212 item_extent[i] = iealloc(tsd_tsdn(tsd), *(tbin->avail - 1 - i)); in tcache_bin_flush_large()
[all …]
Djemalloc.c372 arena_t *arena = arena_get(tsd_tsdn(tsd), ind, false); in arena_bind()
386 oldarena = arena_get(tsd_tsdn(tsd), oldind, false); in arena_migrate()
387 newarena = arena_get(tsd_tsdn(tsd), newind, false); in arena_migrate()
397 arena = arena_get(tsd_tsdn(tsd), ind, false); in arena_unbind()
490 ret = arena_get(tsd_tsdn(tsd), choose, true); in arena_choose_hard()
516 malloc_mutex_lock(tsd_tsdn(tsd), &arenas_lock); in arena_choose_hard()
517 assert(arena_get(tsd_tsdn(tsd), 0, false) != NULL); in arena_choose_hard()
519 if (arena_get(tsd_tsdn(tsd), i, false) != NULL) { in arena_choose_hard()
526 tsd_tsdn(tsd), i, false), !!j) < in arena_choose_hard()
528 tsd_tsdn(tsd), choose[j], false), in arena_choose_hard()
[all …]
Dprof.c230 malloc_mutex_lock(tsd_tsdn(tsd), tctx->tdata->lock); in rb_gen()
232 if (prof_tctx_should_destroy(tsd_tsdn(tsd), tctx)) { in rb_gen()
235 malloc_mutex_unlock(tsd_tsdn(tsd), tctx->tdata->lock); in rb_gen()
258 malloc_mutex_lock(tsd_tsdn(tsd), tctx->tdata->lock); in prof_free_sampled_object()
264 if (prof_tctx_should_destroy(tsd_tsdn(tsd), tctx)) { in prof_free_sampled_object()
267 malloc_mutex_unlock(tsd_tsdn(tsd), tctx->tdata->lock); in prof_free_sampled_object()
289 malloc_mutex_lock(tsd_tsdn(tsd), &bt2gctx_mtx); in prof_enter()
297 malloc_mutex_unlock(tsd_tsdn(tsd), &bt2gctx_mtx); in prof_leave()
310 prof_idump(tsd_tsdn(tsd)); in prof_leave()
313 prof_gdump(tsd_tsdn(tsd)); in prof_leave()
[all …]
Dbackground_thread.c315 malloc_mutex_assert_owner(tsd_tsdn(tsd),
318 malloc_mutex_assert_not_owner(tsd_tsdn(tsd),
323 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx);
333 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx);
393 tsdn_t *tsdn = tsd_tsdn(tsd);
446 if (background_thread_pause_check(tsd_tsdn(tsd),
454 background_work_sleep_once(tsd_tsdn(tsd),
469 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx);
477 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx);
488 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx);
[all …]
Dctl.c655 (struct container_s *)base_alloc(tsd_tsdn(tsd), in arenas_i_impl()
663 ret = (ctl_arena_t *)base_alloc(tsd_tsdn(tsd), b0get(), in arenas_i_impl()
903 if (arena_init(tsd_tsdn(tsd), arena_ind, extent_hooks) == NULL) { in ctl_arena_init()
1001 tsdn_t *tsdn = tsd_tsdn(tsd); in ctl_init()
1187 ret = ctl_lookup(tsd_tsdn(tsd), name, nodes, mib, &depth); in ctl_byname()
1213 ret = ctl_lookup(tsd_tsdn(tsd), name, NULL, mibp, miblenp); in ctl_nametomib()
1247 node = inode->index(tsd_tsdn(tsd), mib, miblen, mib[i]); in ctl_bymib()
1365 malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); \
1374 malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); \
1389 malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); \
[all …]
Dckh.c282 tab = (ckhc_t *)ipallocztm(tsd_tsdn(tsd), usize, CACHELINE, in ckh_grow()
295 idalloctm(tsd_tsdn(tsd), tab, NULL, NULL, true, true); in ckh_grow()
300 idalloctm(tsd_tsdn(tsd), ckh->tab, NULL, NULL, true, true); in ckh_grow()
326 tab = (ckhc_t *)ipallocztm(tsd_tsdn(tsd), usize, CACHELINE, true, NULL, in ckh_shrink()
342 idalloctm(tsd_tsdn(tsd), tab, NULL, NULL, true, true); in ckh_shrink()
350 idalloctm(tsd_tsdn(tsd), ckh->tab, NULL, NULL, true, true); in ckh_shrink()
403 ckh->tab = (ckhc_t *)ipallocztm(tsd_tsdn(tsd), usize, CACHELINE, true, in ckh_new()
431 idalloctm(tsd_tsdn(tsd), ckh->tab, NULL, NULL, true, true); in ckh_delete()
Darena.c974 malloc_mutex_lock(tsd_tsdn(tsd), &arena->large_mtx); in arena_reset()
981 malloc_mutex_unlock(tsd_tsdn(tsd), &arena->large_mtx); in arena_reset()
984 rtree_szind_slab_read(tsd_tsdn(tsd), &extents_rtree, rtree_ctx, in arena_reset()
990 assert(usize == isalloc(tsd_tsdn(tsd), ptr)); in arena_reset()
996 large_dalloc(tsd_tsdn(tsd), extent); in arena_reset()
997 malloc_mutex_lock(tsd_tsdn(tsd), &arena->large_mtx); in arena_reset()
999 malloc_mutex_unlock(tsd_tsdn(tsd), &arena->large_mtx); in arena_reset()
1005 malloc_mutex_lock(tsd_tsdn(tsd), &bin->lock); in arena_reset()
1009 malloc_mutex_unlock(tsd_tsdn(tsd), &bin->lock); in arena_reset()
1010 arena_slab_dalloc(tsd_tsdn(tsd), arena, slab); in arena_reset()
[all …]
Dandroid_je_iterate.c30 extent_t* extent = iealloc(tsd_tsdn(tsd), (void*)ptr); in je_malloc_iterate()
45 rtree_szind_slab_read(tsd_tsdn(tsd), &extents_rtree, rtree_ctx, ptr, true, &szind, &slab); in je_malloc_iterate()
Dextent.c206 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx); in extent_hooks_set()
210 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx); in extent_hooks_set()
1216 if (arena == arena_get(tsd_tsdn(tsd), 0, false)) { in extent_hook_pre_reentrancy()
/external/jemalloc_new/include/jemalloc/internal/
Djemalloc_internal_inlines_b.h17 return arena_get(tsd_tsdn(tsd), 0, true); in arena_choose_impl()
29 arena_get(tsd_tsdn(tsd), 0, false)); in arena_choose_impl()
31 tcache_arena_reassociate(tsd_tsdn(tsd), in arena_choose_impl()
35 tcache_arena_associate(tsd_tsdn(tsd), tcache, in arena_choose_impl()
49 tsd_tsdn(tsd))) { in arena_choose_impl()
55 ret->last_thd = tsd_tsdn(tsd); in arena_choose_impl()
Dprof_inlines_b.h146 assert(usize == isalloc(tsd_tsdn(tsd), ptr)); in prof_realloc()
165 prof_malloc_sample_object(tsd_tsdn(tsd), ptr, usize, tctx); in prof_realloc()
167 prof_tctx_set(tsd_tsdn(tsd), ptr, usize, NULL, in prof_realloc()
176 prof_tctx_reset(tsd_tsdn(tsd), ptr, tctx); in prof_realloc()
178 assert((uintptr_t)prof_tctx_get(tsd_tsdn(tsd), ptr, NULL) == in prof_realloc()
196 prof_tctx_t *tctx = prof_tctx_get(tsd_tsdn(tsd), ptr, alloc_ctx); in prof_free()
199 assert(usize == isalloc(tsd_tsdn(tsd), ptr)); in prof_free()
Dtcache_inlines.h60 ret = tcache_alloc_small_hard(tsd_tsdn(tsd), arena, tcache, in tcache_alloc_small()
74 assert(tcache_salloc(tsd_tsdn(tsd), ret) == usize); in tcache_alloc_small()
126 ret = large_malloc(tsd_tsdn(tsd), arena, sz_s2u(size), zero); in tcache_alloc_large()
173 assert(tcache_salloc(tsd_tsdn(tsd), ptr) <= SMALL_MAXCLASS); in tcache_dalloc_small()
198 assert(tcache_salloc(tsd_tsdn(tsd), ptr) > SMALL_MAXCLASS); in tcache_dalloc_large()
199 assert(tcache_salloc(tsd_tsdn(tsd), ptr) <= tcache_maxclass); in tcache_dalloc_large()
Darena_inlines_a.h44 arena_t *newarena = arena_get(tsd_tsdn(tsd), newind, true); in percpu_arena_update()
51 tcache_arena_reassociate(tsd_tsdn(tsd), tcache, in percpu_arena_update()
Djemalloc_internal_inlines_c.h62 return iallocztm(tsd_tsdn(tsd), size, ind, zero, tcache_get(tsd), false, in ialloc()
94 return ipallocztm(tsd_tsdn(tsd), usize, alignment, zero, in ipalloc()
123 idalloctm(tsd_tsdn(tsd), ptr, tcache_get(tsd), NULL, false, true); in idalloc()
197 return iralloct(tsd_tsdn(tsd), ptr, oldsize, size, alignment, zero, in iralloc()
Dtsd.h137 tsd_tsdn(tsd_t *tsd) { in tsd_tsdn() function
305 return tsd_tsdn(tsd_fetch_impl(false, false)); in tsdn_fetch()
Djemalloc_internal_inlines_a.h151 assert(arena != arena_get(tsd_tsdn(tsd), 0, false)); in pre_reentrancy()
/external/jemalloc_new/test/unit/
Dbackground_thread.c92 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx); in TEST_BEGIN()
96 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx); in TEST_BEGIN()
Dbase.c33 tsdn_t *tsdn = tsd_tsdn(tsd_fetch()); in TEST_BEGIN()
75 tsdn_t *tsdn = tsd_tsdn(tsd_fetch()); in TEST_BEGIN()
120 tsdn_t *tsdn = tsd_tsdn(tsd_fetch()); in TEST_BEGIN()