Home
last modified time | relevance | path

Searched refs:malloc_mutex_unlock (Results 1 – 21 of 21) sorted by relevance

/external/jemalloc_new/src/
Dbackground_thread.c197 malloc_mutex_unlock(tsdn, &decay->mtx);
276 malloc_mutex_unlock(tsdn, &info->mtx);
279 malloc_mutex_unlock(tsdn, &background_thread_lock);
333 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx);
394 malloc_mutex_unlock(tsdn, &background_thread_info[0].mtx);
406 malloc_mutex_unlock(tsdn, &info->mtx);
477 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx);
504 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx);
554 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx);
564 malloc_mutex_unlock(tsd_tsdn(tsd), &t0->mtx);
[all …]
Dprof.c235 malloc_mutex_unlock(tsd_tsdn(tsd), tctx->tdata->lock); in rb_gen()
253 malloc_mutex_unlock(tsdn, tctx->tdata->lock); in prof_malloc_sample_object()
267 malloc_mutex_unlock(tsd_tsdn(tsd), tctx->tdata->lock); in prof_free_sampled_object()
297 malloc_mutex_unlock(tsd_tsdn(tsd), &bt2gctx_mtx); in prof_leave()
600 malloc_mutex_unlock(tsd_tsdn(tsd), gctx->lock); in prof_gctx_try_destroy()
608 malloc_mutex_unlock(tsd_tsdn(tsd), gctx->lock); in prof_gctx_try_destroy()
659 malloc_mutex_unlock(tsd_tsdn(tsd), tdata->lock); in prof_tctx_destroy()
702 malloc_mutex_unlock(tsd_tsdn(tsd), gctx->lock); in prof_tctx_destroy()
767 malloc_mutex_unlock(tsd_tsdn(tsd), gctx.p->lock); in prof_lookup_global()
805 malloc_mutex_unlock(tsd_tsdn(tsd), tdata->lock); in prof_lookup()
[all …]
Dandroid_je_mallinfo.c42 malloc_mutex_unlock(TSDN_NULL, &bin->lock); in accumulate_small_allocs()
65 malloc_mutex_unlock(TSDN_NULL, &arenas_lock); in je_mallinfo()
92 malloc_mutex_unlock(TSDN_NULL, &arenas_lock); in je_mallinfo_arena_info()
110 malloc_mutex_unlock(TSDN_NULL, &bin->lock); in je_mallinfo_bin_info()
113 malloc_mutex_unlock(TSDN_NULL, &arenas_lock); in je_mallinfo_bin_info()
Dtcache.c169 malloc_mutex_unlock(tsd_tsdn(tsd), &bin->lock); in tcache_bin_flush_small()
185 malloc_mutex_unlock(tsd_tsdn(tsd), &bin->lock); in tcache_bin_flush_small()
251 malloc_mutex_unlock(tsd_tsdn(tsd), &locked_arena->large_mtx); in tcache_bin_flush_large()
317 malloc_mutex_unlock(tsdn, &arena->tcache_ql_mtx); in tcache_arena_associate()
343 malloc_mutex_unlock(tsdn, &arena->tcache_ql_mtx); in tcache_arena_dissociate()
559 malloc_mutex_unlock(tsdn, &bin->lock); in tcache_stats_merge()
594 malloc_mutex_unlock(tsd_tsdn(tsd), &tcaches_mtx); in tcaches_create_prep()
628 malloc_mutex_unlock(tsd_tsdn(tsd), &tcaches_mtx); in tcaches_create()
652 malloc_mutex_unlock(tsd_tsdn(tsd), &tcaches_mtx); in tcaches_flush()
665 malloc_mutex_unlock(tsd_tsdn(tsd), &tcaches_mtx); in tcaches_destroy()
Drtree.c114 malloc_mutex_unlock(tsdn, &rtree->init_lock); in rtree_node_init()
123 malloc_mutex_unlock(tsdn, &rtree->init_lock); in rtree_node_init()
140 malloc_mutex_unlock(tsdn, &rtree->init_lock); in rtree_leaf_init()
149 malloc_mutex_unlock(tsdn, &rtree->init_lock); in rtree_leaf_init()
Dtsd.c333 malloc_mutex_unlock(TSDN_NULL, &head->lock); in tsd_init_check_recursion()
341 malloc_mutex_unlock(TSDN_NULL, &head->lock); in tsd_init_check_recursion()
349 malloc_mutex_unlock(TSDN_NULL, &head->lock); in tsd_init_finish()
Dctl.c974 malloc_mutex_unlock(tsdn, &mtx); in ctl_refresh()
1070 malloc_mutex_unlock(tsdn, &ctl_mtx); in ctl_init()
1374 malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); \
1396 malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); \
1414 malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); \
1511 malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); in CTL_RO_NL_GEN()
1567 malloc_mutex_unlock(tsd_tsdn(tsd), &background_thread_lock); in background_thread_ctl()
1568 malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); in background_thread_ctl()
1632 malloc_mutex_unlock(tsd_tsdn(tsd), &background_thread_lock); in max_background_threads_ctl()
1633 malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); in max_background_threads_ctl()
[all …]
Darena.c173 malloc_mutex_unlock(tsdn, &arena->tcache_ql_mtx); in arena_stats_merge()
179 malloc_mutex_unlock(tsdn, &arena->mtx); in arena_stats_merge()
704 malloc_mutex_unlock(tsdn, &decay->mtx); in arena_decay_ms_set()
827 malloc_mutex_unlock(tsdn, &decay->mtx); in arena_decay_to_limit()
854 malloc_mutex_unlock(tsdn, &decay->mtx); in arena_decay_impl()
871 malloc_mutex_unlock(tsdn, &decay->mtx); in arena_decay_impl()
981 malloc_mutex_unlock(tsd_tsdn(tsd), &arena->large_mtx); in arena_reset()
999 malloc_mutex_unlock(tsd_tsdn(tsd), &arena->large_mtx); in arena_reset()
1009 malloc_mutex_unlock(tsd_tsdn(tsd), &bin->lock); in arena_reset()
1015 malloc_mutex_unlock(tsd_tsdn(tsd), &bin->lock); in arena_reset()
[all …]
Dmutex.c192 malloc_mutex_unlock(tsdn, mutex); in malloc_mutex_postfork_parent()
198 malloc_mutex_unlock(tsdn, mutex); in malloc_mutex_postfork_child()
Dbase.c290 malloc_mutex_unlock(tsdn, &base->mtx); in base_block_alloc()
316 malloc_mutex_unlock(tsdn, &base->mtx); in base_extent_alloc()
451 malloc_mutex_unlock(tsdn, &base->mtx); in base_alloc_impl()
492 malloc_mutex_unlock(tsdn, &base->mtx); in base_stats_get()
Dextent.c181 malloc_mutex_unlock(tsdn, &arena->extent_avail_mtx); in extent_alloc()
185 malloc_mutex_unlock(tsdn, &arena->extent_avail_mtx); in extent_alloc()
193 malloc_mutex_unlock(tsdn, &arena->extent_avail_mtx); in extent_dalloc()
210 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx); in extent_hooks_set()
607 malloc_mutex_unlock(tsdn, &extents->mtx); in extents_evict()
659 malloc_mutex_unlock(tsdn, &extents->mtx); in extent_deactivate()
914 malloc_mutex_unlock(tsdn, &extents->mtx); in extent_recycle_extract()
919 malloc_mutex_unlock(tsdn, &extents->mtx); in extent_recycle_extract()
1374 malloc_mutex_unlock(tsdn, &arena->extent_grow_mtx); in extent_grow_retained()
1401 malloc_mutex_unlock(tsdn, &arena->extent_grow_mtx); in extent_grow_retained()
[all …]
Djemalloc.c348 malloc_mutex_unlock(tsdn, &background_thread_lock); in arena_new_create_background_thread()
363 malloc_mutex_unlock(tsdn, &arenas_lock); in arena_init()
568 malloc_mutex_unlock(tsd_tsdn(tsd), in arena_choose_hard()
579 malloc_mutex_unlock(tsd_tsdn(tsd), &arenas_lock); in arena_choose_hard()
656 malloc_mutex_unlock(tsdn, in stats_print_atexit()
1271 malloc_mutex_unlock(TSDN_NULL, &init_lock);
1347 malloc_mutex_unlock(TSDN_NULL, &init_lock);
1506 malloc_mutex_unlock(tsdn, &init_lock);
1537 malloc_mutex_unlock(TSDN_NULL, &init_lock);
1564 malloc_mutex_unlock(tsd_tsdn(tsd), &init_lock);
[all …]
Dlarge.c65 malloc_mutex_unlock(tsdn, &arena->large_mtx); in large_palloc()
324 malloc_mutex_unlock(tsdn, &arena->large_mtx); in large_dalloc_prep_impl()
/external/jemalloc_new/include/jemalloc/internal/
Dmutex_pool.h51 malloc_mutex_unlock(tsdn, mutex); in mutex_pool_unlock()
80 malloc_mutex_unlock(tsdn, mutex1); in mutex_pool_unlock2()
82 malloc_mutex_unlock(tsdn, mutex1); in mutex_pool_unlock2()
83 malloc_mutex_unlock(tsdn, mutex2); in mutex_pool_unlock2()
Dprof_inlines_a.h39 malloc_mutex_unlock(tsdn, &prof_accum->mtx); in prof_accum_add()
68 malloc_mutex_unlock(tsdn, &prof_accum->mtx); in prof_accum_cancel()
Dbin.h103 malloc_mutex_unlock(tsdn, &bin->lock); in bin_stats_merge()
Darena_stats.h124 malloc_mutex_unlock(tsdn, &arena_stats->mtx); in arena_stats_unlock()
Dmutex.h213 malloc_mutex_unlock(tsdn_t *tsdn, malloc_mutex_t *mutex) { in malloc_mutex_unlock() function
/external/jemalloc_new/test/unit/
Drtree.c21 malloc_mutex_unlock(tsdn, &rtree->init_lock); in rtree_node_alloc_intercept()
48 malloc_mutex_unlock(tsdn, &rtree->init_lock); in rtree_leaf_alloc_intercept()
Dbackground_thread.c96 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx); in TEST_BEGIN()
Darena_reset.c153 malloc_mutex_unlock(tsdn, in do_arena_reset_post()