• Home
  • Raw
  • Download

Lines Matching refs:tsd_tsdn

655 			    (struct container_s *)base_alloc(tsd_tsdn(tsd),  in arenas_i_impl()
663 ret = (ctl_arena_t *)base_alloc(tsd_tsdn(tsd), b0get(), in arenas_i_impl()
903 if (arena_init(tsd_tsdn(tsd), arena_ind, extent_hooks) == NULL) { in ctl_arena_init()
1001 tsdn_t *tsdn = tsd_tsdn(tsd); in ctl_init()
1187 ret = ctl_lookup(tsd_tsdn(tsd), name, nodes, mib, &depth); in ctl_byname()
1213 ret = ctl_lookup(tsd_tsdn(tsd), name, NULL, mibp, miblenp); in ctl_nametomib()
1247 node = inode->index(tsd_tsdn(tsd), mib, miblen, mib[i]); in ctl_bymib()
1365 malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); \
1374 malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); \
1389 malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); \
1396 malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); \
1407 malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); \
1414 malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); \
1502 malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); in CTL_RO_NL_GEN()
1505 ctl_refresh(tsd_tsdn(tsd)); in CTL_RO_NL_GEN()
1511 malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); in CTL_RO_NL_GEN()
1524 background_thread_ctl_init(tsd_tsdn(tsd)); in background_thread_ctl()
1526 malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); in background_thread_ctl()
1527 malloc_mutex_lock(tsd_tsdn(tsd), &background_thread_lock); in background_thread_ctl()
1545 background_thread_enabled_set(tsd_tsdn(tsd), newval); in background_thread_ctl()
1567 malloc_mutex_unlock(tsd_tsdn(tsd), &background_thread_lock); in background_thread_ctl()
1568 malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); in background_thread_ctl()
1582 background_thread_ctl_init(tsd_tsdn(tsd)); in max_background_threads_ctl()
1584 malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); in max_background_threads_ctl()
1585 malloc_mutex_lock(tsd_tsdn(tsd), &background_thread_lock); in max_background_threads_ctl()
1615 background_thread_enabled_set(tsd_tsdn(tsd), false); in max_background_threads_ctl()
1621 background_thread_enabled_set(tsd_tsdn(tsd), true); in max_background_threads_ctl()
1632 malloc_mutex_unlock(tsd_tsdn(tsd), &background_thread_lock); in max_background_threads_ctl()
1633 malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); in max_background_threads_ctl()
1730 newarena = arena_get(tsd_tsdn(tsd), newind, true); in CTL_RO_CONFIG_GEN()
1738 tcache_arena_reassociate(tsd_tsdn(tsd), in CTL_RO_CONFIG_GEN()
1923 tsdn_t *tsdn = tsd_tsdn(tsd); in arena_i_initialized_ctl()
1997 arena_i_decay(tsd_tsdn(tsd), arena_ind, false); in arena_i_decay_ctl()
2013 arena_i_decay(tsd_tsdn(tsd), arena_ind, true); in arena_i_purge_ctl()
2030 *arena = arena_get(tsd_tsdn(tsd), *arena_ind, false); in arena_i_reset_destroy_helper()
2045 malloc_mutex_lock(tsd_tsdn(tsd), &background_thread_lock); in arena_reset_prepare_background_thread()
2051 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx); in arena_reset_prepare_background_thread()
2053 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx); in arena_reset_prepare_background_thread()
2066 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx); in arena_reset_finish_background_thread()
2068 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx); in arena_reset_finish_background_thread()
2070 malloc_mutex_unlock(tsd_tsdn(tsd), &background_thread_lock); in arena_reset_finish_background_thread()
2117 arena_decay(tsd_tsdn(tsd), arena, false, true); in arena_i_destroy_ctl()
2120 ctl_arena_refresh(tsd_tsdn(tsd), arena, ctl_darena, arena_ind, true); in arena_i_destroy_ctl()
2144 malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); in arena_i_dss_ctl()
2178 arena_t *arena = arena_get(tsd_tsdn(tsd), arena_ind, false); in arena_i_dss_ctl()
2192 malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); in arena_i_dss_ctl()
2204 arena = arena_get(tsd_tsdn(tsd), arena_ind, false); in arena_i_decay_ms_ctl_impl()
2220 if (dirty ? arena_dirty_decay_ms_set(tsd_tsdn(tsd), arena, in arena_i_decay_ms_ctl_impl()
2221 *(ssize_t *)newp) : arena_muzzy_decay_ms_set(tsd_tsdn(tsd), in arena_i_decay_ms_ctl_impl()
2254 malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); in arena_i_extent_hooks_ctl()
2258 arena = arena_get(tsd_tsdn(tsd), arena_ind, false); in arena_i_extent_hooks_ctl()
2272 arena = arena_init(tsd_tsdn(tsd), arena_ind, in arena_i_extent_hooks_ctl()
2298 malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); in arena_i_extent_hooks_ctl()
2314 malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); in arena_i_retain_grow_limit_ctl()
2317 arena_get(tsd_tsdn(tsd), arena_ind, false)) != NULL) { in arena_i_retain_grow_limit_ctl()
2334 malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); in arena_i_retain_grow_limit_ctl()
2369 malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); in arenas_narenas_ctl()
2380 malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); in arenas_narenas_ctl()
2460 malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); in arenas_create_ctl()
2472 malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); in arenas_create_ctl()
2487 malloc_mutex_lock(tsd_tsdn(tsd), &ctl_mtx); in arenas_lookup_ctl()
2489 extent = iealloc(tsd_tsdn(tsd), ptr); in arenas_lookup_ctl()
2502 malloc_mutex_unlock(tsd_tsdn(tsd), &ctl_mtx); in arenas_lookup_ctl()
2523 oldval = prof_thread_active_init_set(tsd_tsdn(tsd), in prof_thread_active_init_ctl()
2526 oldval = prof_thread_active_init_get(tsd_tsdn(tsd)); in prof_thread_active_init_ctl()
2550 oldval = prof_active_set(tsd_tsdn(tsd), *(bool *)newp); in prof_active_ctl()
2552 oldval = prof_active_get(tsd_tsdn(tsd)); in prof_active_ctl()
2599 oldval = prof_gdump_set(tsd_tsdn(tsd), *(bool *)newp); in prof_gdump_ctl()
2601 oldval = prof_gdump_get(tsd_tsdn(tsd)); in prof_gdump_ctl()
2774 tsdn_t *tsdn = tsd_tsdn(tsd); in CTL_RO_NL_CGEN()