Lines Matching refs:tsd
371 arena_bind(tsd_t *tsd, unsigned ind, bool internal) { in arena_bind() argument
372 arena_t *arena = arena_get(tsd_tsdn(tsd), ind, false); in arena_bind()
376 tsd_iarena_set(tsd, arena); in arena_bind()
378 tsd_arena_set(tsd, arena); in arena_bind()
383 arena_migrate(tsd_t *tsd, unsigned oldind, unsigned newind) { in arena_migrate() argument
386 oldarena = arena_get(tsd_tsdn(tsd), oldind, false); in arena_migrate()
387 newarena = arena_get(tsd_tsdn(tsd), newind, false); in arena_migrate()
390 tsd_arena_set(tsd, newarena); in arena_migrate()
394 arena_unbind(tsd_t *tsd, unsigned ind, bool internal) { in arena_unbind() argument
397 arena = arena_get(tsd_tsdn(tsd), ind, false); in arena_unbind()
401 tsd_iarena_set(tsd, NULL); in arena_unbind()
403 tsd_arena_set(tsd, NULL); in arena_unbind()
408 arena_tdata_get_hard(tsd_t *tsd, unsigned ind) { in arena_tdata_get_hard() argument
410 arena_tdata_t *arenas_tdata = tsd_arenas_tdata_get(tsd); in arena_tdata_get_hard()
412 unsigned narenas_tdata = tsd_narenas_tdata_get(tsd); in arena_tdata_get_hard()
424 tsd_arenas_tdata_set(tsd, arenas_tdata); in arena_tdata_get_hard()
425 tsd_narenas_tdata_set(tsd, narenas_tdata); in arena_tdata_get_hard()
433 bool *arenas_tdata_bypassp = tsd_arenas_tdata_bypassp_get(tsd); in arena_tdata_get_hard()
436 if (tsd_nominal(tsd) && !*arenas_tdata_bypassp) { in arena_tdata_get_hard()
446 assert(tsd_nominal(tsd) && !*arenas_tdata_bypassp); in arena_tdata_get_hard()
447 tsd_arenas_tdata_set(tsd, arenas_tdata); in arena_tdata_get_hard()
448 tsd_narenas_tdata_set(tsd, narenas_tdata); in arena_tdata_get_hard()
485 arena_choose_hard(tsd_t *tsd, bool internal) { in arena_choose_hard() argument
490 ret = arena_get(tsd_tsdn(tsd), choose, true); in arena_choose_hard()
492 arena_bind(tsd, arena_ind_get(ret), false); in arena_choose_hard()
493 arena_bind(tsd, arena_ind_get(ret), true); in arena_choose_hard()
516 malloc_mutex_lock(tsd_tsdn(tsd), &arenas_lock); in arena_choose_hard()
517 assert(arena_get(tsd_tsdn(tsd), 0, false) != NULL); in arena_choose_hard()
519 if (arena_get(tsd_tsdn(tsd), i, false) != NULL) { in arena_choose_hard()
526 tsd_tsdn(tsd), i, false), !!j) < in arena_choose_hard()
528 tsd_tsdn(tsd), choose[j], false), in arena_choose_hard()
548 if (arena_nthreads_get(arena_get(tsd_tsdn(tsd), in arena_choose_hard()
556 ret = arena_get(tsd_tsdn(tsd), in arena_choose_hard()
564 arena = arena_init_locked(tsd_tsdn(tsd), in arena_choose_hard()
568 malloc_mutex_unlock(tsd_tsdn(tsd), in arena_choose_hard()
577 arena_bind(tsd, choose[j], !!j); in arena_choose_hard()
579 malloc_mutex_unlock(tsd_tsdn(tsd), &arenas_lock); in arena_choose_hard()
585 tsd_tsdn(tsd), choose[j]); in arena_choose_hard()
590 ret = arena_get(tsd_tsdn(tsd), 0, false); in arena_choose_hard()
591 arena_bind(tsd, 0, false); in arena_choose_hard()
592 arena_bind(tsd, 0, true); in arena_choose_hard()
599 iarena_cleanup(tsd_t *tsd) { in iarena_cleanup() argument
602 iarena = tsd_iarena_get(tsd); in iarena_cleanup()
604 arena_unbind(tsd, arena_ind_get(iarena), true); in iarena_cleanup()
609 arena_cleanup(tsd_t *tsd) { in arena_cleanup() argument
612 arena = tsd_arena_get(tsd); in arena_cleanup()
614 arena_unbind(tsd, arena_ind_get(arena), false); in arena_cleanup()
619 arenas_tdata_cleanup(tsd_t *tsd) { in arenas_tdata_cleanup() argument
623 *tsd_arenas_tdata_bypassp_get(tsd) = true; in arenas_tdata_cleanup()
625 arenas_tdata = tsd_arenas_tdata_get(tsd); in arenas_tdata_cleanup()
627 tsd_arenas_tdata_set(tsd, NULL); in arenas_tdata_cleanup()
677 tsd_t *tsd = tsdn_tsd(tsdn); in check_entry_exit_locking() local
682 int8_t reentrancy_level = tsd_reentrancy_level_get(tsd); in check_entry_exit_locking()
1509 tsd_t *tsd = tsdn_tsd(tsdn); local
1510 assert(tsd_reentrancy_level_get(tsd) > 0);
1511 post_reentrancy(tsd);
1517 tsd_t *tsd; local
1539 tsd = malloc_tsd_boot0();
1540 if (tsd == NULL) {
1547 malloc_mutex_lock(tsd_tsdn(tsd), &init_lock);
1549 pre_reentrancy(tsd, NULL);
1551 if (malloc_init_narenas() || background_thread_boot1(tsd_tsdn(tsd))) {
1552 UNLOCK_RETURN(tsd_tsdn(tsd), true, true)
1554 if (config_prof && prof_boot2(tsd)) {
1555 UNLOCK_RETURN(tsd_tsdn(tsd), true, true)
1561 UNLOCK_RETURN(tsd_tsdn(tsd), true, true)
1563 post_reentrancy(tsd);
1564 malloc_mutex_unlock(tsd_tsdn(tsd), &init_lock);
1567 tsd_witness_tsdp_get_unsafe(tsd)));
1570 tsd = tsd_fetch();
1578 background_thread_ctl_init(tsd_tsdn(tsd));
1580 malloc_mutex_lock(tsd_tsdn(tsd), &background_thread_lock);
1581 bool err = background_thread_create(tsd, 0);
1582 malloc_mutex_unlock(tsd_tsdn(tsd), &background_thread_lock);
1688 imalloc_no_sample(static_opts_t *sopts, dynamic_opts_t *dopts, tsd_t *tsd, argument
1697 tcache = tsd_tcachep_get(tsd);
1698 assert(tcache == tcache_get(tsd));
1700 tcache = tcache_get(tsd);
1705 tcache = tcaches_get(tsd, dopts->tcache_ind);
1717 arena = arena_get(tsd_tsdn(tsd), dopts->arena_ind, true);
1721 return ipalloct(tsd_tsdn(tsd), usize, dopts->alignment,
1725 return iallocztm(tsd_tsdn(tsd), size, ind, dopts->zero, tcache, false,
1730 imalloc_sample(static_opts_t *sopts, dynamic_opts_t *dopts, tsd_t *tsd, argument
1747 ret = imalloc_no_sample(sopts, dopts, tsd, bumped_usize,
1752 arena_prof_promote(tsd_tsdn(tsd), ret, usize);
1754 ret = imalloc_no_sample(sopts, dopts, tsd, usize, usize, ind);
1803 imalloc_body(static_opts_t *sopts, dynamic_opts_t *dopts, tsd_t *tsd) { argument
1863 check_entry_exit_locking(tsd_tsdn(tsd));
1869 reentrancy_level = tsd_reentrancy_level_get(tsd);
1890 tsd, usize, prof_active_get_unlocked(), true);
1896 sopts, dopts, tsd, usize, usize, ind);
1903 sopts, dopts, tsd, usize, ind);
1910 prof_alloc_rollback(tsd, tctx, true);
1913 prof_malloc(tsd_tsdn(tsd), allocation, usize, &alloc_ctx, tctx);
1921 allocation = imalloc_no_sample(sopts, dopts, tsd, size, usize,
1936 assert(usize == isalloc(tsd_tsdn(tsd), allocation));
1937 *tsd_thread_allocatedp_get(tsd) += usize;
1945 check_entry_exit_locking(tsd_tsdn(tsd));
1959 check_entry_exit_locking(tsd_tsdn(tsd));
1990 check_entry_exit_locking(tsd_tsdn(tsd));
2015 tsd_t *tsd = tsd_fetch(); local
2016 assert(tsd);
2017 if (likely(tsd_fast(tsd))) {
2019 tsd_assert_fast(tsd);
2021 return imalloc_body(sopts, dopts, tsd);
2024 return imalloc_body(sopts, dopts, tsd);
2162 irealloc_prof_sample(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t usize, argument
2170 p = iralloc(tsd, old_ptr, old_usize, LARGE_MINCLASS, 0, false);
2174 arena_prof_promote(tsd_tsdn(tsd), p, usize);
2176 p = iralloc(tsd, old_ptr, old_usize, usize, 0, false);
2183 irealloc_prof(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t usize, argument
2190 old_tctx = prof_tctx_get(tsd_tsdn(tsd), old_ptr, alloc_ctx);
2191 tctx = prof_alloc_prep(tsd, usize, prof_active, true);
2193 p = irealloc_prof_sample(tsd, old_ptr, old_usize, usize, tctx);
2195 p = iralloc(tsd, old_ptr, old_usize, usize, 0, false);
2198 prof_alloc_rollback(tsd, tctx, true);
2201 prof_realloc(tsd, p, usize, tctx, prof_active, true, old_ptr, old_usize,
2208 ifree(tsd_t *tsd, void *ptr, tcache_t *tcache, bool slow_path) { argument
2210 tsd_assert_fast(tsd);
2212 check_entry_exit_locking(tsd_tsdn(tsd));
2213 if (tsd_reentrancy_level_get(tsd) != 0) {
2221 rtree_ctx_t *rtree_ctx = tsd_rtree_ctx(tsd);
2222 rtree_szind_slab_read(tsd_tsdn(tsd), &extents_rtree, rtree_ctx,
2229 prof_free(tsd, ptr, usize, &alloc_ctx);
2234 *tsd_thread_deallocatedp_get(tsd) += usize;
2238 idalloctm(tsd_tsdn(tsd), ptr, tcache, &alloc_ctx, false,
2241 idalloctm(tsd_tsdn(tsd), ptr, tcache, &alloc_ctx, false,
2247 isfree(tsd_t *tsd, void *ptr, size_t usize, tcache_t *tcache, bool slow_path) { argument
2249 tsd_assert_fast(tsd);
2251 check_entry_exit_locking(tsd_tsdn(tsd));
2252 if (tsd_reentrancy_level_get(tsd) != 0) {
2271 rtree_ctx_t *rtree_ctx = tsd_rtree_ctx(tsd);
2272 rtree_szind_slab_read(tsd_tsdn(tsd), &extents_rtree,
2279 rtree_ctx_t *rtree_ctx = tsd_rtree_ctx(tsd);
2280 rtree_szind_slab_read(tsd_tsdn(tsd), &extents_rtree, rtree_ctx,
2289 prof_free(tsd, ptr, usize, ctx);
2292 *tsd_thread_deallocatedp_get(tsd) += usize;
2296 isdalloct(tsd_tsdn(tsd), ptr, usize, tcache, ctx, false);
2298 isdalloct(tsd_tsdn(tsd), ptr, usize, tcache, ctx, true);
2318 tsd_t *tsd = tsd_fetch(); local
2319 if (tsd_reentrancy_level_get(tsd) == 0) {
2320 tcache = tcache_get(tsd);
2324 ifree(tsd, ptr, tcache, true);
2334 tsd_t *tsd = tsd_fetch(); local
2336 check_entry_exit_locking(tsd_tsdn(tsd));
2339 rtree_ctx_t *rtree_ctx = tsd_rtree_ctx(tsd);
2340 rtree_szind_slab_read(tsd_tsdn(tsd), &extents_rtree, rtree_ctx,
2344 assert(old_usize == isalloc(tsd_tsdn(tsd), ptr));
2348 NULL : irealloc_prof(tsd, ptr, old_usize, usize,
2354 ret = iralloc(tsd, ptr, old_usize, size, 0, false);
2356 tsdn = tsd_tsdn(tsd);
2373 tsd_t *tsd; local
2376 tsd = tsdn_tsd(tsdn);
2377 *tsd_thread_allocatedp_get(tsd) += usize;
2378 *tsd_thread_deallocatedp_get(tsd) += old_usize;
2401 tsd_t *tsd = tsd_fetch_min(); local
2402 check_entry_exit_locking(tsd_tsdn(tsd));
2405 if (likely(tsd_fast(tsd))) {
2406 tsd_assert_fast(tsd);
2408 tcache = tsd_tcachep_get(tsd);
2409 ifree(tsd, ptr, tcache, false);
2411 if (likely(tsd_reentrancy_level_get(tsd) == 0)) {
2412 tcache = tcache_get(tsd);
2416 ifree(tsd, ptr, tcache, true);
2418 check_entry_exit_locking(tsd_tsdn(tsd));
2635 irallocx_prof(tsd_t *tsd, void *old_ptr, size_t old_usize, size_t size, argument
2643 old_tctx = prof_tctx_get(tsd_tsdn(tsd), old_ptr, alloc_ctx);
2644 tctx = prof_alloc_prep(tsd, *usize, prof_active, false);
2646 p = irallocx_prof_sample(tsd_tsdn(tsd), old_ptr, old_usize,
2649 p = iralloct(tsd_tsdn(tsd), old_ptr, old_usize, size, alignment,
2653 prof_alloc_rollback(tsd, tctx, false);
2666 *usize = isalloc(tsd_tsdn(tsd), p);
2668 prof_realloc(tsd, p, *usize, tctx, prof_active, false, old_ptr,
2679 tsd_t *tsd; local
2694 tsd = tsd_fetch();
2695 check_entry_exit_locking(tsd_tsdn(tsd));
2699 arena = arena_get(tsd_tsdn(tsd), arena_ind, true);
2711 tcache = tcaches_get(tsd, MALLOCX_TCACHE_GET(flags));
2714 tcache = tcache_get(tsd);
2718 rtree_ctx_t *rtree_ctx = tsd_rtree_ctx(tsd);
2719 rtree_szind_slab_read(tsd_tsdn(tsd), &extents_rtree, rtree_ctx,
2723 assert(old_usize == isalloc(tsd_tsdn(tsd), ptr));
2730 p = irallocx_prof(tsd, ptr, old_usize, size, alignment, &usize,
2736 p = iralloct(tsd_tsdn(tsd), ptr, old_usize, size, alignment,
2742 usize = isalloc(tsd_tsdn(tsd), p);
2748 *tsd_thread_allocatedp_get(tsd) += usize;
2749 *tsd_thread_deallocatedp_get(tsd) += old_usize;
2752 check_entry_exit_locking(tsd_tsdn(tsd));
2762 check_entry_exit_locking(tsd_tsdn(tsd));
2796 ixallocx_prof(tsd_t *tsd, void *ptr, size_t old_usize, size_t size, argument
2803 old_tctx = prof_tctx_get(tsd_tsdn(tsd), ptr, alloc_ctx);
2825 tctx = prof_alloc_prep(tsd, usize_max, prof_active, false);
2828 usize = ixallocx_prof_sample(tsd_tsdn(tsd), ptr, old_usize,
2831 usize = ixallocx_helper(tsd_tsdn(tsd), ptr, old_usize, size,
2835 prof_alloc_rollback(tsd, tctx, false);
2838 prof_realloc(tsd, ptr, usize, tctx, prof_active, false, ptr, old_usize,
2846 tsd_t *tsd; local
2858 tsd = tsd_fetch();
2859 check_entry_exit_locking(tsd_tsdn(tsd));
2862 rtree_ctx_t *rtree_ctx = tsd_rtree_ctx(tsd);
2863 rtree_szind_slab_read(tsd_tsdn(tsd), &extents_rtree, rtree_ctx,
2867 assert(old_usize == isalloc(tsd_tsdn(tsd), ptr));
2886 usize = ixallocx_prof(tsd, ptr, old_usize, size, extra,
2889 usize = ixallocx_helper(tsd_tsdn(tsd), ptr, old_usize, size,
2897 *tsd_thread_allocatedp_get(tsd) += usize;
2898 *tsd_thread_deallocatedp_get(tsd) += old_usize;
2902 check_entry_exit_locking(tsd_tsdn(tsd));
2942 tsd_t *tsd = tsd_fetch(); local
2943 bool fast = tsd_fast(tsd);
2944 check_entry_exit_locking(tsd_tsdn(tsd));
2949 assert(tsd_reentrancy_level_get(tsd) == 0);
2953 tcache = tcaches_get(tsd, MALLOCX_TCACHE_GET(flags));
2957 tcache = tsd_tcachep_get(tsd);
2958 assert(tcache == tcache_get(tsd));
2960 if (likely(tsd_reentrancy_level_get(tsd) == 0)) {
2961 tcache = tcache_get(tsd);
2970 tsd_assert_fast(tsd);
2971 ifree(tsd, ptr, tcache, false);
2973 ifree(tsd, ptr, tcache, true);
2975 check_entry_exit_locking(tsd_tsdn(tsd));
3002 tsd_t *tsd = tsd_fetch(); local
3003 bool fast = tsd_fast(tsd);
3004 size_t usize = inallocx(tsd_tsdn(tsd), size, flags);
3005 assert(usize == isalloc(tsd_tsdn(tsd), ptr));
3006 check_entry_exit_locking(tsd_tsdn(tsd));
3011 assert(tsd_reentrancy_level_get(tsd) == 0);
3015 tcache = tcaches_get(tsd, MALLOCX_TCACHE_GET(flags));
3019 tcache = tsd_tcachep_get(tsd);
3020 assert(tcache == tcache_get(tsd));
3022 if (likely(tsd_reentrancy_level_get(tsd) == 0)) {
3023 tcache = tcache_get(tsd);
3032 tsd_assert_fast(tsd);
3033 isfree(tsd, ptr, usize, tcache, false);
3035 isfree(tsd, ptr, usize, tcache, true);
3037 check_entry_exit_locking(tsd_tsdn(tsd));
3073 tsd_t *tsd; local
3082 tsd = tsd_fetch();
3083 check_entry_exit_locking(tsd_tsdn(tsd));
3084 ret = ctl_byname(tsd, name, oldp, oldlenp, newp, newlen);
3085 check_entry_exit_locking(tsd_tsdn(tsd));
3102 tsd_t *tsd = tsd_fetch(); local
3103 check_entry_exit_locking(tsd_tsdn(tsd));
3104 ret = ctl_nametomib(tsd, name, mibp, miblenp);
3105 check_entry_exit_locking(tsd_tsdn(tsd));
3115 tsd_t *tsd; local
3124 tsd = tsd_fetch();
3125 check_entry_exit_locking(tsd_tsdn(tsd));
3126 ret = ctl_bymib(tsd, mib, miblen, oldp, oldlenp, newp, newlen);
3127 check_entry_exit_locking(tsd_tsdn(tsd));
3212 tsd_t *tsd; local
3223 tsd = tsd_fetch();
3227 witness_prefork(tsd_witness_tsdp_get(tsd));
3229 ctl_prefork(tsd_tsdn(tsd));
3230 tcache_prefork(tsd_tsdn(tsd));
3231 malloc_mutex_prefork(tsd_tsdn(tsd), &arenas_lock);
3233 background_thread_prefork0(tsd_tsdn(tsd));
3235 prof_prefork0(tsd_tsdn(tsd));
3237 background_thread_prefork1(tsd_tsdn(tsd));
3242 if ((arena = arena_get(tsd_tsdn(tsd), j, false)) !=
3246 arena_prefork0(tsd_tsdn(tsd), arena);
3249 arena_prefork1(tsd_tsdn(tsd), arena);
3252 arena_prefork2(tsd_tsdn(tsd), arena);
3255 arena_prefork3(tsd_tsdn(tsd), arena);
3258 arena_prefork4(tsd_tsdn(tsd), arena);
3261 arena_prefork5(tsd_tsdn(tsd), arena);
3264 arena_prefork6(tsd_tsdn(tsd), arena);
3267 arena_prefork7(tsd_tsdn(tsd), arena);
3274 prof_prefork1(tsd_tsdn(tsd));
3285 tsd_t *tsd; local
3295 tsd = tsd_fetch();
3297 witness_postfork_parent(tsd_witness_tsdp_get(tsd));
3302 if ((arena = arena_get(tsd_tsdn(tsd), i, false)) != NULL) {
3303 arena_postfork_parent(tsd_tsdn(tsd), arena);
3306 prof_postfork_parent(tsd_tsdn(tsd));
3308 background_thread_postfork_parent(tsd_tsdn(tsd));
3310 malloc_mutex_postfork_parent(tsd_tsdn(tsd), &arenas_lock);
3311 tcache_postfork_parent(tsd_tsdn(tsd));
3312 ctl_postfork_parent(tsd_tsdn(tsd));
3317 tsd_t *tsd; local
3322 tsd = tsd_fetch();
3324 witness_postfork_child(tsd_witness_tsdp_get(tsd));
3329 if ((arena = arena_get(tsd_tsdn(tsd), i, false)) != NULL) {
3330 arena_postfork_child(tsd_tsdn(tsd), arena);
3333 prof_postfork_child(tsd_tsdn(tsd));
3335 background_thread_postfork_child(tsd_tsdn(tsd));
3337 malloc_mutex_postfork_child(tsd_tsdn(tsd), &arenas_lock);
3338 tcache_postfork_child(tsd_tsdn(tsd));
3339 ctl_postfork_child(tsd_tsdn(tsd));