/external/jemalloc_new/src/ |
D | tsd.c | 55 tsd_slow_update(tsd_t *tsd) { in tsd_slow_update() argument 56 if (tsd_nominal(tsd)) { in tsd_slow_update() 57 if (malloc_slow || !tsd_tcache_enabled_get(tsd) || in tsd_slow_update() 58 tsd_reentrancy_level_get(tsd) > 0) { in tsd_slow_update() 59 tsd->state = tsd_state_nominal_slow; in tsd_slow_update() 61 tsd->state = tsd_state_nominal; in tsd_slow_update() 67 tsd_data_init(tsd_t *tsd) { in tsd_data_init() argument 72 rtree_ctx_data_init(tsd_rtree_ctxp_get_unsafe(tsd)); in tsd_data_init() 81 *tsd_offset_statep_get(tsd) = config_debug ? 0 : in tsd_data_init() 82 (uint64_t)(uintptr_t)tsd; in tsd_data_init() [all …]
|
D | jemalloc.c | 371 arena_bind(tsd_t *tsd, unsigned ind, bool internal) { in arena_bind() argument 372 arena_t *arena = arena_get(tsd_tsdn(tsd), ind, false); in arena_bind() 376 tsd_iarena_set(tsd, arena); in arena_bind() 378 tsd_arena_set(tsd, arena); in arena_bind() 383 arena_migrate(tsd_t *tsd, unsigned oldind, unsigned newind) { in arena_migrate() argument 386 oldarena = arena_get(tsd_tsdn(tsd), oldind, false); in arena_migrate() 387 newarena = arena_get(tsd_tsdn(tsd), newind, false); in arena_migrate() 390 tsd_arena_set(tsd, newarena); in arena_migrate() 394 arena_unbind(tsd_t *tsd, unsigned ind, bool internal) { in arena_unbind() argument 397 arena = arena_get(tsd_tsdn(tsd), ind, false); in arena_unbind() [all …]
|
D | tcache.c | 44 tcache_event_hard(tsd_t *tsd, tcache_t *tcache) { in tcache_event_hard() argument 58 tcache_bin_flush_small(tsd, tcache, tbin, binind, in tcache_event_hard() 71 tcache_bin_flush_large(tsd, tbin, binind, tbin->ncached in tcache_event_hard() 108 tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, cache_bin_t *tbin, in tcache_bin_flush_small() argument 121 item_extent[i] = iealloc(tsd_tsdn(tsd), *(tbin->avail - 1 - i)); in tcache_bin_flush_small() 131 if (arena_prof_accum(tsd_tsdn(tsd), arena, in tcache_bin_flush_small() 133 prof_idump(tsd_tsdn(tsd)); in tcache_bin_flush_small() 138 malloc_mutex_lock(tsd_tsdn(tsd), &bin->lock); in tcache_bin_flush_small() 155 arena_dalloc_bin_junked_locked(tsd_tsdn(tsd), in tcache_bin_flush_small() 169 malloc_mutex_unlock(tsd_tsdn(tsd), &bin->lock); in tcache_bin_flush_small() [all …]
|
D | prof.c | 141 static void prof_tctx_destroy(tsd_t *tsd, prof_tctx_t *tctx); 144 static void prof_tdata_destroy(tsd_t *tsd, prof_tdata_t *tdata, 211 prof_alloc_rollback(tsd_t *tsd, prof_tctx_t *tctx, bool updated) { in rb_gen() 223 tdata = prof_tdata_get(tsd, true); in rb_gen() 230 malloc_mutex_lock(tsd_tsdn(tsd), tctx->tdata->lock); in rb_gen() 232 if (prof_tctx_should_destroy(tsd_tsdn(tsd), tctx)) { in rb_gen() 233 prof_tctx_destroy(tsd, tctx); in rb_gen() 235 malloc_mutex_unlock(tsd_tsdn(tsd), tctx->tdata->lock); in rb_gen() 257 prof_free_sampled_object(tsd_t *tsd, size_t usize, prof_tctx_t *tctx) { in prof_free_sampled_object() argument 258 malloc_mutex_lock(tsd_tsdn(tsd), tctx->tdata->lock); in prof_free_sampled_object() [all …]
|
D | background_thread.c | 56 bool background_thread_create(tsd_t *tsd, unsigned arena_ind) NOT_REACHED in background_thread_create() argument 57 bool background_threads_enable(tsd_t *tsd) NOT_REACHED in background_thread_create() 58 bool background_threads_disable(tsd_t *tsd) NOT_REACHED in background_thread_create() 313 background_threads_disable_single(tsd_t *tsd, background_thread_info_t *info) { 315 malloc_mutex_assert_owner(tsd_tsdn(tsd), 318 malloc_mutex_assert_not_owner(tsd_tsdn(tsd), 322 pre_reentrancy(tsd, NULL); 323 malloc_mutex_lock(tsd_tsdn(tsd), &info->mtx); 333 malloc_mutex_unlock(tsd_tsdn(tsd), &info->mtx); 336 post_reentrancy(tsd); [all …]
|
D | ctl.c | 50 static int n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, \ 642 arenas_i_impl(tsd_t *tsd, size_t i, bool compat, bool init) { in arenas_i_impl() argument 655 (struct container_s *)base_alloc(tsd_tsdn(tsd), in arenas_i_impl() 663 ret = (ctl_arena_t *)base_alloc(tsd_tsdn(tsd), b0get(), in arenas_i_impl() 885 ctl_arena_init(tsd_t *tsd, extent_hooks_t *extent_hooks) { in ctl_arena_init() argument 898 if (arenas_i_impl(tsd, arena_ind, false, true) == NULL) { in ctl_arena_init() 903 if (arena_init(tsd_tsdn(tsd), arena_ind, extent_hooks) == NULL) { in ctl_arena_init() 999 ctl_init(tsd_t *tsd) { in ctl_init() argument 1001 tsdn_t *tsdn = tsd_tsdn(tsd); in ctl_init() 1035 if ((ctl_sarena = arenas_i_impl(tsd, MALLCTL_ARENAS_ALL, false, in ctl_init() [all …]
|
/external/curl/lib/ |
D | asyn-thread.c | 178 struct thread_sync_data tsd; member 183 return &(((struct thread_data *)conn->async.os_specific)->tsd); in conn_thread_sync_data() 188 void destroy_thread_sync_data(struct thread_sync_data * tsd) in destroy_thread_sync_data() argument 190 if(tsd->mtx) { in destroy_thread_sync_data() 191 Curl_mutex_destroy(tsd->mtx); in destroy_thread_sync_data() 192 free(tsd->mtx); in destroy_thread_sync_data() 195 free(tsd->hostname); in destroy_thread_sync_data() 197 if(tsd->res) in destroy_thread_sync_data() 198 Curl_freeaddrinfo(tsd->res); in destroy_thread_sync_data() 200 memset(tsd, 0, sizeof(*tsd)); in destroy_thread_sync_data() [all …]
|
/external/jemalloc_new/include/jemalloc/internal/ |
D | tsd.h | 133 tsd_t tsd; member 137 tsd_tsdn(tsd_t *tsd) { in tsd_tsdn() argument 138 return (tsdn_t *)tsd; in tsd_tsdn() 150 return &tsdn->tsd; in tsdn_tsd() 159 tsd_t *tsd_fetch_slow(tsd_t *tsd, bool internal); 160 void tsd_slow_update(tsd_t *tsd); 184 tsd_##n##p_get_unsafe(tsd_t *tsd) { \ 185 return &tsd->use_a_getter_or_setter_instead_##n; \ 193 tsd_##n##p_get(tsd_t *tsd) { \ 194 assert(tsd->state == tsd_state_nominal || \ [all …]
|
D | jemalloc_internal_inlines_a.h | 58 arena_tdata_get(tsd_t *tsd, unsigned ind, bool refresh_if_missing) { in arena_tdata_get() argument 60 arena_tdata_t *arenas_tdata = tsd_arenas_tdata_get(tsd); in arena_tdata_get() 64 return arena_tdata_get_hard(tsd, ind); in arena_tdata_get() 66 if (unlikely(ind >= tsd_narenas_tdata_get(tsd))) { in arena_tdata_get() 71 return (refresh_if_missing ? arena_tdata_get_hard(tsd, ind) : in arena_tdata_get() 79 return arena_tdata_get_hard(tsd, ind); in arena_tdata_get() 99 decay_ticker_get(tsd_t *tsd, unsigned ind) { in decay_ticker_get() argument 102 tdata = arena_tdata_get(tsd, ind, true); in decay_ticker_get() 122 tcache_available(tsd_t *tsd) { in tcache_available() argument 128 if (likely(tsd_tcache_enabled_get(tsd))) { in tcache_available() [all …]
|
D | tcache_inlines.h | 12 tcache_enabled_get(tsd_t *tsd) { in tcache_enabled_get() argument 13 return tsd_tcache_enabled_get(tsd); in tcache_enabled_get() 17 tcache_enabled_set(tsd_t *tsd, bool enabled) { in tcache_enabled_set() argument 18 bool was_enabled = tsd_tcache_enabled_get(tsd); in tcache_enabled_set() 21 tsd_tcache_data_init(tsd); in tcache_enabled_set() 23 tcache_cleanup(tsd); in tcache_enabled_set() 26 tsd_tcache_enabled_set(tsd, enabled); in tcache_enabled_set() 27 tsd_slow_update(tsd); in tcache_enabled_set() 31 tcache_event(tsd_t *tsd, tcache_t *tcache) { in tcache_event() argument 37 tcache_event_hard(tsd, tcache); in tcache_event() [all …]
|
D | jemalloc_internal_inlines_b.h | 8 arena_choose_impl(tsd_t *tsd, arena_t *arena, bool internal) { in arena_choose_impl() argument 16 if (unlikely(tsd_reentrancy_level_get(tsd) > 0)) { in arena_choose_impl() 17 return arena_get(tsd_tsdn(tsd), 0, true); in arena_choose_impl() 20 ret = internal ? tsd_iarena_get(tsd) : tsd_arena_get(tsd); in arena_choose_impl() 22 ret = arena_choose_hard(tsd, internal); in arena_choose_impl() 24 if (tcache_available(tsd)) { in arena_choose_impl() 25 tcache_t *tcache = tcache_get(tsd); in arena_choose_impl() 29 arena_get(tsd_tsdn(tsd), 0, false)); in arena_choose_impl() 31 tcache_arena_reassociate(tsd_tsdn(tsd), in arena_choose_impl() 35 tcache_arena_associate(tsd_tsdn(tsd), tcache, in arena_choose_impl() [all …]
|
D | prof_inlines_b.h | 17 prof_tdata_get(tsd_t *tsd, bool create) { in prof_tdata_get() argument 22 tdata = tsd_prof_tdata_get(tsd); in prof_tdata_get() 25 if (tsd_nominal(tsd)) { in prof_tdata_get() 26 tdata = prof_tdata_init(tsd); in prof_tdata_get() 27 tsd_prof_tdata_set(tsd, tdata); in prof_tdata_get() 30 tdata = prof_tdata_reinit(tsd, tdata); in prof_tdata_get() 31 tsd_prof_tdata_set(tsd, tdata); in prof_tdata_get() 65 prof_sample_accum_update(tsd_t *tsd, size_t usize, bool update, in prof_sample_accum_update() argument 71 tdata = prof_tdata_get(tsd, true); in prof_sample_accum_update() 90 if (tsd_reentrancy_level_get(tsd) > 0) { in prof_sample_accum_update() [all …]
|
D | prof_externs.h | 45 void prof_alloc_rollback(tsd_t *tsd, prof_tctx_t *tctx, bool updated); 48 void prof_free_sampled_object(tsd_t *tsd, size_t usize, prof_tctx_t *tctx); 51 prof_tctx_t *prof_lookup(tsd_t *tsd, prof_bt_t *bt); 67 bool prof_mdump(tsd_t *tsd, const char *filename); 69 prof_tdata_t *prof_tdata_init(tsd_t *tsd); 70 prof_tdata_t *prof_tdata_reinit(tsd_t *tsd, prof_tdata_t *tdata); 71 void prof_reset(tsd_t *tsd, size_t lg_sample); 72 void prof_tdata_cleanup(tsd_t *tsd); 75 const char *prof_thread_name_get(tsd_t *tsd); 76 int prof_thread_name_set(tsd_t *tsd, const char *thread_name); [all …]
|
/external/jemalloc/src/ |
D | jemalloc.c | 476 arena_bind(tsd_t *tsd, unsigned ind, bool internal) in arena_bind() argument 480 if (!tsd_nominal(tsd)) in arena_bind() 483 arena = arena_get(tsd_tsdn(tsd), ind, false); in arena_bind() 487 tsd_iarena_set(tsd, arena); in arena_bind() 489 tsd_arena_set(tsd, arena); in arena_bind() 493 arena_migrate(tsd_t *tsd, unsigned oldind, unsigned newind) in arena_migrate() argument 497 oldarena = arena_get(tsd_tsdn(tsd), oldind, false); in arena_migrate() 498 newarena = arena_get(tsd_tsdn(tsd), newind, false); in arena_migrate() 501 tsd_arena_set(tsd, newarena); in arena_migrate() 505 arena_unbind(tsd_t *tsd, unsigned ind, bool internal) in arena_unbind() argument [all …]
|
D | tcache.c | 37 tcache_event_hard(tsd_t *tsd, tcache_t *tcache) in tcache_event_hard() argument 48 tcache_bin_flush_small(tsd, tcache, tbin, binind, in tcache_event_hard() 52 tcache_bin_flush_large(tsd, tbin, binind, tbin->ncached in tcache_event_hard() 92 tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, tcache_bin_t *tbin, in tcache_bin_flush_small() argument 103 arena = arena_choose(tsd, NULL); in tcache_bin_flush_small() 113 if (arena_prof_accum(tsd_tsdn(tsd), arena, in tcache_bin_flush_small() 115 prof_idump(tsd_tsdn(tsd)); in tcache_bin_flush_small() 119 malloc_mutex_lock(tsd_tsdn(tsd), &bin->lock); in tcache_bin_flush_small() 137 arena_dalloc_bin_junked_locked(tsd_tsdn(tsd), in tcache_bin_flush_small() 150 malloc_mutex_unlock(tsd_tsdn(tsd), &bin->lock); in tcache_bin_flush_small() [all …]
|
D | prof.c | 125 static void prof_tctx_destroy(tsd_t *tsd, prof_tctx_t *tctx); 128 static void prof_tdata_destroy(tsd_t *tsd, prof_tdata_t *tdata, 197 prof_alloc_rollback(tsd_t *tsd, prof_tctx_t *tctx, bool updated) in rb_gen() 210 tdata = prof_tdata_get(tsd, true); in rb_gen() 216 malloc_mutex_lock(tsd_tsdn(tsd), tctx->tdata->lock); in rb_gen() 218 if (prof_tctx_should_destroy(tsd_tsdn(tsd), tctx)) in rb_gen() 219 prof_tctx_destroy(tsd, tctx); in rb_gen() 221 malloc_mutex_unlock(tsd_tsdn(tsd), tctx->tdata->lock); in rb_gen() 244 prof_free_sampled_object(tsd_t *tsd, size_t usize, prof_tctx_t *tctx) in prof_free_sampled_object() argument 247 malloc_mutex_lock(tsd_tsdn(tsd), tctx->tdata->lock); in prof_free_sampled_object() [all …]
|
D | quarantine.c | 15 static quarantine_t *quarantine_grow(tsd_t *tsd, quarantine_t *quarantine); 43 quarantine_alloc_hook_work(tsd_t *tsd) in quarantine_alloc_hook_work() argument 47 if (!tsd_nominal(tsd)) in quarantine_alloc_hook_work() 50 quarantine = quarantine_init(tsd_tsdn(tsd), LG_MAXOBJS_INIT); in quarantine_alloc_hook_work() 55 if (tsd_quarantine_get(tsd) == NULL) in quarantine_alloc_hook_work() 56 tsd_quarantine_set(tsd, quarantine); in quarantine_alloc_hook_work() 58 idalloctm(tsd_tsdn(tsd), quarantine, NULL, true, true); in quarantine_alloc_hook_work() 62 quarantine_grow(tsd_t *tsd, quarantine_t *quarantine) in quarantine_grow() argument 66 ret = quarantine_init(tsd_tsdn(tsd), quarantine->lg_maxobjs + 1); in quarantine_grow() 68 quarantine_drain_one(tsd_tsdn(tsd), quarantine); in quarantine_grow() [all …]
|
/external/jemalloc/include/jemalloc/internal/ |
D | tcache.h | 146 void tcache_event_hard(tsd_t *tsd, tcache_t *tcache); 149 void tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, tcache_bin_t *tbin, 151 void tcache_bin_flush_large(tsd_t *tsd, tcache_bin_t *tbin, szind_t binind, 155 tcache_t *tcache_get_hard(tsd_t *tsd); 157 void tcache_cleanup(tsd_t *tsd); 158 void tcache_enabled_cleanup(tsd_t *tsd); 160 bool tcaches_create(tsd_t *tsd, unsigned *r_ind); 161 void tcaches_flush(tsd_t *tsd, unsigned ind); 162 void tcaches_destroy(tsd_t *tsd, unsigned ind); 173 void tcache_event(tsd_t *tsd, tcache_t *tcache); [all …]
|
D | prof.h | 283 void prof_alloc_rollback(tsd_t *tsd, prof_tctx_t *tctx, bool updated); 286 void prof_free_sampled_object(tsd_t *tsd, size_t usize, prof_tctx_t *tctx); 289 prof_tctx_t *prof_lookup(tsd_t *tsd, prof_bt_t *bt); 300 bool prof_mdump(tsd_t *tsd, const char *filename); 302 prof_tdata_t *prof_tdata_init(tsd_t *tsd); 303 prof_tdata_t *prof_tdata_reinit(tsd_t *tsd, prof_tdata_t *tdata); 304 void prof_reset(tsd_t *tsd, size_t lg_sample); 305 void prof_tdata_cleanup(tsd_t *tsd); 308 const char *prof_thread_name_get(tsd_t *tsd); 309 int prof_thread_name_set(tsd_t *tsd, const char *thread_name); [all …]
|
D | witness.h | 101 void witnesses_cleanup(tsd_t *tsd); 102 void witness_fork_cleanup(tsd_t *tsd); 103 void witness_prefork(tsd_t *tsd); 104 void witness_postfork_parent(tsd_t *tsd); 105 void witness_postfork_child(tsd_t *tsd); 112 bool witness_owner(tsd_t *tsd, const witness_t *witness); 122 witness_owner(tsd_t *tsd, const witness_t *witness) in witness_owner() argument 127 witnesses = tsd_witnessesp_get(tsd); in witness_owner() 139 tsd_t *tsd; in witness_assert_owner() local 146 tsd = tsdn_tsd(tsdn); in witness_assert_owner() [all …]
|
D | jemalloc_internal.h | 490 arena_tdata_t *arena_tdata_get_hard(tsd_t *tsd, unsigned ind); 491 arena_t *arena_choose_hard(tsd_t *tsd, bool internal); 492 void arena_migrate(tsd_t *tsd, unsigned oldind, unsigned newind); 493 void thread_allocated_cleanup(tsd_t *tsd); 494 void thread_deallocated_cleanup(tsd_t *tsd); 495 void iarena_cleanup(tsd_t *tsd); 496 void arena_cleanup(tsd_t *tsd); 497 void arenas_tdata_cleanup(tsd_t *tsd); 498 void narenas_tdata_cleanup(tsd_t *tsd); 499 void arenas_tdata_bypass_cleanup(tsd_t *tsd); [all …]
|
/external/jemalloc_new/test/unit/ |
D | tsd.c | 46 tsd_t *tsd = tsd_fetch(); in thd_start() local 47 assert_x_eq(tsd_test_data_get(tsd), MALLOC_TSD_TEST_DATA_INIT, in thd_start() 53 tsd_test_data_set(tsd, d); in thd_start() 54 assert_x_eq(tsd_test_data_get(tsd), d, in thd_start() 58 assert_x_eq(tsd_test_data_get(tsd), (int)(uintptr_t)arg, in thd_start() 61 tsd_test_callback_set(tsd, &data_cleanup); in thd_start() 89 tsd_t *tsd = tsd_fetch(); in thd_start_reincarnated() local 90 assert(tsd); in thd_start_reincarnated() 96 assert_ptr_not_null(tsd_arena_get(tsd), in thd_start_reincarnated() 98 tsd_cleanup((void *)tsd); in thd_start_reincarnated() [all …]
|
D | ckh.c | 4 tsd_t *tsd; in TEST_BEGIN() local 7 tsd = tsd_fetch(); in TEST_BEGIN() 9 assert_false(ckh_new(tsd, &ckh, 2, ckh_string_hash, in TEST_BEGIN() 11 ckh_delete(tsd, &ckh); in TEST_BEGIN() 13 assert_false(ckh_new(tsd, &ckh, 3, ckh_pointer_hash, in TEST_BEGIN() 15 ckh_delete(tsd, &ckh); in TEST_BEGIN() 20 tsd_t *tsd; in TEST_BEGIN() local 31 tsd = tsd_fetch(); in TEST_BEGIN() 33 assert_false(ckh_new(tsd, &ckh, 2, ckh_string_hash, in TEST_BEGIN() 41 ckh_insert(tsd, &ckh, strs[i], strs[i]); in TEST_BEGIN() [all …]
|
/external/jemalloc/test/unit/ |
D | ckh.c | 5 tsd_t *tsd; in TEST_BEGIN() local 8 tsd = tsd_fetch(); in TEST_BEGIN() 10 assert_false(ckh_new(tsd, &ckh, 2, ckh_string_hash, in TEST_BEGIN() 12 ckh_delete(tsd, &ckh); in TEST_BEGIN() 14 assert_false(ckh_new(tsd, &ckh, 3, ckh_pointer_hash, in TEST_BEGIN() 16 ckh_delete(tsd, &ckh); in TEST_BEGIN() 22 tsd_t *tsd; in TEST_BEGIN() local 33 tsd = tsd_fetch(); in TEST_BEGIN() 35 assert_false(ckh_new(tsd, &ckh, 2, ckh_string_hash, in TEST_BEGIN() 43 ckh_insert(tsd, &ckh, strs[i], strs[i]); in TEST_BEGIN() [all …]
|
/external/toybox/toys/pending/ |
D | syslogd.c | 407 struct unsocks *tsd; in syslogd_main() local 419 tsd = xzalloc(sizeof(struct unsocks)); in syslogd_main() 421 tsd->path = (toys.optflags & FLAG_p) ? TT.unix_socket : "/dev/log"; // DEFLOGSOCK in syslogd_main() 422 TT.lsocks = tsd; in syslogd_main() 427 tsd = xzalloc(sizeof(struct unsocks)); in syslogd_main() 428 tsd->path = temp; in syslogd_main() 429 tsd->next = TT.lsocks; in syslogd_main() 430 TT.lsocks = tsd; in syslogd_main() 439 for (tsd = TT.lsocks; tsd; tsd = tsd->next) { in syslogd_main() 440 tsd->sdu.sun_family = AF_UNIX; in syslogd_main() [all …]
|