Home
last modified time | relevance | path

Searched refs:tsd_t (Results 1 – 25 of 36) sorted by relevance

12

/external/jemalloc_new/include/jemalloc/internal/
Dtsd.h133 tsd_t tsd;
137 tsd_tsdn(tsd_t *tsd) { in tsd_tsdn()
146 JEMALLOC_ALWAYS_INLINE tsd_t *
156 tsd_t *malloc_tsd_boot0(void);
159 tsd_t *tsd_fetch_slow(tsd_t *tsd, bool internal);
160 void tsd_slow_update(tsd_t *tsd);
184 tsd_##n##p_get_unsafe(tsd_t *tsd) { \
193 tsd_##n##p_get(tsd_t *tsd) { \
213 tsd_t *tsd = tsdn_tsd(tsdn); \
222 tsd_##n##_get(tsd_t *tsd) { \
[all …]
Dprof_externs.h45 void prof_alloc_rollback(tsd_t *tsd, prof_tctx_t *tctx, bool updated);
48 void prof_free_sampled_object(tsd_t *tsd, size_t usize, prof_tctx_t *tctx);
51 prof_tctx_t *prof_lookup(tsd_t *tsd, prof_bt_t *bt);
67 bool prof_mdump(tsd_t *tsd, const char *filename);
69 prof_tdata_t *prof_tdata_init(tsd_t *tsd);
70 prof_tdata_t *prof_tdata_reinit(tsd_t *tsd, prof_tdata_t *tdata);
71 void prof_reset(tsd_t *tsd, size_t lg_sample);
72 void prof_tdata_cleanup(tsd_t *tsd);
75 const char *prof_thread_name_get(tsd_t *tsd);
76 int prof_thread_name_set(tsd_t *tsd, const char *thread_name);
[all …]
Dtcache_externs.h31 void tcache_event_hard(tsd_t *tsd, tcache_t *tcache);
34 void tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, cache_bin_t *tbin,
36 void tcache_bin_flush_large(tsd_t *tsd, cache_bin_t *tbin, szind_t binind,
40 tcache_t *tcache_create_explicit(tsd_t *tsd);
41 void tcache_cleanup(tsd_t *tsd);
43 bool tcaches_create(tsd_t *tsd, unsigned *r_ind);
44 void tcaches_flush(tsd_t *tsd, unsigned ind);
45 void tcaches_destroy(tsd_t *tsd, unsigned ind);
51 void tcache_flush(tsd_t *tsd);
52 bool tsd_tcache_data_init(tsd_t *tsd);
[all …]
Djemalloc_internal_externs.h42 arena_tdata_t *arena_tdata_get_hard(tsd_t *tsd, unsigned ind);
43 arena_t *arena_choose_hard(tsd_t *tsd, bool internal);
44 void arena_migrate(tsd_t *tsd, unsigned oldind, unsigned newind);
45 void iarena_cleanup(tsd_t *tsd);
46 void arena_cleanup(tsd_t *tsd);
47 void arenas_tdata_cleanup(tsd_t *tsd);
Djemalloc_internal_inlines_a.h58 arena_tdata_get(tsd_t *tsd, unsigned ind, bool refresh_if_missing) { in arena_tdata_get()
99 decay_ticker_get(tsd_t *tsd, unsigned ind) { in decay_ticker_get()
122 tcache_available(tsd_t *tsd) { in tcache_available()
140 tcache_get(tsd_t *tsd) { in tcache_get()
149 pre_reentrancy(tsd_t *tsd, arena_t *arena) { in pre_reentrancy()
164 post_reentrancy(tsd_t *tsd) { in post_reentrancy()
Dtcache_inlines.h12 tcache_enabled_get(tsd_t *tsd) { in tcache_enabled_get()
17 tcache_enabled_set(tsd_t *tsd, bool enabled) { in tcache_enabled_set()
31 tcache_event(tsd_t *tsd, tcache_t *tcache) { in tcache_event()
42 tcache_alloc_small(tsd_t *tsd, arena_t *arena, tcache_t *tcache, in tcache_alloc_small()
106 tcache_alloc_large(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size, in tcache_alloc_large()
168 tcache_dalloc_small(tsd_t *tsd, tcache_t *tcache, void *ptr, szind_t binind, in tcache_dalloc_small()
193 tcache_dalloc_large(tsd_t *tsd, tcache_t *tcache, void *ptr, szind_t binind, in tcache_dalloc_large()
219 tcaches_get(tsd_t *tsd, unsigned ind) { in tcaches_get()
Dtsd_win.h8 tsd_t val;
61 tsd_t tsd_initializer = TSD_INITIALIZER; in tsd_wrapper_get()
93 tsd_t initializer = TSD_INITIALIZER; in tsd_boot1()
117 JEMALLOC_ALWAYS_INLINE tsd_t *
130 tsd_set(tsd_t *val) { in tsd_set()
Dckh.h70 bool ckh_new(tsd_t *tsd, ckh_t *ckh, size_t minitems, ckh_hash_t *hash,
72 void ckh_delete(tsd_t *tsd, ckh_t *ckh);
90 bool ckh_insert(tsd_t *tsd, ckh_t *ckh, const void *key, const void *data);
91 bool ckh_remove(tsd_t *tsd, ckh_t *ckh, const void *searchkey, void **key,
Dtsd_generic.h18 tsd_t val;
80 tsd_t initializer = TSD_INITIALIZER; in tsd_wrapper_get()
110 tsd_t initializer = TSD_INITIALIZER; in tsd_boot1()
135 JEMALLOC_ALWAYS_INLINE tsd_t *
148 tsd_set(tsd_t *val) { in tsd_set()
Dtsd_tls.h6 extern __thread tsd_t tsd_tls;
41 JEMALLOC_ALWAYS_INLINE tsd_t *
48 tsd_set(tsd_t *val) { in tsd_set()
Dtsd_malloc_thread_cleanup.h6 extern __thread tsd_t tsd_tls;
48 JEMALLOC_ALWAYS_INLINE tsd_t *
54 tsd_set(tsd_t *val) { in tsd_set()
Dbackground_thread_externs.h13 bool background_thread_create(tsd_t *tsd, unsigned arena_ind);
14 bool background_threads_enable(tsd_t *tsd);
15 bool background_threads_disable(tsd_t *tsd);
Dctl.h24 int (*ctl)(tsd_t *, const size_t *, size_t, void *, size_t *, void *,
93 int ctl_byname(tsd_t *tsd, const char *name, void *oldp, size_t *oldlenp,
95 int ctl_nametomib(tsd_t *tsd, const char *name, size_t *mibp, size_t *miblenp);
97 int ctl_bymib(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp,
Dprof_inlines_b.h17 prof_tdata_get(tsd_t *tsd, bool create) { in prof_tdata_get()
65 prof_sample_accum_update(tsd_t *tsd, size_t usize, bool update, in prof_sample_accum_update()
102 prof_alloc_prep(tsd_t *tsd, size_t usize, bool prof_active, bool update) { in prof_alloc_prep()
137 prof_realloc(tsd_t *tsd, const void *ptr, size_t usize, prof_tctx_t *tctx, in prof_realloc()
195 prof_free(tsd_t *tsd, const void *ptr, size_t usize, alloc_ctx_t *alloc_ctx) { in prof_free()
Djemalloc_internal_inlines_b.h8 arena_choose_impl(tsd_t *tsd, arena_t *arena, bool internal) { in arena_choose_impl()
62 arena_choose(tsd_t *tsd, arena_t *arena) { in arena_choose()
67 arena_ichoose(tsd_t *tsd, arena_t *arena) { in arena_ichoose()
Djemalloc_internal_inlines_c.h61 ialloc(tsd_t *tsd, size_t size, szind_t ind, bool zero, bool slow_path) { in ialloc()
93 ipalloc(tsd_t *tsd, size_t usize, size_t alignment, bool zero) { in ipalloc()
122 idalloc(tsd_t *tsd, void *ptr) { in idalloc()
195 iralloc(tsd_t *tsd, void *ptr, size_t oldsize, size_t size, size_t alignment, in iralloc()
Darena_externs.h45 void arena_reset(tsd_t *tsd, arena_t *arena);
46 void arena_destroy(tsd_t *tsd, arena_t *arena);
75 bool arena_retain_grow_limit_get_set(tsd_t *tsd, arena_t *arena,
/external/jemalloc_new/src/
Dtsd.c16 __thread tsd_t JEMALLOC_TLS_MODEL tsd_tls = TSD_INITIALIZER;
20 __thread tsd_t JEMALLOC_TLS_MODEL tsd_tls = TSD_INITIALIZER;
55 tsd_slow_update(tsd_t *tsd) { in tsd_slow_update()
67 tsd_data_init(tsd_t *tsd) { in tsd_data_init()
88 assert_tsd_data_cleanup_done(tsd_t *tsd) { in assert_tsd_data_cleanup_done()
99 tsd_data_init_nocleanup(tsd_t *tsd) { in tsd_data_init_nocleanup()
116 tsd_t *
117 tsd_fetch_slow(tsd_t *tsd, bool minimal) { in tsd_fetch_slow()
204 tsd_do_data_cleanup(tsd_t *tsd) { in tsd_do_data_cleanup()
215 tsd_t *tsd = (tsd_t *)arg; in tsd_cleanup()
[all …]
Dtcache.c44 tcache_event_hard(tsd_t *tsd, tcache_t *tcache) { in tcache_event_hard()
108 tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, cache_bin_t *tbin, in tcache_bin_flush_small()
197 tcache_bin_flush_large(tsd_t *tsd, cache_bin_t *tbin, szind_t binind, in tcache_bin_flush_large()
355 tsd_tcache_enabled_data_init(tsd_t *tsd) { in tsd_tcache_enabled_data_init()
370 tcache_init(tsd_t *tsd, tcache_t *tcache, void *avail_stack) { in tcache_init()
404 tsd_tcache_data_init(tsd_t *tsd) { in tsd_tcache_data_init()
446 tcache_create_explicit(tsd_t *tsd) { in tcache_create_explicit()
472 tcache_flush_cache(tsd_t *tsd, tcache_t *tcache) { in tcache_flush_cache()
504 tcache_flush(tsd_t *tsd) { in tcache_flush()
510 tcache_destroy(tsd_t *tsd, tcache_t *tcache, bool tsd_tcache) { in tcache_destroy()
[all …]
Dctl.c50 static int n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, \
642 arenas_i_impl(tsd_t *tsd, size_t i, bool compat, bool init) { in arenas_i_impl()
885 ctl_arena_init(tsd_t *tsd, extent_hooks_t *extent_hooks) { in ctl_arena_init()
999 ctl_init(tsd_t *tsd) { in ctl_init()
1173 ctl_byname(tsd_t *tsd, const char *name, void *oldp, size_t *oldlenp, in ctl_byname()
1205 ctl_nametomib(tsd_t *tsd, const char *name, size_t *mibp, size_t *miblenp) { in ctl_nametomib()
1219 ctl_bymib(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, in ctl_bymib()
1356 n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, \
1381 n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, \
1402 n##_ctl(tsd_t *tsd, const size_t *mib, size_t miblen, void *oldp, \
[all …]
Dprof.c141 static void prof_tctx_destroy(tsd_t *tsd, prof_tctx_t *tctx);
144 static void prof_tdata_destroy(tsd_t *tsd, prof_tdata_t *tdata,
211 prof_alloc_rollback(tsd_t *tsd, prof_tctx_t *tctx, bool updated) { in rb_gen()
257 prof_free_sampled_object(tsd_t *tsd, size_t usize, prof_tctx_t *tctx) { in prof_free_sampled_object()
280 prof_enter(tsd_t *tsd, prof_tdata_t *tdata) { in prof_enter()
293 prof_leave(tsd_t *tsd, prof_tdata_t *tdata) { in prof_leave()
579 prof_gctx_try_destroy(tsd_t *tsd, prof_tdata_t *tdata_self, prof_gctx_t *gctx, in prof_gctx_try_destroy()
644 prof_tctx_destroy(tsd_t *tsd, prof_tctx_t *tctx) { in prof_tctx_destroy()
720 prof_lookup_global(tsd_t *tsd, prof_bt_t *bt, prof_tdata_t *tdata, in prof_lookup_global()
785 prof_lookup(tsd_t *tsd, prof_bt_t *bt) { in prof_lookup()
[all …]
Djemalloc.c371 arena_bind(tsd_t *tsd, unsigned ind, bool internal) { in arena_bind()
383 arena_migrate(tsd_t *tsd, unsigned oldind, unsigned newind) { in arena_migrate()
394 arena_unbind(tsd_t *tsd, unsigned ind, bool internal) { in arena_unbind()
408 arena_tdata_get_hard(tsd_t *tsd, unsigned ind) { in arena_tdata_get_hard()
485 arena_choose_hard(tsd_t *tsd, bool internal) { in arena_choose_hard()
599 iarena_cleanup(tsd_t *tsd) { in iarena_cleanup()
609 arena_cleanup(tsd_t *tsd) { in arena_cleanup()
619 arenas_tdata_cleanup(tsd_t *tsd) { in arenas_tdata_cleanup()
677 tsd_t *tsd = tsdn_tsd(tsdn); in check_entry_exit_locking()
1509 tsd_t *tsd = tsdn_tsd(tsdn);
[all …]
Dckh.c53 static bool ckh_grow(tsd_t *tsd, ckh_t *ckh);
54 static void ckh_shrink(tsd_t *tsd, ckh_t *ckh);
257 ckh_grow(tsd_t *tsd, ckh_t *ckh) { in ckh_grow()
311 ckh_shrink(tsd_t *tsd, ckh_t *ckh) { in ckh_shrink()
359 ckh_new(tsd_t *tsd, ckh_t *ckh, size_t minitems, ckh_hash_t *hash, in ckh_new()
416 ckh_delete(tsd_t *tsd, ckh_t *ckh) { in ckh_delete()
466 ckh_insert(tsd_t *tsd, ckh_t *ckh, const void *key, const void *data) { in ckh_insert()
489 ckh_remove(tsd_t *tsd, ckh_t *ckh, const void *searchkey, void **key, in ckh_remove()
Dbackground_thread.c56 bool background_thread_create(tsd_t *tsd, unsigned arena_ind) NOT_REACHED in background_thread_create()
57 bool background_threads_enable(tsd_t *tsd) NOT_REACHED in background_thread_create()
58 bool background_threads_disable(tsd_t *tsd) NOT_REACHED in background_thread_create()
313 background_threads_disable_single(tsd_t *tsd, background_thread_info_t *info) {
386 check_background_thread_creation(tsd_t *tsd, unsigned *n_created,
436 background_thread0_work(tsd_t *tsd) {
485 background_work(tsd_t *tsd, unsigned ind) {
530 background_thread_init(tsd_t *tsd, background_thread_info_t *info) {
539 background_thread_create(tsd_t *tsd, unsigned arena_ind) {
593 background_threads_enable(tsd_t *tsd) {
[all …]
/external/jemalloc_new/test/unit/
Dtsd.c46 tsd_t *tsd = tsd_fetch(); in thd_start()
89 tsd_t *tsd = tsd_fetch(); in thd_start_reincarnated()

12