/external/jemalloc_new/src/ |
D | tcache.c | 44 tcache_event_hard(tsd_t *tsd, tcache_t *tcache) { in tcache_event_hard() argument 45 szind_t binind = tcache->next_gc_bin; in tcache_event_hard() 49 tbin = tcache_small_bin_get(tcache, binind); in tcache_event_hard() 51 tbin = tcache_large_bin_get(tcache, binind); in tcache_event_hard() 58 tcache_bin_flush_small(tsd, tcache, tbin, binind, in tcache_event_hard() 67 (tcache->lg_fill_div[binind] + 1)) >= 1) { in tcache_event_hard() 68 tcache->lg_fill_div[binind]++; in tcache_event_hard() 72 - tbin->low_water + (tbin->low_water >> 2), tcache); in tcache_event_hard() 79 if (binind < NBINS && tcache->lg_fill_div[binind] > 1) { in tcache_event_hard() 80 tcache->lg_fill_div[binind]--; in tcache_event_hard() [all …]
|
D | jemalloc.c | 650 tcache_t *tcache; in stats_print_atexit() local 653 ql_foreach(tcache, &arena->tcache_ql, link) { in stats_print_atexit() 654 tcache_stats_merge(tsdn, tcache, arena); in stats_print_atexit() 1690 tcache_t *tcache; local 1697 tcache = tsd_tcachep_get(tsd); 1698 assert(tcache == tcache_get(tsd)); 1700 tcache = tcache_get(tsd); 1703 tcache = NULL; 1705 tcache = tcaches_get(tsd, dopts->tcache_ind); 1722 dopts->zero, tcache, arena); [all …]
|
D | arena.c | 1250 arena_tcache_fill_small(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache, in arena_tcache_fill_small() argument 1263 tcache->lg_fill_div[binind]); i < nfill; i++) { in arena_tcache_fill_small() 1394 bool zero, tcache_t *tcache) { in arena_palloc() argument 1401 zero, tcache, true); in arena_palloc() 1453 arena_dalloc_promoted(tsdn_t *tsdn, void *ptr, tcache_t *tcache, in arena_dalloc_promoted() argument 1461 tcache_dalloc_large(tsdn_tsd(tsdn), tcache, ptr, in arena_dalloc_promoted() 1623 size_t alignment, bool zero, tcache_t *tcache) { in arena_ralloc_move_helper() argument 1626 zero, tcache, true); in arena_ralloc_move_helper() 1632 return ipalloct(tsdn, usize, alignment, zero, tcache, arena); in arena_ralloc_move_helper() 1637 size_t size, size_t alignment, bool zero, tcache_t *tcache) { in arena_ralloc() argument [all …]
|
D | large.c | 282 size_t alignment, bool zero, tcache_t *tcache) { in large_ralloc() argument 308 isdalloct(tsdn, extent_addr_get(extent), oldusize, tcache, NULL, true); in large_ralloc()
|
/external/jemalloc_new/include/jemalloc/internal/ |
D | tcache_inlines.h | 31 tcache_event(tsd_t *tsd, tcache_t *tcache) { in tcache_event() argument 36 if (unlikely(ticker_tick(&tcache->gc_ticker))) { in tcache_event() 37 tcache_event_hard(tsd, tcache); in tcache_event() 42 tcache_alloc_small(tsd_t *tsd, arena_t *arena, tcache_t *tcache, in tcache_alloc_small() argument 50 bin = tcache_small_bin_get(tcache, binind); in tcache_alloc_small() 60 ret = tcache_alloc_small_hard(tsd_tsdn(tsd), arena, tcache, in tcache_alloc_small() 99 tcache->prof_accumbytes += usize; in tcache_alloc_small() 101 tcache_event(tsd, tcache); in tcache_alloc_small() 106 tcache_alloc_large(tsd_t *tsd, arena_t *arena, tcache_t *tcache, size_t size, in tcache_alloc_large() argument 113 bin = tcache_large_bin_get(tcache, binind); in tcache_alloc_large() [all …]
|
D | jemalloc_internal_inlines_c.h | 41 iallocztm(tsdn_t *tsdn, size_t size, szind_t ind, bool zero, tcache_t *tcache, in iallocztm() argument 46 assert(!is_internal || tcache == NULL); in iallocztm() 53 ret = arena_malloc(tsdn, arena, size, ind, zero, tcache, slow_path); in iallocztm() 68 tcache_t *tcache, bool is_internal, arena_t *arena) { in ipallocztm() argument 73 assert(!is_internal || tcache == NULL); in ipallocztm() 78 ret = arena_palloc(tsdn, arena, usize, alignment, zero, tcache); in ipallocztm() 88 tcache_t *tcache, arena_t *arena) { in ipalloct() argument 89 return ipallocztm(tsdn, usize, alignment, zero, tcache, false, arena); in ipalloct() 104 idalloctm(tsdn_t *tsdn, void *ptr, tcache_t *tcache, alloc_ctx_t *alloc_ctx, in idalloctm() argument 107 assert(!is_internal || tcache == NULL); in idalloctm() [all …]
|
D | tcache_externs.h | 31 void tcache_event_hard(tsd_t *tsd, tcache_t *tcache); 32 void *tcache_alloc_small_hard(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache, 34 void tcache_bin_flush_small(tsd_t *tsd, tcache_t *tcache, cache_bin_t *tbin, 37 unsigned rem, tcache_t *tcache); 38 void tcache_arena_reassociate(tsdn_t *tsdn, tcache_t *tcache, 42 void tcache_stats_merge(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena); 47 void tcache_arena_associate(tsdn_t *tsdn, tcache_t *tcache, arena_t *arena);
|
D | arena_inlines_b.h | 88 tcache_t *tcache, bool slow_path) { in arena_malloc() argument 89 assert(!tsdn_null(tsdn) || tcache == NULL); in arena_malloc() 92 if (likely(tcache != NULL)) { in arena_malloc() 95 tcache, size, ind, zero, slow_path); in arena_malloc() 99 tcache, size, ind, zero, slow_path); in arena_malloc() 190 arena_dalloc(tsdn_t *tsdn, void *ptr, tcache_t *tcache, in arena_dalloc() argument 192 assert(!tsdn_null(tsdn) || tcache == NULL); in arena_dalloc() 195 if (unlikely(tcache == NULL)) { in arena_dalloc() 224 tcache_dalloc_small(tsdn_tsd(tsdn), tcache, ptr, szind, in arena_dalloc() 229 arena_dalloc_promoted(tsdn, ptr, tcache, in arena_dalloc() [all …]
|
D | jemalloc_internal_inlines_b.h | 25 tcache_t *tcache = tcache_get(tsd); in arena_choose_impl() local 26 if (tcache->arena != NULL) { in arena_choose_impl() 28 assert(tcache->arena == in arena_choose_impl() 30 if (tcache->arena != ret) { in arena_choose_impl() 32 tcache, ret); in arena_choose_impl() 35 tcache_arena_associate(tsd_tsdn(tsd), tcache, in arena_choose_impl()
|
D | arena_inlines_a.h | 49 tcache_t *tcache = tcache_get(tsd); in percpu_arena_update() local 50 if (tcache != NULL) { in percpu_arena_update() 51 tcache_arena_reassociate(tsd_tsdn(tsd), tcache, in percpu_arena_update()
|
D | jemalloc_internal_inlines_a.h | 110 tcache_small_bin_get(tcache_t *tcache, szind_t binind) { in tcache_small_bin_get() argument 112 return &tcache->bins_small[binind]; in tcache_small_bin_get() 116 tcache_large_bin_get(tcache_t *tcache, szind_t binind) { in tcache_large_bin_get() argument 118 return &tcache->bins_large[binind - NBINS]; in tcache_large_bin_get()
|
D | arena_externs.h | 47 void arena_tcache_fill_small(tsdn_t *tsdn, arena_t *arena, tcache_t *tcache, 58 size_t alignment, bool zero, tcache_t *tcache); 60 void arena_dalloc_promoted(tsdn_t *tsdn, void *ptr, tcache_t *tcache, 68 size_t size, size_t alignment, bool zero, tcache_t *tcache);
|
D | tcache_structs.h | 56 tcache_t *tcache; member
|
D | large_externs.h | 10 size_t alignment, bool zero, tcache_t *tcache);
|
D | tsd.h | 76 O(tcache, tcache_t, tcache_t) \
|
/external/strace/tests/ |
D | getcpu.c | 53 long *tcache = tail_alloc(128); in main() local 62 res = syscall(__NR_getcpu, cpu, node, tcache); in main() 66 printf("getcpu([%u], [%u], %p) = 0\n", *cpu, *node, tcache); in main()
|
/external/strace/tests-m32/ |
D | getcpu.c | 53 long *tcache = tail_alloc(128); in main() local 62 res = syscall(__NR_getcpu, cpu, node, tcache); in main() 66 printf("getcpu([%u], [%u], %p) = 0\n", *cpu, *node, tcache); in main()
|
/external/strace/tests-mx32/ |
D | getcpu.c | 53 long *tcache = tail_alloc(128); in main() local 62 res = syscall(__NR_getcpu, cpu, node, tcache); in main() 66 printf("getcpu([%u], [%u], %p) = 0\n", *cpu, *node, tcache); in main()
|
/external/jemalloc_new/ |
D | Android.bp | 81 // The minimum number of small slots held in the tcache. This must be 84 // The number of small slots held in the tcache. The higher this number 88 // The number of large slots held in the tcache. The higher this number 92 // 1 << XX is the maximum sized allocation that will be in the tcache. 105 // Only enable the tcache on non-svelte configurations, to save PSS. 171 "src/tcache.c",
|
D | ChangeLog | 79 - Refactor arena / tcache interactions. (@davidtgoldblatt) 251 - Embed per thread automatic tcache into thread-specific data, which reduces 252 conditional branches and dereferences. Also reorganize tcache to increase 295 - Remove --disable-tcache. (@jasone) 308 + config.tcache 367 - Fix/refactor tcache synchronization. This regression was first released in 707 - Add support for explicit tcaches. The "tcache.create", "tcache.flush", and 708 "tcache.destroy" mallctls control tcache lifetime and flushing, and the 710 control which tcache is used for each operation. 805 MALLOCX_TCACHE(tc) and MALLOCX_TCACHE_NONE flags to control tcache usage. [all …]
|
D | TUNING.md | 93 `narenas:1,tcache:false,dirty_decay_ms:0,muzzy_decay_ms:0`
|
D | Makefile.in | 118 $(srcroot)src/tcache.c \
|
/external/jemalloc_new/include/jemalloc/ |
D | jemalloc_macros.h.in | 24 * Bias tcache index bits so that 0 encodes "automatic tcache management", and 1
|
/external/mesa3d/docs/relnotes/ |
D | 19.3.3.rst | 36 tcache 2\`
|
/external/jemalloc_new/test/unit/ |
D | mallctl.c | 174 TEST_MALLCTL_OPT(bool, tcache, always); in TEST_BEGIN()
|